Merge branch 'master' into box-alloc

This commit is contained in:
Tim Diekmann 2020-10-16 08:54:38 +02:00 committed by GitHub
commit 955b37b305
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
895 changed files with 16039 additions and 11283 deletions

68
.github/ISSUE_TEMPLATE/regression.md vendored Normal file
View file

@ -0,0 +1,68 @@
---
name: Regression
about: Report something that unexpectedly changed between Rust versions.
labels: C-bug regression-untriaged
---
<!--
Thank you for filing a regression report! 🐛 A regression is something that changed between versions of Rust but was not supposed to.
Please provide a short summary of the regression, along with any information you feel is relevant to replicate it.
-->
### Code
I tried this code:
```rust
<code>
```
I expected to see this happen: *explanation*
Instead, this happened: *explanation*
### Version it worked on
<!--
Provide the most recent version this worked on, for example:
It most recently worked on: Rust 1.47
-->
It most recently worked on: <!-- version -->
### Version with regression
<!--
Provide the version you are using that has the regression.
-->
`rustc --version --verbose`:
```
<version>
```
<!--
Did the compiler crash? If so, please provide a backtrace.
-->
### Backtrace
<!--
Include a backtrace in the code block by setting `RUST_BACKTRACE=1` in your
environment. E.g. `RUST_BACKTRACE=1 cargo build`.
-->
<details><summary>Backtrace</summary>
<p>
```
<backtrace>
```
</p>
</details>
<!--
If you know when this regression occurred, please add a line like below, replacing `{channel}` with one of stable, beta, or nightly.
@rustbot modify labels: +regression-from-stable-to-{channel} -regression-untriaged
-->

View file

@ -89,6 +89,9 @@ jobs:
- name: install sccache - name: install sccache
run: src/ci/scripts/install-sccache.sh run: src/ci/scripts/install-sccache.sh
if: success() && !env.SKIP_JOB if: success() && !env.SKIP_JOB
- name: select Xcode
run: src/ci/scripts/select-xcode.sh
if: success() && !env.SKIP_JOB
- name: install clang - name: install clang
run: src/ci/scripts/install-clang.sh run: src/ci/scripts/install-clang.sh
if: success() && !env.SKIP_JOB if: success() && !env.SKIP_JOB
@ -300,6 +303,20 @@ jobs:
NO_LLVM_ASSERTIONS: 1 NO_LLVM_ASSERTIONS: 1
NO_DEBUG_ASSERTIONS: 1 NO_DEBUG_ASSERTIONS: 1
os: macos-latest os: macos-latest
- name: dist-aarch64-apple
env:
SCRIPT: "./x.py dist --stage 2"
RUST_CONFIGURE_ARGS: "--build=x86_64-apple-darwin --host=aarch64-apple-darwin --target=aarch64-apple-darwin --enable-full-tools --enable-sanitizers --enable-profiler --set rust.jemalloc --set llvm.ninja=false"
RUSTC_RETRY_LINKER_ON_SEGFAULT: 1
SELECT_XCODE: /Applications/Xcode_12.2.app
USE_XCODE_CLANG: 1
MACOSX_DEPLOYMENT_TARGET: 11.0
MACOSX_STD_DEPLOYMENT_TARGET: 11.0
NO_LLVM_ASSERTIONS: 1
NO_DEBUG_ASSERTIONS: 1
DIST_REQUIRE_ALL_TOOLS: 1
JEMALLOC_SYS_WITH_LG_PAGE: 14
os: macos-latest
- name: x86_64-msvc-1 - name: x86_64-msvc-1
env: env:
RUST_CONFIGURE_ARGS: "--build=x86_64-pc-windows-msvc --enable-profiler" RUST_CONFIGURE_ARGS: "--build=x86_64-pc-windows-msvc --enable-profiler"
@ -369,7 +386,7 @@ jobs:
os: windows-latest-xl os: windows-latest-xl
- name: dist-x86_64-msvc - name: dist-x86_64-msvc
env: env:
RUST_CONFIGURE_ARGS: "--build=x86_64-pc-windows-msvc --host=x86_64-pc-windows-msvc --target=x86_64-pc-windows-msvc,aarch64-pc-windows-msvc --enable-full-tools --enable-profiler" RUST_CONFIGURE_ARGS: "--build=x86_64-pc-windows-msvc --host=x86_64-pc-windows-msvc --target=x86_64-pc-windows-msvc --enable-full-tools --enable-profiler"
SCRIPT: python x.py dist SCRIPT: python x.py dist
DIST_REQUIRE_ALL_TOOLS: 1 DIST_REQUIRE_ALL_TOOLS: 1
os: windows-latest-xl os: windows-latest-xl
@ -379,6 +396,12 @@ jobs:
SCRIPT: python x.py dist SCRIPT: python x.py dist
DIST_REQUIRE_ALL_TOOLS: 1 DIST_REQUIRE_ALL_TOOLS: 1
os: windows-latest-xl os: windows-latest-xl
- name: dist-aarch64-msvc
env:
RUST_CONFIGURE_ARGS: "--build=x86_64-pc-windows-msvc --host=aarch64-pc-windows-msvc --enable-full-tools --enable-profiler"
SCRIPT: python x.py dist
DIST_REQUIRE_ALL_TOOLS: 0
os: windows-latest-xl
- name: dist-i686-mingw - name: dist-i686-mingw
env: env:
RUST_CONFIGURE_ARGS: "--build=i686-pc-windows-gnu --enable-full-tools --enable-profiler" RUST_CONFIGURE_ARGS: "--build=i686-pc-windows-gnu --enable-full-tools --enable-profiler"
@ -437,6 +460,9 @@ jobs:
- name: install sccache - name: install sccache
run: src/ci/scripts/install-sccache.sh run: src/ci/scripts/install-sccache.sh
if: success() && !env.SKIP_JOB if: success() && !env.SKIP_JOB
- name: select Xcode
run: src/ci/scripts/select-xcode.sh
if: success() && !env.SKIP_JOB
- name: install clang - name: install clang
run: src/ci/scripts/install-clang.sh run: src/ci/scripts/install-clang.sh
if: success() && !env.SKIP_JOB if: success() && !env.SKIP_JOB
@ -544,6 +570,9 @@ jobs:
- name: install sccache - name: install sccache
run: src/ci/scripts/install-sccache.sh run: src/ci/scripts/install-sccache.sh
if: success() && !env.SKIP_JOB if: success() && !env.SKIP_JOB
- name: select Xcode
run: src/ci/scripts/select-xcode.sh
if: success() && !env.SKIP_JOB
- name: install clang - name: install clang
run: src/ci/scripts/install-clang.sh run: src/ci/scripts/install-clang.sh
if: success() && !env.SKIP_JOB if: success() && !env.SKIP_JOB
@ -648,6 +677,9 @@ jobs:
- name: install sccache - name: install sccache
run: src/ci/scripts/install-sccache.sh run: src/ci/scripts/install-sccache.sh
if: success() && !env.SKIP_JOB if: success() && !env.SKIP_JOB
- name: select Xcode
run: src/ci/scripts/select-xcode.sh
if: success() && !env.SKIP_JOB
- name: install clang - name: install clang
run: src/ci/scripts/install-clang.sh run: src/ci/scripts/install-clang.sh
if: success() && !env.SKIP_JOB if: success() && !env.SKIP_JOB

2
.gitmodules vendored
View file

@ -37,7 +37,7 @@
[submodule "src/llvm-project"] [submodule "src/llvm-project"]
path = src/llvm-project path = src/llvm-project
url = https://github.com/rust-lang/llvm-project.git url = https://github.com/rust-lang/llvm-project.git
branch = rustc/11.0-2020-09-22 branch = rustc/11.0-2020-10-12
[submodule "src/doc/embedded-book"] [submodule "src/doc/embedded-book"]
path = src/doc/embedded-book path = src/doc/embedded-book
url = https://github.com/rust-embedded/book.git url = https://github.com/rust-embedded/book.git

View file

@ -132,13 +132,13 @@ checksum = "f8aac770f1885fd7e387acedd76065302551364496e46b3dd00860b2f8359b9d"
[[package]] [[package]]
name = "backtrace" name = "backtrace"
version = "0.3.50" version = "0.3.53"
dependencies = [ dependencies = [
"addr2line", "addr2line",
"cfg-if", "cfg-if 1.0.0",
"libc", "libc",
"miniz_oxide", "miniz_oxide",
"object", "object 0.21.1",
"rustc-demangle", "rustc-demangle",
] ]
@ -243,6 +243,7 @@ dependencies = [
"anyhow", "anyhow",
"flate2", "flate2",
"hex 0.4.2", "hex 0.4.2",
"num_cpus",
"rayon", "rayon",
"serde", "serde",
"serde_json", "serde_json",
@ -294,7 +295,7 @@ checksum = "81a18687293a1546b67c246452202bbbf143d239cb43494cc163da14979082da"
[[package]] [[package]]
name = "cargo" name = "cargo"
version = "0.49.0" version = "0.50.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"atty", "atty",
@ -305,7 +306,7 @@ dependencies = [
"clap", "clap",
"core-foundation", "core-foundation",
"crates-io", "crates-io",
"crossbeam-utils 0.7.2", "crossbeam-utils 0.8.0",
"crypto-hash", "crypto-hash",
"curl", "curl",
"curl-sys", "curl-sys",
@ -441,10 +442,16 @@ dependencies = [
] ]
[[package]] [[package]]
name = "chalk-derive" name = "cfg-if"
version = "0.31.0" version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca40c97e20f43e4aac2282d342103d45fafad74ad9bfcbaaf0b5d386f9ce1f39" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "chalk-derive"
version = "0.32.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2d072b2ba723f0bada7c515d8b3725224bc4f5052d2a92dcbeb0b118ff37084a"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -454,9 +461,9 @@ dependencies = [
[[package]] [[package]]
name = "chalk-engine" name = "chalk-engine"
version = "0.31.0" version = "0.32.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "88e3d545394fbc4f7d8fe203c6a78d2b73f82bb119f21af98de1f924c2518e34" checksum = "6fb5475f6083d6d6c509e1c335c4f69ad04144ac090faa1afb134a53c3695841"
dependencies = [ dependencies = [
"chalk-derive", "chalk-derive",
"chalk-ir", "chalk-ir",
@ -467,9 +474,9 @@ dependencies = [
[[package]] [[package]]
name = "chalk-ir" name = "chalk-ir"
version = "0.31.0" version = "0.32.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8c4183955e084fcc387b515f867ed0e17e9e7301f5eee29c0338d5e63315bb41" checksum = "f60cdb0e18c5455cb6a85e8464aad3622b70476018edfa8845691df66f7e9a05"
dependencies = [ dependencies = [
"chalk-derive", "chalk-derive",
"lazy_static", "lazy_static",
@ -477,9 +484,9 @@ dependencies = [
[[package]] [[package]]
name = "chalk-solve" name = "chalk-solve"
version = "0.31.0" version = "0.32.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "50e8407bba06d9e760011a28060e8f3b3f87b82ea53fb8bfaa43614c19c14dcc" checksum = "981534d499a8476ecc0b520be4d3864757f96211826a75360fbf2cb6fae362ab"
dependencies = [ dependencies = [
"chalk-derive", "chalk-derive",
"chalk-ir", "chalk-ir",
@ -548,7 +555,6 @@ dependencies = [
"cargo_metadata 0.11.1", "cargo_metadata 0.11.1",
"if_chain", "if_chain",
"itertools 0.9.0", "itertools 0.9.0",
"lazy_static",
"pulldown-cmark 0.8.0", "pulldown-cmark 0.8.0",
"quine-mc_cluskey", "quine-mc_cluskey",
"quote", "quote",
@ -670,6 +676,12 @@ dependencies = [
"winapi 0.3.9", "winapi 0.3.9",
] ]
[[package]]
name = "const_fn"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce90df4c658c62f12d78f7508cf92f9173e5184a539c10bfe54a3107b3ffd0f2"
[[package]] [[package]]
name = "constant_time_eq" name = "constant_time_eq"
version = "0.1.5" version = "0.1.5"
@ -723,17 +735,17 @@ version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ba125de2af0df55319f41944744ad91c71113bf74a4646efff39afe1f6842db1" checksum = "ba125de2af0df55319f41944744ad91c71113bf74a4646efff39afe1f6842db1"
dependencies = [ dependencies = [
"cfg-if", "cfg-if 0.1.10",
] ]
[[package]] [[package]]
name = "crossbeam-channel" name = "crossbeam-channel"
version = "0.4.3" version = "0.4.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09ee0cc8804d5393478d743b035099520087a5186f3b93fa58cec08fa62407b6" checksum = "b153fe7cbef478c567df0f972e02e6d736db11affe43dfc9c56a9374d1adfb87"
dependencies = [ dependencies = [
"cfg-if",
"crossbeam-utils 0.7.2", "crossbeam-utils 0.7.2",
"maybe-uninit",
] ]
[[package]] [[package]]
@ -754,7 +766,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "058ed274caafc1f60c4997b5fc07bf7dc7cca454af7c6e81edffe5f33f70dace" checksum = "058ed274caafc1f60c4997b5fc07bf7dc7cca454af7c6e81edffe5f33f70dace"
dependencies = [ dependencies = [
"autocfg", "autocfg",
"cfg-if", "cfg-if 0.1.10",
"crossbeam-utils 0.7.2", "crossbeam-utils 0.7.2",
"lazy_static", "lazy_static",
"maybe-uninit", "maybe-uninit",
@ -777,7 +789,7 @@ version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "774ba60a54c213d409d5353bda12d49cd68d14e45036a285234c8d6f91f92570" checksum = "774ba60a54c213d409d5353bda12d49cd68d14e45036a285234c8d6f91f92570"
dependencies = [ dependencies = [
"cfg-if", "cfg-if 0.1.10",
"crossbeam-utils 0.7.2", "crossbeam-utils 0.7.2",
"maybe-uninit", "maybe-uninit",
] ]
@ -788,7 +800,7 @@ version = "0.6.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04973fa96e96579258a5091af6003abde64af786b860f18622b82e026cca60e6" checksum = "04973fa96e96579258a5091af6003abde64af786b860f18622b82e026cca60e6"
dependencies = [ dependencies = [
"cfg-if", "cfg-if 0.1.10",
"lazy_static", "lazy_static",
] ]
@ -799,7 +811,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c3c7c73a2d1e9fc0886a08b93e98eb643461230d5f1925e4036204d5f2e261a8" checksum = "c3c7c73a2d1e9fc0886a08b93e98eb643461230d5f1925e4036204d5f2e261a8"
dependencies = [ dependencies = [
"autocfg", "autocfg",
"cfg-if", "cfg-if 0.1.10",
"lazy_static",
]
[[package]]
name = "crossbeam-utils"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec91540d98355f690a86367e566ecad2e9e579f230230eb7c21398372be73ea5"
dependencies = [
"autocfg",
"cfg-if 1.0.0",
"const_fn",
"lazy_static", "lazy_static",
] ]
@ -929,7 +953,7 @@ version = "2.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13aea89a5c93364a98e9b37b2fa237effbb694d5cfe01c5b70941f7eb087d5e3" checksum = "13aea89a5c93364a98e9b37b2fa237effbb694d5cfe01c5b70941f7eb087d5e3"
dependencies = [ dependencies = [
"cfg-if", "cfg-if 0.1.10",
"dirs-sys", "dirs-sys",
] ]
@ -1071,7 +1095,7 @@ version = "0.2.12"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3ed85775dcc68644b5c950ac06a2b23768d3bc9390464151aaf27136998dcf9e" checksum = "3ed85775dcc68644b5c950ac06a2b23768d3bc9390464151aaf27136998dcf9e"
dependencies = [ dependencies = [
"cfg-if", "cfg-if 0.1.10",
"libc", "libc",
"redox_syscall", "redox_syscall",
"winapi 0.3.9", "winapi 0.3.9",
@ -1089,7 +1113,7 @@ version = "1.0.16"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68c90b0fc46cf89d227cc78b40e494ff81287a92dd07631e5af0d06fe3cf885e" checksum = "68c90b0fc46cf89d227cc78b40e494ff81287a92dd07631e5af0d06fe3cf885e"
dependencies = [ dependencies = [
"cfg-if", "cfg-if 0.1.10",
"crc32fast", "crc32fast",
"libc", "libc",
"libz-sys", "libz-sys",
@ -1220,7 +1244,7 @@ version = "0.1.14"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7abc8dd8451921606d809ba32e95b6111925cd2906060d2dcc29c070220503eb" checksum = "7abc8dd8451921606d809ba32e95b6111925cd2906060d2dcc29c070220503eb"
dependencies = [ dependencies = [
"cfg-if", "cfg-if 0.1.10",
"libc", "libc",
"wasi", "wasi",
] ]
@ -1231,7 +1255,7 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee8025cf36f917e6a52cce185b7c7177689b838b7ec138364e50cc2277a56cf4" checksum = "ee8025cf36f917e6a52cce185b7c7177689b838b7ec138364e50cc2277a56cf4"
dependencies = [ dependencies = [
"cfg-if", "cfg-if 0.1.10",
"libc", "libc",
"wasi", "wasi",
] ]
@ -1249,9 +1273,9 @@ dependencies = [
[[package]] [[package]]
name = "git2" name = "git2"
version = "0.13.8" version = "0.13.12"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6ac22e49b7d886b6802c66662b12609452248b1bc9e87d6d83ecea3db96f557" checksum = "ca6f1a0238d7f8f8fd5ee642f4ebac4dbc03e03d1f78fbe7a3ede35dcf7e2224"
dependencies = [ dependencies = [
"bitflags", "bitflags",
"libc", "libc",
@ -1669,9 +1693,9 @@ dependencies = [
[[package]] [[package]]
name = "libgit2-sys" name = "libgit2-sys"
version = "0.12.9+1.0.1" version = "0.12.14+1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b33bf3d9d4c45b48ae1ea7c334be69994624dc0a69f833d5d9f7605f24b552b" checksum = "8f25af58e6495f7caf2919d08f212de550cfa3ed2f5e744988938ea292b9f549"
dependencies = [ dependencies = [
"cc", "cc",
"libc", "libc",
@ -1693,9 +1717,9 @@ dependencies = [
[[package]] [[package]]
name = "libssh2-sys" name = "libssh2-sys"
version = "0.2.18" version = "0.2.19"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eafa907407504b0e683786d4aba47acf250f114d37357d56608333fd167dd0fc" checksum = "ca46220853ba1c512fc82826d0834d87b06bcd3c2a42241b7de72f3d2fe17056"
dependencies = [ dependencies = [
"cc", "cc",
"libc", "libc",
@ -1760,7 +1784,7 @@ version = "0.4.11"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4fabed175da42fed1fa0746b0ea71f412aa9d35e76e95e59b192c64b9dc2bf8b" checksum = "4fabed175da42fed1fa0746b0ea71f412aa9d35e76e95e59b192c64b9dc2bf8b"
dependencies = [ dependencies = [
"cfg-if", "cfg-if 0.1.10",
] ]
[[package]] [[package]]
@ -1990,7 +2014,7 @@ version = "0.6.22"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fce347092656428bc8eaf6201042cb551b8d67855af7374542a92a0fbfcac430" checksum = "fce347092656428bc8eaf6201042cb551b8d67855af7374542a92a0fbfcac430"
dependencies = [ dependencies = [
"cfg-if", "cfg-if 0.1.10",
"fuchsia-zircon", "fuchsia-zircon",
"fuchsia-zircon-sys", "fuchsia-zircon-sys",
"iovec", "iovec",
@ -2071,7 +2095,7 @@ version = "0.2.34"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2ba7c918ac76704fb42afcbbb43891e72731f3dcca3bef2a19786297baf14af7" checksum = "2ba7c918ac76704fb42afcbbb43891e72731f3dcca3bef2a19786297baf14af7"
dependencies = [ dependencies = [
"cfg-if", "cfg-if 0.1.10",
"libc", "libc",
"winapi 0.3.9", "winapi 0.3.9",
] ]
@ -2122,6 +2146,12 @@ dependencies = [
"rustc-std-workspace-core", "rustc-std-workspace-core",
] ]
[[package]]
name = "object"
version = "0.21.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37fd5004feb2ce328a52b0b3d01dbf4ffff72583493900ed15f22d4111c51693"
[[package]] [[package]]
name = "once_cell" name = "once_cell"
version = "1.4.1" version = "1.4.1"
@ -2165,7 +2195,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8d575eff3665419f9b83678ff2815858ad9d11567e082f5ac1814baba4e2bcb4" checksum = "8d575eff3665419f9b83678ff2815858ad9d11567e082f5ac1814baba4e2bcb4"
dependencies = [ dependencies = [
"bitflags", "bitflags",
"cfg-if", "cfg-if 0.1.10",
"foreign-types", "foreign-types",
"lazy_static", "lazy_static",
"libc", "libc",
@ -2180,9 +2210,9 @@ checksum = "77af24da69f9d9341038eba93a073b1fdaaa1b788221b00a69bce9e762cb32de"
[[package]] [[package]]
name = "openssl-src" name = "openssl-src"
version = "111.10.2+1.1.1g" version = "111.12.0+1.1.1h"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a287fdb22e32b5b60624d4a5a7a02dbe82777f730ec0dbc42a0554326fef5a70" checksum = "858a4132194f8570a7ee9eb8629e85b23cbc4565f2d4a162e87556e5956abf61"
dependencies = [ dependencies = [
"cc", "cc",
] ]
@ -2222,14 +2252,14 @@ version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a85ea9fc0d4ac0deb6fe7911d38786b32fc11119afd9e9d38b84ff691ce64220" checksum = "a85ea9fc0d4ac0deb6fe7911d38786b32fc11119afd9e9d38b84ff691ce64220"
dependencies = [ dependencies = [
"cfg-if", "cfg-if 0.1.10",
] ]
[[package]] [[package]]
name = "panic_abort" name = "panic_abort"
version = "0.0.0" version = "0.0.0"
dependencies = [ dependencies = [
"cfg-if", "cfg-if 0.1.10",
"compiler_builtins", "compiler_builtins",
"core", "core",
"libc", "libc",
@ -2240,7 +2270,7 @@ name = "panic_unwind"
version = "0.0.0" version = "0.0.0"
dependencies = [ dependencies = [
"alloc", "alloc",
"cfg-if", "cfg-if 0.1.10",
"compiler_builtins", "compiler_builtins",
"core", "core",
"libc", "libc",
@ -2303,7 +2333,7 @@ version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b876b1b9e7ac6e1a74a6da34d25c42e17e8862aa409cbbbdcfc8d86c6f3bc62b" checksum = "b876b1b9e7ac6e1a74a6da34d25c42e17e8862aa409cbbbdcfc8d86c6f3bc62b"
dependencies = [ dependencies = [
"cfg-if", "cfg-if 0.1.10",
"cloudabi 0.0.3", "cloudabi 0.0.3",
"libc", "libc",
"redox_syscall", "redox_syscall",
@ -2318,7 +2348,7 @@ version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d58c7c768d4ba344e3e8d72518ac13e259d7c7ade24167003b8488e10b6740a3" checksum = "d58c7c768d4ba344e3e8d72518ac13e259d7c7ade24167003b8488e10b6740a3"
dependencies = [ dependencies = [
"cfg-if", "cfg-if 0.1.10",
"cloudabi 0.0.3", "cloudabi 0.0.3",
"libc", "libc",
"redox_syscall", "redox_syscall",
@ -2332,7 +2362,7 @@ version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c361aa727dd08437f2f1447be8b59a33b0edd15e0fcee698f935613d9efbca9b" checksum = "c361aa727dd08437f2f1447be8b59a33b0edd15e0fcee698f935613d9efbca9b"
dependencies = [ dependencies = [
"cfg-if", "cfg-if 0.1.10",
"cloudabi 0.1.0", "cloudabi 0.1.0",
"instant", "instant",
"libc", "libc",
@ -3036,7 +3066,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "14ffd17a37e00d77926a0713f191c59ff3aeb2b551a024c7cfffce14bab79be8" checksum = "14ffd17a37e00d77926a0713f191c59ff3aeb2b551a024c7cfffce14bab79be8"
dependencies = [ dependencies = [
"bitflags", "bitflags",
"cfg-if", "cfg-if 0.1.10",
"crossbeam-utils 0.7.2", "crossbeam-utils 0.7.2",
"ena", "ena",
"indexmap", "indexmap",
@ -3212,7 +3242,7 @@ version = "679.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1c267f15c3cfc82a8a441d2bf86bcccf299d1eb625822468e3d8ee6f7c5a1c89" checksum = "1c267f15c3cfc82a8a441d2bf86bcccf299d1eb625822468e3d8ee6f7c5a1c89"
dependencies = [ dependencies = [
"cfg-if", "cfg-if 0.1.10",
"md-5", "md-5",
"rustc-ap-rustc_arena", "rustc-ap-rustc_arena",
"rustc-ap-rustc_data_structures", "rustc-ap-rustc_data_structures",
@ -3338,7 +3368,6 @@ dependencies = [
name = "rustc_arena" name = "rustc_arena"
version = "0.0.0" version = "0.0.0"
dependencies = [ dependencies = [
"rustc_data_structures",
"smallvec 1.4.2", "smallvec 1.4.2",
] ]
@ -3505,7 +3534,7 @@ version = "0.0.0"
dependencies = [ dependencies = [
"arrayvec", "arrayvec",
"bitflags", "bitflags",
"cfg-if", "cfg-if 0.1.10",
"crossbeam-utils 0.7.2", "crossbeam-utils 0.7.2",
"ena", "ena",
"indexmap", "indexmap",
@ -4056,7 +4085,7 @@ dependencies = [
name = "rustc_span" name = "rustc_span"
version = "0.0.0" version = "0.0.0"
dependencies = [ dependencies = [
"cfg-if", "cfg-if 0.1.10",
"md-5", "md-5",
"rustc_arena", "rustc_arena",
"rustc_data_structures", "rustc_data_structures",
@ -4417,7 +4446,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2933378ddfeda7ea26f48c555bdad8bb446bf8a3d17832dc83e380d444cfb8c1" checksum = "2933378ddfeda7ea26f48c555bdad8bb446bf8a3d17832dc83e380d444cfb8c1"
dependencies = [ dependencies = [
"block-buffer 0.9.0", "block-buffer 0.9.0",
"cfg-if", "cfg-if 0.1.10",
"cpuid-bool", "cpuid-bool",
"digest 0.9.0", "digest 0.9.0",
"opaque-debug 0.3.0", "opaque-debug 0.3.0",
@ -4503,7 +4532,7 @@ version = "0.3.12"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "03088793f677dce356f3ccc2edb1b314ad191ab702a5de3faf49304f7e104918" checksum = "03088793f677dce356f3ccc2edb1b314ad191ab702a5de3faf49304f7e104918"
dependencies = [ dependencies = [
"cfg-if", "cfg-if 0.1.10",
"libc", "libc",
"redox_syscall", "redox_syscall",
"winapi 0.3.9", "winapi 0.3.9",
@ -4522,7 +4551,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "21ccb4c06ec57bc82d0f610f1a2963d7648700e43a6f513e564b9c89f7991786" checksum = "21ccb4c06ec57bc82d0f610f1a2963d7648700e43a6f513e564b9c89f7991786"
dependencies = [ dependencies = [
"cc", "cc",
"cfg-if", "cfg-if 0.1.10",
"libc", "libc",
"psm", "psm",
"winapi 0.3.9", "winapi 0.3.9",
@ -4534,7 +4563,7 @@ version = "0.0.0"
dependencies = [ dependencies = [
"addr2line", "addr2line",
"alloc", "alloc",
"cfg-if", "cfg-if 0.1.10",
"compiler_builtins", "compiler_builtins",
"core", "core",
"dlmalloc", "dlmalloc",
@ -4543,7 +4572,7 @@ dependencies = [
"hermit-abi", "hermit-abi",
"libc", "libc",
"miniz_oxide", "miniz_oxide",
"object", "object 0.20.0",
"panic_abort", "panic_abort",
"panic_unwind", "panic_unwind",
"profiler_builtins", "profiler_builtins",
@ -4676,7 +4705,7 @@ version = "3.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9" checksum = "7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9"
dependencies = [ dependencies = [
"cfg-if", "cfg-if 0.1.10",
"libc", "libc",
"rand", "rand",
"redox_syscall", "redox_syscall",
@ -4736,7 +4765,7 @@ dependencies = [
name = "test" name = "test"
version = "0.0.0" version = "0.0.0"
dependencies = [ dependencies = [
"cfg-if", "cfg-if 0.1.10",
"core", "core",
"getopts", "getopts",
"libc", "libc",
@ -5068,9 +5097,9 @@ dependencies = [
[[package]] [[package]]
name = "toml" name = "toml"
version = "0.5.6" version = "0.5.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ffc92d160b1eef40665be3a05630d003936a3bc7da7421277846c2613e92c71a" checksum = "75cf45bb0bef80604d001caaec0d09da99611b3c0fd39d3080468875cdb65645"
dependencies = [ dependencies = [
"serde", "serde",
] ]
@ -5081,7 +5110,7 @@ version = "0.1.19"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d79ca061b032d6ce30c660fded31189ca0b9922bf483cd70759f13a2d86786c" checksum = "6d79ca061b032d6ce30c660fded31189ca0b9922bf483cd70759f13a2d86786c"
dependencies = [ dependencies = [
"cfg-if", "cfg-if 0.1.10",
"tracing-attributes", "tracing-attributes",
"tracing-core", "tracing-core",
] ]
@ -5099,9 +5128,9 @@ dependencies = [
[[package]] [[package]]
name = "tracing-core" name = "tracing-core"
version = "0.1.15" version = "0.1.17"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4f0e00789804e99b20f12bc7003ca416309d28a6f495d6af58d1e2c2842461b5" checksum = "f50de3927f93d202783f4513cda820ab47ef17f624b03c096e86ef00c67e6b5f"
dependencies = [ dependencies = [
"lazy_static", "lazy_static",
] ]
@ -5129,9 +5158,9 @@ dependencies = [
[[package]] [[package]]
name = "tracing-subscriber" name = "tracing-subscriber"
version = "0.2.11" version = "0.2.13"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "abd165311cc4d7a555ad11cc77a37756df836182db0d81aac908c8184c584f40" checksum = "4ef0a5e15477aa303afbfac3a44cba9b6430fdaad52423b1e6c0dbbe28c3eedd"
dependencies = [ dependencies = [
"ansi_term 0.12.1", "ansi_term 0.12.1",
"chrono", "chrono",
@ -5144,6 +5173,7 @@ dependencies = [
"sharded-slab", "sharded-slab",
"smallvec 1.4.2", "smallvec 1.4.2",
"thread_local", "thread_local",
"tracing",
"tracing-core", "tracing-core",
"tracing-log", "tracing-log",
"tracing-serde", "tracing-serde",
@ -5277,7 +5307,7 @@ name = "unwind"
version = "0.0.0" version = "0.0.0"
dependencies = [ dependencies = [
"cc", "cc",
"cfg-if", "cfg-if 0.1.10",
"compiler_builtins", "compiler_builtins",
"core", "core",
"libc", "libc",

View file

@ -5,5 +5,4 @@ version = "0.0.0"
edition = "2018" edition = "2018"
[dependencies] [dependencies]
rustc_data_structures = { path = "../rustc_data_structures" }
smallvec = { version = "1.0", features = ["union", "may_dangle"] } smallvec = { version = "1.0", features = ["union", "may_dangle"] }

View file

@ -16,7 +16,6 @@
#![feature(maybe_uninit_slice)] #![feature(maybe_uninit_slice)]
#![cfg_attr(test, feature(test))] #![cfg_attr(test, feature(test))]
use rustc_data_structures::cold_path;
use smallvec::SmallVec; use smallvec::SmallVec;
use std::alloc::Layout; use std::alloc::Layout;
@ -27,6 +26,12 @@ use std::mem::{self, MaybeUninit};
use std::ptr; use std::ptr;
use std::slice; use std::slice;
#[inline(never)]
#[cold]
pub fn cold_path<F: FnOnce() -> R, R>(f: F) -> R {
f()
}
/// An arena that can hold objects of only one type. /// An arena that can hold objects of only one type.
pub struct TypedArena<T> { pub struct TypedArena<T> {
/// A pointer to the next object to be allocated. /// A pointer to the next object to be allocated.

View file

@ -27,6 +27,7 @@ use crate::token::{self, CommentKind, DelimToken};
use crate::tokenstream::{DelimSpan, TokenStream, TokenTree}; use crate::tokenstream::{DelimSpan, TokenStream, TokenTree};
use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::stack::ensure_sufficient_stack;
use rustc_data_structures::sync::Lrc; use rustc_data_structures::sync::Lrc;
use rustc_data_structures::thin_vec::ThinVec; use rustc_data_structures::thin_vec::ThinVec;
use rustc_macros::HashStable_Generic; use rustc_macros::HashStable_Generic;
@ -166,13 +167,6 @@ pub enum GenericArgs {
} }
impl GenericArgs { impl GenericArgs {
pub fn is_parenthesized(&self) -> bool {
match *self {
Parenthesized(..) => true,
_ => false,
}
}
pub fn is_angle_bracketed(&self) -> bool { pub fn is_angle_bracketed(&self) -> bool {
match *self { match *self {
AngleBracketed(..) => true, AngleBracketed(..) => true,
@ -856,13 +850,6 @@ impl BinOpKind {
} }
} }
pub fn is_shift(&self) -> bool {
match *self {
BinOpKind::Shl | BinOpKind::Shr => true,
_ => false,
}
}
pub fn is_comparison(&self) -> bool { pub fn is_comparison(&self) -> bool {
use BinOpKind::*; use BinOpKind::*;
// Note for developers: please keep this as is; // Note for developers: please keep this as is;
@ -872,11 +859,6 @@ impl BinOpKind {
And | Or | Add | Sub | Mul | Div | Rem | BitXor | BitAnd | BitOr | Shl | Shr => false, And | Or | Add | Sub | Mul | Div | Rem | BitXor | BitAnd | BitOr | Shl | Shr => false,
} }
} }
/// Returns `true` if the binary operator takes its arguments by value
pub fn is_by_value(&self) -> bool {
!self.is_comparison()
}
} }
pub type BinOp = Spanned<BinOpKind>; pub type BinOp = Spanned<BinOpKind>;
@ -895,14 +877,6 @@ pub enum UnOp {
} }
impl UnOp { impl UnOp {
/// Returns `true` if the unary operator takes its argument by value
pub fn is_by_value(u: UnOp) -> bool {
match u {
UnOp::Neg | UnOp::Not => true,
_ => false,
}
}
pub fn to_string(op: UnOp) -> &'static str { pub fn to_string(op: UnOp) -> &'static str {
match op { match op {
UnOp::Deref => "*", UnOp::Deref => "*",
@ -1752,13 +1726,6 @@ impl IntTy {
} }
} }
pub fn val_to_string(&self, val: i128) -> String {
// Cast to a `u128` so we can correctly print `INT128_MIN`. All integral types
// are parsed as `u128`, so we wouldn't want to print an extra negative
// sign.
format!("{}{}", val as u128, self.name_str())
}
pub fn bit_width(&self) -> Option<u64> { pub fn bit_width(&self) -> Option<u64> {
Some(match *self { Some(match *self {
IntTy::Isize => return None, IntTy::Isize => return None,
@ -1817,10 +1784,6 @@ impl UintTy {
} }
} }
pub fn val_to_string(&self, val: u128) -> String {
format!("{}{}", val, self.name_str())
}
pub fn bit_width(&self) -> Option<u64> { pub fn bit_width(&self) -> Option<u64> {
Some(match *self { Some(match *self {
UintTy::Usize => return None, UintTy::Usize => return None,
@ -1864,7 +1827,7 @@ pub enum AssocTyConstraintKind {
Bound { bounds: GenericBounds }, Bound { bounds: GenericBounds },
} }
#[derive(Clone, Encodable, Decodable, Debug)] #[derive(Encodable, Decodable, Debug)]
pub struct Ty { pub struct Ty {
pub id: NodeId, pub id: NodeId,
pub kind: TyKind, pub kind: TyKind,
@ -1872,6 +1835,27 @@ pub struct Ty {
pub tokens: Option<TokenStream>, pub tokens: Option<TokenStream>,
} }
impl Clone for Ty {
fn clone(&self) -> Self {
ensure_sufficient_stack(|| Self {
id: self.id,
kind: self.kind.clone(),
span: self.span,
tokens: self.tokens.clone(),
})
}
}
impl Ty {
pub fn peel_refs(&self) -> &Self {
let mut final_ty = self;
while let TyKind::Rptr(_, MutTy { ty, .. }) = &final_ty.kind {
final_ty = &ty;
}
final_ty
}
}
#[derive(Clone, Encodable, Decodable, Debug)] #[derive(Clone, Encodable, Decodable, Debug)]
pub struct BareFnTy { pub struct BareFnTy {
pub unsafety: Unsafe, pub unsafety: Unsafe,

View file

@ -101,11 +101,6 @@ impl NestedMetaItem {
self.meta_item().is_some() self.meta_item().is_some()
} }
/// Returns `true` if the variant is `Literal`.
pub fn is_literal(&self) -> bool {
self.literal().is_some()
}
/// Returns `true` if `self` is a `MetaItem` and the meta item is a word. /// Returns `true` if `self` is a `MetaItem` and the meta item is a word.
pub fn is_word(&self) -> bool { pub fn is_word(&self) -> bool {
self.meta_item().map_or(false, |meta_item| meta_item.is_word()) self.meta_item().map_or(false, |meta_item| meta_item.is_word())
@ -232,10 +227,6 @@ impl MetaItem {
pub fn is_value_str(&self) -> bool { pub fn is_value_str(&self) -> bool {
self.value_str().is_some() self.value_str().is_some()
} }
pub fn is_meta_item_list(&self) -> bool {
self.meta_item_list().is_some()
}
} }
impl AttrItem { impl AttrItem {

View file

@ -54,16 +54,6 @@ pub enum DelimToken {
NoDelim, NoDelim,
} }
impl DelimToken {
pub fn len(self) -> usize {
if self == NoDelim { 0 } else { 1 }
}
pub fn is_empty(self) -> bool {
self == NoDelim
}
}
#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)] #[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
pub enum LitKind { pub enum LitKind {
Bool, // AST only, must never appear in a `Token` Bool, // AST only, must never appear in a `Token`
@ -810,10 +800,10 @@ impl Nonterminal {
if let ExpnKind::Macro(_, macro_name) = orig_span.ctxt().outer_expn_data().kind { if let ExpnKind::Macro(_, macro_name) = orig_span.ctxt().outer_expn_data().kind {
let filename = source_map.span_to_filename(orig_span); let filename = source_map.span_to_filename(orig_span);
if let FileName::Real(RealFileName::Named(path)) = filename { if let FileName::Real(RealFileName::Named(path)) = filename {
let matches_prefix = |prefix| { let matches_prefix = |prefix, filename| {
// Check for a path that ends with 'prefix*/src/lib.rs' // Check for a path that ends with 'prefix*/src/<filename>'
let mut iter = path.components().rev(); let mut iter = path.components().rev();
iter.next().and_then(|p| p.as_os_str().to_str()) == Some("lib.rs") iter.next().and_then(|p| p.as_os_str().to_str()) == Some(filename)
&& iter.next().and_then(|p| p.as_os_str().to_str()) == Some("src") && iter.next().and_then(|p| p.as_os_str().to_str()) == Some("src")
&& iter && iter
.next() .next()
@ -821,14 +811,25 @@ impl Nonterminal {
.map_or(false, |p| p.starts_with(prefix)) .map_or(false, |p| p.starts_with(prefix))
}; };
if (macro_name == sym::impl_macros && matches_prefix("time-macros-impl")) if (macro_name == sym::impl_macros
|| (macro_name == sym::arrays && matches_prefix("js-sys")) && matches_prefix("time-macros-impl", "lib.rs"))
|| (macro_name == sym::arrays && matches_prefix("js-sys", "lib.rs"))
{ {
let snippet = source_map.span_to_snippet(orig_span); let snippet = source_map.span_to_snippet(orig_span);
if snippet.as_deref() == Ok("$name") { if snippet.as_deref() == Ok("$name") {
return Some((*ident, *is_raw)); return Some((*ident, *is_raw));
} }
} }
if macro_name == sym::tuple_from_req
&& (matches_prefix("actix-web", "extract.rs")
|| matches_prefix("actori-web", "extract.rs"))
{
let snippet = source_map.span_to_snippet(orig_span);
if snippet.as_deref() == Ok("$T") {
return Some((*ident, *is_raw));
}
}
} }
} }
} }

View file

@ -295,12 +295,6 @@ impl TokenStream {
.collect(), .collect(),
)) ))
} }
pub fn map<F: FnMut(TokenTree) -> TokenTree>(self, mut f: F) -> TokenStream {
TokenStream(Lrc::new(
self.0.iter().map(|(tree, is_joint)| (f(tree.clone()), *is_joint)).collect(),
))
}
} }
// 99.5%+ of the time we have 1 or 2 elements in this vector. // 99.5%+ of the time we have 1 or 2 elements in this vector.

View file

@ -231,7 +231,6 @@ impl AssocOp {
} }
} }
pub const PREC_RESET: i8 = -100;
pub const PREC_CLOSURE: i8 = -40; pub const PREC_CLOSURE: i8 = -40;
pub const PREC_JUMP: i8 = -30; pub const PREC_JUMP: i8 = -30;
pub const PREC_RANGE: i8 = -10; pub const PREC_RANGE: i8 = -10;

View file

@ -985,7 +985,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
asm::InlineAsmReg::parse( asm::InlineAsmReg::parse(
sess.asm_arch?, sess.asm_arch?,
|feature| sess.target_features.contains(&Symbol::intern(feature)), |feature| sess.target_features.contains(&Symbol::intern(feature)),
&sess.target.target, &sess.target,
s, s,
) )
.map_err(|e| { .map_err(|e| {

View file

@ -796,7 +796,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
fn visit_expr(&mut self, expr: &'a Expr) { fn visit_expr(&mut self, expr: &'a Expr) {
match &expr.kind { match &expr.kind {
ExprKind::LlvmInlineAsm(..) if !self.session.target.target.options.allow_asm => { ExprKind::LlvmInlineAsm(..) if !self.session.target.options.allow_asm => {
struct_span_err!( struct_span_err!(
self.session, self.session,
expr.span, expr.span,

View file

@ -170,17 +170,11 @@ pub enum Token {
impl Token { impl Token {
crate fn is_eof(&self) -> bool { crate fn is_eof(&self) -> bool {
match *self { matches!(self, Token::Eof)
Token::Eof => true,
_ => false,
}
} }
pub fn is_hardbreak_tok(&self) -> bool { pub fn is_hardbreak_tok(&self) -> bool {
match *self { matches!(self, Token::Break(BreakToken { offset: 0, blank_space: SIZE_INFINITY }))
Token::Break(BreakToken { offset: 0, blank_space: bs }) if bs == SIZE_INFINITY => true,
_ => false,
}
} }
} }
@ -491,12 +485,9 @@ impl Printer {
} }
fn get_top(&mut self) -> PrintStackElem { fn get_top(&mut self) -> PrintStackElem {
match self.print_stack.last() { *self.print_stack.last().unwrap_or({
Some(el) => *el, &PrintStackElem { offset: 0, pbreak: PrintStackBreak::Broken(Breaks::Inconsistent) }
None => { })
PrintStackElem { offset: 0, pbreak: PrintStackBreak::Broken(Breaks::Inconsistent) }
}
}
} }
fn print_begin(&mut self, b: BeginToken, l: isize) { fn print_begin(&mut self, b: BeginToken, l: isize) {

View file

@ -0,0 +1,104 @@
#[cfg(test)]
mod tests;
pub mod state;
pub use state::{print_crate, AnnNode, Comments, PpAnn, PrintState, State};
use rustc_ast as ast;
use rustc_ast::token::{Nonterminal, Token, TokenKind};
use rustc_ast::tokenstream::{TokenStream, TokenTree};
pub fn nonterminal_to_string_no_extra_parens(nt: &Nonterminal) -> String {
let state = State::without_insert_extra_parens();
state.nonterminal_to_string(nt)
}
pub fn nonterminal_to_string(nt: &Nonterminal) -> String {
State::new().nonterminal_to_string(nt)
}
/// Print the token kind precisely, without converting `$crate` into its respective crate name.
pub fn token_kind_to_string(tok: &TokenKind) -> String {
State::new().token_kind_to_string(tok)
}
/// Print the token precisely, without converting `$crate` into its respective crate name.
pub fn token_to_string(token: &Token) -> String {
State::new().token_to_string(token)
}
pub fn token_to_string_ext(token: &Token, convert_dollar_crate: bool) -> String {
State::new().token_to_string_ext(token, convert_dollar_crate)
}
pub fn ty_to_string(ty: &ast::Ty) -> String {
State::new().ty_to_string(ty)
}
pub fn bounds_to_string(bounds: &[ast::GenericBound]) -> String {
State::new().bounds_to_string(bounds)
}
pub fn pat_to_string(pat: &ast::Pat) -> String {
State::new().pat_to_string(pat)
}
pub fn expr_to_string(e: &ast::Expr) -> String {
State::new().expr_to_string(e)
}
pub fn tt_to_string(tt: &TokenTree) -> String {
State::new().tt_to_string(tt)
}
pub fn tts_to_string(tokens: &TokenStream) -> String {
State::new().tts_to_string(tokens)
}
pub fn stmt_to_string(stmt: &ast::Stmt) -> String {
State::new().stmt_to_string(stmt)
}
pub fn item_to_string(i: &ast::Item) -> String {
State::new().item_to_string(i)
}
pub fn generic_params_to_string(generic_params: &[ast::GenericParam]) -> String {
State::new().generic_params_to_string(generic_params)
}
pub fn path_to_string(p: &ast::Path) -> String {
State::new().path_to_string(p)
}
pub fn path_segment_to_string(p: &ast::PathSegment) -> String {
State::new().path_segment_to_string(p)
}
pub fn vis_to_string(v: &ast::Visibility) -> String {
State::new().vis_to_string(v)
}
pub fn block_to_string(blk: &ast::Block) -> String {
State::new().block_to_string(blk)
}
pub fn meta_list_item_to_string(li: &ast::NestedMetaItem) -> String {
State::new().meta_list_item_to_string(li)
}
pub fn attr_item_to_string(ai: &ast::AttrItem) -> String {
State::new().attr_item_to_string(ai)
}
pub fn attribute_to_string(attr: &ast::Attribute) -> String {
State::new().attribute_to_string(attr)
}
pub fn param_to_string(arg: &ast::Param) -> String {
State::new().param_to_string(arg)
}
pub fn to_string(f: impl FnOnce(&mut State<'_>)) -> String {
State::new().to_string(f)
}

View file

@ -20,9 +20,6 @@ use rustc_span::{BytePos, FileName, Span};
use std::borrow::Cow; use std::borrow::Cow;
#[cfg(test)]
mod tests;
pub enum MacHeader<'a> { pub enum MacHeader<'a> {
Path(&'a ast::Path), Path(&'a ast::Path),
Keyword(&'static str), Keyword(&'static str),
@ -91,6 +88,13 @@ pub struct State<'a> {
comments: Option<Comments<'a>>, comments: Option<Comments<'a>>,
ann: &'a (dyn PpAnn + 'a), ann: &'a (dyn PpAnn + 'a),
is_expanded: bool, is_expanded: bool,
// If `true`, additional parenthesis (separate from `ExprKind::Paren`)
// are inserted to ensure that proper precedence is preserved
// in the pretty-printed output.
//
// This is usually `true`, except when performing the pretty-print/reparse
// check in `nt_to_tokenstream`
insert_extra_parens: bool,
} }
crate const INDENT_UNIT: usize = 4; crate const INDENT_UNIT: usize = 4;
@ -112,6 +116,7 @@ pub fn print_crate<'a>(
comments: Some(Comments::new(sm, filename, input)), comments: Some(Comments::new(sm, filename, input)),
ann, ann,
is_expanded, is_expanded,
insert_extra_parens: true,
}; };
if is_expanded && has_injected_crate { if is_expanded && has_injected_crate {
@ -142,13 +147,6 @@ pub fn print_crate<'a>(
s.s.eof() s.s.eof()
} }
pub fn to_string(f: impl FnOnce(&mut State<'_>)) -> String {
let mut printer =
State { s: pp::mk_printer(), comments: None, ann: &NoAnn, is_expanded: false };
f(&mut printer);
printer.s.eof()
}
// This makes printed token streams look slightly nicer, // This makes printed token streams look slightly nicer,
// and also addresses some specific regressions described in #63896 and #73345. // and also addresses some specific regressions described in #63896 and #73345.
fn tt_prepend_space(tt: &TokenTree, prev: &TokenTree) -> bool { fn tt_prepend_space(tt: &TokenTree, prev: &TokenTree) -> bool {
@ -231,173 +229,8 @@ pub fn literal_to_string(lit: token::Lit) -> String {
out out
} }
/// Print the token kind precisely, without converting `$crate` into its respective crate name.
pub fn token_kind_to_string(tok: &TokenKind) -> String {
token_kind_to_string_ext(tok, None)
}
fn token_kind_to_string_ext(tok: &TokenKind, convert_dollar_crate: Option<Span>) -> String {
match *tok {
token::Eq => "=".to_string(),
token::Lt => "<".to_string(),
token::Le => "<=".to_string(),
token::EqEq => "==".to_string(),
token::Ne => "!=".to_string(),
token::Ge => ">=".to_string(),
token::Gt => ">".to_string(),
token::Not => "!".to_string(),
token::Tilde => "~".to_string(),
token::OrOr => "||".to_string(),
token::AndAnd => "&&".to_string(),
token::BinOp(op) => binop_to_string(op).to_string(),
token::BinOpEq(op) => format!("{}=", binop_to_string(op)),
/* Structural symbols */
token::At => "@".to_string(),
token::Dot => ".".to_string(),
token::DotDot => "..".to_string(),
token::DotDotDot => "...".to_string(),
token::DotDotEq => "..=".to_string(),
token::Comma => ",".to_string(),
token::Semi => ";".to_string(),
token::Colon => ":".to_string(),
token::ModSep => "::".to_string(),
token::RArrow => "->".to_string(),
token::LArrow => "<-".to_string(),
token::FatArrow => "=>".to_string(),
token::OpenDelim(token::Paren) => "(".to_string(),
token::CloseDelim(token::Paren) => ")".to_string(),
token::OpenDelim(token::Bracket) => "[".to_string(),
token::CloseDelim(token::Bracket) => "]".to_string(),
token::OpenDelim(token::Brace) => "{".to_string(),
token::CloseDelim(token::Brace) => "}".to_string(),
token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim) => "".to_string(),
token::Pound => "#".to_string(),
token::Dollar => "$".to_string(),
token::Question => "?".to_string(),
token::SingleQuote => "'".to_string(),
/* Literals */
token::Literal(lit) => literal_to_string(lit),
/* Name components */
token::Ident(s, is_raw) => IdentPrinter::new(s, is_raw, convert_dollar_crate).to_string(),
token::Lifetime(s) => s.to_string(),
/* Other */
token::DocComment(comment_kind, attr_style, data) => {
doc_comment_to_string(comment_kind, attr_style, data)
}
token::Eof => "<eof>".to_string(),
token::Interpolated(ref nt) => nonterminal_to_string(nt),
}
}
/// Print the token precisely, without converting `$crate` into its respective crate name.
pub fn token_to_string(token: &Token) -> String {
token_to_string_ext(token, false)
}
fn token_to_string_ext(token: &Token, convert_dollar_crate: bool) -> String {
let convert_dollar_crate = convert_dollar_crate.then_some(token.span);
token_kind_to_string_ext(&token.kind, convert_dollar_crate)
}
pub fn nonterminal_to_string(nt: &Nonterminal) -> String {
match *nt {
token::NtExpr(ref e) => expr_to_string(e),
token::NtMeta(ref e) => attr_item_to_string(e),
token::NtTy(ref e) => ty_to_string(e),
token::NtPath(ref e) => path_to_string(e),
token::NtItem(ref e) => item_to_string(e),
token::NtBlock(ref e) => block_to_string(e),
token::NtStmt(ref e) => stmt_to_string(e),
token::NtPat(ref e) => pat_to_string(e),
token::NtIdent(e, is_raw) => IdentPrinter::for_ast_ident(e, is_raw).to_string(),
token::NtLifetime(e) => e.to_string(),
token::NtLiteral(ref e) => expr_to_string(e),
token::NtTT(ref tree) => tt_to_string(tree),
token::NtVis(ref e) => vis_to_string(e),
}
}
pub fn ty_to_string(ty: &ast::Ty) -> String {
to_string(|s| s.print_type(ty))
}
pub fn bounds_to_string(bounds: &[ast::GenericBound]) -> String {
to_string(|s| s.print_type_bounds("", bounds))
}
pub fn pat_to_string(pat: &ast::Pat) -> String {
to_string(|s| s.print_pat(pat))
}
pub fn expr_to_string(e: &ast::Expr) -> String {
to_string(|s| s.print_expr(e))
}
pub fn tt_to_string(tt: &TokenTree) -> String {
to_string(|s| s.print_tt(tt, false))
}
pub fn tts_to_string(tokens: &TokenStream) -> String {
to_string(|s| s.print_tts(tokens, false))
}
pub fn stmt_to_string(stmt: &ast::Stmt) -> String {
to_string(|s| s.print_stmt(stmt))
}
pub fn item_to_string(i: &ast::Item) -> String {
to_string(|s| s.print_item(i))
}
pub fn generic_params_to_string(generic_params: &[ast::GenericParam]) -> String {
to_string(|s| s.print_generic_params(generic_params))
}
pub fn path_to_string(p: &ast::Path) -> String {
to_string(|s| s.print_path(p, false, 0))
}
pub fn path_segment_to_string(p: &ast::PathSegment) -> String {
to_string(|s| s.print_path_segment(p, false))
}
pub fn vis_to_string(v: &ast::Visibility) -> String {
to_string(|s| s.print_visibility(v))
}
fn block_to_string(blk: &ast::Block) -> String {
to_string(|s| {
// Containing cbox, will be closed by `print_block` at `}`.
s.cbox(INDENT_UNIT);
// Head-ibox, will be closed by `print_block` after `{`.
s.ibox(0);
s.print_block(blk)
})
}
pub fn meta_list_item_to_string(li: &ast::NestedMetaItem) -> String {
to_string(|s| s.print_meta_list_item(li))
}
fn attr_item_to_string(ai: &ast::AttrItem) -> String {
to_string(|s| s.print_attr_item(ai, ai.path.span))
}
pub fn attribute_to_string(attr: &ast::Attribute) -> String {
to_string(|s| s.print_attribute(attr))
}
pub fn param_to_string(arg: &ast::Param) -> String {
to_string(|s| s.print_param(arg, false))
}
fn visibility_qualified(vis: &ast::Visibility, s: &str) -> String { fn visibility_qualified(vis: &ast::Visibility, s: &str) -> String {
format!("{}{}", to_string(|s| s.print_visibility(vis)), s) format!("{}{}", State::new().to_string(|s| s.print_visibility(vis)), s)
} }
impl std::ops::Deref for State<'_> { impl std::ops::Deref for State<'_> {
@ -414,6 +247,7 @@ impl std::ops::DerefMut for State<'_> {
} }
pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::DerefMut { pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::DerefMut {
fn insert_extra_parens(&self) -> bool;
fn comments(&mut self) -> &mut Option<Comments<'a>>; fn comments(&mut self) -> &mut Option<Comments<'a>>;
fn print_ident(&mut self, ident: Ident); fn print_ident(&mut self, ident: Ident);
fn print_generic_args(&mut self, args: &ast::GenericArgs, colons_before_params: bool); fn print_generic_args(&mut self, args: &ast::GenericArgs, colons_before_params: bool);
@ -679,7 +513,8 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
fn print_tt(&mut self, tt: &TokenTree, convert_dollar_crate: bool) { fn print_tt(&mut self, tt: &TokenTree, convert_dollar_crate: bool) {
match tt { match tt {
TokenTree::Token(token) => { TokenTree::Token(token) => {
self.word(token_to_string_ext(&token, convert_dollar_crate)); let token_str = self.token_to_string_ext(&token, convert_dollar_crate);
self.word(token_str);
if let token::DocComment(..) = token.kind { if let token::DocComment(..) = token.kind {
self.hardbreak() self.hardbreak()
} }
@ -745,14 +580,20 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
self.space(); self.space();
} }
} }
_ => self.word(token_kind_to_string(&token::OpenDelim(delim))), _ => {
let token_str = self.token_kind_to_string(&token::OpenDelim(delim));
self.word(token_str)
}
} }
self.ibox(0); self.ibox(0);
self.print_tts(tts, convert_dollar_crate); self.print_tts(tts, convert_dollar_crate);
self.end(); self.end();
match delim { match delim {
DelimToken::Brace => self.bclose(span), DelimToken::Brace => self.bclose(span),
_ => self.word(token_kind_to_string(&token::CloseDelim(delim))), _ => {
let token_str = self.token_kind_to_string(&token::CloseDelim(delim));
self.word(token_str)
}
} }
} }
@ -818,9 +659,190 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
} }
} }
} }
fn nonterminal_to_string(&self, nt: &Nonterminal) -> String {
match *nt {
token::NtExpr(ref e) => self.expr_to_string(e),
token::NtMeta(ref e) => self.attr_item_to_string(e),
token::NtTy(ref e) => self.ty_to_string(e),
token::NtPath(ref e) => self.path_to_string(e),
token::NtItem(ref e) => self.item_to_string(e),
token::NtBlock(ref e) => self.block_to_string(e),
token::NtStmt(ref e) => self.stmt_to_string(e),
token::NtPat(ref e) => self.pat_to_string(e),
token::NtIdent(e, is_raw) => IdentPrinter::for_ast_ident(e, is_raw).to_string(),
token::NtLifetime(e) => e.to_string(),
token::NtLiteral(ref e) => self.expr_to_string(e),
token::NtTT(ref tree) => self.tt_to_string(tree),
token::NtVis(ref e) => self.vis_to_string(e),
}
}
/// Print the token kind precisely, without converting `$crate` into its respective crate name.
fn token_kind_to_string(&self, tok: &TokenKind) -> String {
self.token_kind_to_string_ext(tok, None)
}
fn token_kind_to_string_ext(
&self,
tok: &TokenKind,
convert_dollar_crate: Option<Span>,
) -> String {
match *tok {
token::Eq => "=".to_string(),
token::Lt => "<".to_string(),
token::Le => "<=".to_string(),
token::EqEq => "==".to_string(),
token::Ne => "!=".to_string(),
token::Ge => ">=".to_string(),
token::Gt => ">".to_string(),
token::Not => "!".to_string(),
token::Tilde => "~".to_string(),
token::OrOr => "||".to_string(),
token::AndAnd => "&&".to_string(),
token::BinOp(op) => binop_to_string(op).to_string(),
token::BinOpEq(op) => format!("{}=", binop_to_string(op)),
/* Structural symbols */
token::At => "@".to_string(),
token::Dot => ".".to_string(),
token::DotDot => "..".to_string(),
token::DotDotDot => "...".to_string(),
token::DotDotEq => "..=".to_string(),
token::Comma => ",".to_string(),
token::Semi => ";".to_string(),
token::Colon => ":".to_string(),
token::ModSep => "::".to_string(),
token::RArrow => "->".to_string(),
token::LArrow => "<-".to_string(),
token::FatArrow => "=>".to_string(),
token::OpenDelim(token::Paren) => "(".to_string(),
token::CloseDelim(token::Paren) => ")".to_string(),
token::OpenDelim(token::Bracket) => "[".to_string(),
token::CloseDelim(token::Bracket) => "]".to_string(),
token::OpenDelim(token::Brace) => "{".to_string(),
token::CloseDelim(token::Brace) => "}".to_string(),
token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim) => "".to_string(),
token::Pound => "#".to_string(),
token::Dollar => "$".to_string(),
token::Question => "?".to_string(),
token::SingleQuote => "'".to_string(),
/* Literals */
token::Literal(lit) => literal_to_string(lit),
/* Name components */
token::Ident(s, is_raw) => {
IdentPrinter::new(s, is_raw, convert_dollar_crate).to_string()
}
token::Lifetime(s) => s.to_string(),
/* Other */
token::DocComment(comment_kind, attr_style, data) => {
doc_comment_to_string(comment_kind, attr_style, data)
}
token::Eof => "<eof>".to_string(),
token::Interpolated(ref nt) => self.nonterminal_to_string(nt),
}
}
/// Print the token precisely, without converting `$crate` into its respective crate name.
fn token_to_string(&self, token: &Token) -> String {
self.token_to_string_ext(token, false)
}
fn token_to_string_ext(&self, token: &Token, convert_dollar_crate: bool) -> String {
let convert_dollar_crate = convert_dollar_crate.then_some(token.span);
self.token_kind_to_string_ext(&token.kind, convert_dollar_crate)
}
fn ty_to_string(&self, ty: &ast::Ty) -> String {
self.to_string(|s| s.print_type(ty))
}
fn bounds_to_string(&self, bounds: &[ast::GenericBound]) -> String {
self.to_string(|s| s.print_type_bounds("", bounds))
}
fn pat_to_string(&self, pat: &ast::Pat) -> String {
self.to_string(|s| s.print_pat(pat))
}
fn expr_to_string(&self, e: &ast::Expr) -> String {
self.to_string(|s| s.print_expr(e))
}
fn tt_to_string(&self, tt: &TokenTree) -> String {
self.to_string(|s| s.print_tt(tt, false))
}
fn tts_to_string(&self, tokens: &TokenStream) -> String {
self.to_string(|s| s.print_tts(tokens, false))
}
fn stmt_to_string(&self, stmt: &ast::Stmt) -> String {
self.to_string(|s| s.print_stmt(stmt))
}
fn item_to_string(&self, i: &ast::Item) -> String {
self.to_string(|s| s.print_item(i))
}
fn generic_params_to_string(&self, generic_params: &[ast::GenericParam]) -> String {
self.to_string(|s| s.print_generic_params(generic_params))
}
fn path_to_string(&self, p: &ast::Path) -> String {
self.to_string(|s| s.print_path(p, false, 0))
}
fn path_segment_to_string(&self, p: &ast::PathSegment) -> String {
self.to_string(|s| s.print_path_segment(p, false))
}
fn vis_to_string(&self, v: &ast::Visibility) -> String {
self.to_string(|s| s.print_visibility(v))
}
fn block_to_string(&self, blk: &ast::Block) -> String {
self.to_string(|s| {
// Containing cbox, will be closed by `print_block` at `}`.
s.cbox(INDENT_UNIT);
// Head-ibox, will be closed by `print_block` after `{`.
s.ibox(0);
s.print_block(blk)
})
}
fn meta_list_item_to_string(&self, li: &ast::NestedMetaItem) -> String {
self.to_string(|s| s.print_meta_list_item(li))
}
fn attr_item_to_string(&self, ai: &ast::AttrItem) -> String {
self.to_string(|s| s.print_attr_item(ai, ai.path.span))
}
fn attribute_to_string(&self, attr: &ast::Attribute) -> String {
self.to_string(|s| s.print_attribute(attr))
}
fn param_to_string(&self, arg: &ast::Param) -> String {
self.to_string(|s| s.print_param(arg, false))
}
fn to_string(&self, f: impl FnOnce(&mut State<'_>)) -> String {
let mut printer = State::new();
printer.insert_extra_parens = self.insert_extra_parens();
f(&mut printer);
printer.s.eof()
}
} }
impl<'a> PrintState<'a> for State<'a> { impl<'a> PrintState<'a> for State<'a> {
fn insert_extra_parens(&self) -> bool {
self.insert_extra_parens
}
fn comments(&mut self) -> &mut Option<Comments<'a>> { fn comments(&mut self) -> &mut Option<Comments<'a>> {
&mut self.comments &mut self.comments
} }
@ -856,6 +878,20 @@ impl<'a> PrintState<'a> for State<'a> {
} }
impl<'a> State<'a> { impl<'a> State<'a> {
pub fn new() -> State<'a> {
State {
s: pp::mk_printer(),
comments: None,
ann: &NoAnn,
is_expanded: false,
insert_extra_parens: true,
}
}
pub(super) fn without_insert_extra_parens() -> State<'a> {
State { insert_extra_parens: false, ..State::new() }
}
// Synthesizes a comment that was not textually present in the original source // Synthesizes a comment that was not textually present in the original source
// file. // file.
pub fn synth_comment(&mut self, text: String) { pub fn synth_comment(&mut self, text: String) {
@ -1139,7 +1175,7 @@ impl<'a> State<'a> {
self.print_fn_full(sig, item.ident, gen, &item.vis, def, body, &item.attrs); self.print_fn_full(sig, item.ident, gen, &item.vis, def, body, &item.attrs);
} }
ast::ItemKind::Mod(ref _mod) => { ast::ItemKind::Mod(ref _mod) => {
self.head(to_string(|s| { self.head(self.to_string(|s| {
s.print_visibility(&item.vis); s.print_visibility(&item.vis);
s.print_unsafety(_mod.unsafety); s.print_unsafety(_mod.unsafety);
s.word("mod"); s.word("mod");
@ -1158,7 +1194,7 @@ impl<'a> State<'a> {
} }
} }
ast::ItemKind::ForeignMod(ref nmod) => { ast::ItemKind::ForeignMod(ref nmod) => {
self.head(to_string(|s| { self.head(self.to_string(|s| {
s.print_unsafety(nmod.unsafety); s.print_unsafety(nmod.unsafety);
s.word("extern"); s.word("extern");
})); }));
@ -1366,7 +1402,7 @@ impl<'a> State<'a> {
ast::CrateSugar::JustCrate => self.word_nbsp("crate"), ast::CrateSugar::JustCrate => self.word_nbsp("crate"),
}, },
ast::VisibilityKind::Restricted { ref path, .. } => { ast::VisibilityKind::Restricted { ref path, .. } => {
let path = to_string(|s| s.print_path(path, false, 0)); let path = self.to_string(|s| s.print_path(path, false, 0));
if path == "self" || path == "super" { if path == "self" || path == "super" {
self.word_nbsp(format!("pub({})", path)) self.word_nbsp(format!("pub({})", path))
} else { } else {
@ -1658,7 +1694,8 @@ impl<'a> State<'a> {
} }
/// Prints `expr` or `(expr)` when `needs_par` holds. /// Prints `expr` or `(expr)` when `needs_par` holds.
fn print_expr_cond_paren(&mut self, expr: &ast::Expr, needs_par: bool) { fn print_expr_cond_paren(&mut self, expr: &ast::Expr, mut needs_par: bool) {
needs_par &= self.insert_extra_parens;
if needs_par { if needs_par {
self.popen(); self.popen();
} }

View file

@ -75,6 +75,12 @@ pub enum InlineAttr {
Never, Never,
} }
#[derive(Clone, Encodable, Decodable)]
pub enum InstructionSetAttr {
ArmA32,
ArmT32,
}
#[derive(Clone, Encodable, Decodable)] #[derive(Clone, Encodable, Decodable)]
pub enum OptimizeAttr { pub enum OptimizeAttr {
None, None,
@ -148,7 +154,7 @@ pub struct ConstStability {
} }
/// The available stability levels. /// The available stability levels.
#[derive(Encodable, Decodable, PartialEq, PartialOrd, Copy, Clone, Debug, Eq, Hash)] #[derive(Encodable, Decodable, PartialEq, Copy, Clone, Debug, Eq, Hash)]
#[derive(HashStable_Generic)] #[derive(HashStable_Generic)]
pub enum StabilityLevel { pub enum StabilityLevel {
// Reason for the current stability level and the relevant rust-lang issue // Reason for the current stability level and the relevant rust-lang issue

View file

@ -81,7 +81,7 @@ fn parse_args<'a>(
} // accept trailing commas } // accept trailing commas
// Parse options // Parse options
if p.eat(&token::Ident(sym::options, false)) { if p.eat_keyword(sym::options) {
parse_options(&mut p, &mut args)?; parse_options(&mut p, &mut args)?;
allow_templates = false; allow_templates = false;
continue; continue;
@ -101,19 +101,19 @@ fn parse_args<'a>(
}; };
let mut explicit_reg = false; let mut explicit_reg = false;
let op = if p.eat(&token::Ident(kw::In, false)) { let op = if p.eat_keyword(kw::In) {
let reg = parse_reg(&mut p, &mut explicit_reg)?; let reg = parse_reg(&mut p, &mut explicit_reg)?;
let expr = p.parse_expr()?; let expr = p.parse_expr()?;
ast::InlineAsmOperand::In { reg, expr } ast::InlineAsmOperand::In { reg, expr }
} else if p.eat(&token::Ident(sym::out, false)) { } else if p.eat_keyword(sym::out) {
let reg = parse_reg(&mut p, &mut explicit_reg)?; let reg = parse_reg(&mut p, &mut explicit_reg)?;
let expr = if p.eat_keyword(kw::Underscore) { None } else { Some(p.parse_expr()?) }; let expr = if p.eat_keyword(kw::Underscore) { None } else { Some(p.parse_expr()?) };
ast::InlineAsmOperand::Out { reg, expr, late: false } ast::InlineAsmOperand::Out { reg, expr, late: false }
} else if p.eat(&token::Ident(sym::lateout, false)) { } else if p.eat_keyword(sym::lateout) {
let reg = parse_reg(&mut p, &mut explicit_reg)?; let reg = parse_reg(&mut p, &mut explicit_reg)?;
let expr = if p.eat_keyword(kw::Underscore) { None } else { Some(p.parse_expr()?) }; let expr = if p.eat_keyword(kw::Underscore) { None } else { Some(p.parse_expr()?) };
ast::InlineAsmOperand::Out { reg, expr, late: true } ast::InlineAsmOperand::Out { reg, expr, late: true }
} else if p.eat(&token::Ident(sym::inout, false)) { } else if p.eat_keyword(sym::inout) {
let reg = parse_reg(&mut p, &mut explicit_reg)?; let reg = parse_reg(&mut p, &mut explicit_reg)?;
let expr = p.parse_expr()?; let expr = p.parse_expr()?;
if p.eat(&token::FatArrow) { if p.eat(&token::FatArrow) {
@ -123,7 +123,7 @@ fn parse_args<'a>(
} else { } else {
ast::InlineAsmOperand::InOut { reg, expr, late: false } ast::InlineAsmOperand::InOut { reg, expr, late: false }
} }
} else if p.eat(&token::Ident(sym::inlateout, false)) { } else if p.eat_keyword(sym::inlateout) {
let reg = parse_reg(&mut p, &mut explicit_reg)?; let reg = parse_reg(&mut p, &mut explicit_reg)?;
let expr = p.parse_expr()?; let expr = p.parse_expr()?;
if p.eat(&token::FatArrow) { if p.eat(&token::FatArrow) {
@ -133,10 +133,10 @@ fn parse_args<'a>(
} else { } else {
ast::InlineAsmOperand::InOut { reg, expr, late: true } ast::InlineAsmOperand::InOut { reg, expr, late: true }
} }
} else if p.eat(&token::Ident(kw::Const, false)) { } else if p.eat_keyword(kw::Const) {
let expr = p.parse_expr()?; let expr = p.parse_expr()?;
ast::InlineAsmOperand::Const { expr } ast::InlineAsmOperand::Const { expr }
} else if p.eat(&token::Ident(sym::sym, false)) { } else if p.eat_keyword(sym::sym) {
let expr = p.parse_expr()?; let expr = p.parse_expr()?;
match expr.kind { match expr.kind {
ast::ExprKind::Path(..) => {} ast::ExprKind::Path(..) => {}
@ -164,7 +164,7 @@ fn parse_args<'a>(
args.templates.push(template); args.templates.push(template);
continue; continue;
} else { } else {
return Err(p.expect_one_of(&[], &[]).unwrap_err()); return p.unexpected();
}; };
allow_templates = false; allow_templates = false;
@ -333,21 +333,22 @@ fn parse_options<'a>(p: &mut Parser<'a>, args: &mut AsmArgs) -> Result<(), Diagn
p.expect(&token::OpenDelim(token::DelimToken::Paren))?; p.expect(&token::OpenDelim(token::DelimToken::Paren))?;
while !p.eat(&token::CloseDelim(token::DelimToken::Paren)) { while !p.eat(&token::CloseDelim(token::DelimToken::Paren)) {
if p.eat(&token::Ident(sym::pure, false)) { if p.eat_keyword(sym::pure) {
try_set_option(p, args, sym::pure, ast::InlineAsmOptions::PURE); try_set_option(p, args, sym::pure, ast::InlineAsmOptions::PURE);
} else if p.eat(&token::Ident(sym::nomem, false)) { } else if p.eat_keyword(sym::nomem) {
try_set_option(p, args, sym::nomem, ast::InlineAsmOptions::NOMEM); try_set_option(p, args, sym::nomem, ast::InlineAsmOptions::NOMEM);
} else if p.eat(&token::Ident(sym::readonly, false)) { } else if p.eat_keyword(sym::readonly) {
try_set_option(p, args, sym::readonly, ast::InlineAsmOptions::READONLY); try_set_option(p, args, sym::readonly, ast::InlineAsmOptions::READONLY);
} else if p.eat(&token::Ident(sym::preserves_flags, false)) { } else if p.eat_keyword(sym::preserves_flags) {
try_set_option(p, args, sym::preserves_flags, ast::InlineAsmOptions::PRESERVES_FLAGS); try_set_option(p, args, sym::preserves_flags, ast::InlineAsmOptions::PRESERVES_FLAGS);
} else if p.eat(&token::Ident(sym::noreturn, false)) { } else if p.eat_keyword(sym::noreturn) {
try_set_option(p, args, sym::noreturn, ast::InlineAsmOptions::NORETURN); try_set_option(p, args, sym::noreturn, ast::InlineAsmOptions::NORETURN);
} else if p.eat(&token::Ident(sym::nostack, false)) { } else if p.eat_keyword(sym::nostack) {
try_set_option(p, args, sym::nostack, ast::InlineAsmOptions::NOSTACK); try_set_option(p, args, sym::nostack, ast::InlineAsmOptions::NOSTACK);
} else { } else if p.eat_keyword(sym::att_syntax) {
p.expect(&token::Ident(sym::att_syntax, false))?;
try_set_option(p, args, sym::att_syntax, ast::InlineAsmOptions::ATT_SYNTAX); try_set_option(p, args, sym::att_syntax, ast::InlineAsmOptions::ATT_SYNTAX);
} else {
return p.unexpected();
} }
// Allow trailing commas // Allow trailing commas

View file

@ -120,8 +120,7 @@ fn parse_assert<'a>(
}; };
if parser.token != token::Eof { if parser.token != token::Eof {
parser.expect_one_of(&[], &[])?; return parser.unexpected();
unreachable!();
} }
Ok(Assert { cond_expr, custom_message }) Ok(Assert { cond_expr, custom_message })

View file

@ -1137,12 +1137,9 @@ impl<'a> MethodDef<'a> {
/// for each of the self-args, carried in precomputed variables. /// for each of the self-args, carried in precomputed variables.
/// ```{.text} /// ```{.text}
/// let __self0_vi = unsafe { /// let __self0_vi = std::intrinsics::discriminant_value(&self);
/// std::intrinsics::discriminant_value(&self) }; /// let __self1_vi = std::intrinsics::discriminant_value(&arg1);
/// let __self1_vi = unsafe { /// let __self2_vi = std::intrinsics::discriminant_value(&arg2);
/// std::intrinsics::discriminant_value(&arg1) };
/// let __self2_vi = unsafe {
/// std::intrinsics::discriminant_value(&arg2) };
/// ///
/// if __self0_vi == __self1_vi && __self0_vi == __self2_vi && ... { /// if __self0_vi == __self1_vi && __self0_vi == __self2_vi && ... {
/// match (...) { /// match (...) {
@ -1325,7 +1322,7 @@ impl<'a> MethodDef<'a> {
// Since we know that all the arguments will match if we reach // Since we know that all the arguments will match if we reach
// the match expression we add the unreachable intrinsics as the // the match expression we add the unreachable intrinsics as the
// result of the catch all which should help llvm in optimizing it // result of the catch all which should help llvm in optimizing it
Some(deriving::call_intrinsic(cx, sp, sym::unreachable, vec![])) Some(deriving::call_unreachable(cx, sp))
} }
_ => None, _ => None,
}; };
@ -1356,12 +1353,9 @@ impl<'a> MethodDef<'a> {
// with three Self args, builds three statements: // with three Self args, builds three statements:
// //
// ``` // ```
// let __self0_vi = unsafe { // let __self0_vi = std::intrinsics::discriminant_value(&self);
// std::intrinsics::discriminant_value(&self) }; // let __self1_vi = std::intrinsics::discriminant_value(&arg1);
// let __self1_vi = unsafe { // let __self2_vi = std::intrinsics::discriminant_value(&arg2);
// std::intrinsics::discriminant_value(&arg1) };
// let __self2_vi = unsafe {
// std::intrinsics::discriminant_value(&arg2) };
// ``` // ```
let mut index_let_stmts: Vec<ast::Stmt> = Vec::with_capacity(vi_idents.len() + 1); let mut index_let_stmts: Vec<ast::Stmt> = Vec::with_capacity(vi_idents.len() + 1);
@ -1474,7 +1468,7 @@ impl<'a> MethodDef<'a> {
// derive Debug on such a type could here generate code // derive Debug on such a type could here generate code
// that needs the feature gate enabled.) // that needs the feature gate enabled.)
deriving::call_intrinsic(cx, sp, sym::unreachable, vec![]) deriving::call_unreachable(cx, sp)
} else { } else {
// Final wrinkle: the self_args are expressions that deref // Final wrinkle: the self_args are expressions that deref
// down to desired places, but we cannot actually deref // down to desired places, but we cannot actually deref

View file

@ -68,7 +68,14 @@ fn call_intrinsic(
) -> P<ast::Expr> { ) -> P<ast::Expr> {
let span = cx.with_def_site_ctxt(span); let span = cx.with_def_site_ctxt(span);
let path = cx.std_path(&[sym::intrinsics, intrinsic]); let path = cx.std_path(&[sym::intrinsics, intrinsic]);
let call = cx.expr_call_global(span, path, args); cx.expr_call_global(span, path, args)
}
/// Constructs an expression that calls the `unreachable` intrinsic.
fn call_unreachable(cx: &ExtCtxt<'_>, span: Span) -> P<ast::Expr> {
let span = cx.with_def_site_ctxt(span);
let path = cx.std_path(&[sym::intrinsics, sym::unreachable]);
let call = cx.expr_call_global(span, path, vec![]);
cx.expr_block(P(ast::Block { cx.expr_block(P(ast::Block {
stmts: vec![cx.stmt_expr(call)], stmts: vec![cx.stmt_expr(call)],

View file

@ -385,7 +385,7 @@ pub mod printf {
if let Start = state { if let Start = state {
match c { match c {
'1'..='9' => { '1'..='9' => {
let end = at_next_cp_while(next, is_digit); let end = at_next_cp_while(next, char::is_ascii_digit);
match end.next_cp() { match end.next_cp() {
// Yes, this *is* the parameter. // Yes, this *is* the parameter.
Some(('$', end2)) => { Some(('$', end2)) => {
@ -427,7 +427,7 @@ pub mod printf {
move_to!(next); move_to!(next);
} }
'1'..='9' => { '1'..='9' => {
let end = at_next_cp_while(next, is_digit); let end = at_next_cp_while(next, char::is_ascii_digit);
state = Prec; state = Prec;
width = Some(Num::from_str(at.slice_between(end).unwrap(), None)); width = Some(Num::from_str(at.slice_between(end).unwrap(), None));
move_to!(end); move_to!(end);
@ -441,7 +441,7 @@ pub mod printf {
} }
if let WidthArg = state { if let WidthArg = state {
let end = at_next_cp_while(at, is_digit); let end = at_next_cp_while(at, char::is_ascii_digit);
match end.next_cp() { match end.next_cp() {
Some(('$', end2)) => { Some(('$', end2)) => {
state = Prec; state = Prec;
@ -473,7 +473,7 @@ pub mod printf {
if let PrecInner = state { if let PrecInner = state {
match c { match c {
'*' => { '*' => {
let end = at_next_cp_while(next, is_digit); let end = at_next_cp_while(next, char::is_ascii_digit);
match end.next_cp() { match end.next_cp() {
Some(('$', end2)) => { Some(('$', end2)) => {
state = Length; state = Length;
@ -488,7 +488,7 @@ pub mod printf {
} }
} }
'0'..='9' => { '0'..='9' => {
let end = at_next_cp_while(next, is_digit); let end = at_next_cp_while(next, char::is_ascii_digit);
state = Length; state = Length;
precision = Some(Num::from_str(at.slice_between(end).unwrap(), None)); precision = Some(Num::from_str(at.slice_between(end).unwrap(), None));
move_to!(end); move_to!(end);
@ -563,12 +563,12 @@ pub mod printf {
fn at_next_cp_while<F>(mut cur: Cur<'_>, mut pred: F) -> Cur<'_> fn at_next_cp_while<F>(mut cur: Cur<'_>, mut pred: F) -> Cur<'_>
where where
F: FnMut(char) -> bool, F: FnMut(&char) -> bool,
{ {
loop { loop {
match cur.next_cp() { match cur.next_cp() {
Some((c, next)) => { Some((c, next)) => {
if pred(c) { if pred(&c) {
cur = next; cur = next;
} else { } else {
return cur; return cur;
@ -579,14 +579,7 @@ pub mod printf {
} }
} }
fn is_digit(c: char) -> bool { fn is_flag(c: &char) -> bool {
match c {
'0'..='9' => true,
_ => false,
}
}
fn is_flag(c: char) -> bool {
match c { match c {
'0' | '-' | '+' | ' ' | '#' | '\'' => true, '0' | '-' | '+' | ' ' | '#' | '\'' => true,
_ => false, _ => false,
@ -723,17 +716,11 @@ pub mod shell {
} }
fn is_ident_head(c: char) -> bool { fn is_ident_head(c: char) -> bool {
match c { c.is_ascii_alphabetic() || c == '_'
'a'..='z' | 'A'..='Z' | '_' => true,
_ => false,
}
} }
fn is_ident_tail(c: char) -> bool { fn is_ident_tail(c: char) -> bool {
match c { c.is_ascii_alphanumeric() || c == '_'
'0'..='9' => true,
c => is_ident_head(c),
}
} }
#[cfg(test)] #[cfg(test)]

View file

@ -37,7 +37,7 @@ struct TestCtxt<'a> {
pub fn inject(sess: &Session, resolver: &mut dyn ResolverExpand, krate: &mut ast::Crate) { pub fn inject(sess: &Session, resolver: &mut dyn ResolverExpand, krate: &mut ast::Crate) {
let span_diagnostic = sess.diagnostic(); let span_diagnostic = sess.diagnostic();
let panic_strategy = sess.panic_strategy(); let panic_strategy = sess.panic_strategy();
let platform_panic_strategy = sess.target.target.options.panic_strategy; let platform_panic_strategy = sess.target.options.panic_strategy;
// Check for #![reexport_test_harness_main = "some_name"] which gives the // Check for #![reexport_test_harness_main = "some_name"] which gives the
// main test function the name `some_name` without hygiene. This needs to be // main test function the name `some_name` without hygiene. This needs to be

View file

@ -16,10 +16,10 @@ pub(crate) unsafe fn codegen(
) { ) {
let llcx = &*mods.llcx; let llcx = &*mods.llcx;
let llmod = mods.llmod(); let llmod = mods.llmod();
let usize = match &tcx.sess.target.target.target_pointer_width[..] { let usize = match tcx.sess.target.pointer_width {
"16" => llvm::LLVMInt16TypeInContext(llcx), 16 => llvm::LLVMInt16TypeInContext(llcx),
"32" => llvm::LLVMInt32TypeInContext(llcx), 32 => llvm::LLVMInt32TypeInContext(llcx),
"64" => llvm::LLVMInt64TypeInContext(llcx), 64 => llvm::LLVMInt64TypeInContext(llcx),
tws => bug!("Unsupported target word size for int: {}", tws), tws => bug!("Unsupported target word size for int: {}", tws),
}; };
let i8 = llvm::LLVMInt8TypeInContext(llcx); let i8 = llvm::LLVMInt8TypeInContext(llcx);
@ -57,7 +57,7 @@ pub(crate) unsafe fn codegen(
let name = format!("__rust_{}", method.name); let name = format!("__rust_{}", method.name);
let llfn = llvm::LLVMRustGetOrInsertFunction(llmod, name.as_ptr().cast(), name.len(), ty); let llfn = llvm::LLVMRustGetOrInsertFunction(llmod, name.as_ptr().cast(), name.len(), ty);
if tcx.sess.target.target.options.default_hidden_visibility { if tcx.sess.target.options.default_hidden_visibility {
llvm::LLVMRustSetVisibility(llfn, llvm::Visibility::Hidden); llvm::LLVMRustSetVisibility(llfn, llvm::Visibility::Hidden);
} }
if tcx.sess.must_emit_unwind_tables() { if tcx.sess.must_emit_unwind_tables() {
@ -98,7 +98,7 @@ pub(crate) unsafe fn codegen(
// -> ! DIFlagNoReturn // -> ! DIFlagNoReturn
llvm::Attribute::NoReturn.apply_llfn(llvm::AttributePlace::Function, llfn); llvm::Attribute::NoReturn.apply_llfn(llvm::AttributePlace::Function, llfn);
if tcx.sess.target.target.options.default_hidden_visibility { if tcx.sess.target.options.default_hidden_visibility {
llvm::LLVMRustSetVisibility(llfn, llvm::Visibility::Hidden); llvm::LLVMRustSetVisibility(llfn, llvm::Visibility::Hidden);
} }
if tcx.sess.must_emit_unwind_tables() { if tcx.sess.must_emit_unwind_tables() {

View file

@ -60,7 +60,7 @@ impl AsmBuilderMethods<'tcx> for Builder<'a, 'll, 'tcx> {
// Default per-arch clobbers // Default per-arch clobbers
// Basically what clang does // Basically what clang does
let arch_clobbers = match &self.sess().target.target.arch[..] { let arch_clobbers = match &self.sess().target.arch[..] {
"x86" | "x86_64" => vec!["~{dirflag}", "~{fpsr}", "~{flags}"], "x86" | "x86_64" => vec!["~{dirflag}", "~{fpsr}", "~{flags}"],
"mips" | "mips64" => vec!["~{$1}"], "mips" | "mips64" => vec!["~{$1}"],
_ => Vec::new(), _ => Vec::new(),
@ -259,7 +259,7 @@ impl AsmBuilderMethods<'tcx> for Builder<'a, 'll, 'tcx> {
InlineAsmArch::RiscV32 | InlineAsmArch::RiscV64 => {} InlineAsmArch::RiscV32 | InlineAsmArch::RiscV64 => {}
InlineAsmArch::Nvptx64 => {} InlineAsmArch::Nvptx64 => {}
InlineAsmArch::Hexagon => {} InlineAsmArch::Hexagon => {}
InlineAsmArch::Mips => {} InlineAsmArch::Mips | InlineAsmArch::Mips64 => {}
} }
} }
if !options.contains(InlineAsmOptions::NOMEM) { if !options.contains(InlineAsmOptions::NOMEM) {
@ -710,6 +710,7 @@ fn llvm_fixup_input(
// MIPS only supports register-length arithmetics. // MIPS only supports register-length arithmetics.
Primitive::Int(Integer::I8 | Integer::I16, _) => bx.zext(value, bx.cx.type_i32()), Primitive::Int(Integer::I8 | Integer::I16, _) => bx.zext(value, bx.cx.type_i32()),
Primitive::F32 => bx.bitcast(value, bx.cx.type_i32()), Primitive::F32 => bx.bitcast(value, bx.cx.type_i32()),
Primitive::F64 => bx.bitcast(value, bx.cx.type_i64()),
_ => value, _ => value,
}, },
_ => value, _ => value,
@ -785,6 +786,7 @@ fn llvm_fixup_output(
Primitive::Int(Integer::I8, _) => bx.trunc(value, bx.cx.type_i8()), Primitive::Int(Integer::I8, _) => bx.trunc(value, bx.cx.type_i8()),
Primitive::Int(Integer::I16, _) => bx.trunc(value, bx.cx.type_i16()), Primitive::Int(Integer::I16, _) => bx.trunc(value, bx.cx.type_i16()),
Primitive::F32 => bx.bitcast(value, bx.cx.type_f32()), Primitive::F32 => bx.bitcast(value, bx.cx.type_f32()),
Primitive::F64 => bx.bitcast(value, bx.cx.type_f64()),
_ => value, _ => value,
}, },
_ => value, _ => value,
@ -854,6 +856,7 @@ fn llvm_fixup_output_type(
// MIPS only supports register-length arithmetics. // MIPS only supports register-length arithmetics.
Primitive::Int(Integer::I8 | Integer::I16, _) => cx.type_i32(), Primitive::Int(Integer::I8 | Integer::I16, _) => cx.type_i32(),
Primitive::F32 => cx.type_i32(), Primitive::F32 => cx.type_i32(),
Primitive::F64 => cx.type_i64(),
_ => layout.llvm_type(cx), _ => layout.llvm_type(cx),
}, },
_ => layout.llvm_type(cx), _ => layout.llvm_type(cx),

View file

@ -6,7 +6,7 @@ use rustc_codegen_ssa::traits::*;
use rustc_data_structures::const_cstr; use rustc_data_structures::const_cstr;
use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::small_c_str::SmallCStr; use rustc_data_structures::small_c_str::SmallCStr;
use rustc_hir::def_id::{DefId, LOCAL_CRATE}; use rustc_hir::def_id::DefId;
use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags; use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
use rustc_middle::ty::layout::HasTyCtxt; use rustc_middle::ty::layout::HasTyCtxt;
use rustc_middle::ty::query::Providers; use rustc_middle::ty::query::Providers;
@ -18,7 +18,7 @@ use crate::attributes;
use crate::llvm::AttributePlace::Function; use crate::llvm::AttributePlace::Function;
use crate::llvm::{self, Attribute}; use crate::llvm::{self, Attribute};
use crate::llvm_util; use crate::llvm_util;
pub use rustc_attr::{InlineAttr, OptimizeAttr}; pub use rustc_attr::{InlineAttr, InstructionSetAttr, OptimizeAttr};
use crate::context::CodegenCx; use crate::context::CodegenCx;
use crate::value::Value; use crate::value::Value;
@ -31,7 +31,7 @@ fn inline(cx: &CodegenCx<'ll, '_>, val: &'ll Value, inline: InlineAttr) {
Hint => Attribute::InlineHint.apply_llfn(Function, val), Hint => Attribute::InlineHint.apply_llfn(Function, val),
Always => Attribute::AlwaysInline.apply_llfn(Function, val), Always => Attribute::AlwaysInline.apply_llfn(Function, val),
Never => { Never => {
if cx.tcx().sess.target.target.arch != "amdgpu" { if cx.tcx().sess.target.arch != "amdgpu" {
Attribute::NoInline.apply_llfn(Function, val); Attribute::NoInline.apply_llfn(Function, val);
} }
} }
@ -91,8 +91,7 @@ fn set_instrument_function(cx: &CodegenCx<'ll, '_>, llfn: &'ll Value) {
// The function name varies on platforms. // The function name varies on platforms.
// See test/CodeGen/mcount.c in clang. // See test/CodeGen/mcount.c in clang.
let mcount_name = let mcount_name =
CString::new(cx.sess().target.target.options.target_mcount.as_str().as_bytes()) CString::new(cx.sess().target.options.target_mcount.as_str().as_bytes()).unwrap();
.unwrap();
llvm::AddFunctionAttrStringValue( llvm::AddFunctionAttrStringValue(
llfn, llfn,
@ -106,7 +105,7 @@ fn set_instrument_function(cx: &CodegenCx<'ll, '_>, llfn: &'ll Value) {
fn set_probestack(cx: &CodegenCx<'ll, '_>, llfn: &'ll Value) { fn set_probestack(cx: &CodegenCx<'ll, '_>, llfn: &'ll Value) {
// Only use stack probes if the target specification indicates that we // Only use stack probes if the target specification indicates that we
// should be using stack probes // should be using stack probes
if !cx.sess().target.target.options.stack_probes { if !cx.sess().target.options.stack_probes {
return; return;
} }
@ -175,7 +174,6 @@ pub fn llvm_target_features(sess: &Session) -> impl Iterator<Item = &str> {
.split(',') .split(',')
.filter(|f| !RUSTC_SPECIFIC_FEATURES.iter().any(|s| f.contains(s))); .filter(|f| !RUSTC_SPECIFIC_FEATURES.iter().any(|s| f.contains(s)));
sess.target sess.target
.target
.options .options
.features .features
.split(',') .split(',')
@ -194,6 +192,18 @@ pub fn apply_target_cpu_attr(cx: &CodegenCx<'ll, '_>, llfn: &'ll Value) {
); );
} }
pub fn apply_tune_cpu_attr(cx: &CodegenCx<'ll, '_>, llfn: &'ll Value) {
if let Some(tune) = llvm_util::tune_cpu(cx.tcx.sess) {
let tune_cpu = SmallCStr::new(tune);
llvm::AddFunctionAttrStringValue(
llfn,
llvm::AttributePlace::Function,
const_cstr!("tune-cpu"),
tune_cpu.as_c_str(),
);
}
}
/// Sets the `NonLazyBind` LLVM attribute on a given function, /// Sets the `NonLazyBind` LLVM attribute on a given function,
/// assuming the codegen options allow skipping the PLT. /// assuming the codegen options allow skipping the PLT.
pub fn non_lazy_bind(sess: &Session, llfn: &'ll Value) { pub fn non_lazy_bind(sess: &Session, llfn: &'ll Value) {
@ -303,6 +313,9 @@ pub fn from_fn_attrs(cx: &CodegenCx<'ll, 'tcx>, llfn: &'ll Value, instance: ty::
// Without this, ThinLTO won't inline Rust functions into Clang generated // Without this, ThinLTO won't inline Rust functions into Clang generated
// functions (because Clang annotates functions this way too). // functions (because Clang annotates functions this way too).
apply_target_cpu_attr(cx, llfn); apply_target_cpu_attr(cx, llfn);
// tune-cpu is only conveyed through the attribute for our purpose.
// The target doesn't care; the subtarget reads our attribute.
apply_tune_cpu_attr(cx, llfn);
let features = llvm_target_features(cx.tcx.sess) let features = llvm_target_features(cx.tcx.sess)
.map(|s| s.to_string()) .map(|s| s.to_string())
@ -310,6 +323,10 @@ pub fn from_fn_attrs(cx: &CodegenCx<'ll, 'tcx>, llfn: &'ll Value, instance: ty::
let feature = &f.as_str(); let feature = &f.as_str();
format!("+{}", llvm_util::to_llvm_feature(cx.tcx.sess, feature)) format!("+{}", llvm_util::to_llvm_feature(cx.tcx.sess, feature))
})) }))
.chain(codegen_fn_attrs.instruction_set.iter().map(|x| match x {
InstructionSetAttr::ArmA32 => "-thumb-mode".to_string(),
InstructionSetAttr::ArmT32 => "+thumb-mode".to_string(),
}))
.collect::<Vec<String>>() .collect::<Vec<String>>()
.join(","); .join(",");
@ -326,7 +343,7 @@ pub fn from_fn_attrs(cx: &CodegenCx<'ll, 'tcx>, llfn: &'ll Value, instance: ty::
// Note that currently the `wasm-import-module` doesn't do anything, but // Note that currently the `wasm-import-module` doesn't do anything, but
// eventually LLVM 7 should read this and ferry the appropriate import // eventually LLVM 7 should read this and ferry the appropriate import
// module to the output file. // module to the output file.
if cx.tcx.sess.target.target.arch == "wasm32" { if cx.tcx.sess.target.arch == "wasm32" {
if let Some(module) = wasm_import_module(cx.tcx, instance.def_id()) { if let Some(module) = wasm_import_module(cx.tcx, instance.def_id()) {
llvm::AddFunctionAttrStringValue( llvm::AddFunctionAttrStringValue(
llfn, llfn,
@ -348,23 +365,7 @@ pub fn from_fn_attrs(cx: &CodegenCx<'ll, 'tcx>, llfn: &'ll Value, instance: ty::
} }
} }
pub fn provide(providers: &mut Providers) { pub fn provide_both(providers: &mut Providers) {
use rustc_codegen_ssa::target_features::{all_known_features, supported_target_features};
providers.supported_target_features = |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE);
if tcx.sess.opts.actually_rustdoc {
// rustdoc needs to be able to document functions that use all the features, so
// provide them all.
all_known_features().map(|(a, b)| (a.to_string(), b)).collect()
} else {
supported_target_features(tcx.sess).iter().map(|&(a, b)| (a.to_string(), b)).collect()
}
};
provide_extern(providers);
}
pub fn provide_extern(providers: &mut Providers) {
providers.wasm_import_module_map = |tcx, cnum| { providers.wasm_import_module_map = |tcx, cnum| {
// Build up a map from DefId to a `NativeLib` structure, where // Build up a map from DefId to a `NativeLib` structure, where
// `NativeLib` internally contains information about // `NativeLib` internally contains information about

View file

@ -206,7 +206,7 @@ impl<'a> LlvmArchiveBuilder<'a> {
} }
fn llvm_archive_kind(&self) -> Result<ArchiveKind, &str> { fn llvm_archive_kind(&self) -> Result<ArchiveKind, &str> {
let kind = &*self.config.sess.target.target.options.archive_format; let kind = &*self.config.sess.target.options.archive_format;
kind.parse().map_err(|_| kind) kind.parse().map_err(|_| kind)
} }

View file

@ -2,14 +2,14 @@ use crate::back::write::{
self, save_temp_bitcode, to_llvm_opt_settings, with_llvm_pmb, DiagnosticHandlers, self, save_temp_bitcode, to_llvm_opt_settings, with_llvm_pmb, DiagnosticHandlers,
}; };
use crate::llvm::archive_ro::ArchiveRO; use crate::llvm::archive_ro::ArchiveRO;
use crate::llvm::{self, False, True}; use crate::llvm::{self, build_string, False, True};
use crate::{LlvmCodegenBackend, ModuleLlvm}; use crate::{LlvmCodegenBackend, ModuleLlvm};
use rustc_codegen_ssa::back::lto::{LtoModuleCodegen, SerializedModule, ThinModule, ThinShared}; use rustc_codegen_ssa::back::lto::{LtoModuleCodegen, SerializedModule, ThinModule, ThinShared};
use rustc_codegen_ssa::back::symbol_export; use rustc_codegen_ssa::back::symbol_export;
use rustc_codegen_ssa::back::write::{CodegenContext, FatLTOInput, ModuleConfig}; use rustc_codegen_ssa::back::write::{CodegenContext, FatLTOInput, ModuleConfig};
use rustc_codegen_ssa::traits::*; use rustc_codegen_ssa::traits::*;
use rustc_codegen_ssa::{looks_like_rust_object_file, ModuleCodegen, ModuleKind}; use rustc_codegen_ssa::{looks_like_rust_object_file, ModuleCodegen, ModuleKind};
use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_data_structures::fx::FxHashMap;
use rustc_errors::{FatalError, Handler}; use rustc_errors::{FatalError, Handler};
use rustc_hir::def_id::LOCAL_CRATE; use rustc_hir::def_id::LOCAL_CRATE;
use rustc_middle::bug; use rustc_middle::bug;
@ -22,16 +22,14 @@ use tracing::{debug, info};
use std::ffi::{CStr, CString}; use std::ffi::{CStr, CString};
use std::fs::File; use std::fs::File;
use std::io; use std::io;
use std::mem;
use std::path::Path; use std::path::Path;
use std::ptr; use std::ptr;
use std::slice; use std::slice;
use std::sync::Arc; use std::sync::Arc;
/// We keep track of past LTO imports that were used to produce the current set /// We keep track of the computed LTO cache keys from the previous
/// of compiled object files that we might choose to reuse during this /// session to determine which CGUs we can reuse.
/// compilation session. pub const THIN_LTO_KEYS_INCR_COMP_FILE_NAME: &str = "thin-lto-past-keys.bin";
pub const THIN_LTO_IMPORTS_INCR_COMP_FILE_NAME: &str = "thin-lto-past-imports.bin";
pub fn crate_type_allows_lto(crate_type: CrateType) -> bool { pub fn crate_type_allows_lto(crate_type: CrateType) -> bool {
match crate_type { match crate_type {
@ -485,31 +483,31 @@ fn thin_lto(
) )
.ok_or_else(|| write::llvm_err(&diag_handler, "failed to prepare thin LTO context"))?; .ok_or_else(|| write::llvm_err(&diag_handler, "failed to prepare thin LTO context"))?;
let data = ThinData(data);
info!("thin LTO data created"); info!("thin LTO data created");
let (import_map_path, prev_import_map, curr_import_map) = let (key_map_path, prev_key_map, curr_key_map) = if let Some(ref incr_comp_session_dir) =
if let Some(ref incr_comp_session_dir) = cgcx.incr_comp_session_dir { cgcx.incr_comp_session_dir
let path = incr_comp_session_dir.join(THIN_LTO_IMPORTS_INCR_COMP_FILE_NAME); {
// If previous imports have been deleted, or we get an IO error let path = incr_comp_session_dir.join(THIN_LTO_KEYS_INCR_COMP_FILE_NAME);
// reading the file storing them, then we'll just use `None` as the // If the previous file was deleted, or we get an IO error
// prev_import_map, which will force the code to be recompiled. // reading the file, then we'll just use `None` as the
let prev = if path.exists() { // prev_key_map, which will force the code to be recompiled.
ThinLTOImportMaps::load_from_file(&path).ok() let prev =
} else { if path.exists() { ThinLTOKeysMap::load_from_file(&path).ok() } else { None };
None let curr = ThinLTOKeysMap::from_thin_lto_modules(&data, &thin_modules, &module_names);
}; (Some(path), prev, curr)
let curr = ThinLTOImportMaps::from_thin_lto_data(data); } else {
(Some(path), prev, curr) // If we don't compile incrementally, we don't need to load the
} else { // import data from LLVM.
// If we don't compile incrementally, we don't need to load the assert!(green_modules.is_empty());
// import data from LLVM. let curr = ThinLTOKeysMap::default();
assert!(green_modules.is_empty()); (None, None, curr)
let curr = ThinLTOImportMaps::default(); };
(None, None, curr) info!("thin LTO cache key map loaded");
}; info!("prev_key_map: {:#?}", prev_key_map);
info!("thin LTO import map loaded"); info!("curr_key_map: {:#?}", curr_key_map);
let data = ThinData(data);
// Throw our data in an `Arc` as we'll be sharing it across threads. We // Throw our data in an `Arc` as we'll be sharing it across threads. We
// also put all memory referenced by the C++ data (buffers, ids, etc) // also put all memory referenced by the C++ data (buffers, ids, etc)
@ -528,60 +526,14 @@ fn thin_lto(
info!("checking which modules can be-reused and which have to be re-optimized."); info!("checking which modules can be-reused and which have to be re-optimized.");
for (module_index, module_name) in shared.module_names.iter().enumerate() { for (module_index, module_name) in shared.module_names.iter().enumerate() {
let module_name = module_name_to_str(module_name); let module_name = module_name_to_str(module_name);
if let (Some(prev_key_map), true) =
// If (1.) the module hasn't changed, and (2.) none of the modules (prev_key_map.as_ref(), green_modules.contains_key(module_name))
// it imports from have changed, *and* (3.) the import and export
// sets themselves have not changed from the previous compile when
// it was last ThinLTO'ed, then we can re-use the post-ThinLTO
// version of the module. Otherwise, freshly perform LTO
// optimization.
//
// (Note that globally, the export set is just the inverse of the
// import set.)
//
// For further justification of why the above is necessary and sufficient,
// see the LLVM blog post on ThinLTO:
//
// http://blog.llvm.org/2016/06/thinlto-scalable-and-incremental-lto.html
//
// which states the following:
//
// ```quote
// any particular ThinLTO backend must be redone iff:
//
// 1. The corresponding (primary) modules bitcode changed
// 2. The list of imports into or exports from the module changed
// 3. The bitcode for any module being imported from has changed
// 4. Any global analysis result affecting either the primary module
// or anything it imports has changed.
// ```
//
// This strategy means we can always save the computed imports as
// canon: when we reuse the post-ThinLTO version, condition (3.)
// ensures that the current import set is the same as the previous
// one. (And of course, when we don't reuse the post-ThinLTO
// version, the current import set *is* the correct one, since we
// are doing the ThinLTO in this current compilation cycle.)
//
// For more discussion, see rust-lang/rust#59535 (where the import
// issue was discovered) and rust-lang/rust#69798 (where the
// analogous export issue was discovered).
if let (Some(prev_import_map), true) =
(prev_import_map.as_ref(), green_modules.contains_key(module_name))
{ {
assert!(cgcx.incr_comp_session_dir.is_some()); assert!(cgcx.incr_comp_session_dir.is_some());
let prev_imports = prev_import_map.imports_of(module_name); // If a module exists in both the current and the previous session,
let curr_imports = curr_import_map.imports_of(module_name); // and has the same LTO cache key in both sessions, then we can re-use it
let prev_exports = prev_import_map.exports_of(module_name); if prev_key_map.keys.get(module_name) == curr_key_map.keys.get(module_name) {
let curr_exports = curr_import_map.exports_of(module_name);
let imports_all_green = curr_imports
.iter()
.all(|imported_module| green_modules.contains_key(imported_module));
if imports_all_green
&& equivalent_as_sets(prev_imports, curr_imports)
&& equivalent_as_sets(prev_exports, curr_exports)
{
let work_product = green_modules[module_name].clone(); let work_product = green_modules[module_name].clone();
copy_jobs.push(work_product); copy_jobs.push(work_product);
info!(" - {}: re-used", module_name); info!(" - {}: re-used", module_name);
@ -599,10 +551,10 @@ fn thin_lto(
} }
// Save the current ThinLTO import information for the next compilation // Save the current ThinLTO import information for the next compilation
// session, overwriting the previous serialized imports (if any). // session, overwriting the previous serialized data (if any).
if let Some(path) = import_map_path { if let Some(path) = key_map_path {
if let Err(err) = curr_import_map.save_to_file(&path) { if let Err(err) = curr_key_map.save_to_file(&path) {
let msg = format!("Error while writing ThinLTO import data: {}", err); let msg = format!("Error while writing ThinLTO key data: {}", err);
return Err(write::llvm_err(&diag_handler, &msg)); return Err(write::llvm_err(&diag_handler, &msg));
} }
} }
@ -611,24 +563,6 @@ fn thin_lto(
} }
} }
/// Given two slices, each with no repeat elements. returns true if and only if
/// the two slices have the same contents when considered as sets (i.e. when
/// element order is disregarded).
fn equivalent_as_sets(a: &[String], b: &[String]) -> bool {
// cheap path: unequal lengths means cannot possibly be set equivalent.
if a.len() != b.len() {
return false;
}
// fast path: before building new things, check if inputs are equivalent as is.
if a == b {
return true;
}
// slow path: general set comparison.
let a: FxHashSet<&str> = a.iter().map(|s| s.as_str()).collect();
let b: FxHashSet<&str> = b.iter().map(|s| s.as_str()).collect();
a == b
}
pub(crate) fn run_pass_manager( pub(crate) fn run_pass_manager(
cgcx: &CodegenContext<LlvmCodegenBackend>, cgcx: &CodegenContext<LlvmCodegenBackend>,
module: &ModuleCodegen<ModuleLlvm>, module: &ModuleCodegen<ModuleLlvm>,
@ -942,113 +876,56 @@ pub unsafe fn optimize_thin_module(
Ok(module) Ok(module)
} }
/// Summarizes module import/export relationships used by LLVM's ThinLTO pass. /// Maps LLVM module identifiers to their corresponding LLVM LTO cache keys
///
/// Note that we tend to have two such instances of `ThinLTOImportMaps` in use:
/// one loaded from a file that represents the relationships used during the
/// compilation associated with the incremetnal build artifacts we are
/// attempting to reuse, and another constructed via `from_thin_lto_data`, which
/// captures the relationships of ThinLTO in the current compilation.
#[derive(Debug, Default)] #[derive(Debug, Default)]
pub struct ThinLTOImportMaps { pub struct ThinLTOKeysMap {
// key = llvm name of importing module, value = list of modules it imports from // key = llvm name of importing module, value = LLVM cache key
imports: FxHashMap<String, Vec<String>>, keys: FxHashMap<String, String>,
// key = llvm name of exporting module, value = list of modules it exports to
exports: FxHashMap<String, Vec<String>>,
} }
impl ThinLTOImportMaps { impl ThinLTOKeysMap {
/// Returns modules imported by `llvm_module_name` during some ThinLTO pass.
fn imports_of(&self, llvm_module_name: &str) -> &[String] {
self.imports.get(llvm_module_name).map(|v| &v[..]).unwrap_or(&[])
}
/// Returns modules exported by `llvm_module_name` during some ThinLTO pass.
fn exports_of(&self, llvm_module_name: &str) -> &[String] {
self.exports.get(llvm_module_name).map(|v| &v[..]).unwrap_or(&[])
}
fn save_to_file(&self, path: &Path) -> io::Result<()> { fn save_to_file(&self, path: &Path) -> io::Result<()> {
use std::io::Write; use std::io::Write;
let file = File::create(path)?; let file = File::create(path)?;
let mut writer = io::BufWriter::new(file); let mut writer = io::BufWriter::new(file);
for (importing_module_name, imported_modules) in &self.imports { for (module, key) in &self.keys {
writeln!(writer, "{}", importing_module_name)?; writeln!(writer, "{} {}", module, key)?;
for imported_module in imported_modules {
writeln!(writer, " {}", imported_module)?;
}
writeln!(writer)?;
} }
Ok(()) Ok(())
} }
fn load_from_file(path: &Path) -> io::Result<ThinLTOImportMaps> { fn load_from_file(path: &Path) -> io::Result<Self> {
use std::io::BufRead; use std::io::BufRead;
let mut imports = FxHashMap::default(); let mut keys = FxHashMap::default();
let mut exports: FxHashMap<_, Vec<_>> = FxHashMap::default();
let mut current_module: Option<String> = None;
let mut current_imports: Vec<String> = vec![];
let file = File::open(path)?; let file = File::open(path)?;
for line in io::BufReader::new(file).lines() { for line in io::BufReader::new(file).lines() {
let line = line?; let line = line?;
if line.is_empty() { let mut split = line.split(" ");
let importing_module = current_module.take().expect("Importing module not set"); let module = split.next().unwrap();
for imported in &current_imports { let key = split.next().unwrap();
exports.entry(imported.clone()).or_default().push(importing_module.clone()); assert_eq!(split.next(), None, "Expected two space-separated values, found {:?}", line);
} keys.insert(module.to_string(), key.to_string());
imports.insert(importing_module, mem::replace(&mut current_imports, vec![]));
} else if line.starts_with(' ') {
// Space marks an imported module
assert_ne!(current_module, None);
current_imports.push(line.trim().to_string());
} else {
// Otherwise, beginning of a new module (must be start or follow empty line)
assert_eq!(current_module, None);
current_module = Some(line.trim().to_string());
}
} }
Ok(ThinLTOImportMaps { imports, exports }) Ok(Self { keys })
} }
/// Loads the ThinLTO import map from ThinLTOData. fn from_thin_lto_modules(
unsafe fn from_thin_lto_data(data: *const llvm::ThinLTOData) -> ThinLTOImportMaps { data: &ThinData,
unsafe extern "C" fn imported_module_callback( modules: &[llvm::ThinLTOModule],
payload: *mut libc::c_void, names: &[CString],
importing_module_name: *const libc::c_char, ) -> Self {
imported_module_name: *const libc::c_char, let keys = modules
) { .iter()
let map = &mut *(payload as *mut ThinLTOImportMaps); .zip(names.iter())
let importing_module_name = CStr::from_ptr(importing_module_name); .map(|(module, name)| {
let importing_module_name = module_name_to_str(&importing_module_name); let key = build_string(|rust_str| unsafe {
let imported_module_name = CStr::from_ptr(imported_module_name); llvm::LLVMRustComputeLTOCacheKey(rust_str, module.identifier, data.0);
let imported_module_name = module_name_to_str(&imported_module_name); })
.expect("Invalid ThinLTO module key");
if !map.imports.contains_key(importing_module_name) { (name.clone().into_string().unwrap(), key)
map.imports.insert(importing_module_name.to_owned(), vec![]); })
} .collect();
Self { keys }
map.imports
.get_mut(importing_module_name)
.unwrap()
.push(imported_module_name.to_owned());
if !map.exports.contains_key(imported_module_name) {
map.exports.insert(imported_module_name.to_owned(), vec![]);
}
map.exports
.get_mut(imported_module_name)
.unwrap()
.push(importing_module_name.to_owned());
}
let mut map = ThinLTOImportMaps::default();
llvm::LLVMRustGetThinLTOModuleImports(
data,
imported_module_callback,
&mut map as *mut _ as *mut libc::c_void,
);
map
} }
} }

View file

@ -128,40 +128,40 @@ pub fn target_machine_factory(
let (opt_level, _) = to_llvm_opt_settings(optlvl); let (opt_level, _) = to_llvm_opt_settings(optlvl);
let use_softfp = sess.opts.cg.soft_float; let use_softfp = sess.opts.cg.soft_float;
let ffunction_sections = sess.target.target.options.function_sections; let ffunction_sections = sess.target.options.function_sections;
let fdata_sections = ffunction_sections; let fdata_sections = ffunction_sections;
let code_model = to_llvm_code_model(sess.code_model()); let code_model = to_llvm_code_model(sess.code_model());
let features = attributes::llvm_target_features(sess).collect::<Vec<_>>(); let features = attributes::llvm_target_features(sess).collect::<Vec<_>>();
let mut singlethread = sess.target.target.options.singlethread; let mut singlethread = sess.target.options.singlethread;
// On the wasm target once the `atomics` feature is enabled that means that // On the wasm target once the `atomics` feature is enabled that means that
// we're no longer single-threaded, or otherwise we don't want LLVM to // we're no longer single-threaded, or otherwise we don't want LLVM to
// lower atomic operations to single-threaded operations. // lower atomic operations to single-threaded operations.
if singlethread if singlethread
&& sess.target.target.llvm_target.contains("wasm32") && sess.target.llvm_target.contains("wasm32")
&& sess.target_features.contains(&sym::atomics) && sess.target_features.contains(&sym::atomics)
{ {
singlethread = false; singlethread = false;
} }
let triple = SmallCStr::new(&sess.target.target.llvm_target); let triple = SmallCStr::new(&sess.target.llvm_target);
let cpu = SmallCStr::new(llvm_util::target_cpu(sess)); let cpu = SmallCStr::new(llvm_util::target_cpu(sess));
let features = features.join(","); let features = features.join(",");
let features = CString::new(features).unwrap(); let features = CString::new(features).unwrap();
let abi = SmallCStr::new(&sess.target.target.options.llvm_abiname); let abi = SmallCStr::new(&sess.target.options.llvm_abiname);
let trap_unreachable = sess.target.target.options.trap_unreachable; let trap_unreachable = sess.target.options.trap_unreachable;
let emit_stack_size_section = sess.opts.debugging_opts.emit_stack_sizes; let emit_stack_size_section = sess.opts.debugging_opts.emit_stack_sizes;
let asm_comments = sess.asm_comments(); let asm_comments = sess.asm_comments();
let relax_elf_relocations = sess.target.target.options.relax_elf_relocations; let relax_elf_relocations = sess.target.options.relax_elf_relocations;
let use_init_array = !sess let use_init_array = !sess
.opts .opts
.debugging_opts .debugging_opts
.use_ctors_section .use_ctors_section
.unwrap_or(sess.target.target.options.use_ctors_section); .unwrap_or(sess.target.options.use_ctors_section);
Arc::new(move || { Arc::new(move || {
let tm = unsafe { let tm = unsafe {

View file

@ -60,7 +60,7 @@ pub fn write_compressed_metadata<'tcx>(
unsafe { llvm::LLVMAddGlobal(metadata_llmod, common::val_ty(llconst), buf.as_ptr()) }; unsafe { llvm::LLVMAddGlobal(metadata_llmod, common::val_ty(llconst), buf.as_ptr()) };
unsafe { unsafe {
llvm::LLVMSetInitializer(llglobal, llconst); llvm::LLVMSetInitializer(llglobal, llconst);
let section_name = metadata::metadata_section_name(&tcx.sess.target.target); let section_name = metadata::metadata_section_name(&tcx.sess.target);
let name = SmallCStr::new(section_name); let name = SmallCStr::new(section_name);
llvm::LLVMSetSection(llglobal, name.as_ptr()); llvm::LLVMSetSection(llglobal, name.as_ptr());

View file

@ -308,8 +308,8 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
use rustc_middle::ty::{Int, Uint}; use rustc_middle::ty::{Int, Uint};
let new_kind = match ty.kind() { let new_kind = match ty.kind() {
Int(t @ Isize) => Int(t.normalize(self.tcx.sess.target.ptr_width)), Int(t @ Isize) => Int(t.normalize(self.tcx.sess.target.pointer_width)),
Uint(t @ Usize) => Uint(t.normalize(self.tcx.sess.target.ptr_width)), Uint(t @ Usize) => Uint(t.normalize(self.tcx.sess.target.pointer_width)),
t @ (Uint(_) | Int(_)) => t.clone(), t @ (Uint(_) | Int(_)) => t.clone(),
_ => panic!("tried to get overflow intrinsic for op applied to non-int type"), _ => panic!("tried to get overflow intrinsic for op applied to non-int type"),
}; };
@ -541,7 +541,7 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
} }
fn range_metadata(&mut self, load: &'ll Value, range: Range<u128>) { fn range_metadata(&mut self, load: &'ll Value, range: Range<u128>) {
if self.sess().target.target.arch == "amdgpu" { if self.sess().target.arch == "amdgpu" {
// amdgpu/LLVM does something weird and thinks a i64 value is // amdgpu/LLVM does something weird and thinks a i64 value is
// split into a v2i32, halving the bitwidth LLVM expects, // split into a v2i32, halving the bitwidth LLVM expects,
// tripping an assertion. So, for now, just disable this // tripping an assertion. So, for now, just disable this
@ -671,7 +671,7 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
// WebAssembly has saturating floating point to integer casts if the // WebAssembly has saturating floating point to integer casts if the
// `nontrapping-fptoint` target feature is activated. We'll use those if // `nontrapping-fptoint` target feature is activated. We'll use those if
// they are available. // they are available.
if self.sess().target.target.arch == "wasm32" if self.sess().target.arch == "wasm32"
&& self.sess().target_features.contains(&sym::nontrapping_dash_fptoint) && self.sess().target_features.contains(&sym::nontrapping_dash_fptoint)
{ {
let src_ty = self.cx.val_ty(val); let src_ty = self.cx.val_ty(val);
@ -696,7 +696,7 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
// WebAssembly has saturating floating point to integer casts if the // WebAssembly has saturating floating point to integer casts if the
// `nontrapping-fptoint` target feature is activated. We'll use those if // `nontrapping-fptoint` target feature is activated. We'll use those if
// they are available. // they are available.
if self.sess().target.target.arch == "wasm32" if self.sess().target.arch == "wasm32"
&& self.sess().target_features.contains(&sym::nontrapping_dash_fptoint) && self.sess().target_features.contains(&sym::nontrapping_dash_fptoint)
{ {
let src_ty = self.cx.val_ty(val); let src_ty = self.cx.val_ty(val);
@ -1427,7 +1427,7 @@ impl Builder<'a, 'll, 'tcx> {
} }
fn wasm_and_missing_nontrapping_fptoint(&self) -> bool { fn wasm_and_missing_nontrapping_fptoint(&self) -> bool {
self.sess().target.target.arch == "wasm32" self.sess().target.arch == "wasm32"
&& !self.sess().target_features.contains(&sym::nontrapping_dash_fptoint) && !self.sess().target_features.contains(&sym::nontrapping_dash_fptoint)
} }
} }

View file

@ -176,7 +176,7 @@ pub fn get_fn(cx: &CodegenCx<'ll, 'tcx>, instance: Instance<'tcx>) -> &'ll Value
// should use dllimport for functions. // should use dllimport for functions.
if cx.use_dll_storage_attrs if cx.use_dll_storage_attrs
&& tcx.is_dllimport_foreign_item(instance_def_id) && tcx.is_dllimport_foreign_item(instance_def_id)
&& tcx.sess.target.target.target_env != "gnu" && tcx.sess.target.target_env != "gnu"
{ {
unsafe { unsafe {
llvm::LLVMSetDLLStorageClass(llfn, llvm::DLLStorageClass::DllImport); llvm::LLVMSetDLLStorageClass(llfn, llvm::DLLStorageClass::DllImport);

View file

@ -92,7 +92,7 @@ fn set_global_alignment(cx: &CodegenCx<'ll, '_>, gv: &'ll Value, mut align: Alig
// The target may require greater alignment for globals than the type does. // The target may require greater alignment for globals than the type does.
// Note: GCC and Clang also allow `__attribute__((aligned))` on variables, // Note: GCC and Clang also allow `__attribute__((aligned))` on variables,
// which can force it to be smaller. Rust doesn't support this yet. // which can force it to be smaller. Rust doesn't support this yet.
if let Some(min) = cx.sess().target.target.options.min_global_align { if let Some(min) = cx.sess().target.options.min_global_align {
match Align::from_bits(min) { match Align::from_bits(min) {
Ok(min) => align = align.max(min), Ok(min) => align = align.max(min),
Err(err) => { Err(err) => {
@ -283,7 +283,7 @@ impl CodegenCx<'ll, 'tcx> {
// argument validation. // argument validation.
debug_assert!( debug_assert!(
!(self.tcx.sess.opts.cg.linker_plugin_lto.enabled() !(self.tcx.sess.opts.cg.linker_plugin_lto.enabled()
&& self.tcx.sess.target.target.options.is_like_windows && self.tcx.sess.target.options.is_like_windows
&& self.tcx.sess.opts.cg.prefer_dynamic) && self.tcx.sess.opts.cg.prefer_dynamic)
); );
@ -437,7 +437,7 @@ impl StaticMethods for CodegenCx<'ll, 'tcx> {
// will use load-unaligned instructions instead, and thus avoiding the crash. // will use load-unaligned instructions instead, and thus avoiding the crash.
// //
// We could remove this hack whenever we decide to drop macOS 10.10 support. // We could remove this hack whenever we decide to drop macOS 10.10 support.
if self.tcx.sess.target.target.options.is_like_osx { if self.tcx.sess.target.options.is_like_osx {
// The `inspect` method is okay here because we checked relocations, and // The `inspect` method is okay here because we checked relocations, and
// because we are doing this access to inspect the final interpreter state // because we are doing this access to inspect the final interpreter state
// (not as part of the interpreter execution). // (not as part of the interpreter execution).

View file

@ -118,18 +118,18 @@ pub unsafe fn create_module(
let mod_name = SmallCStr::new(mod_name); let mod_name = SmallCStr::new(mod_name);
let llmod = llvm::LLVMModuleCreateWithNameInContext(mod_name.as_ptr(), llcx); let llmod = llvm::LLVMModuleCreateWithNameInContext(mod_name.as_ptr(), llcx);
let mut target_data_layout = sess.target.target.data_layout.clone(); let mut target_data_layout = sess.target.data_layout.clone();
if llvm_util::get_major_version() < 9 { if llvm_util::get_major_version() < 9 {
target_data_layout = strip_function_ptr_alignment(target_data_layout); target_data_layout = strip_function_ptr_alignment(target_data_layout);
} }
if llvm_util::get_major_version() < 10 { if llvm_util::get_major_version() < 10 {
if sess.target.target.arch == "x86" || sess.target.target.arch == "x86_64" { if sess.target.arch == "x86" || sess.target.arch == "x86_64" {
target_data_layout = strip_x86_address_spaces(target_data_layout); target_data_layout = strip_x86_address_spaces(target_data_layout);
} }
} }
// Ensure the data-layout values hardcoded remain the defaults. // Ensure the data-layout values hardcoded remain the defaults.
if sess.target.target.options.is_builtin { if sess.target.options.is_builtin {
let tm = crate::back::write::create_informational_target_machine(tcx.sess); let tm = crate::back::write::create_informational_target_machine(tcx.sess);
llvm::LLVMRustSetDataLayoutFromTargetMachine(llmod, tm); llvm::LLVMRustSetDataLayoutFromTargetMachine(llmod, tm);
llvm::LLVMRustDisposeTargetMachine(tm); llvm::LLVMRustDisposeTargetMachine(tm);
@ -160,7 +160,7 @@ pub unsafe fn create_module(
bug!( bug!(
"data-layout for builtin `{}` target, `{}`, \ "data-layout for builtin `{}` target, `{}`, \
differs from LLVM default, `{}`", differs from LLVM default, `{}`",
sess.target.target.llvm_target, sess.target.llvm_target,
target_data_layout, target_data_layout,
llvm_data_layout llvm_data_layout
); );
@ -170,7 +170,7 @@ pub unsafe fn create_module(
let data_layout = SmallCStr::new(&target_data_layout); let data_layout = SmallCStr::new(&target_data_layout);
llvm::LLVMSetDataLayout(llmod, data_layout.as_ptr()); llvm::LLVMSetDataLayout(llmod, data_layout.as_ptr());
let llvm_target = SmallCStr::new(&sess.target.target.llvm_target); let llvm_target = SmallCStr::new(&sess.target.llvm_target);
llvm::LLVMRustSetNormalizedTarget(llmod, llvm_target.as_ptr()); llvm::LLVMRustSetNormalizedTarget(llmod, llvm_target.as_ptr());
if sess.relocation_model() == RelocModel::Pic { if sess.relocation_model() == RelocModel::Pic {
@ -190,7 +190,7 @@ pub unsafe fn create_module(
} }
// Control Flow Guard is currently only supported by the MSVC linker on Windows. // Control Flow Guard is currently only supported by the MSVC linker on Windows.
if sess.target.target.options.is_like_msvc { if sess.target.options.is_like_msvc {
match sess.opts.cg.control_flow_guard { match sess.opts.cg.control_flow_guard {
CFGuard::Disabled => {} CFGuard::Disabled => {}
CFGuard::NoChecks => { CFGuard::NoChecks => {
@ -265,7 +265,7 @@ impl<'ll, 'tcx> CodegenCx<'ll, 'tcx> {
// linker will take care of everything. Fixing this problem will likely // linker will take care of everything. Fixing this problem will likely
// require adding a few attributes to Rust itself (feature gated at the // require adding a few attributes to Rust itself (feature gated at the
// start) and then strongly recommending static linkage on Windows! // start) and then strongly recommending static linkage on Windows!
let use_dll_storage_attrs = tcx.sess.target.target.options.is_like_windows; let use_dll_storage_attrs = tcx.sess.target.options.is_like_windows;
let check_overflow = tcx.sess.overflow_checks(); let check_overflow = tcx.sess.overflow_checks();
@ -417,7 +417,8 @@ impl MiscMethods<'tcx> for CodegenCx<'ll, 'tcx> {
} }
fn apply_target_cpu_attr(&self, llfn: &'ll Value) { fn apply_target_cpu_attr(&self, llfn: &'ll Value) {
attributes::apply_target_cpu_attr(self, llfn) attributes::apply_target_cpu_attr(self, llfn);
attributes::apply_tune_cpu_attr(self, llfn);
} }
fn create_used_variable(&self) { fn create_used_variable(&self) {
@ -838,7 +839,7 @@ impl CodegenCx<'b, 'tcx> {
return eh_catch_typeinfo; return eh_catch_typeinfo;
} }
let tcx = self.tcx; let tcx = self.tcx;
assert!(self.sess().target.target.options.is_like_emscripten); assert!(self.sess().target.options.is_like_emscripten);
let eh_catch_typeinfo = match tcx.lang_items().eh_catch_typeinfo() { let eh_catch_typeinfo = match tcx.lang_items().eh_catch_typeinfo() {
Some(def_id) => self.get_static(def_id), Some(def_id) => self.get_static(def_id),
_ => { _ => {
@ -877,7 +878,7 @@ impl HasDataLayout for CodegenCx<'ll, 'tcx> {
impl HasTargetSpec for CodegenCx<'ll, 'tcx> { impl HasTargetSpec for CodegenCx<'ll, 'tcx> {
fn target_spec(&self) -> &Target { fn target_spec(&self) -> &Target {
&self.tcx.sess.target.target &self.tcx.sess.target
} }
} }

View file

@ -67,5 +67,5 @@ pub fn needs_gdb_debug_scripts_section(cx: &CodegenCx<'_, '_>) -> bool {
!omit_gdb_pretty_printer_section !omit_gdb_pretty_printer_section
&& cx.sess().opts.debuginfo != DebugInfo::None && cx.sess().opts.debuginfo != DebugInfo::None
&& cx.sess().target.target.options.emit_debug_gdb_scripts && cx.sess().target.options.emit_debug_gdb_scripts
} }

View file

@ -874,7 +874,7 @@ fn basic_type_metadata(cx: &CodegenCx<'ll, 'tcx>, t: Ty<'tcx>) -> &'ll DIType {
// When targeting MSVC, emit MSVC style type names for compatibility with // When targeting MSVC, emit MSVC style type names for compatibility with
// .natvis visualizers (and perhaps other existing native debuggers?) // .natvis visualizers (and perhaps other existing native debuggers?)
let msvc_like_names = cx.tcx.sess.target.target.options.is_like_msvc; let msvc_like_names = cx.tcx.sess.target.options.is_like_msvc;
let (name, encoding) = match t.kind() { let (name, encoding) = match t.kind() {
ty::Never => ("!", DW_ATE_unsigned), ty::Never => ("!", DW_ATE_unsigned),
@ -985,7 +985,7 @@ pub fn compile_unit_metadata(
// if multiple object files with the same `DW_AT_name` are linked together. // if multiple object files with the same `DW_AT_name` are linked together.
// As a workaround we generate unique names for each object file. Those do // As a workaround we generate unique names for each object file. Those do
// not correspond to an actual source file but that should be harmless. // not correspond to an actual source file but that should be harmless.
if tcx.sess.target.target.options.is_like_osx { if tcx.sess.target.options.is_like_osx {
name_in_debuginfo.push("@"); name_in_debuginfo.push("@");
name_in_debuginfo.push(codegen_unit_name); name_in_debuginfo.push(codegen_unit_name);
} }
@ -1401,7 +1401,7 @@ fn prepare_union_metadata(
/// on MSVC we have to use the fallback mode, because LLVM doesn't /// on MSVC we have to use the fallback mode, because LLVM doesn't
/// lower variant parts to PDB. /// lower variant parts to PDB.
fn use_enum_fallback(cx: &CodegenCx<'_, '_>) -> bool { fn use_enum_fallback(cx: &CodegenCx<'_, '_>) -> bool {
cx.sess().target.target.options.is_like_msvc cx.sess().target.options.is_like_msvc
} }
// FIXME(eddyb) maybe precompute this? Right now it's computed once // FIXME(eddyb) maybe precompute this? Right now it's computed once

View file

@ -120,14 +120,12 @@ pub fn finalize(cx: &CodegenCx<'_, '_>) {
// for macOS to understand. For more info see #11352 // for macOS to understand. For more info see #11352
// This can be overridden using --llvm-opts -dwarf-version,N. // This can be overridden using --llvm-opts -dwarf-version,N.
// Android has the same issue (#22398) // Android has the same issue (#22398)
if cx.sess().target.target.options.is_like_osx if let Some(version) = cx.sess().target.options.dwarf_version {
|| cx.sess().target.target.options.is_like_android llvm::LLVMRustAddModuleFlag(cx.llmod, "Dwarf Version\0".as_ptr().cast(), version)
{
llvm::LLVMRustAddModuleFlag(cx.llmod, "Dwarf Version\0".as_ptr().cast(), 2)
} }
// Indicate that we want CodeView debug information on MSVC // Indicate that we want CodeView debug information on MSVC
if cx.sess().target.target.options.is_like_msvc { if cx.sess().target.options.is_like_msvc {
llvm::LLVMRustAddModuleFlag(cx.llmod, "CodeView\0".as_ptr().cast(), 1) llvm::LLVMRustAddModuleFlag(cx.llmod, "CodeView\0".as_ptr().cast(), 1)
} }
@ -348,7 +346,7 @@ impl DebugInfoMethods<'tcx> for CodegenCx<'ll, 'tcx> {
}); });
// Arguments types // Arguments types
if cx.sess().target.target.options.is_like_msvc { if cx.sess().target.options.is_like_msvc {
// FIXME(#42800): // FIXME(#42800):
// There is a bug in MSDIA that leads to a crash when it encounters // There is a bug in MSDIA that leads to a crash when it encounters
// a fixed-size array of `u8` or something zero-sized in a // a fixed-size array of `u8` or something zero-sized in a

View file

@ -38,7 +38,7 @@ impl CodegenCx<'ll, '_> {
// For MSVC, omit the column number. // For MSVC, omit the column number.
// Otherwise, emit it. This mimics clang behaviour. // Otherwise, emit it. This mimics clang behaviour.
// See discussion in https://github.com/rust-lang/rust/issues/42921 // See discussion in https://github.com/rust-lang/rust/issues/42921
if self.sess().target.target.options.is_like_msvc { if self.sess().target.options.is_like_msvc {
DebugLoc { file, line, col: None } DebugLoc { file, line, col: None }
} else { } else {
DebugLoc { file, line, col } DebugLoc { file, line, col }

View file

@ -42,7 +42,7 @@ fn declare_raw_fn(
// be merged. // be merged.
llvm::SetUnnamedAddress(llfn, llvm::UnnamedAddr::Global); llvm::SetUnnamedAddress(llfn, llvm::UnnamedAddr::Global);
if cx.tcx.sess.opts.cg.no_redzone.unwrap_or(cx.tcx.sess.target.target.options.disable_redzone) { if cx.tcx.sess.opts.cg.no_redzone.unwrap_or(cx.tcx.sess.target.options.disable_redzone) {
llvm::Attribute::NoRedZone.apply_llfn(Function, llfn); llvm::Attribute::NoRedZone.apply_llfn(Function, llfn);
} }

View file

@ -367,7 +367,7 @@ fn try_intrinsic(
bx.store(bx.const_i32(0), dest, ret_align); bx.store(bx.const_i32(0), dest, ret_align);
} else if wants_msvc_seh(bx.sess()) { } else if wants_msvc_seh(bx.sess()) {
codegen_msvc_try(bx, try_func, data, catch_func, dest); codegen_msvc_try(bx, try_func, data, catch_func, dest);
} else if bx.sess().target.target.options.is_like_emscripten { } else if bx.sess().target.options.is_like_emscripten {
codegen_emcc_try(bx, try_func, data, catch_func, dest); codegen_emcc_try(bx, try_func, data, catch_func, dest);
} else { } else {
codegen_gnu_try(bx, try_func, data, catch_func, dest); codegen_gnu_try(bx, try_func, data, catch_func, dest);
@ -1722,10 +1722,10 @@ unsupported {} from `{}` with element `{}` of size `{}` to `{}`"#,
fn int_type_width_signed(ty: Ty<'_>, cx: &CodegenCx<'_, '_>) -> Option<(u64, bool)> { fn int_type_width_signed(ty: Ty<'_>, cx: &CodegenCx<'_, '_>) -> Option<(u64, bool)> {
match ty.kind() { match ty.kind() {
ty::Int(t) => { ty::Int(t) => {
Some((t.bit_width().unwrap_or(u64::from(cx.tcx.sess.target.ptr_width)), true)) Some((t.bit_width().unwrap_or(u64::from(cx.tcx.sess.target.pointer_width)), true))
} }
ty::Uint(t) => { ty::Uint(t) => {
Some((t.bit_width().unwrap_or(u64::from(cx.tcx.sess.target.ptr_width)), false)) Some((t.bit_width().unwrap_or(u64::from(cx.tcx.sess.target.pointer_width)), false))
} }
_ => None, _ => None,
} }

View file

@ -23,18 +23,17 @@ use rustc_codegen_ssa::back::write::{CodegenContext, FatLTOInput, ModuleConfig};
use rustc_codegen_ssa::traits::*; use rustc_codegen_ssa::traits::*;
use rustc_codegen_ssa::ModuleCodegen; use rustc_codegen_ssa::ModuleCodegen;
use rustc_codegen_ssa::{CodegenResults, CompiledModule}; use rustc_codegen_ssa::{CodegenResults, CompiledModule};
use rustc_data_structures::fx::FxHashMap;
use rustc_errors::{ErrorReported, FatalError, Handler}; use rustc_errors::{ErrorReported, FatalError, Handler};
use rustc_middle::dep_graph::{DepGraph, WorkProduct}; use rustc_middle::dep_graph::{WorkProduct, WorkProductId};
use rustc_middle::middle::cstore::{EncodedMetadata, MetadataLoaderDyn}; use rustc_middle::middle::cstore::{EncodedMetadata, MetadataLoaderDyn};
use rustc_middle::ty::{self, TyCtxt}; use rustc_middle::ty::{self, TyCtxt};
use rustc_serialize::json; use rustc_session::config::{OptLevel, OutputFilenames, PrintRequest};
use rustc_session::config::{self, OptLevel, OutputFilenames, PrintRequest};
use rustc_session::Session; use rustc_session::Session;
use rustc_span::symbol::Symbol; use rustc_span::symbol::Symbol;
use std::any::Any; use std::any::Any;
use std::ffi::CStr; use std::ffi::CStr;
use std::fs;
use std::sync::Arc; use std::sync::Arc;
mod back { mod back {
@ -116,6 +115,9 @@ impl ExtraBackendMethods for LlvmCodegenBackend {
fn target_cpu<'b>(&self, sess: &'b Session) -> &'b str { fn target_cpu<'b>(&self, sess: &'b Session) -> &'b str {
llvm_util::target_cpu(sess) llvm_util::target_cpu(sess)
} }
fn tune_cpu<'b>(&self, sess: &'b Session) -> Option<&'b str> {
llvm_util::tune_cpu(sess)
}
} }
impl WriteBackendMethods for LlvmCodegenBackend { impl WriteBackendMethods for LlvmCodegenBackend {
@ -249,11 +251,11 @@ impl CodegenBackend for LlvmCodegenBackend {
} }
fn provide(&self, providers: &mut ty::query::Providers) { fn provide(&self, providers: &mut ty::query::Providers) {
attributes::provide(providers); attributes::provide_both(providers);
} }
fn provide_extern(&self, providers: &mut ty::query::Providers) { fn provide_extern(&self, providers: &mut ty::query::Providers) {
attributes::provide_extern(providers); attributes::provide_both(providers);
} }
fn codegen_crate<'tcx>( fn codegen_crate<'tcx>(
@ -274,47 +276,27 @@ impl CodegenBackend for LlvmCodegenBackend {
&self, &self,
ongoing_codegen: Box<dyn Any>, ongoing_codegen: Box<dyn Any>,
sess: &Session, sess: &Session,
dep_graph: &DepGraph, ) -> Result<(CodegenResults, FxHashMap<WorkProductId, WorkProduct>), ErrorReported> {
) -> Result<Box<dyn Any>, ErrorReported> {
let (codegen_results, work_products) = ongoing_codegen let (codegen_results, work_products) = ongoing_codegen
.downcast::<rustc_codegen_ssa::back::write::OngoingCodegen<LlvmCodegenBackend>>() .downcast::<rustc_codegen_ssa::back::write::OngoingCodegen<LlvmCodegenBackend>>()
.expect("Expected LlvmCodegenBackend's OngoingCodegen, found Box<Any>") .expect("Expected LlvmCodegenBackend's OngoingCodegen, found Box<Any>")
.join(sess); .join(sess);
if sess.opts.debugging_opts.incremental_info {
rustc_codegen_ssa::back::write::dump_incremental_data(&codegen_results);
}
sess.time("serialize_work_products", move || { sess.time("llvm_dump_timing_file", || {
rustc_incremental::save_work_product_index(sess, &dep_graph, work_products) if sess.opts.debugging_opts.llvm_time_trace {
llvm_util::time_trace_profiler_finish("llvm_timings.json");
}
}); });
sess.compile_status()?; Ok((codegen_results, work_products))
Ok(Box::new(codegen_results))
} }
fn link( fn link(
&self, &self,
sess: &Session, sess: &Session,
codegen_results: Box<dyn Any>, codegen_results: CodegenResults,
outputs: &OutputFilenames, outputs: &OutputFilenames,
) -> Result<(), ErrorReported> { ) -> Result<(), ErrorReported> {
let codegen_results = codegen_results
.downcast::<CodegenResults>()
.expect("Expected CodegenResults, found Box<Any>");
if sess.opts.debugging_opts.no_link {
// FIXME: use a binary format to encode the `.rlink` file
let rlink_data = json::encode(&codegen_results).map_err(|err| {
sess.fatal(&format!("failed to encode rlink: {}", err));
})?;
let rlink_file = outputs.with_extension(config::RLINK_EXT);
fs::write(&rlink_file, rlink_data).map_err(|err| {
sess.fatal(&format!("failed to write file {}: {}", rlink_file.display(), err));
})?;
return Ok(());
}
// Run the linker on any artifacts that resulted from the LLVM run. // Run the linker on any artifacts that resulted from the LLVM run.
// This should produce either a finished executable or library. // This should produce either a finished executable or library.
sess.time("link_crate", || { sess.time("link_crate", || {
@ -331,16 +313,6 @@ impl CodegenBackend for LlvmCodegenBackend {
); );
}); });
// Now that we won't touch anything in the incremental compilation directory
// any more, we can finalize it (which involves renaming it)
rustc_incremental::finalize_session_directory(sess, codegen_results.crate_hash);
sess.time("llvm_dump_timing_file", || {
if sess.opts.debugging_opts.llvm_time_trace {
llvm_util::time_trace_profiler_finish("llvm_timings.json");
}
});
Ok(()) Ok(())
} }
} }

View file

@ -2362,4 +2362,10 @@ extern "C" {
bytecode_len: usize, bytecode_len: usize,
) -> bool; ) -> bool;
pub fn LLVMRustLinkerFree(linker: &'a mut Linker<'a>); pub fn LLVMRustLinkerFree(linker: &'a mut Linker<'a>);
#[allow(improper_ctypes)]
pub fn LLVMRustComputeLTOCacheKey(
key_out: &RustString,
mod_id: *const c_char,
data: &ThinLTOData,
);
} }

View file

@ -118,11 +118,6 @@ pub fn SetUnnamedAddress(global: &'a Value, unnamed: UnnamedAddr) {
} }
} }
pub fn set_thread_local(global: &'a Value, is_thread_local: bool) {
unsafe {
LLVMSetThreadLocal(global, is_thread_local as Bool);
}
}
pub fn set_thread_local_mode(global: &'a Value, mode: ThreadLocalMode) { pub fn set_thread_local_mode(global: &'a Value, mode: ThreadLocalMode) {
unsafe { unsafe {
LLVMSetThreadLocalMode(global, mode); LLVMSetThreadLocalMode(global, mode);

View file

@ -46,7 +46,7 @@ fn require_inited() {
} }
unsafe fn configure_llvm(sess: &Session) { unsafe fn configure_llvm(sess: &Session) {
let n_args = sess.opts.cg.llvm_args.len() + sess.target.target.options.llvm_args.len(); let n_args = sess.opts.cg.llvm_args.len() + sess.target.options.llvm_args.len();
let mut llvm_c_strs = Vec::with_capacity(n_args + 1); let mut llvm_c_strs = Vec::with_capacity(n_args + 1);
let mut llvm_args = Vec::with_capacity(n_args + 1); let mut llvm_args = Vec::with_capacity(n_args + 1);
@ -57,7 +57,7 @@ unsafe fn configure_llvm(sess: &Session) {
} }
let cg_opts = sess.opts.cg.llvm_args.iter(); let cg_opts = sess.opts.cg.llvm_args.iter();
let tg_opts = sess.target.target.options.llvm_args.iter(); let tg_opts = sess.target.options.llvm_args.iter();
let sess_args = cg_opts.chain(tg_opts); let sess_args = cg_opts.chain(tg_opts);
let user_specified_args: FxHashSet<_> = let user_specified_args: FxHashSet<_> =
@ -88,7 +88,7 @@ unsafe fn configure_llvm(sess: &Session) {
.opts .opts
.debugging_opts .debugging_opts
.merge_functions .merge_functions
.unwrap_or(sess.target.target.options.merge_functions) .unwrap_or(sess.target.options.merge_functions)
{ {
MergeFunctions::Disabled | MergeFunctions::Trampolines => {} MergeFunctions::Disabled | MergeFunctions::Trampolines => {}
MergeFunctions::Aliases => { MergeFunctions::Aliases => {
@ -96,9 +96,7 @@ unsafe fn configure_llvm(sess: &Session) {
} }
} }
if sess.target.target.target_os == "emscripten" if sess.target.target_os == "emscripten" && sess.panic_strategy() == PanicStrategy::Unwind {
&& sess.panic_strategy() == PanicStrategy::Unwind
{
add("-enable-emscripten-cxx-exceptions", false); add("-enable-emscripten-cxx-exceptions", false);
} }
@ -122,7 +120,7 @@ unsafe fn configure_llvm(sess: &Session) {
llvm::LLVMInitializePasses(); llvm::LLVMInitializePasses();
::rustc_llvm::initialize_available_targets(); rustc_llvm::initialize_available_targets();
llvm::LLVMRustSetLLVMOptions(llvm_args.len() as c_int, llvm_args.as_ptr()); llvm::LLVMRustSetLLVMOptions(llvm_args.len() as c_int, llvm_args.as_ptr());
} }
@ -140,7 +138,7 @@ pub fn time_trace_profiler_finish(file_name: &str) {
// to LLVM or the feature detection code will walk past the end of the feature // to LLVM or the feature detection code will walk past the end of the feature
// array, leading to crashes. // array, leading to crashes.
pub fn to_llvm_feature<'a>(sess: &Session, s: &'a str) -> &'a str { pub fn to_llvm_feature<'a>(sess: &Session, s: &'a str) -> &'a str {
let arch = if sess.target.target.arch == "x86_64" { "x86" } else { &*sess.target.target.arch }; let arch = if sess.target.arch == "x86_64" { "x86" } else { &*sess.target.arch };
match (arch, s) { match (arch, s) {
("x86", "pclmulqdq") => "pclmul", ("x86", "pclmulqdq") => "pclmul",
("x86", "rdrand") => "rdrnd", ("x86", "rdrand") => "rdrnd",
@ -202,11 +200,7 @@ pub(crate) fn print(req: PrintRequest, sess: &Session) {
} }
} }
pub fn target_cpu(sess: &Session) -> &str { fn handle_native(name: &str) -> &str {
let name = match sess.opts.cg.target_cpu {
Some(ref s) => &**s,
None => &*sess.target.target.options.cpu,
};
if name != "native" { if name != "native" {
return name; return name;
} }
@ -217,3 +211,19 @@ pub fn target_cpu(sess: &Session) -> &str {
str::from_utf8(slice::from_raw_parts(ptr as *const u8, len)).unwrap() str::from_utf8(slice::from_raw_parts(ptr as *const u8, len)).unwrap()
} }
} }
pub fn target_cpu(sess: &Session) -> &str {
let name = match sess.opts.cg.target_cpu {
Some(ref s) => &**s,
None => &*sess.target.options.cpu,
};
handle_native(name)
}
pub fn tune_cpu(sess: &Session) -> Option<&str> {
match sess.opts.debugging_opts.tune_cpu {
Some(ref s) => Some(handle_native(&**s)),
None => None,
}
}

View file

@ -52,7 +52,7 @@ fn emit_direct_ptr_va_arg(
let next = bx.inbounds_gep(addr, &[full_direct_size]); let next = bx.inbounds_gep(addr, &[full_direct_size]);
bx.store(next, va_list_addr, bx.tcx().data_layout.pointer_align.abi); bx.store(next, va_list_addr, bx.tcx().data_layout.pointer_align.abi);
if size.bytes() < slot_size.bytes() && &*bx.tcx().sess.target.target.target_endian == "big" { if size.bytes() < slot_size.bytes() && &*bx.tcx().sess.target.target_endian == "big" {
let adjusted_size = bx.cx().const_i32((slot_size.bytes() - size.bytes()) as i32); let adjusted_size = bx.cx().const_i32((slot_size.bytes() - size.bytes()) as i32);
let adjusted = bx.inbounds_gep(addr, &[adjusted_size]); let adjusted = bx.inbounds_gep(addr, &[adjusted_size]);
(bx.bitcast(adjusted, bx.cx().type_ptr_to(llty)), addr_align) (bx.bitcast(adjusted, bx.cx().type_ptr_to(llty)), addr_align)
@ -105,7 +105,7 @@ fn emit_aapcs_va_arg(
let mut end = bx.build_sibling_block("va_arg.end"); let mut end = bx.build_sibling_block("va_arg.end");
let zero = bx.const_i32(0); let zero = bx.const_i32(0);
let offset_align = Align::from_bytes(4).unwrap(); let offset_align = Align::from_bytes(4).unwrap();
assert!(&*bx.tcx().sess.target.target.target_endian == "little"); assert!(&*bx.tcx().sess.target.target_endian == "little");
let gr_type = target_ty.is_any_ptr() || target_ty.is_integral(); let gr_type = target_ty.is_any_ptr() || target_ty.is_integral();
let (reg_off, reg_top_index, slot_size) = if gr_type { let (reg_off, reg_top_index, slot_size) = if gr_type {
@ -171,8 +171,8 @@ pub(super) fn emit_va_arg(
) -> &'ll Value { ) -> &'ll Value {
// Determine the va_arg implementation to use. The LLVM va_arg instruction // Determine the va_arg implementation to use. The LLVM va_arg instruction
// is lacking in some instances, so we should only use it as a fallback. // is lacking in some instances, so we should only use it as a fallback.
let target = &bx.cx.tcx.sess.target.target; let target = &bx.cx.tcx.sess.target;
let arch = &bx.cx.tcx.sess.target.target.arch; let arch = &bx.cx.tcx.sess.target.arch;
match (&**arch, target.options.is_like_windows) { match (&**arch, target.options.is_like_windows) {
// Windows x86 // Windows x86
("x86", true) => { ("x86", true) => {

View file

@ -9,9 +9,7 @@ pub fn find_library(name: Symbol, search_paths: &[PathBuf], sess: &Session) -> P
// times show up as foo.lib // times show up as foo.lib
let oslibname = format!( let oslibname = format!(
"{}{}{}", "{}{}{}",
sess.target.target.options.staticlib_prefix, sess.target.options.staticlib_prefix, name, sess.target.options.staticlib_suffix
name,
sess.target.target.options.staticlib_suffix
); );
let unixlibname = format!("lib{}.a", name); let unixlibname = format!("lib{}.a", name);

View file

@ -15,7 +15,7 @@ use rustc_session::{filesearch, Session};
use rustc_span::symbol::Symbol; use rustc_span::symbol::Symbol;
use rustc_target::spec::crt_objects::{CrtObjects, CrtObjectsFallback}; use rustc_target::spec::crt_objects::{CrtObjects, CrtObjectsFallback};
use rustc_target::spec::{LinkOutputKind, LinkerFlavor, LldFlavor}; use rustc_target::spec::{LinkOutputKind, LinkerFlavor, LldFlavor};
use rustc_target::spec::{PanicStrategy, RelocModel, RelroLevel}; use rustc_target::spec::{PanicStrategy, RelocModel, RelroLevel, Target};
use super::archive::ArchiveBuilder; use super::archive::ArchiveBuilder;
use super::command::Command; use super::command::Command;
@ -152,7 +152,7 @@ fn get_linker(
_ => match flavor { _ => match flavor {
LinkerFlavor::Lld(f) => Command::lld(linker, f), LinkerFlavor::Lld(f) => Command::lld(linker, f),
LinkerFlavor::Msvc LinkerFlavor::Msvc
if sess.opts.cg.linker.is_none() && sess.target.target.options.linker.is_none() => if sess.opts.cg.linker.is_none() && sess.target.options.linker.is_none() =>
{ {
Command::new(msvc_tool.as_ref().map(|t| t.path()).unwrap_or(linker)) Command::new(msvc_tool.as_ref().map(|t| t.path()).unwrap_or(linker))
} }
@ -163,7 +163,7 @@ fn get_linker(
// UWP apps have API restrictions enforced during Store submissions. // UWP apps have API restrictions enforced during Store submissions.
// To comply with the Windows App Certification Kit, // To comply with the Windows App Certification Kit,
// MSVC needs to link with the Store versions of the runtime libraries (vcruntime, msvcrt, etc). // MSVC needs to link with the Store versions of the runtime libraries (vcruntime, msvcrt, etc).
let t = &sess.target.target; let t = &sess.target;
if (flavor == LinkerFlavor::Msvc || flavor == LinkerFlavor::Lld(LldFlavor::Link)) if (flavor == LinkerFlavor::Msvc || flavor == LinkerFlavor::Lld(LldFlavor::Link))
&& t.target_vendor == "uwp" && t.target_vendor == "uwp"
{ {
@ -197,7 +197,7 @@ fn get_linker(
// PATH for the child. // PATH for the child.
let mut new_path = sess.host_filesearch(PathKind::All).get_tools_search_paths(self_contained); let mut new_path = sess.host_filesearch(PathKind::All).get_tools_search_paths(self_contained);
let mut msvc_changed_path = false; let mut msvc_changed_path = false;
if sess.target.target.options.is_like_msvc { if sess.target.options.is_like_msvc {
if let Some(ref tool) = msvc_tool { if let Some(ref tool) = msvc_tool {
cmd.args(tool.args()); cmd.args(tool.args());
for &(ref k, ref v) in tool.env() { for &(ref k, ref v) in tool.env() {
@ -365,7 +365,7 @@ fn link_rlib<'a, B: ArchiveBuilder<'a>>(
// After adding all files to the archive, we need to update the // After adding all files to the archive, we need to update the
// symbol table of the archive. This currently dies on macOS (see // symbol table of the archive. This currently dies on macOS (see
// #11162), and isn't necessary there anyway // #11162), and isn't necessary there anyway
if !sess.target.target.options.is_like_osx { if !sess.target.options.is_like_osx {
ab.update_symbols(); ab.update_symbols();
} }
} }
@ -476,10 +476,10 @@ fn link_natively<'a, B: ArchiveBuilder<'a>>(
linker::disable_localization(&mut cmd); linker::disable_localization(&mut cmd);
for &(ref k, ref v) in &sess.target.target.options.link_env { for &(ref k, ref v) in &sess.target.options.link_env {
cmd.env(k, v); cmd.env(k, v);
} }
for k in &sess.target.target.options.link_env_remove { for k in &sess.target.options.link_env_remove {
cmd.env_remove(k); cmd.env_remove(k);
} }
@ -515,7 +515,7 @@ fn link_natively<'a, B: ArchiveBuilder<'a>>(
// if the linker doesn't support -no-pie then it should not default to // if the linker doesn't support -no-pie then it should not default to
// linking executables as pie. Different versions of gcc seem to use // linking executables as pie. Different versions of gcc seem to use
// different quotes in the error message so don't check for them. // different quotes in the error message so don't check for them.
if sess.target.target.options.linker_is_gnu if sess.target.options.linker_is_gnu
&& flavor != LinkerFlavor::Ld && flavor != LinkerFlavor::Ld
&& (out.contains("unrecognized command line option") && (out.contains("unrecognized command line option")
|| out.contains("unknown argument")) || out.contains("unknown argument"))
@ -535,7 +535,7 @@ fn link_natively<'a, B: ArchiveBuilder<'a>>(
// Detect '-static-pie' used with an older version of gcc or clang not supporting it. // Detect '-static-pie' used with an older version of gcc or clang not supporting it.
// Fallback from '-static-pie' to '-static' in that case. // Fallback from '-static-pie' to '-static' in that case.
if sess.target.target.options.linker_is_gnu if sess.target.options.linker_is_gnu
&& flavor != LinkerFlavor::Ld && flavor != LinkerFlavor::Ld
&& (out.contains("unrecognized command line option") && (out.contains("unrecognized command line option")
|| out.contains("unknown argument")) || out.contains("unknown argument"))
@ -548,7 +548,7 @@ fn link_natively<'a, B: ArchiveBuilder<'a>>(
); );
// Mirror `add_(pre,post)_link_objects` to replace CRT objects. // Mirror `add_(pre,post)_link_objects` to replace CRT objects.
let self_contained = crt_objects_fallback(sess, crate_type); let self_contained = crt_objects_fallback(sess, crate_type);
let opts = &sess.target.target.options; let opts = &sess.target.options;
let pre_objects = if self_contained { let pre_objects = if self_contained {
&opts.pre_link_objects_fallback &opts.pre_link_objects_fallback
} else { } else {
@ -670,7 +670,7 @@ fn link_natively<'a, B: ArchiveBuilder<'a>>(
// is not a Microsoft LNK error then suggest a way to fix or // is not a Microsoft LNK error then suggest a way to fix or
// install the Visual Studio build tools. // install the Visual Studio build tools.
if let Some(code) = prog.status.code() { if let Some(code) = prog.status.code() {
if sess.target.target.options.is_like_msvc if sess.target.options.is_like_msvc
&& flavor == LinkerFlavor::Msvc && flavor == LinkerFlavor::Msvc
// Respect the command line override // Respect the command line override
&& sess.opts.cg.linker.is_none() && sess.opts.cg.linker.is_none()
@ -741,7 +741,7 @@ fn link_natively<'a, B: ArchiveBuilder<'a>>(
linker_error.emit(); linker_error.emit();
if sess.target.target.options.is_like_msvc && linker_not_found { if sess.target.options.is_like_msvc && linker_not_found {
sess.note_without_error( sess.note_without_error(
"the msvc targets depend on the msvc linker \ "the msvc targets depend on the msvc linker \
but `link.exe` was not found", but `link.exe` was not found",
@ -758,7 +758,7 @@ fn link_natively<'a, B: ArchiveBuilder<'a>>(
// On macOS, debuggers need this utility to get run to do some munging of // On macOS, debuggers need this utility to get run to do some munging of
// the symbols. Note, though, that if the object files are being preserved // the symbols. Note, though, that if the object files are being preserved
// for their debug information there's no need for us to run dsymutil. // for their debug information there's no need for us to run dsymutil.
if sess.target.target.options.is_like_osx if sess.target.options.is_like_osx
&& sess.opts.debuginfo != DebugInfo::None && sess.opts.debuginfo != DebugInfo::None
&& !preserve_objects_for_their_debuginfo(sess) && !preserve_objects_for_their_debuginfo(sess)
{ {
@ -776,7 +776,7 @@ fn link_sanitizers(sess: &Session, crate_type: CrateType, linker: &mut dyn Linke
let needs_runtime = match crate_type { let needs_runtime = match crate_type {
CrateType::Executable => true, CrateType::Executable => true,
CrateType::Dylib | CrateType::Cdylib | CrateType::ProcMacro => { CrateType::Dylib | CrateType::Cdylib | CrateType::ProcMacro => {
sess.target.target.options.is_like_osx sess.target.options.is_like_osx
} }
CrateType::Rlib | CrateType::Staticlib => false, CrateType::Rlib | CrateType::Staticlib => false,
}; };
@ -846,7 +846,7 @@ pub fn ignored_for_lto(sess: &Session, info: &CrateInfo, cnum: CrateNum) -> bool
// If our target enables builtin function lowering in LLVM then the // If our target enables builtin function lowering in LLVM then the
// crates providing these functions don't participate in LTO (e.g. // crates providing these functions don't participate in LTO (e.g.
// no_builtins or compiler builtins crates). // no_builtins or compiler builtins crates).
!sess.target.target.options.no_builtins !sess.target.options.no_builtins
&& (info.compiler_builtins == Some(cnum) || info.is_no_builtins.contains(&cnum)) && (info.compiler_builtins == Some(cnum) || info.is_no_builtins.contains(&cnum))
} }
@ -906,10 +906,10 @@ fn linker_and_flavor(sess: &Session) -> (PathBuf, LinkerFlavor) {
} else if stem == "link" || stem == "lld-link" { } else if stem == "link" || stem == "lld-link" {
LinkerFlavor::Msvc LinkerFlavor::Msvc
} else if stem == "lld" || stem == "rust-lld" { } else if stem == "lld" || stem == "rust-lld" {
LinkerFlavor::Lld(sess.target.target.options.lld_flavor) LinkerFlavor::Lld(sess.target.options.lld_flavor)
} else { } else {
// fall back to the value in the target spec // fall back to the value in the target spec
sess.target.target.linker_flavor sess.target.linker_flavor
}; };
Some((linker, flavor)) Some((linker, flavor))
@ -926,8 +926,8 @@ fn linker_and_flavor(sess: &Session) -> (PathBuf, LinkerFlavor) {
if let Some(ret) = infer_from( if let Some(ret) = infer_from(
sess, sess,
sess.target.target.options.linker.clone().map(PathBuf::from), sess.target.options.linker.clone().map(PathBuf::from),
Some(sess.target.target.linker_flavor), Some(sess.target.linker_flavor),
) { ) {
return ret; return ret;
} }
@ -962,7 +962,7 @@ fn preserve_objects_for_their_debuginfo(sess: &Session) -> bool {
// Basically as a result this just means that if we're on OSX and we're // Basically as a result this just means that if we're on OSX and we're
// *not* running dsymutil then the object files are the only source of truth // *not* running dsymutil then the object files are the only source of truth
// for debug information, so we must preserve them. // for debug information, so we must preserve them.
if sess.target.target.options.is_like_osx { if sess.target.options.is_like_osx {
return !sess.opts.debugging_opts.run_dsymutil; return !sess.opts.debugging_opts.run_dsymutil;
} }
@ -988,7 +988,7 @@ fn print_native_static_libs(sess: &Session, all_native_libs: &[NativeLib]) {
NativeLibKind::StaticNoBundle NativeLibKind::StaticNoBundle
| NativeLibKind::Dylib | NativeLibKind::Dylib
| NativeLibKind::Unspecified => { | NativeLibKind::Unspecified => {
if sess.target.target.options.is_like_msvc { if sess.target.options.is_like_msvc {
Some(format!("{}.lib", name)) Some(format!("{}.lib", name))
} else { } else {
Some(format!("-l{}", name)) Some(format!("-l{}", name))
@ -1070,16 +1070,13 @@ fn exec_linker(
let mut args = String::new(); let mut args = String::new();
for arg in cmd2.take_args() { for arg in cmd2.take_args() {
args.push_str( args.push_str(
&Escape { &Escape { arg: arg.to_str().unwrap(), is_like_msvc: sess.target.options.is_like_msvc }
arg: arg.to_str().unwrap(), .to_string(),
is_like_msvc: sess.target.target.options.is_like_msvc,
}
.to_string(),
); );
args.push('\n'); args.push('\n');
} }
let file = tmpdir.join("linker-arguments"); let file = tmpdir.join("linker-arguments");
let bytes = if sess.target.target.options.is_like_msvc { let bytes = if sess.target.options.is_like_msvc {
let mut out = Vec::with_capacity((1 + args.len()) * 2); let mut out = Vec::with_capacity((1 + args.len()) * 2);
// start the stream with a UTF-16 BOM // start the stream with a UTF-16 BOM
for c in std::iter::once(0xFEFF).chain(args.encode_utf16()) { for c in std::iter::once(0xFEFF).chain(args.encode_utf16()) {
@ -1195,7 +1192,7 @@ fn link_output_kind(sess: &Session, crate_type: CrateType) -> LinkOutputKind {
}; };
// Adjust the output kind to target capabilities. // Adjust the output kind to target capabilities.
let opts = &sess.target.target.options; let opts = &sess.target.options;
let pic_exe_supported = opts.position_independent_executables; let pic_exe_supported = opts.position_independent_executables;
let static_pic_exe_supported = opts.static_position_independent_executables; let static_pic_exe_supported = opts.static_position_independent_executables;
let static_dylib_supported = opts.crt_static_allows_dylibs; let static_dylib_supported = opts.crt_static_allows_dylibs;
@ -1236,14 +1233,14 @@ fn crt_objects_fallback(sess: &Session, crate_type: CrateType) -> bool {
return self_contained; return self_contained;
} }
match sess.target.target.options.crt_objects_fallback { match sess.target.options.crt_objects_fallback {
// FIXME: Find a better heuristic for "native musl toolchain is available", // FIXME: Find a better heuristic for "native musl toolchain is available",
// based on host and linker path, for example. // based on host and linker path, for example.
// (https://github.com/rust-lang/rust/pull/71769#issuecomment-626330237). // (https://github.com/rust-lang/rust/pull/71769#issuecomment-626330237).
Some(CrtObjectsFallback::Musl) => sess.crt_static(Some(crate_type)), Some(CrtObjectsFallback::Musl) => sess.crt_static(Some(crate_type)),
Some(CrtObjectsFallback::Mingw) => { Some(CrtObjectsFallback::Mingw) => {
sess.host == sess.target.target sess.host == sess.target
&& sess.target.target.target_vendor != "uwp" && sess.target.target_vendor != "uwp"
&& detect_self_contained_mingw(&sess) && detect_self_contained_mingw(&sess)
} }
// FIXME: Figure out cases in which WASM needs to link with a native toolchain. // FIXME: Figure out cases in which WASM needs to link with a native toolchain.
@ -1259,7 +1256,7 @@ fn add_pre_link_objects(
link_output_kind: LinkOutputKind, link_output_kind: LinkOutputKind,
self_contained: bool, self_contained: bool,
) { ) {
let opts = &sess.target.target.options; let opts = &sess.target.options;
let objects = let objects =
if self_contained { &opts.pre_link_objects_fallback } else { &opts.pre_link_objects }; if self_contained { &opts.pre_link_objects_fallback } else { &opts.pre_link_objects };
for obj in objects.get(&link_output_kind).iter().copied().flatten() { for obj in objects.get(&link_output_kind).iter().copied().flatten() {
@ -1274,7 +1271,7 @@ fn add_post_link_objects(
link_output_kind: LinkOutputKind, link_output_kind: LinkOutputKind,
self_contained: bool, self_contained: bool,
) { ) {
let opts = &sess.target.target.options; let opts = &sess.target.options;
let objects = let objects =
if self_contained { &opts.post_link_objects_fallback } else { &opts.post_link_objects }; if self_contained { &opts.post_link_objects_fallback } else { &opts.post_link_objects };
for obj in objects.get(&link_output_kind).iter().copied().flatten() { for obj in objects.get(&link_output_kind).iter().copied().flatten() {
@ -1285,7 +1282,7 @@ fn add_post_link_objects(
/// Add arbitrary "pre-link" args defined by the target spec or from command line. /// Add arbitrary "pre-link" args defined by the target spec or from command line.
/// FIXME: Determine where exactly these args need to be inserted. /// FIXME: Determine where exactly these args need to be inserted.
fn add_pre_link_args(cmd: &mut dyn Linker, sess: &Session, flavor: LinkerFlavor) { fn add_pre_link_args(cmd: &mut dyn Linker, sess: &Session, flavor: LinkerFlavor) {
if let Some(args) = sess.target.target.options.pre_link_args.get(&flavor) { if let Some(args) = sess.target.options.pre_link_args.get(&flavor) {
cmd.args(args); cmd.args(args);
} }
cmd.args(&sess.opts.debugging_opts.pre_link_args); cmd.args(&sess.opts.debugging_opts.pre_link_args);
@ -1293,13 +1290,13 @@ fn add_pre_link_args(cmd: &mut dyn Linker, sess: &Session, flavor: LinkerFlavor)
/// Add a link script embedded in the target, if applicable. /// Add a link script embedded in the target, if applicable.
fn add_link_script(cmd: &mut dyn Linker, sess: &Session, tmpdir: &Path, crate_type: CrateType) { fn add_link_script(cmd: &mut dyn Linker, sess: &Session, tmpdir: &Path, crate_type: CrateType) {
match (crate_type, &sess.target.target.options.link_script) { match (crate_type, &sess.target.options.link_script) {
(CrateType::Cdylib | CrateType::Executable, Some(script)) => { (CrateType::Cdylib | CrateType::Executable, Some(script)) => {
if !sess.target.target.options.linker_is_gnu { if !sess.target.options.linker_is_gnu {
sess.fatal("can only use link script when linking with GNU-like linker"); sess.fatal("can only use link script when linking with GNU-like linker");
} }
let file_name = ["rustc", &sess.target.target.llvm_target, "linkfile.ld"].join("-"); let file_name = ["rustc", &sess.target.llvm_target, "linkfile.ld"].join("-");
let path = tmpdir.join(file_name); let path = tmpdir.join(file_name);
if let Err(e) = fs::write(&path, script) { if let Err(e) = fs::write(&path, script) {
@ -1338,15 +1335,15 @@ fn add_late_link_args(
*ty == crate_type && list.iter().any(|&linkage| linkage == Linkage::Dynamic) *ty == crate_type && list.iter().any(|&linkage| linkage == Linkage::Dynamic)
}); });
if any_dynamic_crate { if any_dynamic_crate {
if let Some(args) = sess.target.target.options.late_link_args_dynamic.get(&flavor) { if let Some(args) = sess.target.options.late_link_args_dynamic.get(&flavor) {
cmd.args(args); cmd.args(args);
} }
} else { } else {
if let Some(args) = sess.target.target.options.late_link_args_static.get(&flavor) { if let Some(args) = sess.target.options.late_link_args_static.get(&flavor) {
cmd.args(args); cmd.args(args);
} }
} }
if let Some(args) = sess.target.target.options.late_link_args.get(&flavor) { if let Some(args) = sess.target.options.late_link_args.get(&flavor) {
cmd.args(args); cmd.args(args);
} }
} }
@ -1354,7 +1351,7 @@ fn add_late_link_args(
/// Add arbitrary "post-link" args defined by the target spec. /// Add arbitrary "post-link" args defined by the target spec.
/// FIXME: Determine where exactly these args need to be inserted. /// FIXME: Determine where exactly these args need to be inserted.
fn add_post_link_args(cmd: &mut dyn Linker, sess: &Session, flavor: LinkerFlavor) { fn add_post_link_args(cmd: &mut dyn Linker, sess: &Session, flavor: LinkerFlavor) {
if let Some(args) = sess.target.target.options.post_link_args.get(&flavor) { if let Some(args) = sess.target.options.post_link_args.get(&flavor) {
cmd.args(args); cmd.args(args);
} }
} }
@ -1456,7 +1453,7 @@ fn add_library_search_dirs(cmd: &mut dyn Linker, sess: &Session, self_contained:
/// Add options making relocation sections in the produced ELF files read-only /// Add options making relocation sections in the produced ELF files read-only
/// and suppressing lazy binding. /// and suppressing lazy binding.
fn add_relro_args(cmd: &mut dyn Linker, sess: &Session) { fn add_relro_args(cmd: &mut dyn Linker, sess: &Session) {
match sess.opts.debugging_opts.relro_level.unwrap_or(sess.target.target.options.relro_level) { match sess.opts.debugging_opts.relro_level.unwrap_or(sess.target.options.relro_level) {
RelroLevel::Full => cmd.full_relro(), RelroLevel::Full => cmd.full_relro(),
RelroLevel::Partial => cmd.partial_relro(), RelroLevel::Partial => cmd.partial_relro(),
RelroLevel::Off => cmd.no_relro(), RelroLevel::Off => cmd.no_relro(),
@ -1487,9 +1484,9 @@ fn add_rpath_args(
let mut rpath_config = RPathConfig { let mut rpath_config = RPathConfig {
used_crates: &codegen_results.crate_info.used_crates_dynamic, used_crates: &codegen_results.crate_info.used_crates_dynamic,
out_filename: out_filename.to_path_buf(), out_filename: out_filename.to_path_buf(),
has_rpath: sess.target.target.options.has_rpath, has_rpath: sess.target.options.has_rpath,
is_like_osx: sess.target.target.options.is_like_osx, is_like_osx: sess.target.options.is_like_osx,
linker_is_gnu: sess.target.target.options.linker_is_gnu, linker_is_gnu: sess.target.options.linker_is_gnu,
get_install_prefix_lib_path: &mut get_install_prefix_lib_path, get_install_prefix_lib_path: &mut get_install_prefix_lib_path,
}; };
cmd.args(&rpath::get_rpath_flags(&mut rpath_config)); cmd.args(&rpath::get_rpath_flags(&mut rpath_config));
@ -1517,7 +1514,7 @@ fn linker_with_args<'a, B: ArchiveBuilder<'a>>(
let base_cmd = get_linker(sess, path, flavor, crt_objects_fallback); let base_cmd = get_linker(sess, path, flavor, crt_objects_fallback);
// FIXME: Move `/LIBPATH` addition for uwp targets from the linker construction // FIXME: Move `/LIBPATH` addition for uwp targets from the linker construction
// to the linker args construction. // to the linker args construction.
assert!(base_cmd.get_args().is_empty() || sess.target.target.target_vendor == "uwp"); assert!(base_cmd.get_args().is_empty() || sess.target.target_vendor == "uwp");
let cmd = &mut *codegen_results.linker_info.to_linker(base_cmd, &sess, flavor, target_cpu); let cmd = &mut *codegen_results.linker_info.to_linker(base_cmd, &sess, flavor, target_cpu);
let link_output_kind = link_output_kind(sess, crate_type); let link_output_kind = link_output_kind(sess, crate_type);
@ -1531,7 +1528,7 @@ fn linker_with_args<'a, B: ArchiveBuilder<'a>>(
add_link_script(cmd, sess, tmpdir, crate_type); add_link_script(cmd, sess, tmpdir, crate_type);
// NO-OPT-OUT, OBJECT-FILES-NO, AUDIT-ORDER // NO-OPT-OUT, OBJECT-FILES-NO, AUDIT-ORDER
if sess.target.target.options.is_like_fuchsia && crate_type == CrateType::Executable { if sess.target.options.is_like_fuchsia && crate_type == CrateType::Executable {
let prefix = if sess.opts.debugging_opts.sanitizer.contains(SanitizerSet::ADDRESS) { let prefix = if sess.opts.debugging_opts.sanitizer.contains(SanitizerSet::ADDRESS) {
"asan/" "asan/"
} else { } else {
@ -1541,7 +1538,7 @@ fn linker_with_args<'a, B: ArchiveBuilder<'a>>(
} }
// NO-OPT-OUT, OBJECT-FILES-NO, AUDIT-ORDER // NO-OPT-OUT, OBJECT-FILES-NO, AUDIT-ORDER
if sess.target.target.options.eh_frame_header { if sess.target.options.eh_frame_header {
cmd.add_eh_frame_header(); cmd.add_eh_frame_header();
} }
@ -1554,7 +1551,7 @@ fn linker_with_args<'a, B: ArchiveBuilder<'a>>(
add_pre_link_objects(cmd, sess, link_output_kind, crt_objects_fallback); add_pre_link_objects(cmd, sess, link_output_kind, crt_objects_fallback);
// NO-OPT-OUT, OBJECT-FILES-NO, AUDIT-ORDER // NO-OPT-OUT, OBJECT-FILES-NO, AUDIT-ORDER
if sess.target.target.options.is_like_emscripten { if sess.target.options.is_like_emscripten {
cmd.arg("-s"); cmd.arg("-s");
cmd.arg(if sess.panic_strategy() == PanicStrategy::Abort { cmd.arg(if sess.panic_strategy() == PanicStrategy::Abort {
"DISABLE_EXCEPTION_CATCHING=1" "DISABLE_EXCEPTION_CATCHING=1"
@ -1582,7 +1579,7 @@ fn linker_with_args<'a, B: ArchiveBuilder<'a>>(
cmd.output_filename(out_filename); cmd.output_filename(out_filename);
// OBJECT-FILES-NO, AUDIT-ORDER // OBJECT-FILES-NO, AUDIT-ORDER
if crate_type == CrateType::Executable && sess.target.target.options.is_like_windows { if crate_type == CrateType::Executable && sess.target.options.is_like_windows {
if let Some(ref s) = codegen_results.windows_subsystem { if let Some(ref s) = codegen_results.windows_subsystem {
cmd.subsystem(s); cmd.subsystem(s);
} }
@ -1626,7 +1623,7 @@ fn linker_with_args<'a, B: ArchiveBuilder<'a>>(
// OBJECT-FILES-NO, AUDIT-ORDER // OBJECT-FILES-NO, AUDIT-ORDER
// We want to prevent the compiler from accidentally leaking in any system libraries, // We want to prevent the compiler from accidentally leaking in any system libraries,
// so by default we tell linkers not to link to any default libraries. // so by default we tell linkers not to link to any default libraries.
if !sess.opts.cg.default_linker_libraries && sess.target.target.options.no_default_libraries { if !sess.opts.cg.default_linker_libraries && sess.target.options.no_default_libraries {
cmd.no_default_libraries(); cmd.no_default_libraries();
} }
@ -1845,12 +1842,8 @@ fn add_upstream_rust_crates<'a, B: ArchiveBuilder<'a>>(
} }
// Converts a library file-stem into a cc -l argument // Converts a library file-stem into a cc -l argument
fn unlib<'a>(config: &config::Config, stem: &'a str) -> &'a str { fn unlib<'a>(target: &Target, stem: &'a str) -> &'a str {
if stem.starts_with("lib") && !config.target.options.is_like_windows { if stem.starts_with("lib") && !target.options.is_like_windows { &stem[3..] } else { stem }
&stem[3..]
} else {
stem
}
} }
// Adds the static "rlib" versions of all crates to the command line. // Adds the static "rlib" versions of all crates to the command line.
@ -1945,7 +1938,7 @@ fn add_upstream_rust_crates<'a, B: ArchiveBuilder<'a>>(
// though, so we let that object file slide. // though, so we let that object file slide.
let skip_because_lto = are_upstream_rust_objects_already_included(sess) let skip_because_lto = are_upstream_rust_objects_already_included(sess)
&& is_rust_object && is_rust_object
&& (sess.target.target.options.no_builtins && (sess.target.options.no_builtins
|| !codegen_results.crate_info.is_no_builtins.contains(&cnum)); || !codegen_results.crate_info.is_no_builtins.contains(&cnum));
if skip_because_cfg_say_so || skip_because_lto { if skip_because_cfg_say_so || skip_because_lto {
@ -2088,10 +2081,10 @@ fn are_upstream_rust_objects_already_included(sess: &Session) -> bool {
} }
fn add_apple_sdk(cmd: &mut dyn Linker, sess: &Session, flavor: LinkerFlavor) { fn add_apple_sdk(cmd: &mut dyn Linker, sess: &Session, flavor: LinkerFlavor) {
let arch = &sess.target.target.arch; let arch = &sess.target.arch;
let os = &sess.target.target.target_os; let os = &sess.target.target_os;
let llvm_target = &sess.target.target.llvm_target; let llvm_target = &sess.target.llvm_target;
if sess.target.target.target_vendor != "apple" if sess.target.target_vendor != "apple"
|| !matches!(os.as_str(), "ios" | "tvos") || !matches!(os.as_str(), "ios" | "tvos")
|| flavor != LinkerFlavor::Gcc || flavor != LinkerFlavor::Gcc
{ {

View file

@ -184,7 +184,7 @@ impl<'a> GccLinker<'a> {
// * On OSX they have their own linker, not binutils' // * On OSX they have their own linker, not binutils'
// * For WebAssembly the only functional linker is LLD, which doesn't // * For WebAssembly the only functional linker is LLD, which doesn't
// support hint flags // support hint flags
!self.sess.target.target.options.is_like_osx && self.sess.target.target.arch != "wasm32" !self.sess.target.options.is_like_osx && self.sess.target.arch != "wasm32"
} }
// Some platforms take hints about whether a library is static or dynamic. // Some platforms take hints about whether a library is static or dynamic.
@ -221,10 +221,8 @@ impl<'a> GccLinker<'a> {
let opt_level = match self.sess.opts.optimize { let opt_level = match self.sess.opts.optimize {
config::OptLevel::No => "O0", config::OptLevel::No => "O0",
config::OptLevel::Less => "O1", config::OptLevel::Less => "O1",
config::OptLevel::Default => "O2", config::OptLevel::Default | config::OptLevel::Size | config::OptLevel::SizeMin => "O2",
config::OptLevel::Aggressive => "O3", config::OptLevel::Aggressive => "O3",
config::OptLevel::Size => "Os",
config::OptLevel::SizeMin => "Oz",
}; };
self.linker_arg(&format!("-plugin-opt={}", opt_level)); self.linker_arg(&format!("-plugin-opt={}", opt_level));
@ -234,7 +232,7 @@ impl<'a> GccLinker<'a> {
fn build_dylib(&mut self, out_filename: &Path) { fn build_dylib(&mut self, out_filename: &Path) {
// On mac we need to tell the linker to let this library be rpathed // On mac we need to tell the linker to let this library be rpathed
if self.sess.target.target.options.is_like_osx { if self.sess.target.options.is_like_osx {
self.cmd.arg("-dynamiclib"); self.cmd.arg("-dynamiclib");
self.linker_arg("-dylib"); self.linker_arg("-dylib");
@ -250,7 +248,7 @@ impl<'a> GccLinker<'a> {
} }
} else { } else {
self.cmd.arg("-shared"); self.cmd.arg("-shared");
if self.sess.target.target.options.is_like_windows { if self.sess.target.options.is_like_windows {
// The output filename already contains `dll_suffix` so // The output filename already contains `dll_suffix` so
// the resulting import library will have a name in the // the resulting import library will have a name in the
// form of libfoo.dll.a // form of libfoo.dll.a
@ -258,9 +256,9 @@ impl<'a> GccLinker<'a> {
out_filename.file_name().and_then(|file| file.to_str()).map(|file| { out_filename.file_name().and_then(|file| file.to_str()).map(|file| {
format!( format!(
"{}{}{}", "{}{}{}",
self.sess.target.target.options.staticlib_prefix, self.sess.target.options.staticlib_prefix,
file, file,
self.sess.target.target.options.staticlib_suffix self.sess.target.options.staticlib_suffix
) )
}); });
if let Some(implib_name) = implib_name { if let Some(implib_name) = implib_name {
@ -282,7 +280,7 @@ impl<'a> Linker for GccLinker<'a> {
fn set_output_kind(&mut self, output_kind: LinkOutputKind, out_filename: &Path) { fn set_output_kind(&mut self, output_kind: LinkOutputKind, out_filename: &Path) {
match output_kind { match output_kind {
LinkOutputKind::DynamicNoPicExe => { LinkOutputKind::DynamicNoPicExe => {
if !self.is_ld && self.sess.target.target.options.linker_is_gnu { if !self.is_ld && self.sess.target.options.linker_is_gnu {
self.cmd.arg("-no-pie"); self.cmd.arg("-no-pie");
} }
} }
@ -293,7 +291,7 @@ impl<'a> Linker for GccLinker<'a> {
LinkOutputKind::StaticNoPicExe => { LinkOutputKind::StaticNoPicExe => {
// `-static` works for both gcc wrapper and ld. // `-static` works for both gcc wrapper and ld.
self.cmd.arg("-static"); self.cmd.arg("-static");
if !self.is_ld && self.sess.target.target.options.linker_is_gnu { if !self.is_ld && self.sess.target.options.linker_is_gnu {
self.cmd.arg("-no-pie"); self.cmd.arg("-no-pie");
} }
} }
@ -322,7 +320,7 @@ impl<'a> Linker for GccLinker<'a> {
// any `#[link]` attributes in the `libc` crate, see #72782 for details. // any `#[link]` attributes in the `libc` crate, see #72782 for details.
// FIXME: Switch to using `#[link]` attributes in the `libc` crate // FIXME: Switch to using `#[link]` attributes in the `libc` crate
// similarly to other targets. // similarly to other targets.
if self.sess.target.target.target_os == "vxworks" if self.sess.target.target_os == "vxworks"
&& matches!( && matches!(
output_kind, output_kind,
LinkOutputKind::StaticNoPicExe LinkOutputKind::StaticNoPicExe
@ -387,7 +385,7 @@ impl<'a> Linker for GccLinker<'a> {
// functions, etc. // functions, etc.
fn link_whole_staticlib(&mut self, lib: Symbol, search_path: &[PathBuf]) { fn link_whole_staticlib(&mut self, lib: Symbol, search_path: &[PathBuf]) {
self.hint_static(); self.hint_static();
let target = &self.sess.target.target; let target = &self.sess.target;
if !target.options.is_like_osx { if !target.options.is_like_osx {
self.linker_arg("--whole-archive").cmd.arg(format!("-l{}", lib)); self.linker_arg("--whole-archive").cmd.arg(format!("-l{}", lib));
self.linker_arg("--no-whole-archive"); self.linker_arg("--no-whole-archive");
@ -402,7 +400,7 @@ impl<'a> Linker for GccLinker<'a> {
fn link_whole_rlib(&mut self, lib: &Path) { fn link_whole_rlib(&mut self, lib: &Path) {
self.hint_static(); self.hint_static();
if self.sess.target.target.options.is_like_osx { if self.sess.target.options.is_like_osx {
self.linker_arg("-force_load"); self.linker_arg("-force_load");
self.linker_arg(&lib); self.linker_arg(&lib);
} else { } else {
@ -426,9 +424,9 @@ impl<'a> Linker for GccLinker<'a> {
// -dead_strip can't be part of the pre_link_args because it's also used // -dead_strip can't be part of the pre_link_args because it's also used
// for partial linking when using multiple codegen units (-r). So we // for partial linking when using multiple codegen units (-r). So we
// insert it here. // insert it here.
if self.sess.target.target.options.is_like_osx { if self.sess.target.options.is_like_osx {
self.linker_arg("-dead_strip"); self.linker_arg("-dead_strip");
} else if self.sess.target.target.options.is_like_solaris { } else if self.sess.target.options.is_like_solaris {
self.linker_arg("-zignore"); self.linker_arg("-zignore");
// If we're building a dylib, we don't use --gc-sections because LLVM // If we're building a dylib, we don't use --gc-sections because LLVM
@ -442,7 +440,7 @@ impl<'a> Linker for GccLinker<'a> {
} }
fn optimize(&mut self) { fn optimize(&mut self) {
if !self.sess.target.target.options.linker_is_gnu { if !self.sess.target.options.linker_is_gnu {
return; return;
} }
@ -456,7 +454,7 @@ impl<'a> Linker for GccLinker<'a> {
} }
fn pgo_gen(&mut self) { fn pgo_gen(&mut self) {
if !self.sess.target.target.options.linker_is_gnu { if !self.sess.target.options.linker_is_gnu {
return; return;
} }
@ -506,7 +504,7 @@ impl<'a> Linker for GccLinker<'a> {
fn export_symbols(&mut self, tmpdir: &Path, crate_type: CrateType) { fn export_symbols(&mut self, tmpdir: &Path, crate_type: CrateType) {
// Symbol visibility in object files typically takes care of this. // Symbol visibility in object files typically takes care of this.
if crate_type == CrateType::Executable if crate_type == CrateType::Executable
&& self.sess.target.target.options.override_export_symbols.is_none() && self.sess.target.options.override_export_symbols.is_none()
{ {
return; return;
} }
@ -515,7 +513,7 @@ impl<'a> Linker for GccLinker<'a> {
// The object files have far more public symbols than we actually want to export, // The object files have far more public symbols than we actually want to export,
// so we hide them all here. // so we hide them all here.
if !self.sess.target.target.options.limit_rdylib_exports { if !self.sess.target.options.limit_rdylib_exports {
return; return;
} }
@ -523,13 +521,13 @@ impl<'a> Linker for GccLinker<'a> {
return; return;
} }
let is_windows = self.sess.target.target.options.is_like_windows; let is_windows = self.sess.target.options.is_like_windows;
let mut arg = OsString::new(); let mut arg = OsString::new();
let path = tmpdir.join(if is_windows { "list.def" } else { "list" }); let path = tmpdir.join(if is_windows { "list.def" } else { "list" });
debug!("EXPORTED SYMBOLS:"); debug!("EXPORTED SYMBOLS:");
if self.sess.target.target.options.is_like_osx { if self.sess.target.options.is_like_osx {
// Write a plain, newline-separated list of symbols // Write a plain, newline-separated list of symbols
let res: io::Result<()> = try { let res: io::Result<()> = try {
let mut f = BufWriter::new(File::create(&path)?); let mut f = BufWriter::new(File::create(&path)?);
@ -575,12 +573,12 @@ impl<'a> Linker for GccLinker<'a> {
} }
} }
if self.sess.target.target.options.is_like_osx { if self.sess.target.options.is_like_osx {
if !self.is_ld { if !self.is_ld {
arg.push("-Wl,") arg.push("-Wl,")
} }
arg.push("-exported_symbols_list,"); arg.push("-exported_symbols_list,");
} else if self.sess.target.target.options.is_like_solaris { } else if self.sess.target.options.is_like_solaris {
if !self.is_ld { if !self.is_ld {
arg.push("-Wl,") arg.push("-Wl,")
} }
@ -1205,7 +1203,7 @@ impl<'a> Linker for WasmLd<'a> {
} }
fn exported_symbols(tcx: TyCtxt<'_>, crate_type: CrateType) -> Vec<String> { fn exported_symbols(tcx: TyCtxt<'_>, crate_type: CrateType) -> Vec<String> {
if let Some(ref exports) = tcx.sess.target.target.options.override_export_symbols { if let Some(ref exports) = tcx.sess.target.options.override_export_symbols {
return exports.clone(); return exports.clone();
} }
@ -1295,7 +1293,7 @@ impl<'a> Linker for PtxLinker<'a> {
// Provide the linker with fallback to internal `target-cpu`. // Provide the linker with fallback to internal `target-cpu`.
self.cmd.arg("--fallback-arch").arg(match self.sess.opts.cg.target_cpu { self.cmd.arg("--fallback-arch").arg(match self.sess.opts.cg.target_cpu {
Some(ref s) => s, Some(ref s) => s,
None => &self.sess.target.target.options.cpu, None => &self.sess.target.options.cpu,
}); });
} }

View file

@ -229,8 +229,8 @@ fn exported_symbols_provider_local(
// needs to be exported. // needs to be exported.
// However, on platforms that don't allow for Rust dylibs, having // However, on platforms that don't allow for Rust dylibs, having
// external linkage is enough for monomorphization to be linked to. // external linkage is enough for monomorphization to be linked to.
let need_visibility = tcx.sess.target.target.options.dynamic_linking let need_visibility =
&& !tcx.sess.target.target.options.only_cdylib; tcx.sess.target.options.dynamic_linking && !tcx.sess.target.options.only_cdylib;
let (_, cgus) = tcx.collect_and_partition_mono_items(LOCAL_CRATE); let (_, cgus) = tcx.collect_and_partition_mono_items(LOCAL_CRATE);
@ -391,7 +391,7 @@ fn symbol_export_level(tcx: TyCtxt<'_>, sym_def_id: DefId) -> SymbolExportLevel
codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL); codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL);
if is_extern && !std_internal { if is_extern && !std_internal {
let target = &tcx.sess.target.target.llvm_target; let target = &tcx.sess.target.llvm_target;
// WebAssembly cannot export data symbols, so reduce their export level // WebAssembly cannot export data symbols, so reduce their export level
if target.contains("emscripten") { if target.contains("emscripten") {
if let Some(Node::Item(&hir::Item { kind: hir::ItemKind::Static(..), .. })) = if let Some(Node::Item(&hir::Item { kind: hir::ItemKind::Static(..), .. })) =

View file

@ -13,7 +13,6 @@ use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::profiling::SelfProfilerRef; use rustc_data_structures::profiling::SelfProfilerRef;
use rustc_data_structures::profiling::TimingGuard; use rustc_data_structures::profiling::TimingGuard;
use rustc_data_structures::profiling::VerboseTimingGuard; use rustc_data_structures::profiling::VerboseTimingGuard;
use rustc_data_structures::svh::Svh;
use rustc_data_structures::sync::Lrc; use rustc_data_structures::sync::Lrc;
use rustc_errors::emitter::Emitter; use rustc_errors::emitter::Emitter;
use rustc_errors::{DiagnosticId, FatalError, Handler, Level}; use rustc_errors::{DiagnosticId, FatalError, Handler, Level};
@ -140,7 +139,7 @@ impl ModuleConfig {
let emit_obj = if !should_emit_obj { let emit_obj = if !should_emit_obj {
EmitObj::None EmitObj::None
} else if sess.target.target.options.obj_is_bitcode } else if sess.target.options.obj_is_bitcode
|| (sess.opts.cg.linker_plugin_lto.enabled() && !no_builtins) || (sess.opts.cg.linker_plugin_lto.enabled() && !no_builtins)
{ {
// This case is selected if the target uses objects as bitcode, or // This case is selected if the target uses objects as bitcode, or
@ -222,11 +221,11 @@ impl ModuleConfig {
false false
), ),
emit_obj, emit_obj,
bc_cmdline: sess.target.target.options.bitcode_llvm_cmdline.clone(), bc_cmdline: sess.target.options.bitcode_llvm_cmdline.clone(),
verify_llvm_ir: sess.verify_llvm_ir(), verify_llvm_ir: sess.verify_llvm_ir(),
no_prepopulate_passes: sess.opts.cg.no_prepopulate_passes, no_prepopulate_passes: sess.opts.cg.no_prepopulate_passes,
no_builtins: no_builtins || sess.target.target.options.no_builtins, no_builtins: no_builtins || sess.target.options.no_builtins,
// Exclude metadata and allocator modules from time_passes output, // Exclude metadata and allocator modules from time_passes output,
// since they throw off the "LLVM passes" measurement. // since they throw off the "LLVM passes" measurement.
@ -253,7 +252,7 @@ impl ModuleConfig {
.opts .opts
.debugging_opts .debugging_opts
.merge_functions .merge_functions
.unwrap_or(sess.target.target.options.merge_functions) .unwrap_or(sess.target.options.merge_functions)
{ {
MergeFunctions::Disabled => false, MergeFunctions::Disabled => false,
MergeFunctions::Trampolines | MergeFunctions::Aliases => { MergeFunctions::Trampolines | MergeFunctions::Aliases => {
@ -308,7 +307,7 @@ pub struct CodegenContext<B: WriteBackendMethods> {
pub allocator_module_config: Arc<ModuleConfig>, pub allocator_module_config: Arc<ModuleConfig>,
pub tm_factory: TargetMachineFactory<B>, pub tm_factory: TargetMachineFactory<B>,
pub msvc_imps_needed: bool, pub msvc_imps_needed: bool,
pub target_pointer_width: String, pub target_pointer_width: u32,
pub target_arch: String, pub target_arch: String,
pub debuginfo: config::DebugInfo, pub debuginfo: config::DebugInfo,
@ -389,7 +388,7 @@ fn need_bitcode_in_object(sess: &Session) -> bool {
let requested_for_rlib = sess.opts.cg.embed_bitcode let requested_for_rlib = sess.opts.cg.embed_bitcode
&& sess.crate_types().contains(&CrateType::Rlib) && sess.crate_types().contains(&CrateType::Rlib)
&& sess.opts.output_types.contains_key(&OutputType::Exe); && sess.opts.output_types.contains_key(&OutputType::Exe);
let forced_by_target = sess.target.target.options.forces_embed_bitcode; let forced_by_target = sess.target.options.forces_embed_bitcode;
requested_for_rlib || forced_by_target requested_for_rlib || forced_by_target
} }
@ -414,7 +413,6 @@ pub fn start_async_codegen<B: ExtraBackendMethods>(
let sess = tcx.sess; let sess = tcx.sess;
let crate_name = tcx.crate_name(LOCAL_CRATE); let crate_name = tcx.crate_name(LOCAL_CRATE);
let crate_hash = tcx.crate_hash(LOCAL_CRATE);
let no_builtins = tcx.sess.contains_name(&tcx.hir().krate().item.attrs, sym::no_builtins); let no_builtins = tcx.sess.contains_name(&tcx.hir().krate().item.attrs, sym::no_builtins);
let is_compiler_builtins = let is_compiler_builtins =
tcx.sess.contains_name(&tcx.hir().krate().item.attrs, sym::compiler_builtins); tcx.sess.contains_name(&tcx.hir().krate().item.attrs, sym::compiler_builtins);
@ -463,7 +461,6 @@ pub fn start_async_codegen<B: ExtraBackendMethods>(
OngoingCodegen { OngoingCodegen {
backend, backend,
crate_name, crate_name,
crate_hash,
metadata, metadata,
windows_subsystem, windows_subsystem,
linker_info, linker_info,
@ -658,15 +655,6 @@ fn produce_final_output_artifacts(
// These are used in linking steps and will be cleaned up afterward. // These are used in linking steps and will be cleaned up afterward.
} }
pub fn dump_incremental_data(_codegen_results: &CodegenResults) {
// FIXME(mw): This does not work at the moment because the situation has
// become more complicated due to incremental LTO. Now a CGU
// can have more than two caching states.
// println!("[incremental] Re-using {} out of {} modules",
// codegen_results.modules.iter().filter(|m| m.pre_existing).count(),
// codegen_results.modules.len());
}
pub enum WorkItem<B: WriteBackendMethods> { pub enum WorkItem<B: WriteBackendMethods> {
/// Optimize a newly codegened, totally unoptimized module. /// Optimize a newly codegened, totally unoptimized module.
Optimize(ModuleCodegen<B::Module>), Optimize(ModuleCodegen<B::Module>),
@ -1034,8 +1022,8 @@ fn start_executing_work<B: ExtraBackendMethods>(
tm_factory: TargetMachineFactory(backend.target_machine_factory(tcx.sess, ol)), tm_factory: TargetMachineFactory(backend.target_machine_factory(tcx.sess, ol)),
total_cgus, total_cgus,
msvc_imps_needed: msvc_imps_needed(tcx), msvc_imps_needed: msvc_imps_needed(tcx),
target_pointer_width: tcx.sess.target.target.target_pointer_width.clone(), target_pointer_width: tcx.sess.target.pointer_width,
target_arch: tcx.sess.target.target.arch.clone(), target_arch: tcx.sess.target.arch.clone(),
debuginfo: tcx.sess.opts.debuginfo, debuginfo: tcx.sess.opts.debuginfo,
}; };
@ -1175,7 +1163,7 @@ fn start_executing_work<B: ExtraBackendMethods>(
// necessary. There's already optimizations in place to avoid sending work // necessary. There's already optimizations in place to avoid sending work
// back to the coordinator if LTO isn't requested. // back to the coordinator if LTO isn't requested.
return thread::spawn(move || { return thread::spawn(move || {
let max_workers = ::num_cpus::get(); let max_workers = num_cpus::get();
let mut worker_id_counter = 0; let mut worker_id_counter = 0;
let mut free_worker_ids = Vec::new(); let mut free_worker_ids = Vec::new();
let mut get_worker_id = |free_worker_ids: &mut Vec<usize>| { let mut get_worker_id = |free_worker_ids: &mut Vec<usize>| {
@ -1531,8 +1519,6 @@ fn start_executing_work<B: ExtraBackendMethods>(
} }
} }
pub const CODEGEN_WORKER_ID: usize = usize::MAX;
/// `FatalError` is explicitly not `Send`. /// `FatalError` is explicitly not `Send`.
#[must_use] #[must_use]
pub struct WorkerFatalError; pub struct WorkerFatalError;
@ -1720,7 +1706,6 @@ impl SharedEmitterMain {
pub struct OngoingCodegen<B: ExtraBackendMethods> { pub struct OngoingCodegen<B: ExtraBackendMethods> {
pub backend: B, pub backend: B,
pub crate_name: Symbol, pub crate_name: Symbol,
pub crate_hash: Svh,
pub metadata: EncodedMetadata, pub metadata: EncodedMetadata,
pub windows_subsystem: Option<String>, pub windows_subsystem: Option<String>,
pub linker_info: LinkerInfo, pub linker_info: LinkerInfo,
@ -1766,7 +1751,6 @@ impl<B: ExtraBackendMethods> OngoingCodegen<B> {
( (
CodegenResults { CodegenResults {
crate_name: self.crate_name, crate_name: self.crate_name,
crate_hash: self.crate_hash,
metadata: self.metadata, metadata: self.metadata,
windows_subsystem: self.windows_subsystem, windows_subsystem: self.windows_subsystem,
linker_info: self.linker_info, linker_info: self.linker_info,
@ -1881,11 +1865,11 @@ fn msvc_imps_needed(tcx: TyCtxt<'_>) -> bool {
// something is wrong with commandline arg validation. // something is wrong with commandline arg validation.
assert!( assert!(
!(tcx.sess.opts.cg.linker_plugin_lto.enabled() !(tcx.sess.opts.cg.linker_plugin_lto.enabled()
&& tcx.sess.target.target.options.is_like_windows && tcx.sess.target.options.is_like_windows
&& tcx.sess.opts.cg.prefer_dynamic) && tcx.sess.opts.cg.prefer_dynamic)
); );
tcx.sess.target.target.options.is_like_windows && tcx.sess.target.options.is_like_windows &&
tcx.sess.crate_types().iter().any(|ct| *ct == CrateType::Rlib) && tcx.sess.crate_types().iter().any(|ct| *ct == CrateType::Rlib) &&
// ThinLTO can't handle this workaround in all cases, so we don't // ThinLTO can't handle this workaround in all cases, so we don't
// emit the `__imp_` symbols. Instead we make them unnecessary by disallowing // emit the `__imp_` symbols. Instead we make them unnecessary by disallowing

View file

@ -327,7 +327,7 @@ fn cast_shift_rhs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
/// currently uses SEH-ish unwinding with DWARF info tables to the side (same as /// currently uses SEH-ish unwinding with DWARF info tables to the side (same as
/// 64-bit MinGW) instead of "full SEH". /// 64-bit MinGW) instead of "full SEH".
pub fn wants_msvc_seh(sess: &Session) -> bool { pub fn wants_msvc_seh(sess: &Session) -> bool {
sess.target.target.options.is_like_msvc sess.target.options.is_like_msvc
} }
pub fn memcpy_ty<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( pub fn memcpy_ty<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
@ -393,7 +393,7 @@ pub fn maybe_create_entry_wrapper<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
) -> Bx::Function { ) -> Bx::Function {
// The entry function is either `int main(void)` or `int main(int argc, char **argv)`, // The entry function is either `int main(void)` or `int main(int argc, char **argv)`,
// depending on whether the target needs `argc` and `argv` to be passed in. // depending on whether the target needs `argc` and `argv` to be passed in.
let llfty = if cx.sess().target.target.options.main_needs_argc_argv { let llfty = if cx.sess().target.options.main_needs_argc_argv {
cx.type_func(&[cx.type_int(), cx.type_ptr_to(cx.type_i8p())], cx.type_int()) cx.type_func(&[cx.type_int(), cx.type_ptr_to(cx.type_i8p())], cx.type_int())
} else { } else {
cx.type_func(&[], cx.type_int()) cx.type_func(&[], cx.type_int())
@ -464,7 +464,7 @@ fn get_argc_argv<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
cx: &'a Bx::CodegenCx, cx: &'a Bx::CodegenCx,
bx: &mut Bx, bx: &mut Bx,
) -> (Bx::Value, Bx::Value) { ) -> (Bx::Value, Bx::Value) {
if cx.sess().target.target.options.main_needs_argc_argv { if cx.sess().target.options.main_needs_argc_argv {
// Params from native `main()` used as args for rust start function // Params from native `main()` used as args for rust start function
let param_argc = bx.get_param(0); let param_argc = bx.get_param(0);
let param_argv = bx.get_param(1); let param_argv = bx.get_param(1);
@ -479,8 +479,6 @@ fn get_argc_argv<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
} }
} }
pub const CODEGEN_WORKER_ID: usize = usize::MAX;
pub fn codegen_crate<B: ExtraBackendMethods>( pub fn codegen_crate<B: ExtraBackendMethods>(
backend: B, backend: B,
tcx: TyCtxt<'tcx>, tcx: TyCtxt<'tcx>,
@ -695,7 +693,7 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
total_codegen_time.into_inner(), total_codegen_time.into_inner(),
); );
::rustc_incremental::assert_module_sources::assert_module_sources(tcx); rustc_incremental::assert_module_sources::assert_module_sources(tcx);
symbol_names_test::report_symbol_names(tcx); symbol_names_test::report_symbol_names(tcx);
@ -754,8 +752,8 @@ impl<B: ExtraBackendMethods> Drop for AbortCodegenOnDrop<B> {
} }
fn finalize_tcx(tcx: TyCtxt<'_>) { fn finalize_tcx(tcx: TyCtxt<'_>) {
tcx.sess.time("assert_dep_graph", || ::rustc_incremental::assert_dep_graph(tcx)); tcx.sess.time("assert_dep_graph", || rustc_incremental::assert_dep_graph(tcx));
tcx.sess.time("serialize_dep_graph", || ::rustc_incremental::save_dep_graph(tcx)); tcx.sess.time("serialize_dep_graph", || rustc_incremental::save_dep_graph(tcx));
// We assume that no queries are run past here. If there are new queries // We assume that no queries are run past here. If there are new queries
// after this point, they'll show up as "<unknown>" in self-profiling data. // after this point, they'll show up as "<unknown>" in self-profiling data.

View file

@ -33,7 +33,7 @@ pub fn push_debuginfo_type_name<'tcx>(
) { ) {
// When targeting MSVC, emit C++ style type names for compatibility with // When targeting MSVC, emit C++ style type names for compatibility with
// .natvis visualizers (and perhaps other existing native debuggers?) // .natvis visualizers (and perhaps other existing native debuggers?)
let cpp_like_names = tcx.sess.target.target.options.is_like_msvc; let cpp_like_names = tcx.sess.target.options.is_like_msvc;
match *t.kind() { match *t.kind() {
ty::Bool => output.push_str("bool"), ty::Bool => output.push_str("bool"),

View file

@ -21,7 +21,6 @@ extern crate tracing;
extern crate rustc_middle; extern crate rustc_middle;
use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_data_structures::svh::Svh;
use rustc_data_structures::sync::Lrc; use rustc_data_structures::sync::Lrc;
use rustc_hir::def_id::CrateNum; use rustc_hir::def_id::CrateNum;
use rustc_hir::LangItem; use rustc_hir::LangItem;
@ -134,7 +133,6 @@ pub struct CodegenResults {
pub modules: Vec<CompiledModule>, pub modules: Vec<CompiledModule>,
pub allocator_module: Option<CompiledModule>, pub allocator_module: Option<CompiledModule>,
pub metadata_module: Option<CompiledModule>, pub metadata_module: Option<CompiledModule>,
pub crate_hash: Svh,
pub metadata: rustc_middle::middle::cstore::EncodedMetadata, pub metadata: rustc_middle::middle::cstore::EncodedMetadata,
pub windows_subsystem: Option<String>, pub windows_subsystem: Option<String>,
pub linker_info: back::linker::LinkerInfo, pub linker_info: back::linker::LinkerInfo,
@ -144,6 +142,7 @@ pub struct CodegenResults {
pub fn provide(providers: &mut Providers) { pub fn provide(providers: &mut Providers) {
crate::back::symbol_export::provide(providers); crate::back::symbol_export::provide(providers);
crate::base::provide_both(providers); crate::base::provide_both(providers);
crate::target_features::provide(providers);
} }
pub fn provide_extern(providers: &mut Providers) { pub fn provide_extern(providers: &mut Providers) {

View file

@ -12,9 +12,9 @@ use crate::MemFlags;
use rustc_ast as ast; use rustc_ast as ast;
use rustc_hir::lang_items::LangItem; use rustc_hir::lang_items::LangItem;
use rustc_index::vec::Idx; use rustc_index::vec::Idx;
use rustc_middle::mir;
use rustc_middle::mir::interpret::ConstValue; use rustc_middle::mir::interpret::ConstValue;
use rustc_middle::mir::AssertKind; use rustc_middle::mir::AssertKind;
use rustc_middle::mir::{self, SwitchTargets};
use rustc_middle::ty::layout::{FnAbiExt, HasTyCtxt}; use rustc_middle::ty::layout::{FnAbiExt, HasTyCtxt};
use rustc_middle::ty::print::with_no_trimmed_paths; use rustc_middle::ty::print::with_no_trimmed_paths;
use rustc_middle::ty::{self, Instance, Ty, TypeFoldable}; use rustc_middle::ty::{self, Instance, Ty, TypeFoldable};
@ -24,8 +24,6 @@ use rustc_target::abi::call::{ArgAbi, FnAbi, PassMode};
use rustc_target::abi::{self, LayoutOf}; use rustc_target::abi::{self, LayoutOf};
use rustc_target::spec::abi::Abi; use rustc_target::spec::abi::Abi;
use std::borrow::Cow;
/// Used by `FunctionCx::codegen_terminator` for emitting common patterns /// Used by `FunctionCx::codegen_terminator` for emitting common patterns
/// e.g., creating a basic block, calling a function, etc. /// e.g., creating a basic block, calling a function, etc.
struct TerminatorCodegenHelper<'tcx> { struct TerminatorCodegenHelper<'tcx> {
@ -198,42 +196,37 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
mut bx: Bx, mut bx: Bx,
discr: &mir::Operand<'tcx>, discr: &mir::Operand<'tcx>,
switch_ty: Ty<'tcx>, switch_ty: Ty<'tcx>,
values: &Cow<'tcx, [u128]>, targets: &SwitchTargets,
targets: &Vec<mir::BasicBlock>,
) { ) {
let discr = self.codegen_operand(&mut bx, &discr); let discr = self.codegen_operand(&mut bx, &discr);
// `switch_ty` is redundant, sanity-check that. // `switch_ty` is redundant, sanity-check that.
assert_eq!(discr.layout.ty, switch_ty); assert_eq!(discr.layout.ty, switch_ty);
if targets.len() == 2 { helper.maybe_sideeffect(self.mir, &mut bx, targets.all_targets());
// If there are two targets, emit br instead of switch
let lltrue = helper.llblock(self, targets[0]); let mut target_iter = targets.iter();
let llfalse = helper.llblock(self, targets[1]); if target_iter.len() == 1 {
// If there are two targets (one conditional, one fallback), emit br instead of switch
let (test_value, target) = target_iter.next().unwrap();
let lltrue = helper.llblock(self, target);
let llfalse = helper.llblock(self, targets.otherwise());
if switch_ty == bx.tcx().types.bool { if switch_ty == bx.tcx().types.bool {
helper.maybe_sideeffect(self.mir, &mut bx, targets.as_slice());
// Don't generate trivial icmps when switching on bool // Don't generate trivial icmps when switching on bool
if let [0] = values[..] { match test_value {
bx.cond_br(discr.immediate(), llfalse, lltrue); 0 => bx.cond_br(discr.immediate(), llfalse, lltrue),
} else { 1 => bx.cond_br(discr.immediate(), lltrue, llfalse),
assert_eq!(&values[..], &[1]); _ => bug!(),
bx.cond_br(discr.immediate(), lltrue, llfalse);
} }
} else { } else {
let switch_llty = bx.immediate_backend_type(bx.layout_of(switch_ty)); let switch_llty = bx.immediate_backend_type(bx.layout_of(switch_ty));
let llval = bx.const_uint_big(switch_llty, values[0]); let llval = bx.const_uint_big(switch_llty, test_value);
let cmp = bx.icmp(IntPredicate::IntEQ, discr.immediate(), llval); let cmp = bx.icmp(IntPredicate::IntEQ, discr.immediate(), llval);
helper.maybe_sideeffect(self.mir, &mut bx, targets.as_slice());
bx.cond_br(cmp, lltrue, llfalse); bx.cond_br(cmp, lltrue, llfalse);
} }
} else { } else {
helper.maybe_sideeffect(self.mir, &mut bx, targets.as_slice());
let (otherwise, targets) = targets.split_last().unwrap();
bx.switch( bx.switch(
discr.immediate(), discr.immediate(),
helper.llblock(self, *otherwise), helper.llblock(self, targets.otherwise()),
values target_iter.map(|(value, target)| (value, helper.llblock(self, target))),
.iter()
.zip(targets)
.map(|(&value, target)| (value, helper.llblock(self, *target))),
); );
} }
} }
@ -879,7 +872,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
let string = match ty.kind() { let string = match ty.kind() {
ty::Uint(_) => value.to_string(), ty::Uint(_) => value.to_string(),
ty::Int(int_ty) => { ty::Int(int_ty) => {
match int_ty.normalize(bx.tcx().sess.target.ptr_width) { match int_ty.normalize(bx.tcx().sess.target.pointer_width) {
ast::IntTy::I8 => (value as i8).to_string(), ast::IntTy::I8 => (value as i8).to_string(),
ast::IntTy::I16 => (value as i16).to_string(), ast::IntTy::I16 => (value as i16).to_string(),
ast::IntTy::I32 => (value as i32).to_string(), ast::IntTy::I32 => (value as i32).to_string(),
@ -975,8 +968,8 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
helper.funclet_br(self, &mut bx, target); helper.funclet_br(self, &mut bx, target);
} }
mir::TerminatorKind::SwitchInt { ref discr, switch_ty, ref values, ref targets } => { mir::TerminatorKind::SwitchInt { ref discr, switch_ty, ref targets } => {
self.codegen_switchint_terminator(helper, bx, discr, switch_ty, values, targets); self.codegen_switchint_terminator(helper, bx, discr, switch_ty, targets);
} }
mir::TerminatorKind::Return => { mir::TerminatorKind::Return => {

View file

@ -580,8 +580,12 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
// stuffs. // stuffs.
fn int_type_width_signed(ty: Ty<'_>, tcx: TyCtxt<'_>) -> Option<(u64, bool)> { fn int_type_width_signed(ty: Ty<'_>, tcx: TyCtxt<'_>) -> Option<(u64, bool)> {
match ty.kind() { match ty.kind() {
ty::Int(t) => Some((t.bit_width().unwrap_or(u64::from(tcx.sess.target.ptr_width)), true)), ty::Int(t) => {
ty::Uint(t) => Some((t.bit_width().unwrap_or(u64::from(tcx.sess.target.ptr_width)), false)), Some((t.bit_width().unwrap_or(u64::from(tcx.sess.target.pointer_width)), true))
}
ty::Uint(t) => {
Some((t.bit_width().unwrap_or(u64::from(tcx.sess.target.pointer_width)), false))
}
_ => None, _ => None,
} }
} }

View file

@ -346,8 +346,8 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
.. ..
} => { } => {
if variant_index != dataful_variant { if variant_index != dataful_variant {
if bx.cx().sess().target.target.arch == "arm" if bx.cx().sess().target.arch == "arm"
|| bx.cx().sess().target.target.arch == "aarch64" || bx.cx().sess().target.arch == "aarch64"
{ {
// FIXME(#34427): as workaround for LLVM bug on ARM, // FIXME(#34427): as workaround for LLVM bug on ARM,
// use memset of 0 before assigning niche value. // use memset of 0 before assigning niche value.

View file

@ -1,3 +1,5 @@
use rustc_hir::def_id::LOCAL_CRATE;
use rustc_middle::ty::query::Providers;
use rustc_session::Session; use rustc_session::Session;
use rustc_span::symbol::sym; use rustc_span::symbol::sym;
use rustc_span::symbol::Symbol; use rustc_span::symbol::Symbol;
@ -136,7 +138,7 @@ pub fn all_known_features() -> impl Iterator<Item = (&'static str, Option<Symbol
} }
pub fn supported_target_features(sess: &Session) -> &'static [(&'static str, Option<Symbol>)] { pub fn supported_target_features(sess: &Session) -> &'static [(&'static str, Option<Symbol>)] {
match &*sess.target.target.arch { match &*sess.target.arch {
"arm" => ARM_ALLOWED_FEATURES, "arm" => ARM_ALLOWED_FEATURES,
"aarch64" => AARCH64_ALLOWED_FEATURES, "aarch64" => AARCH64_ALLOWED_FEATURES,
"x86" | "x86_64" => X86_ALLOWED_FEATURES, "x86" | "x86_64" => X86_ALLOWED_FEATURES,
@ -148,3 +150,16 @@ pub fn supported_target_features(sess: &Session) -> &'static [(&'static str, Opt
_ => &[], _ => &[],
} }
} }
pub(crate) fn provide(providers: &mut Providers) {
providers.supported_target_features = |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE);
if tcx.sess.opts.actually_rustdoc {
// rustdoc needs to be able to document functions that use all the features, so
// whitelist them all
all_known_features().map(|(a, b)| (a.to_string(), b)).collect()
} else {
supported_target_features(tcx.sess).iter().map(|&(a, b)| (a.to_string(), b)).collect()
}
};
}

View file

@ -1,10 +1,11 @@
use super::write::WriteBackendMethods; use super::write::WriteBackendMethods;
use super::CodegenObject; use super::CodegenObject;
use crate::ModuleCodegen; use crate::{CodegenResults, ModuleCodegen};
use rustc_ast::expand::allocator::AllocatorKind; use rustc_ast::expand::allocator::AllocatorKind;
use rustc_data_structures::fx::FxHashMap;
use rustc_errors::ErrorReported; use rustc_errors::ErrorReported;
use rustc_middle::dep_graph::DepGraph; use rustc_middle::dep_graph::{WorkProduct, WorkProductId};
use rustc_middle::middle::cstore::{EncodedMetadata, MetadataLoaderDyn}; use rustc_middle::middle::cstore::{EncodedMetadata, MetadataLoaderDyn};
use rustc_middle::ty::layout::{HasTyCtxt, TyAndLayout}; use rustc_middle::ty::layout::{HasTyCtxt, TyAndLayout};
use rustc_middle::ty::query::Providers; use rustc_middle::ty::query::Providers;
@ -80,8 +81,7 @@ pub trait CodegenBackend {
&self, &self,
ongoing_codegen: Box<dyn Any>, ongoing_codegen: Box<dyn Any>,
sess: &Session, sess: &Session,
dep_graph: &DepGraph, ) -> Result<(CodegenResults, FxHashMap<WorkProductId, WorkProduct>), ErrorReported>;
) -> Result<Box<dyn Any>, ErrorReported>;
/// This is called on the returned `Box<dyn Any>` from `join_codegen` /// This is called on the returned `Box<dyn Any>` from `join_codegen`
/// ///
@ -91,7 +91,7 @@ pub trait CodegenBackend {
fn link( fn link(
&self, &self,
sess: &Session, sess: &Session,
codegen_results: Box<dyn Any>, codegen_results: CodegenResults,
outputs: &OutputFilenames, outputs: &OutputFilenames,
) -> Result<(), ErrorReported>; ) -> Result<(), ErrorReported>;
} }
@ -124,4 +124,5 @@ pub trait ExtraBackendMethods: CodegenBackend + WriteBackendMethods + Sized + Se
opt_level: config::OptLevel, opt_level: config::OptLevel,
) -> Arc<dyn Fn() -> Result<Self::TargetMachine, String> + Send + Sync>; ) -> Arc<dyn Fn() -> Result<Self::TargetMachine, String> + Send + Sync>;
fn target_cpu<'b>(&self, sess: &'b Session) -> &'b str; fn target_cpu<'b>(&self, sess: &'b Session) -> &'b str;
fn tune_cpu<'b>(&self, sess: &'b Session) -> Option<&'b str>;
} }

View file

@ -85,7 +85,7 @@ impl<'tcx, T> CodegenMethods<'tcx> for T where
} }
pub trait HasCodegen<'tcx>: pub trait HasCodegen<'tcx>:
Backend<'tcx> + ::std::ops::Deref<Target = <Self as HasCodegen<'tcx>>::CodegenCx> Backend<'tcx> + std::ops::Deref<Target = <Self as HasCodegen<'tcx>>::CodegenCx>
{ {
type CodegenCx: CodegenMethods<'tcx> type CodegenCx: CodegenMethods<'tcx>
+ BackendTypes< + BackendTypes<

View file

@ -51,7 +51,7 @@ pub trait DerivedTypeMethods<'tcx>: BaseTypeMethods<'tcx> + MiscMethods<'tcx> {
} }
fn type_int(&self) -> Self::Type { fn type_int(&self) -> Self::Type {
match &self.sess().target.target.target_c_int_width[..] { match &self.sess().target.target_c_int_width[..] {
"16" => self.type_i16(), "16" => self.type_i16(),
"32" => self.type_i32(), "32" => self.type_i32(),
"64" => self.type_i64(), "64" => self.type_i64(),

View file

@ -71,8 +71,8 @@ impl Fingerprint {
} }
} }
impl ::std::fmt::Display for Fingerprint { impl std::fmt::Display for Fingerprint {
fn fmt(&self, formatter: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result { fn fmt(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(formatter, "{:x}-{:x}", self.0, self.1) write!(formatter, "{:x}-{:x}", self.0, self.1)
} }
} }

View file

@ -129,7 +129,7 @@ pub enum ProcessResult<O, E> {
struct ObligationTreeId(usize); struct ObligationTreeId(usize);
type ObligationTreeIdGenerator = type ObligationTreeIdGenerator =
::std::iter::Map<::std::ops::RangeFrom<usize>, fn(usize) -> ObligationTreeId>; std::iter::Map<std::ops::RangeFrom<usize>, fn(usize) -> ObligationTreeId>;
pub struct ObligationForest<O: ForestObligation> { pub struct ObligationForest<O: ForestObligation> {
/// The list of obligations. In between calls to `process_obligations`, /// The list of obligations. In between calls to `process_obligations`,

View file

@ -93,7 +93,7 @@ impl<K: Ord, V> SortedMap<K, V> {
/// Iterate over elements, sorted by key /// Iterate over elements, sorted by key
#[inline] #[inline]
pub fn iter(&self) -> ::std::slice::Iter<'_, (K, V)> { pub fn iter(&self) -> std::slice::Iter<'_, (K, V)> {
self.data.iter() self.data.iter()
} }
@ -134,7 +134,7 @@ impl<K: Ord, V> SortedMap<K, V> {
R: RangeBounds<K>, R: RangeBounds<K>,
{ {
let (start, end) = self.range_slice_indices(range); let (start, end) = self.range_slice_indices(range);
self.data.splice(start..end, ::std::iter::empty()); self.data.splice(start..end, std::iter::empty());
} }
/// Mutate all keys with the given function `f`. This mutation must not /// Mutate all keys with the given function `f`. This mutation must not
@ -241,7 +241,7 @@ impl<K: Ord, V> SortedMap<K, V> {
impl<K: Ord, V> IntoIterator for SortedMap<K, V> { impl<K: Ord, V> IntoIterator for SortedMap<K, V> {
type Item = (K, V); type Item = (K, V);
type IntoIter = ::std::vec::IntoIter<(K, V)>; type IntoIter = std::vec::IntoIter<(K, V)>;
fn into_iter(self) -> Self::IntoIter { fn into_iter(self) -> Self::IntoIter {
self.data.into_iter() self.data.into_iter()

View file

@ -20,7 +20,7 @@ pub struct StableHasher {
} }
impl ::std::fmt::Debug for StableHasher { impl ::std::fmt::Debug for StableHasher {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{:?}", self.state) write!(f, "{:?}", self.state)
} }
} }

View file

@ -14,12 +14,6 @@ pub struct WorkQueue<T: Idx> {
} }
impl<T: Idx> WorkQueue<T> { impl<T: Idx> WorkQueue<T> {
/// Creates a new work queue with all the elements from (0..len).
#[inline]
pub fn with_all(len: usize) -> Self {
WorkQueue { deque: (0..len).map(T::new).collect(), set: BitSet::new_filled(len) }
}
/// Creates a new work queue that starts empty, where elements range from (0..len). /// Creates a new work queue that starts empty, where elements range from (0..len).
#[inline] #[inline]
pub fn with_none(len: usize) -> Self { pub fn with_none(len: usize) -> Self {

View file

@ -10,7 +10,7 @@ crate-type = ["dylib"]
[dependencies] [dependencies]
libc = "0.2" libc = "0.2"
tracing = { version = "0.1.18" } tracing = { version = "0.1.18" }
tracing-subscriber = { version = "0.2.10", default-features = false, features = ["fmt", "env-filter", "smallvec", "parking_lot", "ansi"] } tracing-subscriber = { version = "0.2.13", default-features = false, features = ["fmt", "env-filter", "smallvec", "parking_lot", "ansi"] }
tracing-tree = "0.1.6" tracing-tree = "0.1.6"
rustc_middle = { path = "../rustc_middle" } rustc_middle = { path = "../rustc_middle" }
rustc_ast_pretty = { path = "../rustc_ast_pretty" } rustc_ast_pretty = { path = "../rustc_ast_pretty" }

View file

@ -134,9 +134,52 @@ pub fn diagnostics_registry() -> Registry {
Registry::new(&rustc_error_codes::DIAGNOSTICS) Registry::new(&rustc_error_codes::DIAGNOSTICS)
} }
pub struct RunCompiler<'a, 'b> {
at_args: &'a [String],
callbacks: &'b mut (dyn Callbacks + Send),
file_loader: Option<Box<dyn FileLoader + Send + Sync>>,
emitter: Option<Box<dyn Write + Send>>,
make_codegen_backend:
Option<Box<dyn FnOnce(&config::Options) -> Box<dyn CodegenBackend> + Send>>,
}
impl<'a, 'b> RunCompiler<'a, 'b> {
pub fn new(at_args: &'a [String], callbacks: &'b mut (dyn Callbacks + Send)) -> Self {
Self { at_args, callbacks, file_loader: None, emitter: None, make_codegen_backend: None }
}
pub fn set_make_codegen_backend(
&mut self,
make_codegen_backend: Option<
Box<dyn FnOnce(&config::Options) -> Box<dyn CodegenBackend> + Send>,
>,
) -> &mut Self {
self.make_codegen_backend = make_codegen_backend;
self
}
pub fn set_emitter(&mut self, emitter: Option<Box<dyn Write + Send>>) -> &mut Self {
self.emitter = emitter;
self
}
pub fn set_file_loader(
&mut self,
file_loader: Option<Box<dyn FileLoader + Send + Sync>>,
) -> &mut Self {
self.file_loader = file_loader;
self
}
pub fn run(self) -> interface::Result<()> {
run_compiler(
self.at_args,
self.callbacks,
self.file_loader,
self.emitter,
self.make_codegen_backend,
)
}
}
// Parse args and run the compiler. This is the primary entry point for rustc. // Parse args and run the compiler. This is the primary entry point for rustc.
// The FileLoader provides a way to load files from sources other than the file system. // The FileLoader provides a way to load files from sources other than the file system.
pub fn run_compiler( fn run_compiler(
at_args: &[String], at_args: &[String],
callbacks: &mut (dyn Callbacks + Send), callbacks: &mut (dyn Callbacks + Send),
file_loader: Option<Box<dyn FileLoader + Send + Sync>>, file_loader: Option<Box<dyn FileLoader + Send + Sync>>,
@ -155,8 +198,7 @@ pub fn run_compiler(
), ),
} }
} }
let diagnostic_output = let diagnostic_output = emitter.map_or(DiagnosticOutput::Default, DiagnosticOutput::Raw);
emitter.map(|emitter| DiagnosticOutput::Raw(emitter)).unwrap_or(DiagnosticOutput::Default);
let matches = match handle_options(&args) { let matches = match handle_options(&args) {
Some(matches) => matches, Some(matches) => matches,
None => return Ok(()), None => return Ok(()),
@ -600,7 +642,7 @@ impl RustcDefaultCalls {
let codegen_results: CodegenResults = json::decode(&rlink_data).unwrap_or_else(|err| { let codegen_results: CodegenResults = json::decode(&rlink_data).unwrap_or_else(|err| {
sess.fatal(&format!("failed to decode rlink: {}", err)); sess.fatal(&format!("failed to decode rlink: {}", err));
}); });
compiler.codegen_backend().link(&sess, Box::new(codegen_results), &outputs) compiler.codegen_backend().link(&sess, codegen_results, &outputs)
} else { } else {
sess.fatal("rlink must be a file") sess.fatal("rlink must be a file")
} }
@ -628,7 +670,7 @@ impl RustcDefaultCalls {
Input::File(ref ifile) => { Input::File(ref ifile) => {
let path = &(*ifile); let path = &(*ifile);
let mut v = Vec::new(); let mut v = Vec::new();
locator::list_file_metadata(&sess.target.target, path, metadata_loader, &mut v) locator::list_file_metadata(&sess.target, path, metadata_loader, &mut v)
.unwrap(); .unwrap();
println!("{}", String::from_utf8(v).unwrap()); println!("{}", String::from_utf8(v).unwrap());
} }
@ -672,7 +714,8 @@ impl RustcDefaultCalls {
for req in &sess.opts.prints { for req in &sess.opts.prints {
match *req { match *req {
TargetList => { TargetList => {
let mut targets = rustc_target::spec::get_targets().collect::<Vec<String>>(); let mut targets =
rustc_target::spec::TARGETS.iter().copied().collect::<Vec<_>>();
targets.sort(); targets.sort();
println!("{}", targets.join("\n")); println!("{}", targets.join("\n"));
} }
@ -681,7 +724,7 @@ impl RustcDefaultCalls {
"{}", "{}",
sess.target_tlib_path.as_ref().unwrap_or(&sess.host_tlib_path).dir.display() sess.target_tlib_path.as_ref().unwrap_or(&sess.host_tlib_path).dir.display()
), ),
TargetSpec => println!("{}", sess.target.target.to_json().pretty()), TargetSpec => println!("{}", sess.target.to_json().pretty()),
FileNames | CrateName => { FileNames | CrateName => {
let input = input.unwrap_or_else(|| { let input = input.unwrap_or_else(|| {
early_error(ErrorOutputType::default(), "no input file provided") early_error(ErrorOutputType::default(), "no input file provided")
@ -1215,9 +1258,9 @@ pub fn report_ice(info: &panic::PanicInfo<'_>, bug_report_url: &str) {
// If backtraces are enabled, also print the query stack // If backtraces are enabled, also print the query stack
let backtrace = env::var_os("RUST_BACKTRACE").map(|x| &x != "0").unwrap_or(false); let backtrace = env::var_os("RUST_BACKTRACE").map(|x| &x != "0").unwrap_or(false);
if backtrace { let num_frames = if backtrace { None } else { Some(2) };
TyCtxt::try_print_query_stack(&handler);
} TyCtxt::try_print_query_stack(&handler, num_frames);
#[cfg(windows)] #[cfg(windows)]
unsafe { unsafe {
@ -1285,7 +1328,7 @@ pub fn main() -> ! {
}) })
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
run_compiler(&args, &mut callbacks, None, None, None) RunCompiler::new(&args, &mut callbacks).run()
}); });
// The extra `\t` is necessary to align this label with the others. // The extra `\t` is necessary to align this label with the others.
print_time_passes_entry(callbacks.time_passes, "\ttotal", start.elapsed()); print_time_passes_entry(callbacks.time_passes, "\ttotal", start.elapsed());

View file

@ -460,6 +460,8 @@ E0774: include_str!("./error_codes/E0774.md"),
E0775: include_str!("./error_codes/E0775.md"), E0775: include_str!("./error_codes/E0775.md"),
E0776: include_str!("./error_codes/E0776.md"), E0776: include_str!("./error_codes/E0776.md"),
E0777: include_str!("./error_codes/E0777.md"), E0777: include_str!("./error_codes/E0777.md"),
E0778: include_str!("./error_codes/E0778.md"),
E0779: include_str!("./error_codes/E0779.md"),
; ;
// E0006, // merged with E0005 // E0006, // merged with E0005
// E0008, // cannot bind by-move into a pattern guard // E0008, // cannot bind by-move into a pattern guard

View file

@ -1,3 +1,5 @@
#### Note: this error code is no longer emitted by the compiler.
This error indicates that the bindings in a match arm would require a value to This error indicates that the bindings in a match arm would require a value to
be moved into more than one location, thus violating unique ownership. Code be moved into more than one location, thus violating unique ownership. Code
like the following is invalid as it requires the entire `Option<String>` to be like the following is invalid as it requires the entire `Option<String>` to be
@ -6,11 +8,13 @@ inner `String` to be moved into a variable called `s`.
Erroneous code example: Erroneous code example:
```compile_fail,E0007 ```compile_fail,E0382
#![feature(bindings_after_at)]
let x = Some("s".to_string()); let x = Some("s".to_string());
match x { match x {
op_string @ Some(s) => {}, // error: cannot bind by-move with sub-bindings op_string @ Some(s) => {}, // error: use of moved value
None => {}, None => {},
} }
``` ```

View file

@ -21,7 +21,7 @@ impl Foo {
The `self` keyword can only be used inside methods, which are associated The `self` keyword can only be used inside methods, which are associated
functions (functions defined inside of a `trait` or `impl` block) that have a functions (functions defined inside of a `trait` or `impl` block) that have a
`self` receiver as its first parameter, like `self`, `&self`, `&mut self` or `self` receiver as its first parameter, like `self`, `&self`, `&mut self` or
`self: &mut Pin<Self>` (this last one is an example of an ["abitrary `self` `self: &mut Pin<Self>` (this last one is an example of an ["arbitrary `self`
type"](https://github.com/rust-lang/rust/issues/44874)). type"](https://github.com/rust-lang/rust/issues/44874)).
Check if the associated function's parameter list should have contained a `self` Check if the associated function's parameter list should have contained a `self`

View file

@ -3,12 +3,8 @@ An unstable feature in `const` contexts was used.
Erroneous code example: Erroneous code example:
```compile_fail,E0723 ```compile_fail,E0723
trait T {} const fn foo<T: Copy>(_: T) { // error!
// ...
impl T for () {}
const fn foo() -> impl T { // error: `impl Trait` in const fn is unstable
()
} }
``` ```
@ -18,11 +14,7 @@ feature flag:
``` ```
#![feature(const_fn)] #![feature(const_fn)]
trait T {} const fn foo<T: Copy>(_: T) { // ok!
// ...
impl T for () {}
const fn foo() -> impl T {
()
} }
``` ```

View file

@ -0,0 +1,35 @@
The `instruction_set` attribute was malformed.
Erroneous code example:
```compile_fail,E0778
#![feature(isa_attribute)]
#[instruction_set()] // error: expected one argument
pub fn something() {}
fn main() {}
```
The parenthesized `instruction_set` attribute requires the parameter to be
specified:
```
#![feature(isa_attribute)]
#[cfg_attr(target_arch="arm", instruction_set(arm::a32))]
fn something() {}
```
or:
```
#![feature(isa_attribute)]
#[cfg_attr(target_arch="arm", instruction_set(arm::t32))]
fn something() {}
```
For more information see the [`instruction_set` attribute][isa-attribute]
section of the Reference.
[isa-attribute]: https://doc.rust-lang.org/reference/attributes/codegen.html

View file

@ -0,0 +1,32 @@
An unknown argument was given to the `instruction_set` attribute.
Erroneous code example:
```compile_fail,E0779
#![feature(isa_attribute)]
#[instruction_set(intel::x64)] // error: invalid argument
pub fn something() {}
fn main() {}
```
The `instruction_set` attribute only supports two arguments currently:
* arm::a32
* arm::t32
All other arguments given to the `instruction_set` attribute will return this
error. Example:
```
#![feature(isa_attribute)]
#[cfg_attr(target_arch="arm", instruction_set(arm::a32))] // ok!
pub fn something() {}
fn main() {}
```
For more information see the [`instruction_set` attribute][isa-attribute]
section of the Reference.
[isa-attribute]: https://doc.rust-lang.org/reference/attributes/codegen.html

View file

@ -121,11 +121,6 @@ impl Diagnostic {
self.level == Level::Cancelled self.level == Level::Cancelled
} }
/// Set the sorting span.
pub fn set_sort_span(&mut self, sp: Span) {
self.sort_span = sp;
}
/// Adds a span/label to be included in the resulting snippet. /// Adds a span/label to be included in the resulting snippet.
/// ///
/// This is pushed onto the [`MultiSpan`] that was created when the diagnostic /// This is pushed onto the [`MultiSpan`] that was created when the diagnostic
@ -535,14 +530,6 @@ impl Diagnostic {
&self.message &self.message
} }
/// Used by a lint. Copies over all details *but* the "main
/// message".
pub fn copy_details_not_message(&mut self, from: &Diagnostic) {
self.span = from.span.clone();
self.code = from.code.clone();
self.children.extend(from.children.iter().cloned())
}
/// Convenience function for internal use, clients should use one of the /// Convenience function for internal use, clients should use one of the
/// public methods above. /// public methods above.
pub fn sub( pub fn sub(

View file

@ -510,8 +510,6 @@ impl Emitter for SilentEmitter {
fn emit_diagnostic(&mut self, _: &Diagnostic) {} fn emit_diagnostic(&mut self, _: &Diagnostic) {}
} }
/// Maximum number of lines we will print for each error; arbitrary.
pub const MAX_HIGHLIGHT_LINES: usize = 6;
/// Maximum number of lines we will print for a multiline suggestion; arbitrary. /// Maximum number of lines we will print for a multiline suggestion; arbitrary.
/// ///
/// This should be replaced with a more involved mechanism to output multiline suggestions that /// This should be replaced with a more involved mechanism to output multiline suggestions that

View file

@ -148,17 +148,6 @@ impl Annotatable {
} }
} }
pub fn map_item_or<F, G>(self, mut f: F, mut or: G) -> Annotatable
where
F: FnMut(P<ast::Item>) -> P<ast::Item>,
G: FnMut(Annotatable) -> Annotatable,
{
match self {
Annotatable::Item(i) => Annotatable::Item(f(i)),
_ => or(self),
}
}
pub fn expect_trait_item(self) -> P<ast::AssocItem> { pub fn expect_trait_item(self) -> P<ast::AssocItem> {
match self { match self {
Annotatable::TraitItem(i) => i, Annotatable::TraitItem(i) => i,
@ -1052,9 +1041,6 @@ impl<'a> ExtCtxt<'a> {
.chain(components.iter().map(|&s| Ident::with_dummy_span(s))) .chain(components.iter().map(|&s| Ident::with_dummy_span(s)))
.collect() .collect()
} }
pub fn name_of(&self, st: &str) -> Symbol {
Symbol::intern(st)
}
pub fn check_unused_macros(&mut self) { pub fn check_unused_macros(&mut self) {
self.resolver.check_unused_macros(); self.resolver.check_unused_macros();

View file

@ -139,24 +139,6 @@ impl<'a> ExtCtxt<'a> {
ast::Lifetime { id: ast::DUMMY_NODE_ID, ident: ident.with_span_pos(span) } ast::Lifetime { id: ast::DUMMY_NODE_ID, ident: ident.with_span_pos(span) }
} }
pub fn lifetime_def(
&self,
span: Span,
ident: Ident,
attrs: Vec<ast::Attribute>,
bounds: ast::GenericBounds,
) -> ast::GenericParam {
let lifetime = self.lifetime(span, ident);
ast::GenericParam {
ident: lifetime.ident,
id: lifetime.id,
attrs: attrs.into(),
bounds,
kind: ast::GenericParamKind::Lifetime,
is_placeholder: false,
}
}
pub fn stmt_expr(&self, expr: P<ast::Expr>) -> ast::Stmt { pub fn stmt_expr(&self, expr: P<ast::Expr>) -> ast::Stmt {
ast::Stmt { ast::Stmt {
id: ast::DUMMY_NODE_ID, id: ast::DUMMY_NODE_ID,
@ -465,24 +447,6 @@ impl<'a> ExtCtxt<'a> {
self.pat_tuple_struct(span, path, vec![pat]) self.pat_tuple_struct(span, path, vec![pat])
} }
pub fn pat_none(&self, span: Span) -> P<ast::Pat> {
let some = self.std_path(&[sym::option, sym::Option, sym::None]);
let path = self.path_global(span, some);
self.pat_path(span, path)
}
pub fn pat_ok(&self, span: Span, pat: P<ast::Pat>) -> P<ast::Pat> {
let some = self.std_path(&[sym::result, sym::Result, sym::Ok]);
let path = self.path_global(span, some);
self.pat_tuple_struct(span, path, vec![pat])
}
pub fn pat_err(&self, span: Span, pat: P<ast::Pat>) -> P<ast::Pat> {
let some = self.std_path(&[sym::result, sym::Result, sym::Err]);
let path = self.path_global(span, some);
self.pat_tuple_struct(span, path, vec![pat])
}
pub fn arm(&self, span: Span, pat: P<ast::Pat>, expr: P<ast::Expr>) -> ast::Arm { pub fn arm(&self, span: Span, pat: P<ast::Pat>, expr: P<ast::Expr>) -> ast::Arm {
ast::Arm { ast::Arm {
attrs: vec![], attrs: vec![],
@ -514,26 +478,6 @@ impl<'a> ExtCtxt<'a> {
self.expr(span, ast::ExprKind::If(cond, self.block_expr(then), els)) self.expr(span, ast::ExprKind::If(cond, self.block_expr(then), els))
} }
pub fn lambda_fn_decl(
&self,
span: Span,
fn_decl: P<ast::FnDecl>,
body: P<ast::Expr>,
fn_decl_span: Span,
) -> P<ast::Expr> {
self.expr(
span,
ast::ExprKind::Closure(
ast::CaptureBy::Ref,
ast::Async::No,
ast::Movability::Movable,
fn_decl,
body,
fn_decl_span,
),
)
}
pub fn lambda(&self, span: Span, ids: Vec<Ident>, body: P<ast::Expr>) -> P<ast::Expr> { pub fn lambda(&self, span: Span, ids: Vec<Ident>, body: P<ast::Expr>) -> P<ast::Expr> {
let fn_decl = self.fn_decl( let fn_decl = self.fn_decl(
ids.iter().map(|id| self.param(span, *id, self.ty(span, ast::TyKind::Infer))).collect(), ids.iter().map(|id| self.param(span, *id, self.ty(span, ast::TyKind::Infer))).collect(),
@ -610,47 +554,6 @@ impl<'a> ExtCtxt<'a> {
}) })
} }
pub fn variant(&self, span: Span, ident: Ident, tys: Vec<P<ast::Ty>>) -> ast::Variant {
let vis_span = span.shrink_to_lo();
let fields: Vec<_> = tys
.into_iter()
.map(|ty| ast::StructField {
span: ty.span,
ty,
ident: None,
vis: ast::Visibility {
span: vis_span,
kind: ast::VisibilityKind::Inherited,
tokens: None,
},
attrs: Vec::new(),
id: ast::DUMMY_NODE_ID,
is_placeholder: false,
})
.collect();
let vdata = if fields.is_empty() {
ast::VariantData::Unit(ast::DUMMY_NODE_ID)
} else {
ast::VariantData::Tuple(fields, ast::DUMMY_NODE_ID)
};
ast::Variant {
attrs: Vec::new(),
data: vdata,
disr_expr: None,
id: ast::DUMMY_NODE_ID,
ident,
vis: ast::Visibility {
span: vis_span,
kind: ast::VisibilityKind::Inherited,
tokens: None,
},
span,
is_placeholder: false,
}
}
pub fn item_static( pub fn item_static(
&self, &self,
span: Span, span: Span,

View file

@ -20,7 +20,7 @@ use rustc_data_structures::map_in_place::MapInPlace;
use rustc_data_structures::stack::ensure_sufficient_stack; use rustc_data_structures::stack::ensure_sufficient_stack;
use rustc_errors::{struct_span_err, Applicability, PResult}; use rustc_errors::{struct_span_err, Applicability, PResult};
use rustc_feature::Features; use rustc_feature::Features;
use rustc_parse::parser::Parser; use rustc_parse::parser::{AttemptLocalParseRecovery, Parser};
use rustc_parse::validate_attr; use rustc_parse::validate_attr;
use rustc_session::lint::builtin::UNUSED_DOC_COMMENTS; use rustc_session::lint::builtin::UNUSED_DOC_COMMENTS;
use rustc_session::lint::BuiltinLintDiagnostics; use rustc_session::lint::BuiltinLintDiagnostics;
@ -921,7 +921,7 @@ pub fn parse_ast_fragment<'a>(
let mut stmts = SmallVec::new(); let mut stmts = SmallVec::new();
// Won't make progress on a `}`. // Won't make progress on a `}`.
while this.token != token::Eof && this.token != token::CloseDelim(token::Brace) { while this.token != token::Eof && this.token != token::CloseDelim(token::Brace) {
if let Some(stmt) = this.parse_full_stmt()? { if let Some(stmt) = this.parse_full_stmt(AttemptLocalParseRecovery::Yes)? {
stmts.push(stmt); stmts.push(stmt);
} }
} }

View file

@ -270,6 +270,9 @@ declare_features! (
(accepted, track_caller, "1.46.0", Some(47809), None), (accepted, track_caller, "1.46.0", Some(47809), None),
/// Allows `#[doc(alias = "...")]`. /// Allows `#[doc(alias = "...")]`.
(accepted, doc_alias, "1.48.0", Some(50146), None), (accepted, doc_alias, "1.48.0", Some(50146), None),
/// Allows patterns with concurrent by-move and by-ref bindings.
/// For example, you can write `Foo(a, ref b)` where `a` is by-move and `b` is by-ref.
(accepted, move_ref_pattern, "1.48.0", Some(68354), None),
// ------------------------------------------------------------------------- // -------------------------------------------------------------------------
// feature-group-end: accepted features // feature-group-end: accepted features

View file

@ -526,10 +526,6 @@ declare_features! (
/// For example, you can write `x @ Some(y)`. /// For example, you can write `x @ Some(y)`.
(active, bindings_after_at, "1.41.0", Some(65490), None), (active, bindings_after_at, "1.41.0", Some(65490), None),
/// Allows patterns with concurrent by-move and by-ref bindings.
/// For example, you can write `Foo(a, ref b)` where `a` is by-move and `b` is by-ref.
(active, move_ref_pattern, "1.42.0", Some(68354), None),
/// Allows `impl const Trait for T` syntax. /// Allows `impl const Trait for T` syntax.
(active, const_trait_impl, "1.42.0", Some(67792), None), (active, const_trait_impl, "1.42.0", Some(67792), None),
@ -596,6 +592,12 @@ declare_features! (
/// Allows rustc to inject a default alloc_error_handler /// Allows rustc to inject a default alloc_error_handler
(active, default_alloc_error_handler, "1.48.0", Some(66741), None), (active, default_alloc_error_handler, "1.48.0", Some(66741), None),
/// Allows argument and return position `impl Trait` in a `const fn`.
(active, const_impl_trait, "1.48.0", Some(77463), None),
/// Allows `#[instruction_set(_)]` attribute
(active, isa_attribute, "1.48.0", Some(74727), None),
// ------------------------------------------------------------------------- // -------------------------------------------------------------------------
// feature-group-end: actual feature gates // feature-group-end: actual feature gates
// ------------------------------------------------------------------------- // -------------------------------------------------------------------------

View file

@ -336,6 +336,8 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[
optimize, AssumedUsed, template!(List: "size|speed"), optimize_attribute, optimize, AssumedUsed, template!(List: "size|speed"), optimize_attribute,
experimental!(optimize), experimental!(optimize),
), ),
// RFC 2867
gated!(instruction_set, AssumedUsed, template!(List: "set"), isa_attribute, experimental!(instruction_set)),
gated!(ffi_returns_twice, AssumedUsed, template!(Word), experimental!(ffi_returns_twice)), gated!(ffi_returns_twice, AssumedUsed, template!(Word), experimental!(ffi_returns_twice)),
gated!(ffi_pure, AssumedUsed, template!(Word), experimental!(ffi_pure)), gated!(ffi_pure, AssumedUsed, template!(Word), experimental!(ffi_pure)),

View file

@ -75,33 +75,6 @@ pub fn link_or_copy<P: AsRef<Path>, Q: AsRef<Path>>(p: P, q: Q) -> io::Result<Li
} }
} }
#[derive(Debug)]
pub enum RenameOrCopyRemove {
Rename,
CopyRemove,
}
/// Rename `p` into `q`, preferring to use `rename` if possible.
/// If `rename` fails (rename may fail for reasons such as crossing
/// filesystem), fallback to copy & remove
pub fn rename_or_copy_remove<P: AsRef<Path>, Q: AsRef<Path>>(
p: P,
q: Q,
) -> io::Result<RenameOrCopyRemove> {
let p = p.as_ref();
let q = q.as_ref();
match fs::rename(p, q) {
Ok(()) => Ok(RenameOrCopyRemove::Rename),
Err(_) => match fs::copy(p, q) {
Ok(_) => {
fs::remove_file(p)?;
Ok(RenameOrCopyRemove::CopyRemove)
}
Err(e) => Err(e),
},
}
}
#[cfg(unix)] #[cfg(unix)]
pub fn path_to_c_string(p: &Path) -> CString { pub fn path_to_c_string(p: &Path) -> CString {
use std::ffi::OsStr; use std::ffi::OsStr;

View file

@ -118,7 +118,7 @@ impl DefKey {
let DisambiguatedDefPathData { ref data, disambiguator } = self.disambiguated_data; let DisambiguatedDefPathData { ref data, disambiguator } = self.disambiguated_data;
::std::mem::discriminant(data).hash(&mut hasher); std::mem::discriminant(data).hash(&mut hasher);
if let Some(name) = data.get_opt_name() { if let Some(name) = data.get_opt_name() {
// Get a stable hash by considering the symbol chars rather than // Get a stable hash by considering the symbol chars rather than
// the symbol index. // the symbol index.
@ -188,10 +188,6 @@ pub struct DefPath {
} }
impl DefPath { impl DefPath {
pub fn is_local(&self) -> bool {
self.krate == LOCAL_CRATE
}
pub fn make<FN>(krate: CrateNum, start_index: DefIndex, mut get_key: FN) -> DefPath pub fn make<FN>(krate: CrateNum, start_index: DefIndex, mut get_key: FN) -> DefPath
where where
FN: FnMut(DefIndex) -> DefKey, FN: FnMut(DefIndex) -> DefKey,

View file

@ -3,7 +3,6 @@ use crate::def_id::DefId;
crate use crate::hir_id::HirId; crate use crate::hir_id::HirId;
use crate::{itemlikevisit, LangItem}; use crate::{itemlikevisit, LangItem};
use rustc_ast::node_id::NodeMap;
use rustc_ast::util::parser::ExprPrecedence; use rustc_ast::util::parser::ExprPrecedence;
use rustc_ast::{self as ast, CrateSugar, LlvmAsmDialect}; use rustc_ast::{self as ast, CrateSugar, LlvmAsmDialect};
use rustc_ast::{AttrVec, Attribute, FloatTy, IntTy, Label, LitKind, StrStyle, UintTy}; use rustc_ast::{AttrVec, Attribute, FloatTy, IntTy, Label, LitKind, StrStyle, UintTy};
@ -306,10 +305,6 @@ impl GenericArgs<'_> {
Self { args: &[], bindings: &[], parenthesized: false } Self { args: &[], bindings: &[], parenthesized: false }
} }
pub fn is_empty(&self) -> bool {
self.args.is_empty() && self.bindings.is_empty() && !self.parenthesized
}
pub fn inputs(&self) -> &[Ty<'_>] { pub fn inputs(&self) -> &[Ty<'_>] {
if self.parenthesized { if self.parenthesized {
for arg in self.args { for arg in self.args {
@ -467,23 +462,6 @@ impl Generics<'hir> {
} }
} }
pub fn own_counts(&self) -> GenericParamCount {
// We could cache this as a property of `GenericParamCount`, but
// the aim is to refactor this away entirely eventually and the
// presence of this method will be a constant reminder.
let mut own_counts: GenericParamCount = Default::default();
for param in self.params {
match param.kind {
GenericParamKind::Lifetime { .. } => own_counts.lifetimes += 1,
GenericParamKind::Type { .. } => own_counts.types += 1,
GenericParamKind::Const { .. } => own_counts.consts += 1,
};
}
own_counts
}
pub fn get_named(&self, name: Symbol) -> Option<&GenericParam<'_>> { pub fn get_named(&self, name: Symbol) -> Option<&GenericParam<'_>> {
for param in self.params { for param in self.params {
if name == param.name.ident().name { if name == param.name.ident().name {
@ -2679,8 +2657,6 @@ pub struct Upvar {
pub span: Span, pub span: Span,
} }
pub type CaptureModeMap = NodeMap<CaptureBy>;
// The TraitCandidate's import_ids is empty if the trait is defined in the same module, and // The TraitCandidate's import_ids is empty if the trait is defined in the same module, and
// has length > 0 if the trait is found through an chain of imports, starting with the // has length > 0 if the trait is found through an chain of imports, starting with the
// import/use statement in the scope where the trait is used. // import/use statement in the scope where the trait is used.

View file

@ -45,5 +45,3 @@ pub const CRATE_HIR_ID: HirId = HirId {
owner: LocalDefId { local_def_index: CRATE_DEF_INDEX }, owner: LocalDefId { local_def_index: CRATE_DEF_INDEX },
local_id: ItemLocalId::from_u32(0), local_id: ItemLocalId::from_u32(0),
}; };
pub const DUMMY_ITEM_LOCAL_ID: ItemLocalId = ItemLocalId::MAX;

View file

@ -58,25 +58,6 @@ impl<T: ExactSizeIterator> EnumerateAndAdjustIterator for T {
} }
impl hir::Pat<'_> { impl hir::Pat<'_> {
pub fn is_refutable(&self) -> bool {
match self.kind {
PatKind::Lit(_)
| PatKind::Range(..)
| PatKind::Path(hir::QPath::Resolved(Some(..), _) | hir::QPath::TypeRelative(..)) => {
true
}
PatKind::Path(hir::QPath::Resolved(_, ref path))
| PatKind::TupleStruct(hir::QPath::Resolved(_, ref path), ..)
| PatKind::Struct(hir::QPath::Resolved(_, ref path), ..) => match path.res {
Res::Def(DefKind::Variant, _) => true,
_ => false,
},
PatKind::Slice(..) => true,
_ => false,
}
}
/// Call `f` on every "binding" in a pattern, e.g., on `a` in /// Call `f` on every "binding" in a pattern, e.g., on `a` in
/// `match foo() { Some(a) => (), None => () }` /// `match foo() { Some(a) => (), None => () }`
pub fn each_binding(&self, mut f: impl FnMut(hir::BindingAnnotation, HirId, Span, Ident)) { pub fn each_binding(&self, mut f: impl FnMut(hir::BindingAnnotation, HirId, Span, Ident)) {
@ -117,15 +98,6 @@ impl hir::Pat<'_> {
}) })
} }
/// Checks if the pattern contains any patterns that bind something to
/// an ident or wildcard, e.g., `foo`, or `Foo(_)`, `foo @ Bar(..)`,
pub fn contains_bindings_or_wild(&self) -> bool {
self.satisfies(|p| match p.kind {
PatKind::Binding(..) | PatKind::Wild => true,
_ => false,
})
}
/// Checks if the pattern satisfies the given predicate on some sub-pattern. /// Checks if the pattern satisfies the given predicate on some sub-pattern.
fn satisfies(&self, pred: impl Fn(&hir::Pat<'_>) -> bool) -> bool { fn satisfies(&self, pred: impl Fn(&hir::Pat<'_>) -> bool) -> bool {
let mut satisfies = false; let mut satisfies = false;

View file

@ -44,9 +44,6 @@ pub trait PpAnn {
fn nested(&self, _state: &mut State<'_>, _nested: Nested) {} fn nested(&self, _state: &mut State<'_>, _nested: Nested) {}
fn pre(&self, _state: &mut State<'_>, _node: AnnNode<'_>) {} fn pre(&self, _state: &mut State<'_>, _node: AnnNode<'_>) {}
fn post(&self, _state: &mut State<'_>, _node: AnnNode<'_>) {} fn post(&self, _state: &mut State<'_>, _node: AnnNode<'_>) {}
fn try_fetch_item(&self, _: hir::HirId) -> Option<&hir::Item<'_>> {
None
}
} }
pub struct NoAnn; pub struct NoAnn;
@ -54,9 +51,6 @@ impl PpAnn for NoAnn {}
pub const NO_ANN: &dyn PpAnn = &NoAnn; pub const NO_ANN: &dyn PpAnn = &NoAnn;
impl PpAnn for hir::Crate<'_> { impl PpAnn for hir::Crate<'_> {
fn try_fetch_item(&self, item: hir::HirId) -> Option<&hir::Item<'_>> {
Some(self.item(item))
}
fn nested(&self, state: &mut State<'_>, nested: Nested) { fn nested(&self, state: &mut State<'_>, nested: Nested) {
match nested { match nested {
Nested::Item(id) => state.print_item(self.item(id.id)), Nested::Item(id) => state.print_item(self.item(id.id)),
@ -141,6 +135,9 @@ impl std::ops::DerefMut for State<'_> {
} }
impl<'a> PrintState<'a> for State<'a> { impl<'a> PrintState<'a> for State<'a> {
fn insert_extra_parens(&self) -> bool {
true
}
fn comments(&mut self) -> &mut Option<Comments<'a>> { fn comments(&mut self) -> &mut Option<Comments<'a>> {
&mut self.comments &mut self.comments
} }

View file

@ -113,13 +113,6 @@ impl Default for RegionckMode {
} }
impl RegionckMode { impl RegionckMode {
pub fn suppressed(self) -> bool {
match self {
Self::Solve => false,
Self::Erase { suppress_errors } => suppress_errors,
}
}
/// Indicates that the MIR borrowck will repeat these region /// Indicates that the MIR borrowck will repeat these region
/// checks, so we should ignore errors if NLL is (unconditionally) /// checks, so we should ignore errors if NLL is (unconditionally)
/// enabled. /// enabled.
@ -420,15 +413,6 @@ pub enum SubregionOrigin<'tcx> {
#[cfg(target_arch = "x86_64")] #[cfg(target_arch = "x86_64")]
static_assert_size!(SubregionOrigin<'_>, 32); static_assert_size!(SubregionOrigin<'_>, 32);
/// Places that type/region parameters can appear.
#[derive(Clone, Copy, Debug)]
pub enum ParameterOrigin {
Path, // foo::bar
MethodCall, // foo.bar() <-- parameters on impl providing bar()
OverloadedOperator, // a + b when overloaded
OverloadedDeref, // *a when overloaded
}
/// Times when we replace late-bound regions with variables: /// Times when we replace late-bound regions with variables:
#[derive(Clone, Copy, Debug)] #[derive(Clone, Copy, Debug)]
pub enum LateBoundRegionConversionTime { pub enum LateBoundRegionConversionTime {
@ -508,21 +492,6 @@ pub enum NLLRegionVariableOrigin {
}, },
} }
impl NLLRegionVariableOrigin {
pub fn is_universal(self) -> bool {
match self {
NLLRegionVariableOrigin::FreeRegion => true,
NLLRegionVariableOrigin::Placeholder(..) => true,
NLLRegionVariableOrigin::Existential { .. } => false,
NLLRegionVariableOrigin::RootEmptyRegion => false,
}
}
pub fn is_existential(self) -> bool {
!self.is_universal()
}
}
// FIXME(eddyb) investigate overlap between this and `TyOrConstInferVar`. // FIXME(eddyb) investigate overlap between this and `TyOrConstInferVar`.
#[derive(Copy, Clone, Debug)] #[derive(Copy, Clone, Debug)]
pub enum FixupError<'tcx> { pub enum FixupError<'tcx> {

View file

@ -28,7 +28,6 @@ use rustc_data_structures::fx::FxHashMap;
use rustc_middle::ty::error::TypeError; use rustc_middle::ty::error::TypeError;
use rustc_middle::ty::fold::{TypeFoldable, TypeVisitor}; use rustc_middle::ty::fold::{TypeFoldable, TypeVisitor};
use rustc_middle::ty::relate::{self, Relate, RelateResult, TypeRelation}; use rustc_middle::ty::relate::{self, Relate, RelateResult, TypeRelation};
use rustc_middle::ty::subst::GenericArg;
use rustc_middle::ty::{self, InferConst, Ty, TyCtxt}; use rustc_middle::ty::{self, InferConst, Ty, TyCtxt};
use std::fmt::Debug; use std::fmt::Debug;
@ -119,12 +118,6 @@ pub trait TypeRelatingDelegate<'tcx> {
fn forbid_inference_vars() -> bool; fn forbid_inference_vars() -> bool;
} }
#[derive(Clone, Debug)]
struct ScopesAndKind<'tcx> {
scopes: Vec<BoundRegionScope<'tcx>>,
kind: GenericArg<'tcx>,
}
#[derive(Clone, Debug, Default)] #[derive(Clone, Debug, Default)]
struct BoundRegionScope<'tcx> { struct BoundRegionScope<'tcx> {
map: FxHashMap<ty::BoundRegion, ty::Region<'tcx>>, map: FxHashMap<ty::BoundRegion, ty::Region<'tcx>>,
@ -341,7 +334,7 @@ where
// been fully instantiated and hence the set of scopes we have // been fully instantiated and hence the set of scopes we have
// doesn't matter -- just to be sure, put an empty vector // doesn't matter -- just to be sure, put an empty vector
// in there. // in there.
let old_a_scopes = ::std::mem::take(pair.vid_scopes(self)); let old_a_scopes = std::mem::take(pair.vid_scopes(self));
// Relate the generalized kind to the original one. // Relate the generalized kind to the original one.
let result = pair.relate_generalized_ty(self, generalized_ty); let result = pair.relate_generalized_ty(self, generalized_ty);
@ -680,7 +673,7 @@ where
// itself occurs. Note that `'b` and `'c` must both // itself occurs. Note that `'b` and `'c` must both
// include P. At the point, the call works because of // include P. At the point, the call works because of
// subtyping (i.e., `&'b u32 <: &{P} u32`). // subtyping (i.e., `&'b u32 <: &{P} u32`).
let variance = ::std::mem::replace(&mut self.ambient_variance, ty::Variance::Covariant); let variance = std::mem::replace(&mut self.ambient_variance, ty::Variance::Covariant);
self.relate(a.skip_binder(), b.skip_binder())?; self.relate(a.skip_binder(), b.skip_binder())?;
@ -709,7 +702,7 @@ where
// Reset ambient variance to contravariance. See the // Reset ambient variance to contravariance. See the
// covariant case above for an explanation. // covariant case above for an explanation.
let variance = let variance =
::std::mem::replace(&mut self.ambient_variance, ty::Variance::Contravariant); std::mem::replace(&mut self.ambient_variance, ty::Variance::Contravariant);
self.relate(a.skip_binder(), b.skip_binder())?; self.relate(a.skip_binder(), b.skip_binder())?;

View file

@ -110,7 +110,7 @@ impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> {
/// Trait queries just want to pass back type obligations "as is" /// Trait queries just want to pass back type obligations "as is"
pub fn take_registered_region_obligations(&self) -> Vec<(hir::HirId, RegionObligation<'tcx>)> { pub fn take_registered_region_obligations(&self) -> Vec<(hir::HirId, RegionObligation<'tcx>)> {
::std::mem::take(&mut self.inner.borrow_mut().region_obligations) std::mem::take(&mut self.inner.borrow_mut().region_obligations)
} }
/// Process the region obligations that must be proven (during /// Process the region obligations that must be proven (during

View file

@ -59,9 +59,7 @@ pub type TraitObligation<'tcx> = Obligation<'tcx, ty::PolyTraitPredicate<'tcx>>;
#[cfg(target_arch = "x86_64")] #[cfg(target_arch = "x86_64")]
static_assert_size!(PredicateObligation<'_>, 32); static_assert_size!(PredicateObligation<'_>, 32);
pub type Obligations<'tcx, O> = Vec<Obligation<'tcx, O>>;
pub type PredicateObligations<'tcx> = Vec<PredicateObligation<'tcx>>; pub type PredicateObligations<'tcx> = Vec<PredicateObligation<'tcx>>;
pub type TraitObligations<'tcx> = Vec<TraitObligation<'tcx>>;
pub type Selection<'tcx> = ImplSource<'tcx, PredicateObligation<'tcx>>; pub type Selection<'tcx> = ImplSource<'tcx, PredicateObligation<'tcx>>;

View file

@ -551,6 +551,10 @@ fn write_out_deps(
.map(|fmap| escape_dep_filename(&fmap.unmapped_path.as_ref().unwrap_or(&fmap.name))) .map(|fmap| escape_dep_filename(&fmap.unmapped_path.as_ref().unwrap_or(&fmap.name)))
.collect(); .collect();
if let Some(ref backend) = sess.opts.debugging_opts.codegen_backend {
files.push(backend.to_string());
}
if sess.binary_dep_depinfo() { if sess.binary_dep_depinfo() {
boxed_resolver.borrow().borrow_mut().access(|resolver| { boxed_resolver.borrow().borrow_mut().access(|resolver| {
for cnum in resolver.cstore().crates_untracked() { for cnum in resolver.cstore().crates_untracked() {

View file

@ -3,6 +3,7 @@ use crate::passes::{self, BoxedResolver, QueryContext};
use rustc_ast as ast; use rustc_ast as ast;
use rustc_codegen_ssa::traits::CodegenBackend; use rustc_codegen_ssa::traits::CodegenBackend;
use rustc_data_structures::svh::Svh;
use rustc_data_structures::sync::{Lrc, OnceCell, WorkerLocal}; use rustc_data_structures::sync::{Lrc, OnceCell, WorkerLocal};
use rustc_errors::ErrorReported; use rustc_errors::ErrorReported;
use rustc_hir::def_id::LOCAL_CRATE; use rustc_hir::def_id::LOCAL_CRATE;
@ -13,7 +14,8 @@ use rustc_middle::arena::Arena;
use rustc_middle::dep_graph::DepGraph; use rustc_middle::dep_graph::DepGraph;
use rustc_middle::ty::steal::Steal; use rustc_middle::ty::steal::Steal;
use rustc_middle::ty::{GlobalCtxt, ResolverOutputs, TyCtxt}; use rustc_middle::ty::{GlobalCtxt, ResolverOutputs, TyCtxt};
use rustc_session::config::{OutputFilenames, OutputType}; use rustc_serialize::json;
use rustc_session::config::{self, OutputFilenames, OutputType};
use rustc_session::{output::find_crate_name, Session}; use rustc_session::{output::find_crate_name, Session};
use rustc_span::symbol::sym; use rustc_span::symbol::sym;
use std::any::Any; use std::any::Any;
@ -331,6 +333,7 @@ impl<'tcx> Queries<'tcx> {
pub fn linker(&'tcx self) -> Result<Linker> { pub fn linker(&'tcx self) -> Result<Linker> {
let dep_graph = self.dep_graph()?; let dep_graph = self.dep_graph()?;
let prepare_outputs = self.prepare_outputs()?; let prepare_outputs = self.prepare_outputs()?;
let crate_hash = self.global_ctxt()?.peek_mut().enter(|tcx| tcx.crate_hash(LOCAL_CRATE));
let ongoing_codegen = self.ongoing_codegen()?; let ongoing_codegen = self.ongoing_codegen()?;
let sess = self.session().clone(); let sess = self.session().clone();
@ -340,6 +343,7 @@ impl<'tcx> Queries<'tcx> {
sess, sess,
dep_graph: dep_graph.peek().clone(), dep_graph: dep_graph.peek().clone(),
prepare_outputs: prepare_outputs.take(), prepare_outputs: prepare_outputs.take(),
crate_hash,
ongoing_codegen: ongoing_codegen.take(), ongoing_codegen: ongoing_codegen.take(),
codegen_backend, codegen_backend,
}) })
@ -350,18 +354,31 @@ pub struct Linker {
sess: Lrc<Session>, sess: Lrc<Session>,
dep_graph: DepGraph, dep_graph: DepGraph,
prepare_outputs: OutputFilenames, prepare_outputs: OutputFilenames,
crate_hash: Svh,
ongoing_codegen: Box<dyn Any>, ongoing_codegen: Box<dyn Any>,
codegen_backend: Lrc<Box<dyn CodegenBackend>>, codegen_backend: Lrc<Box<dyn CodegenBackend>>,
} }
impl Linker { impl Linker {
pub fn link(self) -> Result<()> { pub fn link(self) -> Result<()> {
let codegen_results = let (codegen_results, work_products) =
self.codegen_backend.join_codegen(self.ongoing_codegen, &self.sess, &self.dep_graph)?; self.codegen_backend.join_codegen(self.ongoing_codegen, &self.sess)?;
let prof = self.sess.prof.clone();
self.sess.compile_status()?;
let sess = &self.sess;
let dep_graph = self.dep_graph; let dep_graph = self.dep_graph;
sess.time("serialize_work_products", || {
rustc_incremental::save_work_product_index(&sess, &dep_graph, work_products)
});
let prof = self.sess.prof.clone();
prof.generic_activity("drop_dep_graph").run(move || drop(dep_graph)); prof.generic_activity("drop_dep_graph").run(move || drop(dep_graph));
// Now that we won't touch anything in the incremental compilation directory
// any more, we can finalize it (which involves renaming it)
rustc_incremental::finalize_session_directory(&self.sess, self.crate_hash);
if !self if !self
.sess .sess
.opts .opts
@ -371,6 +388,19 @@ impl Linker {
{ {
return Ok(()); return Ok(());
} }
if sess.opts.debugging_opts.no_link {
// FIXME: use a binary format to encode the `.rlink` file
let rlink_data = json::encode(&codegen_results).map_err(|err| {
sess.fatal(&format!("failed to encode rlink: {}", err));
})?;
let rlink_file = self.prepare_outputs.with_extension(config::RLINK_EXT);
std::fs::write(&rlink_file, rlink_data).map_err(|err| {
sess.fatal(&format!("failed to write file {}: {}", rlink_file.display(), err));
})?;
return Ok(());
}
self.codegen_backend.link(&self.sess, codegen_results, &self.prepare_outputs) self.codegen_backend.link(&self.sess, codegen_results, &self.prepare_outputs)
} }
} }

View file

@ -585,6 +585,7 @@ fn test_debugging_options_tracking_hash() {
tracked!(symbol_mangling_version, SymbolManglingVersion::V0); tracked!(symbol_mangling_version, SymbolManglingVersion::V0);
tracked!(teach, true); tracked!(teach, true);
tracked!(thinlto, Some(true)); tracked!(thinlto, Some(true));
tracked!(tune_cpu, Some(String::from("abc")));
tracked!(tls_model, Some(TlsModel::GeneralDynamic)); tracked!(tls_model, Some(TlsModel::GeneralDynamic));
tracked!(treat_err_as_bug, Some(1)); tracked!(treat_err_as_bug, Some(1));
tracked!(unleash_the_miri_inside_of_you, true); tracked!(unleash_the_miri_inside_of_you, true);

View file

@ -187,7 +187,7 @@ pub fn setup_callbacks_and_run_in_thread_pool_with_globals<F: FnOnce() -> R + Se
config = config.stack_size(size); config = config.stack_size(size);
} }
let with_pool = move |pool: &rayon::ThreadPool| pool.install(move || f()); let with_pool = move |pool: &rayon::ThreadPool| pool.install(f);
rustc_span::with_session_globals(edition, || { rustc_span::with_session_globals(edition, || {
rustc_span::SESSION_GLOBALS.with(|session_globals| { rustc_span::SESSION_GLOBALS.with(|session_globals| {

View file

@ -48,6 +48,7 @@ impl Token {
} }
/// Enum representing common lexeme types. /// Enum representing common lexeme types.
// perf note: Changing all `usize` to `u32` doesn't change performance. See #77629
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub enum TokenKind { pub enum TokenKind {
// Multi-char tokens: // Multi-char tokens:
@ -160,6 +161,7 @@ pub enum LiteralKind {
/// - `r##~"abcde"##`: `InvalidStarter` /// - `r##~"abcde"##`: `InvalidStarter`
/// - `r###"abcde"##`: `NoTerminator { expected: 3, found: 2, possible_terminator_offset: Some(11)` /// - `r###"abcde"##`: `NoTerminator { expected: 3, found: 2, possible_terminator_offset: Some(11)`
/// - Too many `#`s (>65535): `TooManyDelimiters` /// - Too many `#`s (>65535): `TooManyDelimiters`
// perf note: It doesn't matter that this makes `Token` 36 bytes bigger. See #77629
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub enum RawStrError { pub enum RawStrError {
/// Non `#` characters exist between `r` and `"` eg. `r#~"..` /// Non `#` characters exist between `r` and `"` eg. `r#~"..`
@ -689,7 +691,12 @@ impl Cursor<'_> {
let mut max_hashes = 0; let mut max_hashes = 0;
// Count opening '#' symbols. // Count opening '#' symbols.
let n_start_hashes = self.eat_while(|c| c == '#'); let mut eaten = 0;
while self.first() == '#' {
eaten += 1;
self.bump();
}
let n_start_hashes = eaten;
// Check that string is started. // Check that string is started.
match self.bump() { match self.bump() {
@ -724,16 +731,11 @@ impl Cursor<'_> {
// Note that this will not consume extra trailing `#` characters: // Note that this will not consume extra trailing `#` characters:
// `r###"abcde"####` is lexed as a `RawStr { n_hashes: 3 }` // `r###"abcde"####` is lexed as a `RawStr { n_hashes: 3 }`
// followed by a `#` token. // followed by a `#` token.
let mut hashes_left = n_start_hashes; let mut n_end_hashes = 0;
let is_closing_hash = |c| { while self.first() == '#' && n_end_hashes < n_start_hashes {
if c == '#' && hashes_left != 0 { n_end_hashes += 1;
hashes_left -= 1; self.bump();
true }
} else {
false
}
};
let n_end_hashes = self.eat_while(is_closing_hash);
if n_end_hashes == n_start_hashes { if n_end_hashes == n_start_hashes {
return (n_start_hashes, None); return (n_start_hashes, None);
@ -807,17 +809,9 @@ impl Cursor<'_> {
} }
/// Eats symbols while predicate returns true or until the end of file is reached. /// Eats symbols while predicate returns true or until the end of file is reached.
/// Returns amount of eaten symbols. fn eat_while(&mut self, mut predicate: impl FnMut(char) -> bool) {
fn eat_while<F>(&mut self, mut predicate: F) -> usize
where
F: FnMut(char) -> bool,
{
let mut eaten: usize = 0;
while predicate(self.first()) && !self.is_eof() { while predicate(self.first()) && !self.is_eof() {
eaten += 1;
self.bump(); self.bump();
} }
eaten
} }
} }

View file

@ -711,10 +711,6 @@ impl<'tcx> LateContext<'tcx> {
} }
} }
pub fn current_lint_root(&self) -> hir::HirId {
self.last_node_with_lint_attrs
}
/// Check if a `DefId`'s path matches the given absolute type path usage. /// Check if a `DefId`'s path matches the given absolute type path usage.
/// ///
/// Anonymous scopes such as `extern` imports are matched with `kw::Invalid`; /// Anonymous scopes such as `extern` imports are matched with `kw::Invalid`;

View file

@ -64,8 +64,8 @@ use rustc_middle::ty::query::Providers;
use rustc_middle::ty::TyCtxt; use rustc_middle::ty::TyCtxt;
use rustc_session::lint::builtin::{ use rustc_session::lint::builtin::{
BARE_TRAIT_OBJECTS, BROKEN_INTRA_DOC_LINKS, ELIDED_LIFETIMES_IN_PATHS, BARE_TRAIT_OBJECTS, BROKEN_INTRA_DOC_LINKS, ELIDED_LIFETIMES_IN_PATHS,
EXPLICIT_OUTLIVES_REQUIREMENTS, INVALID_CODEBLOCK_ATTRIBUTES, MISSING_DOC_CODE_EXAMPLES, EXPLICIT_OUTLIVES_REQUIREMENTS, INVALID_CODEBLOCK_ATTRIBUTES, INVALID_HTML_TAGS,
PRIVATE_DOC_TESTS, MISSING_DOC_CODE_EXAMPLES, PRIVATE_DOC_TESTS,
}; };
use rustc_span::symbol::{Ident, Symbol}; use rustc_span::symbol::{Ident, Symbol};
use rustc_span::Span; use rustc_span::Span;
@ -311,7 +311,8 @@ fn register_builtins(store: &mut LintStore, no_interleave_lints: bool) {
PRIVATE_INTRA_DOC_LINKS, PRIVATE_INTRA_DOC_LINKS,
INVALID_CODEBLOCK_ATTRIBUTES, INVALID_CODEBLOCK_ATTRIBUTES,
MISSING_DOC_CODE_EXAMPLES, MISSING_DOC_CODE_EXAMPLES,
PRIVATE_DOC_TESTS PRIVATE_DOC_TESTS,
INVALID_HTML_TAGS
); );
// Register renamed and removed lints. // Register renamed and removed lints.

View file

@ -304,7 +304,7 @@ fn lint_int_literal<'tcx>(
t: ast::IntTy, t: ast::IntTy,
v: u128, v: u128,
) { ) {
let int_type = t.normalize(cx.sess().target.ptr_width); let int_type = t.normalize(cx.sess().target.pointer_width);
let (min, max) = int_ty_range(int_type); let (min, max) = int_ty_range(int_type);
let max = max as u128; let max = max as u128;
let negative = type_limits.negated_expr_id == Some(e.hir_id); let negative = type_limits.negated_expr_id == Some(e.hir_id);
@ -352,7 +352,7 @@ fn lint_uint_literal<'tcx>(
lit: &hir::Lit, lit: &hir::Lit,
t: ast::UintTy, t: ast::UintTy,
) { ) {
let uint_type = t.normalize(cx.sess().target.ptr_width); let uint_type = t.normalize(cx.sess().target.pointer_width);
let (min, max) = uint_ty_range(uint_type); let (min, max) = uint_ty_range(uint_type);
let lit_val: u128 = match lit.node { let lit_val: u128 = match lit.node {
// _v is u8, within range by definition // _v is u8, within range by definition

View file

@ -70,7 +70,7 @@ fn main() {
let host = env::var("HOST").expect("HOST was not set"); let host = env::var("HOST").expect("HOST was not set");
let is_crossed = target != host; let is_crossed = target != host;
let mut optional_components = vec![ let optional_components = &[
"x86", "x86",
"arm", "arm",
"aarch64", "aarch64",
@ -85,6 +85,7 @@ fn main() {
"sparc", "sparc",
"nvptx", "nvptx",
"hexagon", "hexagon",
"riscv",
]; ];
let mut version_cmd = Command::new(&llvm_config); let mut version_cmd = Command::new(&llvm_config);
@ -94,13 +95,9 @@ fn main() {
let (major, _minor) = if let (Some(major), Some(minor)) = (parts.next(), parts.next()) { let (major, _minor) = if let (Some(major), Some(minor)) = (parts.next(), parts.next()) {
(major, minor) (major, minor)
} else { } else {
(6, 0) (8, 0)
}; };
if major > 6 {
optional_components.push("riscv");
}
let required_components = &[ let required_components = &[
"ipo", "ipo",
"bitreader", "bitreader",

Some files were not shown because too many files have changed in this diff Show more