Merge remote-tracking branch 'upstream/master' into impl-16351-nightly
Signed-off-by: Sean Cross <sean@xobs.io>
This commit is contained in:
commit
f9d390d14a
4238 changed files with 96221 additions and 81870 deletions
6
.gitattributes
vendored
6
.gitattributes
vendored
|
@ -7,10 +7,12 @@
|
|||
*.fixed linguist-language=Rust
|
||||
*.mir linguist-language=Rust
|
||||
src/etc/installer/gfx/* binary
|
||||
*.woff binary
|
||||
src/vendor/** -text
|
||||
Cargo.lock linguist-generated=false
|
||||
config.toml.example linguist-language=TOML
|
||||
|
||||
# Older git versions try to fix line endings on images, this prevents it.
|
||||
# Older git versions try to fix line endings on images and fonts, this prevents it.
|
||||
*.png binary
|
||||
*.ico binary
|
||||
*.woff binary
|
||||
*.woff2 binary
|
||||
|
|
6
.github/workflows/ci.yml
vendored
6
.github/workflows/ci.yml
vendored
|
@ -43,7 +43,7 @@ jobs:
|
|||
- name: mingw-check
|
||||
os: ubuntu-latest-xl
|
||||
env: {}
|
||||
- name: x86_64-gnu-llvm-9
|
||||
- name: x86_64-gnu-llvm-10
|
||||
os: ubuntu-latest-xl
|
||||
env: {}
|
||||
- name: x86_64-gnu-tools
|
||||
|
@ -265,7 +265,7 @@ jobs:
|
|||
- name: x86_64-gnu-distcheck
|
||||
os: ubuntu-latest-xl
|
||||
env: {}
|
||||
- name: x86_64-gnu-llvm-9
|
||||
- name: x86_64-gnu-llvm-10
|
||||
env:
|
||||
RUST_BACKTRACE: 1
|
||||
os: ubuntu-latest-xl
|
||||
|
@ -622,7 +622,7 @@ jobs:
|
|||
if: "github.event_name == 'push' && github.ref == 'refs/heads/master' && github.repository == 'rust-lang-ci/rust'"
|
||||
steps:
|
||||
- name: checkout the source code
|
||||
uses: actions/checkout@v1
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 2
|
||||
- name: publish toolstate
|
||||
|
|
72
.gitignore
vendored
72
.gitignore
vendored
|
@ -5,51 +5,71 @@
|
|||
# created during manual debugging and many people like to clean up instead of
|
||||
# having git ignore such leftovers. You can use `.git/info/exclude` to
|
||||
# configure your local ignore list.
|
||||
# FIXME: This needs cleanup.
|
||||
*~
|
||||
.#*
|
||||
|
||||
## File system
|
||||
.DS_Store
|
||||
desktop.ini
|
||||
|
||||
## Editor
|
||||
*.swp
|
||||
*.swo
|
||||
Session.vim
|
||||
.cproject
|
||||
.hg/
|
||||
.hgignore
|
||||
.idea
|
||||
*.iml
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
.project
|
||||
.settings/
|
||||
.valgrindrc
|
||||
.vscode
|
||||
.project
|
||||
.favorites.json
|
||||
/Makefile
|
||||
/build/
|
||||
.settings/
|
||||
|
||||
## Tool
|
||||
.valgrindrc
|
||||
.cargo
|
||||
# Included because it is part of the test case
|
||||
!/src/test/run-make/thumb-none-qemu/example/.cargo
|
||||
|
||||
## Configuration
|
||||
/config.toml
|
||||
/dist/
|
||||
/Makefile
|
||||
config.mk
|
||||
config.stamp
|
||||
no_llvm_build
|
||||
|
||||
## Build
|
||||
/dl/
|
||||
/doc/
|
||||
/inst/
|
||||
/llvm/
|
||||
/mingw-build/
|
||||
/src/tools/x/target
|
||||
# Created by default with `src/ci/docker/run.sh`:
|
||||
/obj/
|
||||
/build/
|
||||
/dist/
|
||||
/unicode-downloads
|
||||
/target
|
||||
# Generated by compiletest for incremental:
|
||||
/src/tools/x/target
|
||||
# Generated by compiletest for incremental
|
||||
/tmp/
|
||||
# Created by default with `src/ci/docker/run.sh`
|
||||
/obj/
|
||||
|
||||
## Temporary files
|
||||
*~
|
||||
\#*
|
||||
\#*\#
|
||||
.#*
|
||||
|
||||
## Tags
|
||||
tags
|
||||
tags.*
|
||||
TAGS
|
||||
TAGS.*
|
||||
\#*
|
||||
\#*\#
|
||||
config.mk
|
||||
config.stamp
|
||||
Session.vim
|
||||
.cargo
|
||||
!/src/test/run-make/thumb-none-qemu/example/.cargo
|
||||
no_llvm_build
|
||||
|
||||
## Python
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
## Node
|
||||
**node_modules
|
||||
**package-lock.json
|
||||
|
||||
# Before adding new lines, see the comment at the top.
|
||||
|
|
2
.gitmodules
vendored
2
.gitmodules
vendored
|
@ -37,7 +37,7 @@
|
|||
[submodule "src/llvm-project"]
|
||||
path = src/llvm-project
|
||||
url = https://github.com/rust-lang/llvm-project.git
|
||||
branch = rustc/12.0-2021-02-03
|
||||
branch = rustc/12.0-2021-04-15
|
||||
[submodule "src/doc/embedded-book"]
|
||||
path = src/doc/embedded-book
|
||||
url = https://github.com/rust-embedded/book.git
|
||||
|
|
3
.mailmap
3
.mailmap
|
@ -235,6 +235,7 @@ Philipp Matthias Schäfer <philipp.matthias.schaefer@posteo.de>
|
|||
Przemysław Wesołek <jest@go.art.pl> Przemek Wesołek <jest@go.art.pl>
|
||||
Rafael Ávila de Espíndola <respindola@mozilla.com> Rafael Avila de Espindola <espindola@dream.(none)>
|
||||
Ralph Giles <giles@thaumas.net> Ralph Giles <giles@mozilla.com>
|
||||
Ramkumar Ramachandra <r@artagnon.com> <artagnon@gmail.com>
|
||||
Renato Riccieri Santos Zannon <renato@rrsz.com.br>
|
||||
Richard Diamond <wichard@vitalitystudios.com> <wichard@hahbee.co>
|
||||
Rob Arnold <robarnold@cs.cmu.edu>
|
||||
|
@ -285,7 +286,7 @@ Xuefeng Wu <benewu@gmail.com> Xuefeng Wu <xfwu@thoughtworks.com>
|
|||
Xuefeng Wu <benewu@gmail.com> XuefengWu <benewu@gmail.com>
|
||||
York Xiang <bombless@126.com>
|
||||
Youngsoo Son <ysson83@gmail.com> <ysoo.son@samsung.com>
|
||||
Yuki Okushi <huyuumi.dev@gmail.com>
|
||||
Yuki Okushi <jtitor@2k36.org> <huyuumi.dev@gmail.com>
|
||||
Zach Pomerantz <zmp@umich.edu>
|
||||
Zack Corr <zack@z0w0.me> <zackcorr95@gmail.com>
|
||||
Zack Slayton <zack.slayton@gmail.com>
|
||||
|
|
|
@ -1,8 +1,31 @@
|
|||
# Contributing to Rust
|
||||
|
||||
Thank you for your interest in contributing to Rust!
|
||||
Thank you for your interest in contributing to Rust! There are many ways to contribute
|
||||
and we appreciate all of them.
|
||||
|
||||
To get started, read the [Contributing to Rust] chapter of the [rustc-dev-guide].
|
||||
Documentation for contributing to Rust is located in the [Guide to Rustc Development](https://rustc-dev-guide.rust-lang.org/),
|
||||
commonly known as the [rustc-dev-guide]. Despite the name, this guide documents
|
||||
not just how to develop rustc (the Rust compiler), but also how to contribute to any part
|
||||
of the Rust project.
|
||||
|
||||
To get started with contributing, please read the [Contributing to Rust] chapter of the guide.
|
||||
That chapter explains how to get your development environment set up and how to get help.
|
||||
|
||||
## About the [rustc-dev-guide]
|
||||
|
||||
The [rustc-dev-guide] is meant to help document how rustc –the Rust compiler– works,
|
||||
as well as to help new contributors get involved in rustc development. It is recommend
|
||||
to read and understand the [rustc-dev-guide] before making a contribution. This guide
|
||||
talks about the different bots in the Rust ecosystem, the Rust development tools,
|
||||
bootstrapping, the compiler architecture, source code representation, and more.
|
||||
|
||||
## [Getting help](https://rustc-dev-guide.rust-lang.org/getting-started.html#asking-questions)
|
||||
|
||||
There are many ways you can get help when you're stuck. Rust has many platforms for this:
|
||||
[internals], [rust-zulip], and [rust-discord]. It is recommended to ask for help on
|
||||
the [rust-zulip], but any of these platforms are a great way to seek help and even
|
||||
find a mentor! You can learn more about asking questions and getting help in the
|
||||
[Asking Questions](https://rustc-dev-guide.rust-lang.org/getting-started.html#asking-questions) chapter of the [rustc-dev-guide].
|
||||
|
||||
## Bug reports
|
||||
|
||||
|
@ -13,3 +36,6 @@ refer to [this section][contributing-bug-reports] and [open an issue][issue temp
|
|||
[rustc-dev-guide]: https://rustc-dev-guide.rust-lang.org/
|
||||
[contributing-bug-reports]: https://rustc-dev-guide.rust-lang.org/contributing.html#bug-reports
|
||||
[issue template]: https://github.com/rust-lang/rust/issues/new/choose
|
||||
[internals]: https://internals.rust-lang.org
|
||||
[rust-discord]: http://discord.gg/rust-lang
|
||||
[rust-zulip]: https://rust-lang.zulipchat.com
|
||||
|
|
634
Cargo.lock
634
Cargo.lock
File diff suppressed because it is too large
Load diff
|
@ -45,6 +45,8 @@ exclude = [
|
|||
# not all `Cargo.toml` files are available, so we exclude the `x` binary,
|
||||
# so it can be invoked before the current checkout is set up.
|
||||
"src/tools/x",
|
||||
# stdarch has its own Cargo workspace
|
||||
"library/stdarch",
|
||||
]
|
||||
|
||||
[profile.release.package.compiler_builtins]
|
||||
|
@ -88,6 +90,7 @@ object.debug = 0
|
|||
# vendored copy.
|
||||
[patch."https://github.com/rust-lang/cargo"]
|
||||
cargo = { path = "src/tools/cargo" }
|
||||
cargo-util = { path = "src/tools/cargo/crates/cargo-util" }
|
||||
|
||||
[patch."https://github.com/rust-lang/rustfmt"]
|
||||
# Similar to Cargo above we want the RLS to use a vendored version of `rustfmt`
|
||||
|
|
|
@ -31,7 +31,7 @@ by running `./x.py --help` or reading the [rustc dev guide][rustcguidebuild].
|
|||
* `g++` 5.1 or later or `clang++` 3.5 or later
|
||||
* `python` 3 or 2.7
|
||||
* GNU `make` 3.81 or later
|
||||
* `cmake` 3.4.3 or later
|
||||
* `cmake` 3.13.4 or later
|
||||
* `ninja`
|
||||
* `curl`
|
||||
* `git`
|
||||
|
@ -90,7 +90,7 @@ build.
|
|||
|
||||
[MSYS2][msys2] can be used to easily build Rust on Windows:
|
||||
|
||||
[msys2]: https://msys2.github.io/
|
||||
[msys2]: https://www.msys2.org/
|
||||
|
||||
1. Grab the latest [MSYS2 installer][msys2] and go through the installer.
|
||||
|
||||
|
|
206
RELEASES.md
206
RELEASES.md
|
@ -1,3 +1,181 @@
|
|||
Version 1.51.0 (2021-03-25)
|
||||
============================
|
||||
|
||||
Language
|
||||
--------
|
||||
- [You can now parameterize items such as functions, traits, and `struct`s by constant
|
||||
values in addition to by types and lifetimes.][79135] Also known as "const generics"
|
||||
E.g. you can now write the following. Note: Only values of primitive integers,
|
||||
`bool`, or `char` types are currently permitted.
|
||||
```rust
|
||||
struct GenericArray<T, const LENGTH: usize> {
|
||||
inner: [T; LENGTH]
|
||||
}
|
||||
|
||||
impl<T, const LENGTH: usize> GenericArray<T, LENGTH> {
|
||||
const fn last(&self) -> Option<&T> {
|
||||
if LENGTH == 0 {
|
||||
None
|
||||
} else {
|
||||
Some(&self.inner[LENGTH - 1])
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
Compiler
|
||||
--------
|
||||
|
||||
- [Added the `-Csplit-debuginfo` codegen option for macOS platforms.][79570]
|
||||
This option controls whether debug information is split across multiple files
|
||||
or packed into a single file. **Note** This option is unstable on other platforms.
|
||||
- [Added tier 3\* support for `aarch64_be-unknown-linux-gnu`,
|
||||
`aarch64-unknown-linux-gnu_ilp32`, and `aarch64_be-unknown-linux-gnu_ilp32` targets.][81455]
|
||||
- [Added tier 3 support for `i386-unknown-linux-gnu` and `i486-unknown-linux-gnu` targets.][80662]
|
||||
- [The `target-cpu=native` option will now detect individual features of CPUs.][80749]
|
||||
|
||||
\* Refer to Rust's [platform support page][platform-support-doc] for more
|
||||
information on Rust's tiered platform support.
|
||||
|
||||
Libraries
|
||||
---------
|
||||
|
||||
- [`Box::downcast` is now also implemented for any `dyn Any + Send + Sync` object.][80945]
|
||||
- [`str` now implements `AsMut<str>`.][80279]
|
||||
- [`u64` and `u128` now implement `From<char>`.][79502]
|
||||
- [`Error` is now implemented for `&T` where `T` implements `Error`.][75180]
|
||||
- [`Poll::{map_ok, map_err}` are now implemented for `Poll<Option<Result<T, E>>>`.][80968]
|
||||
- [`unsigned_abs` is now implemented for all signed integer types.][80959]
|
||||
- [`io::Empty` now implements `io::Seek`.][78044]
|
||||
- [`rc::Weak<T>` and `sync::Weak<T>`'s methods such as `as_ptr` are now implemented for
|
||||
`T: ?Sized` types.][80764]
|
||||
- [`Div` and `Rem` by their `NonZero` variant is now implemented for all unsigned integers.][79134]
|
||||
|
||||
|
||||
Stabilized APIs
|
||||
---------------
|
||||
|
||||
- [`Arc::decrement_strong_count`]
|
||||
- [`Arc::increment_strong_count`]
|
||||
- [`Once::call_once_force`]
|
||||
- [`Peekable::next_if_eq`]
|
||||
- [`Peekable::next_if`]
|
||||
- [`Seek::stream_position`]
|
||||
- [`array::IntoIter`]
|
||||
- [`panic::panic_any`]
|
||||
- [`ptr::addr_of!`]
|
||||
- [`ptr::addr_of_mut!`]
|
||||
- [`slice::fill_with`]
|
||||
- [`slice::split_inclusive_mut`]
|
||||
- [`slice::split_inclusive`]
|
||||
- [`slice::strip_prefix`]
|
||||
- [`slice::strip_suffix`]
|
||||
- [`str::split_inclusive`]
|
||||
- [`sync::OnceState`]
|
||||
- [`task::Wake`]
|
||||
- [`VecDeque::range`]
|
||||
- [`VecDeque::range_mut`]
|
||||
|
||||
Cargo
|
||||
-----
|
||||
- [Added the `split-debuginfo` profile option to control the -Csplit-debuginfo
|
||||
codegen option.][cargo/9112]
|
||||
- [Added the `resolver` field to `Cargo.toml` to enable the new feature resolver
|
||||
and CLI option behavior.][cargo/8997] Version 2 of the feature resolver will try
|
||||
to avoid unifying features of dependencies where that unification could be unwanted.
|
||||
Such as using the same dependency with a `std` feature in a build scripts and
|
||||
proc-macros, while using the `no-std` feature in the final binary. See the
|
||||
[Cargo book documentation][feature-resolver@2.0] for more information on the feature.
|
||||
|
||||
Rustdoc
|
||||
-------
|
||||
|
||||
- [Rustdoc will now include documentation for methods available from _nested_ `Deref` traits.][80653]
|
||||
- [You can now provide a `--default-theme` flag which sets the default theme to use for
|
||||
documentation.][79642]
|
||||
|
||||
Various improvements to intra-doc links:
|
||||
|
||||
- [You can link to non-path primitives such as `slice`.][80181]
|
||||
- [You can link to associated items.][74489]
|
||||
- [You can now include generic parameters when linking to items, like `Vec<T>`.][76934]
|
||||
|
||||
Misc
|
||||
----
|
||||
- [You can now pass `--include-ignored` to tests (e.g. with
|
||||
`cargo test -- --include-ignored`) to include testing tests marked `#[ignore]`.][80053]
|
||||
|
||||
Compatibility Notes
|
||||
-------------------
|
||||
|
||||
- [WASI platforms no longer use the `wasm-bindgen` ABI, and instead use the wasm32 ABI.][79998]
|
||||
- [`rustc` no longer promotes division, modulo and indexing operations to `const` that
|
||||
could fail.][80579]
|
||||
- [The minimum version of glibc for the following platforms has been bumped to version 2.31
|
||||
for the distributed artifacts.][81521]
|
||||
- `armv5te-unknown-linux-gnueabi`
|
||||
- `sparc64-unknown-linux-gnu`
|
||||
- `thumbv7neon-unknown-linux-gnueabihf`
|
||||
- `armv7-unknown-linux-gnueabi`
|
||||
- `x86_64-unknown-linux-gnux32`
|
||||
- [`atomic::spin_loop_hint` has been deprecated.][80966] It's recommended to use `hint::spin_loop` instead.
|
||||
|
||||
Internal Only
|
||||
-------------
|
||||
|
||||
- [Consistently avoid constructing optimized MIR when not doing codegen][80718]
|
||||
|
||||
[79135]: https://github.com/rust-lang/rust/pull/79135
|
||||
[74489]: https://github.com/rust-lang/rust/pull/74489
|
||||
[76934]: https://github.com/rust-lang/rust/pull/76934
|
||||
[79570]: https://github.com/rust-lang/rust/pull/79570
|
||||
[80181]: https://github.com/rust-lang/rust/pull/80181
|
||||
[79642]: https://github.com/rust-lang/rust/pull/79642
|
||||
[80945]: https://github.com/rust-lang/rust/pull/80945
|
||||
[80279]: https://github.com/rust-lang/rust/pull/80279
|
||||
[80053]: https://github.com/rust-lang/rust/pull/80053
|
||||
[79502]: https://github.com/rust-lang/rust/pull/79502
|
||||
[75180]: https://github.com/rust-lang/rust/pull/75180
|
||||
[79135]: https://github.com/rust-lang/rust/pull/79135
|
||||
[81521]: https://github.com/rust-lang/rust/pull/81521
|
||||
[80968]: https://github.com/rust-lang/rust/pull/80968
|
||||
[80959]: https://github.com/rust-lang/rust/pull/80959
|
||||
[80718]: https://github.com/rust-lang/rust/pull/80718
|
||||
[80653]: https://github.com/rust-lang/rust/pull/80653
|
||||
[80579]: https://github.com/rust-lang/rust/pull/80579
|
||||
[79998]: https://github.com/rust-lang/rust/pull/79998
|
||||
[78044]: https://github.com/rust-lang/rust/pull/78044
|
||||
[81455]: https://github.com/rust-lang/rust/pull/81455
|
||||
[80764]: https://github.com/rust-lang/rust/pull/80764
|
||||
[80749]: https://github.com/rust-lang/rust/pull/80749
|
||||
[80662]: https://github.com/rust-lang/rust/pull/80662
|
||||
[79134]: https://github.com/rust-lang/rust/pull/79134
|
||||
[80966]: https://github.com/rust-lang/rust/pull/80966
|
||||
[cargo/8997]: https://github.com/rust-lang/cargo/pull/8997
|
||||
[cargo/9112]: https://github.com/rust-lang/cargo/pull/9112
|
||||
[feature-resolver@2.0]: https://doc.rust-lang.org/nightly/cargo/reference/features.html#feature-resolver-version-2
|
||||
[`Once::call_once_force`]: https://doc.rust-lang.org/stable/std/sync/struct.Once.html#method.call_once_force
|
||||
[`sync::OnceState`]: https://doc.rust-lang.org/stable/std/sync/struct.OnceState.html
|
||||
[`panic::panic_any`]: https://doc.rust-lang.org/stable/std/panic/fn.panic_any.html
|
||||
[`slice::strip_prefix`]: https://doc.rust-lang.org/stable/std/primitive.slice.html#method.strip_prefix
|
||||
[`slice::strip_suffix`]: https://doc.rust-lang.org/stable/std/primitive.slice.html#method.strip_prefix
|
||||
[`Arc::increment_strong_count`]: https://doc.rust-lang.org/nightly/std/sync/struct.Arc.html#method.increment_strong_count
|
||||
[`Arc::decrement_strong_count`]: https://doc.rust-lang.org/nightly/std/sync/struct.Arc.html#method.decrement_strong_count
|
||||
[`slice::fill_with`]: https://doc.rust-lang.org/nightly/std/primitive.slice.html#method.fill_with
|
||||
[`ptr::addr_of!`]: https://doc.rust-lang.org/nightly/std/ptr/macro.addr_of.html
|
||||
[`ptr::addr_of_mut!`]: https://doc.rust-lang.org/nightly/std/ptr/macro.addr_of_mut.html
|
||||
[`array::IntoIter`]: https://doc.rust-lang.org/nightly/std/array/struct.IntoIter.html
|
||||
[`slice::split_inclusive`]: https://doc.rust-lang.org/nightly/std/primitive.slice.html#method.split_inclusive
|
||||
[`slice::split_inclusive_mut`]: https://doc.rust-lang.org/nightly/std/primitive.slice.html#method.split_inclusive_mut
|
||||
[`str::split_inclusive`]: https://doc.rust-lang.org/nightly/std/primitive.str.html#method.split_inclusive
|
||||
[`task::Wake`]: https://doc.rust-lang.org/nightly/std/task/trait.Wake.html
|
||||
[`Seek::stream_position`]: https://doc.rust-lang.org/nightly/std/io/trait.Seek.html#method.stream_position
|
||||
[`Peekable::next_if`]: https://doc.rust-lang.org/nightly/std/iter/struct.Peekable.html#method.next_if
|
||||
[`Peekable::next_if_eq`]: https://doc.rust-lang.org/nightly/std/iter/struct.Peekable.html#method.next_if_eq
|
||||
[`VecDeque::range`]: https://doc.rust-lang.org/nightly/std/collections/struct.VecDeque.html#method.range
|
||||
[`VecDeque::range_mut`]: https://doc.rust-lang.org/nightly/std/collections/struct.VecDeque.html#method.range_mut
|
||||
|
||||
Version 1.50.0 (2021-02-11)
|
||||
============================
|
||||
|
||||
|
@ -14,7 +192,7 @@ Compiler
|
|||
- [The `x86_64-unknown-freebsd` is now built with the full toolset.][79484]
|
||||
- [Dropped support for all cloudabi targets.][78439]
|
||||
|
||||
\* Refer to Rust's [platform support page][forge-platform-support] for more
|
||||
\* Refer to Rust's [platform support page][platform-support-doc] for more
|
||||
information on Rust's tiered platform support.
|
||||
|
||||
Libraries
|
||||
|
@ -191,7 +369,7 @@ Compiler
|
|||
- [Output from threads spawned in tests is now captured.][78227]
|
||||
- [Change os and vendor values to "none" and "unknown" for some targets][78951]
|
||||
|
||||
\* Refer to Rust's [platform support page][forge-platform-support] for more
|
||||
\* Refer to Rust's [platform support page][platform-support-doc] for more
|
||||
information on Rust's tiered platform support.
|
||||
|
||||
Libraries
|
||||
|
@ -296,7 +474,7 @@ Compiler
|
|||
Note: If you're using cargo you must explicitly pass the `--target` flag.
|
||||
- [Added tier 2\* support for `aarch64-unknown-linux-musl`.][76420]
|
||||
|
||||
\* Refer to Rust's [platform support page][forge-platform-support] for more
|
||||
\* Refer to Rust's [platform support page][platform-support-doc] for more
|
||||
information on Rust's tiered platform support.
|
||||
|
||||
Libraries
|
||||
|
@ -435,7 +613,7 @@ Compiler
|
|||
- [Upgrade the FreeBSD toolchain to version 11.4][75204]
|
||||
- [`RUST_BACKTRACE`'s output is now more compact.][75048]
|
||||
|
||||
\* Refer to Rust's [platform support page][forge-platform-support] for more
|
||||
\* Refer to Rust's [platform support page][platform-support-doc] for more
|
||||
information on Rust's tiered platform support.
|
||||
|
||||
Libraries
|
||||
|
@ -720,7 +898,7 @@ Compiler
|
|||
- [Added tier 3 support for the `thumbv7a-uwp-windows-msvc` target.][72133]
|
||||
- [Upgraded to LLVM 10.][67759]
|
||||
|
||||
\* Refer to Rust's [platform support page][forge-platform-support] for more
|
||||
\* Refer to Rust's [platform support page][platform-support-doc] for more
|
||||
information on Rust's tiered platform support.
|
||||
|
||||
|
||||
|
@ -1229,7 +1407,7 @@ Compiler
|
|||
pointing to the location where they were called, rather than
|
||||
`core`'s internals. ][67887]
|
||||
|
||||
\* Refer to Rust's [platform support page][forge-platform-support] for more
|
||||
\* Refer to Rust's [platform support page][platform-support-doc] for more
|
||||
information on Rust's tiered platform support.
|
||||
|
||||
Libraries
|
||||
|
@ -1327,7 +1505,7 @@ Compiler
|
|||
- [You can now provide `--extern` flag without a path, indicating that it is
|
||||
available from the search path or specified with an `-L` flag.][64882]
|
||||
|
||||
\* Refer to Rust's [platform support page][forge-platform-support] for more
|
||||
\* Refer to Rust's [platform support page][platform-support-doc] for more
|
||||
information on Rust's tiered platform support.
|
||||
|
||||
[argfile-docs]: https://doc.rust-lang.org/nightly/rustc/command-line-arguments.html#path-load-command-line-flags-from-a-path
|
||||
|
@ -1451,7 +1629,7 @@ Compiler
|
|||
- [Added tier 3 support for the `mips64-unknown-linux-muslabi64`, and
|
||||
`mips64el-unknown-linux-muslabi64` targets.][65843]
|
||||
|
||||
\* Refer to Rust's [platform support page][forge-platform-support] for more
|
||||
\* Refer to Rust's [platform support page][platform-support-doc] for more
|
||||
information on Rust's tiered platform support.
|
||||
|
||||
Libraries
|
||||
|
@ -1601,7 +1779,7 @@ Compiler
|
|||
output of successful tests.][62600]
|
||||
|
||||
|
||||
\* Refer to Rust's [platform support page][forge-platform-support] for more
|
||||
\* Refer to Rust's [platform support page][platform-support-doc] for more
|
||||
information on Rust's tiered platform support.
|
||||
|
||||
Libraries
|
||||
|
@ -1695,7 +1873,7 @@ Compiler
|
|||
- [Added tier 3 support for the `riscv32i-unknown-none-elf` target.][62784]
|
||||
- [Upgraded to LLVM 9.][62592]
|
||||
|
||||
\* Refer to Rust's [platform support page][forge-platform-support] for more
|
||||
\* Refer to Rust's [platform support page][platform-support-doc] for more
|
||||
information on Rust's tiered platform support.
|
||||
|
||||
Libraries
|
||||
|
@ -1737,6 +1915,11 @@ Cargo
|
|||
- [You can now pass the `--features` option multiple times to enable
|
||||
multiple features.][cargo/7084]
|
||||
|
||||
Rustdoc
|
||||
-------
|
||||
|
||||
- [Documentation on `pub use` statements is prepended to the documentation of the re-exported item][63048]
|
||||
|
||||
Misc
|
||||
----
|
||||
- [`rustc` will now warn about some incorrect uses of
|
||||
|
@ -1771,6 +1954,7 @@ Compatibility Notes
|
|||
[63421]: https://github.com/rust-lang/rust/pull/63421/
|
||||
[cargo/7084]: https://github.com/rust-lang/cargo/pull/7084/
|
||||
[cargo/7143]: https://github.com/rust-lang/cargo/pull/7143/
|
||||
[63048]: https://github.com/rust-lang/rust/pull/63048
|
||||
[`<*const T>::cast`]: https://doc.rust-lang.org/std/primitive.pointer.html#method.cast
|
||||
[`<*mut T>::cast`]: https://doc.rust-lang.org/std/primitive.pointer.html#method.cast
|
||||
[`Duration::as_secs_f32`]: https://doc.rust-lang.org/std/time/struct.Duration.html#method.as_secs_f32
|
||||
|
@ -1782,7 +1966,7 @@ Compatibility Notes
|
|||
[`Duration::mul_f32`]: https://doc.rust-lang.org/std/time/struct.Duration.html#method.mul_f32
|
||||
[`Duration::mul_f64`]: https://doc.rust-lang.org/std/time/struct.Duration.html#method.mul_f64
|
||||
[`any::type_name`]: https://doc.rust-lang.org/std/any/fn.type_name.html
|
||||
[forge-platform-support]: https://forge.rust-lang.org/release/platform-support.html
|
||||
[platform-support-doc]: https://doc.rust-lang.org/nightly/rustc/platform-support.html
|
||||
[pipeline-internals]: https://internals.rust-lang.org/t/evaluating-pipelined-rustc-compilation/10199
|
||||
|
||||
Version 1.37.0 (2019-08-15)
|
||||
|
|
|
@ -11,12 +11,16 @@ rustc_driver = { path = "../rustc_driver" }
|
|||
# crate is intended to be used by codegen backends, which may not be in-tree.
|
||||
rustc_codegen_ssa = { path = "../rustc_codegen_ssa" }
|
||||
|
||||
[dependencies.jemalloc-sys]
|
||||
version = '0.3.0'
|
||||
[dependencies.tikv-jemalloc-sys]
|
||||
version = '0.4.0'
|
||||
optional = true
|
||||
features = ['unprefixed_malloc_on_supported_platforms']
|
||||
|
||||
[dependencies.tikv-jemallocator]
|
||||
version = '0.4.0'
|
||||
optional = true
|
||||
|
||||
[features]
|
||||
jemalloc = ['jemalloc-sys']
|
||||
jemalloc = ['tikv-jemalloc-sys', 'tikv-jemallocator']
|
||||
llvm = ['rustc_driver/llvm']
|
||||
max_level_info = ['rustc_driver/max_level_info']
|
||||
|
|
|
@ -1,13 +1,26 @@
|
|||
// Configure jemalloc as the `global_allocator` when configured. This is
|
||||
// so that we use the sized deallocation apis jemalloc provides
|
||||
// (namely `sdallocx`).
|
||||
//
|
||||
// The symbol overrides documented below are also performed so that we can
|
||||
// ensure that we use a consistent allocator across the rustc <-> llvm boundary
|
||||
#[cfg(feature = "jemalloc")]
|
||||
#[global_allocator]
|
||||
static ALLOC: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc;
|
||||
|
||||
#[cfg(feature = "tikv-jemalloc-sys")]
|
||||
use tikv_jemalloc_sys as jemalloc_sys;
|
||||
|
||||
fn main() {
|
||||
// Pull in jemalloc when enabled.
|
||||
//
|
||||
// Note that we're pulling in a static copy of jemalloc which means that to
|
||||
// pull it in we need to actually reference its symbols for it to get
|
||||
// linked. The two crates we link to here, std and rustc_driver, are both
|
||||
// dynamic libraries. That means to pull in jemalloc we need to actually
|
||||
// dynamic libraries. That means to pull in jemalloc we actually need to
|
||||
// reference allocation symbols one way or another (as this file is the only
|
||||
// object code in the rustc executable).
|
||||
#[cfg(feature = "jemalloc-sys")]
|
||||
#[cfg(feature = "tikv-jemalloc-sys")]
|
||||
{
|
||||
use std::os::raw::{c_int, c_void};
|
||||
|
||||
|
@ -24,6 +37,20 @@ fn main() {
|
|||
static _F5: unsafe extern "C" fn(*mut c_void, usize) -> *mut c_void = jemalloc_sys::realloc;
|
||||
#[used]
|
||||
static _F6: unsafe extern "C" fn(*mut c_void) = jemalloc_sys::free;
|
||||
|
||||
// On OSX, jemalloc doesn't directly override malloc/free, but instead
|
||||
// registers itself with the allocator's zone APIs in a ctor. However,
|
||||
// the linker doesn't seem to consider ctors as "used" when statically
|
||||
// linking, so we need to explicitly depend on the function.
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
extern "C" {
|
||||
fn _rjem_je_zone_register();
|
||||
}
|
||||
|
||||
#[used]
|
||||
static _F7: unsafe extern "C" fn() = _rjem_je_zone_register;
|
||||
}
|
||||
}
|
||||
|
||||
rustc_driver::set_sigpipe_handler();
|
||||
|
|
|
@ -2273,6 +2273,7 @@ impl Loss {
|
|||
mod sig {
|
||||
use super::{limbs_for_bits, ExpInt, Limb, Loss, LIMB_BITS};
|
||||
use core::cmp::Ordering;
|
||||
use core::iter;
|
||||
use core::mem;
|
||||
|
||||
pub(super) fn is_all_zeros(limbs: &[Limb]) -> bool {
|
||||
|
@ -2483,7 +2484,7 @@ mod sig {
|
|||
pub(super) fn add(a: &mut [Limb], b: &[Limb], mut c: Limb) -> Limb {
|
||||
assert!(c <= 1);
|
||||
|
||||
for (a, &b) in a.iter_mut().zip(b) {
|
||||
for (a, &b) in iter::zip(a, b) {
|
||||
let (r, overflow) = a.overflowing_add(b);
|
||||
let (r, overflow2) = r.overflowing_add(c);
|
||||
*a = r;
|
||||
|
@ -2497,7 +2498,7 @@ mod sig {
|
|||
pub(super) fn sub(a: &mut [Limb], b: &[Limb], mut c: Limb) -> Limb {
|
||||
assert!(c <= 1);
|
||||
|
||||
for (a, &b) in a.iter_mut().zip(b) {
|
||||
for (a, &b) in iter::zip(a, b) {
|
||||
let (r, overflow) = a.overflowing_sub(b);
|
||||
let (r, overflow2) = r.overflowing_sub(c);
|
||||
*a = r;
|
||||
|
|
|
@ -33,8 +33,9 @@
|
|||
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
|
||||
#![no_std]
|
||||
#![forbid(unsafe_code)]
|
||||
#![feature(iter_zip)]
|
||||
#![feature(nll)]
|
||||
#![feature(or_patterns)]
|
||||
#![cfg_attr(bootstrap, feature(or_patterns))]
|
||||
|
||||
#[macro_use]
|
||||
extern crate alloc;
|
||||
|
|
|
@ -5,4 +5,5 @@ version = "0.0.0"
|
|||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
rustc_data_structures = { path = "../rustc_data_structures" }
|
||||
smallvec = { version = "1.6.1", features = ["union", "may_dangle"] }
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
#![feature(min_specialization)]
|
||||
#![cfg_attr(test, feature(test))]
|
||||
|
||||
use rustc_data_structures::sync;
|
||||
use smallvec::SmallVec;
|
||||
|
||||
use std::alloc::Layout;
|
||||
|
@ -235,26 +236,6 @@ impl<T> TypedArena<T> {
|
|||
start_ptr
|
||||
}
|
||||
|
||||
/// Allocates a slice of objects that are copied into the `TypedArena`, returning a mutable
|
||||
/// reference to it. Will panic if passed a zero-sized types.
|
||||
///
|
||||
/// Panics:
|
||||
///
|
||||
/// - Zero-sized types
|
||||
/// - Zero-length slices
|
||||
#[inline]
|
||||
pub fn alloc_slice(&self, slice: &[T]) -> &mut [T]
|
||||
where
|
||||
T: Copy,
|
||||
{
|
||||
unsafe {
|
||||
let len = slice.len();
|
||||
let start_ptr = self.alloc_raw_slice(len);
|
||||
slice.as_ptr().copy_to_nonoverlapping(start_ptr, len);
|
||||
slice::from_raw_parts_mut(start_ptr, len)
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn alloc_from_iter<I: IntoIterator<Item = T>>(&self, iter: I) -> &mut [T] {
|
||||
assert!(mem::size_of::<T>() != 0);
|
||||
|
@ -297,22 +278,6 @@ impl<T> TypedArena<T> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Clears the arena. Deallocates all but the longest chunk which may be reused.
|
||||
pub fn clear(&mut self) {
|
||||
unsafe {
|
||||
// Clear the last chunk, which is partially filled.
|
||||
let mut chunks_borrow = self.chunks.borrow_mut();
|
||||
if let Some(mut last_chunk) = chunks_borrow.last_mut() {
|
||||
self.clear_last_chunk(&mut last_chunk);
|
||||
let len = chunks_borrow.len();
|
||||
// If `T` is ZST, code below has no effect.
|
||||
for mut chunk in chunks_borrow.drain(..len - 1) {
|
||||
chunk.destroy(chunk.entries);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Drops the contents of the last chunk. The last chunk is partially empty, unlike all other
|
||||
// chunks.
|
||||
fn clear_last_chunk(&self, last_chunk: &mut TypedArenaChunk<T>) {
|
||||
|
@ -556,8 +521,19 @@ struct DropType {
|
|||
obj: *mut u8,
|
||||
}
|
||||
|
||||
unsafe fn drop_for_type<T>(to_drop: *mut u8) {
|
||||
// SAFETY: we require `T: Send` before type-erasing into `DropType`.
|
||||
#[cfg(parallel_compiler)]
|
||||
unsafe impl sync::Send for DropType {}
|
||||
|
||||
impl DropType {
|
||||
#[inline]
|
||||
unsafe fn new<T: sync::Send>(obj: *mut T) -> Self {
|
||||
unsafe fn drop_for_type<T>(to_drop: *mut u8) {
|
||||
std::ptr::drop_in_place(to_drop as *mut T)
|
||||
}
|
||||
|
||||
DropType { drop_fn: drop_for_type::<T>, obj: obj as *mut u8 }
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for DropType {
|
||||
|
@ -585,21 +561,26 @@ pub struct DropArena {
|
|||
|
||||
impl DropArena {
|
||||
#[inline]
|
||||
pub unsafe fn alloc<T>(&self, object: T) -> &mut T {
|
||||
pub unsafe fn alloc<T>(&self, object: T) -> &mut T
|
||||
where
|
||||
T: sync::Send,
|
||||
{
|
||||
let mem = self.arena.alloc_raw(Layout::new::<T>()) as *mut T;
|
||||
// Write into uninitialized memory.
|
||||
ptr::write(mem, object);
|
||||
let result = &mut *mem;
|
||||
// Record the destructor after doing the allocation as that may panic
|
||||
// and would cause `object`'s destructor to run twice if it was recorded before.
|
||||
self.destructors
|
||||
.borrow_mut()
|
||||
.push(DropType { drop_fn: drop_for_type::<T>, obj: result as *mut T as *mut u8 });
|
||||
self.destructors.borrow_mut().push(DropType::new(result));
|
||||
result
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub unsafe fn alloc_from_iter<T, I: IntoIterator<Item = T>>(&self, iter: I) -> &mut [T] {
|
||||
pub unsafe fn alloc_from_iter<T, I>(&self, iter: I) -> &mut [T]
|
||||
where
|
||||
T: sync::Send,
|
||||
I: IntoIterator<Item = T>,
|
||||
{
|
||||
let mut vec: SmallVec<[_; 8]> = iter.into_iter().collect();
|
||||
if vec.is_empty() {
|
||||
return &mut [];
|
||||
|
@ -620,8 +601,7 @@ impl DropArena {
|
|||
// Record the destructors after doing the allocation as that may panic
|
||||
// and would cause `object`'s destructor to run twice if it was recorded before.
|
||||
for i in 0..len {
|
||||
destructors
|
||||
.push(DropType { drop_fn: drop_for_type::<T>, obj: start_ptr.add(i) as *mut u8 });
|
||||
destructors.push(DropType::new(start_ptr.add(i)));
|
||||
}
|
||||
|
||||
slice::from_raw_parts_mut(start_ptr, len)
|
||||
|
|
|
@ -11,6 +11,24 @@ struct Point {
|
|||
z: i32,
|
||||
}
|
||||
|
||||
impl<T> TypedArena<T> {
|
||||
/// Clears the arena. Deallocates all but the longest chunk which may be reused.
|
||||
fn clear(&mut self) {
|
||||
unsafe {
|
||||
// Clear the last chunk, which is partially filled.
|
||||
let mut chunks_borrow = self.chunks.borrow_mut();
|
||||
if let Some(mut last_chunk) = chunks_borrow.last_mut() {
|
||||
self.clear_last_chunk(&mut last_chunk);
|
||||
let len = chunks_borrow.len();
|
||||
// If `T` is ZST, code below has no effect.
|
||||
for mut chunk in chunks_borrow.drain(..len - 1) {
|
||||
chunk.destroy(chunk.entries);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_unused() {
|
||||
let arena: TypedArena<Point> = TypedArena::default();
|
||||
|
|
|
@ -100,6 +100,7 @@ pub struct Path {
|
|||
}
|
||||
|
||||
impl PartialEq<Symbol> for Path {
|
||||
#[inline]
|
||||
fn eq(&self, symbol: &Symbol) -> bool {
|
||||
self.segments.len() == 1 && { self.segments[0].ident.name == *symbol }
|
||||
}
|
||||
|
@ -149,9 +150,17 @@ impl PathSegment {
|
|||
pub fn from_ident(ident: Ident) -> Self {
|
||||
PathSegment { ident, id: DUMMY_NODE_ID, args: None }
|
||||
}
|
||||
|
||||
pub fn path_root(span: Span) -> Self {
|
||||
PathSegment::from_ident(Ident::new(kw::PathRoot, span))
|
||||
}
|
||||
|
||||
pub fn span(&self) -> Span {
|
||||
match &self.args {
|
||||
Some(args) => self.ident.span.to(args.span()),
|
||||
None => self.ident.span,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The arguments of a path segment.
|
||||
|
@ -647,7 +656,7 @@ impl Pat {
|
|||
/// are treated the same as `x: x, y: ref y, z: ref mut z`,
|
||||
/// except when `is_shorthand` is true.
|
||||
#[derive(Clone, Encodable, Decodable, Debug)]
|
||||
pub struct FieldPat {
|
||||
pub struct PatField {
|
||||
/// The identifier for the field.
|
||||
pub ident: Ident,
|
||||
/// The pattern the field is destructured to.
|
||||
|
@ -692,7 +701,7 @@ pub enum PatKind {
|
|||
|
||||
/// A struct or struct variant pattern (e.g., `Variant {x, y, ..}`).
|
||||
/// The `bool` is `true` in the presence of a `..`.
|
||||
Struct(Path, Vec<FieldPat>, /* recovered */ bool),
|
||||
Struct(Path, Vec<PatField>, /* recovered */ bool),
|
||||
|
||||
/// A tuple struct/variant pattern (`Variant(x, y, .., z)`).
|
||||
TupleStruct(Path, Vec<P<Pat>>),
|
||||
|
@ -754,14 +763,6 @@ pub enum Mutability {
|
|||
}
|
||||
|
||||
impl Mutability {
|
||||
/// Returns `MutMutable` only if both `self` and `other` are mutable.
|
||||
pub fn and(self, other: Self) -> Self {
|
||||
match self {
|
||||
Mutability::Mut => other,
|
||||
Mutability::Not => Mutability::Not,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn invert(self) -> Self {
|
||||
match self {
|
||||
Mutability::Mut => Mutability::Not,
|
||||
|
@ -915,16 +916,6 @@ impl Stmt {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn tokens_mut(&mut self) -> Option<&mut LazyTokenStream> {
|
||||
match self.kind {
|
||||
StmtKind::Local(ref mut local) => local.tokens.as_mut(),
|
||||
StmtKind::Item(ref mut item) => item.tokens.as_mut(),
|
||||
StmtKind::Expr(ref mut expr) | StmtKind::Semi(ref mut expr) => expr.tokens.as_mut(),
|
||||
StmtKind::Empty => None,
|
||||
StmtKind::MacCall(ref mut mac) => mac.tokens.as_mut(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn has_trailing_semicolon(&self) -> bool {
|
||||
match &self.kind {
|
||||
StmtKind::Semi(_) => true,
|
||||
|
@ -1037,9 +1028,9 @@ pub struct Arm {
|
|||
pub is_placeholder: bool,
|
||||
}
|
||||
|
||||
/// Access of a named (e.g., `obj.foo`) or unnamed (e.g., `obj.0`) struct field.
|
||||
/// A single field in a struct expression, e.g. `x: value` and `y` in `Foo { x: value, y }`.
|
||||
#[derive(Clone, Encodable, Decodable, Debug)]
|
||||
pub struct Field {
|
||||
pub struct ExprField {
|
||||
pub attrs: AttrVec,
|
||||
pub id: NodeId,
|
||||
pub span: Span,
|
||||
|
@ -1083,8 +1074,8 @@ pub struct Expr {
|
|||
}
|
||||
|
||||
// `Expr` is used a lot. Make sure it doesn't unintentionally get bigger.
|
||||
#[cfg(target_arch = "x86_64")]
|
||||
rustc_data_structures::static_assert_size!(Expr, 120);
|
||||
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
|
||||
rustc_data_structures::static_assert_size!(Expr, 104);
|
||||
|
||||
impl Expr {
|
||||
/// Returns `true` if this expression would be valid somewhere that expects a value;
|
||||
|
@ -1139,6 +1130,14 @@ impl Expr {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn peel_parens(&self) -> &Expr {
|
||||
let mut expr = self;
|
||||
while let ExprKind::Paren(inner) = &expr.kind {
|
||||
expr = &inner;
|
||||
}
|
||||
expr
|
||||
}
|
||||
|
||||
/// Attempts to reparse as `Ty` (for diagnostic purposes).
|
||||
pub fn to_ty(&self) -> Option<P<Ty>> {
|
||||
let kind = match &self.kind {
|
||||
|
@ -1246,6 +1245,13 @@ pub enum StructRest {
|
|||
None,
|
||||
}
|
||||
|
||||
#[derive(Clone, Encodable, Decodable, Debug)]
|
||||
pub struct StructExpr {
|
||||
pub path: Path,
|
||||
pub fields: Vec<ExprField>,
|
||||
pub rest: StructRest,
|
||||
}
|
||||
|
||||
#[derive(Clone, Encodable, Decodable, Debug)]
|
||||
pub enum ExprKind {
|
||||
/// A `box x` expression.
|
||||
|
@ -1340,7 +1346,7 @@ pub enum ExprKind {
|
|||
Field(P<Expr>, Ident),
|
||||
/// An indexing operation (e.g., `foo[2]`).
|
||||
Index(P<Expr>, P<Expr>),
|
||||
/// A range (e.g., `1..2`, `1..`, `..2`, `1..=2`, `..=2`; and `..` in destructuring assingment).
|
||||
/// A range (e.g., `1..2`, `1..`, `..2`, `1..=2`, `..=2`; and `..` in destructuring assignment).
|
||||
Range(Option<P<Expr>>, Option<P<Expr>>, RangeLimits),
|
||||
/// An underscore, used in destructuring assignment to ignore a value.
|
||||
Underscore,
|
||||
|
@ -1371,7 +1377,7 @@ pub enum ExprKind {
|
|||
/// A struct literal expression.
|
||||
///
|
||||
/// E.g., `Foo {x: 1, y: 2}`, or `Foo {x: 1, .. rest}`.
|
||||
Struct(Path, Vec<Field>, StructRest),
|
||||
Struct(P<StructExpr>),
|
||||
|
||||
/// An array literal constructed from one repeated element.
|
||||
///
|
||||
|
@ -1709,13 +1715,6 @@ impl FloatTy {
|
|||
FloatTy::F64 => sym::f64,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn bit_width(self) -> u64 {
|
||||
match self {
|
||||
FloatTy::F32 => 32,
|
||||
FloatTy::F64 => 64,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
|
||||
|
@ -1751,29 +1750,6 @@ impl IntTy {
|
|||
IntTy::I128 => sym::i128,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn bit_width(&self) -> Option<u64> {
|
||||
Some(match *self {
|
||||
IntTy::Isize => return None,
|
||||
IntTy::I8 => 8,
|
||||
IntTy::I16 => 16,
|
||||
IntTy::I32 => 32,
|
||||
IntTy::I64 => 64,
|
||||
IntTy::I128 => 128,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn normalize(&self, target_width: u32) -> Self {
|
||||
match self {
|
||||
IntTy::Isize => match target_width {
|
||||
16 => IntTy::I16,
|
||||
32 => IntTy::I32,
|
||||
64 => IntTy::I64,
|
||||
_ => unreachable!(),
|
||||
},
|
||||
_ => *self,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Copy, Debug)]
|
||||
|
@ -1809,29 +1785,6 @@ impl UintTy {
|
|||
UintTy::U128 => sym::u128,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn bit_width(&self) -> Option<u64> {
|
||||
Some(match *self {
|
||||
UintTy::Usize => return None,
|
||||
UintTy::U8 => 8,
|
||||
UintTy::U16 => 16,
|
||||
UintTy::U32 => 32,
|
||||
UintTy::U64 => 64,
|
||||
UintTy::U128 => 128,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn normalize(&self, target_width: u32) -> Self {
|
||||
match self {
|
||||
UintTy::Usize => match target_width {
|
||||
16 => UintTy::U16,
|
||||
32 => UintTy::U32,
|
||||
64 => UintTy::U64,
|
||||
_ => unreachable!(),
|
||||
},
|
||||
_ => *self,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A constraint on an associated type (e.g., `A = Bar` in `Foo<A = Bar>` or
|
||||
|
@ -1951,7 +1904,7 @@ impl TyKind {
|
|||
}
|
||||
|
||||
/// Syntax used to declare a trait object.
|
||||
#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug)]
|
||||
#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
|
||||
pub enum TraitObjectSyntax {
|
||||
Dyn,
|
||||
None,
|
||||
|
@ -2046,7 +1999,7 @@ pub enum InlineAsmOperand {
|
|||
out_expr: Option<P<Expr>>,
|
||||
},
|
||||
Const {
|
||||
expr: P<Expr>,
|
||||
anon_const: AnonConst,
|
||||
},
|
||||
Sym {
|
||||
expr: P<Expr>,
|
||||
|
@ -2202,9 +2155,6 @@ pub struct FnDecl {
|
|||
}
|
||||
|
||||
impl FnDecl {
|
||||
pub fn get_self(&self) -> Option<ExplicitSelf> {
|
||||
self.inputs.get(0).and_then(Param::to_self)
|
||||
}
|
||||
pub fn has_self(&self) -> bool {
|
||||
self.inputs.get(0).map_or(false, Param::is_self)
|
||||
}
|
||||
|
@ -2299,7 +2249,7 @@ impl FnRetTy {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Encodable, Decodable, Debug)]
|
||||
#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug)]
|
||||
pub enum Inline {
|
||||
Yes,
|
||||
No,
|
||||
|
@ -2521,11 +2471,11 @@ impl VisibilityKind {
|
|||
}
|
||||
}
|
||||
|
||||
/// Field of a struct.
|
||||
/// Field definition in a struct, variant or union.
|
||||
///
|
||||
/// E.g., `bar: usize` as in `struct Foo { bar: usize }`.
|
||||
#[derive(Clone, Encodable, Decodable, Debug)]
|
||||
pub struct StructField {
|
||||
pub struct FieldDef {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub id: NodeId,
|
||||
pub span: Span,
|
||||
|
@ -2542,11 +2492,11 @@ pub enum VariantData {
|
|||
/// Struct variant.
|
||||
///
|
||||
/// E.g., `Bar { .. }` as in `enum Foo { Bar { .. } }`.
|
||||
Struct(Vec<StructField>, bool),
|
||||
Struct(Vec<FieldDef>, bool),
|
||||
/// Tuple variant.
|
||||
///
|
||||
/// E.g., `Bar(..)` as in `enum Foo { Bar(..) }`.
|
||||
Tuple(Vec<StructField>, NodeId),
|
||||
Tuple(Vec<FieldDef>, NodeId),
|
||||
/// Unit variant.
|
||||
///
|
||||
/// E.g., `Bar = ..` as in `enum Foo { Bar = .. }`.
|
||||
|
@ -2555,7 +2505,7 @@ pub enum VariantData {
|
|||
|
||||
impl VariantData {
|
||||
/// Return the fields of this variant.
|
||||
pub fn fields(&self) -> &[StructField] {
|
||||
pub fn fields(&self) -> &[FieldDef] {
|
||||
match *self {
|
||||
VariantData::Struct(ref fields, ..) | VariantData::Tuple(ref fields, _) => fields,
|
||||
_ => &[],
|
||||
|
@ -2757,7 +2707,7 @@ pub enum ItemKind {
|
|||
MacroDef(MacroDef),
|
||||
}
|
||||
|
||||
#[cfg(target_arch = "x86_64")]
|
||||
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
|
||||
rustc_data_structures::static_assert_size!(ItemKind, 112);
|
||||
|
||||
impl ItemKind {
|
||||
|
@ -2831,7 +2781,7 @@ pub enum AssocItemKind {
|
|||
MacCall(MacCall),
|
||||
}
|
||||
|
||||
#[cfg(target_arch = "x86_64")]
|
||||
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
|
||||
rustc_data_structures::static_assert_size!(AssocItemKind, 72);
|
||||
|
||||
impl AssocItemKind {
|
||||
|
@ -2883,7 +2833,7 @@ pub enum ForeignItemKind {
|
|||
MacCall(MacCall),
|
||||
}
|
||||
|
||||
#[cfg(target_arch = "x86_64")]
|
||||
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
|
||||
rustc_data_structures::static_assert_size!(ForeignItemKind, 72);
|
||||
|
||||
impl From<ForeignItemKind> for ItemKind {
|
||||
|
|
|
@ -1,34 +1,89 @@
|
|||
use super::ptr::P;
|
||||
use super::token::Nonterminal;
|
||||
use super::tokenstream::LazyTokenStream;
|
||||
use super::{Arm, Field, FieldPat, GenericParam, Param, StructField, Variant};
|
||||
use super::{AssocItem, Expr, ForeignItem, Item, Local};
|
||||
use super::{Arm, ExprField, FieldDef, GenericParam, Param, PatField, Variant};
|
||||
use super::{AssocItem, Expr, ForeignItem, Item, Local, MacCallStmt};
|
||||
use super::{AttrItem, AttrKind, Block, Pat, Path, Ty, Visibility};
|
||||
use super::{AttrVec, Attribute, Stmt, StmtKind};
|
||||
|
||||
use std::fmt::Debug;
|
||||
|
||||
/// An `AstLike` represents an AST node (or some wrapper around
|
||||
/// and AST node) which stores some combination of attributes
|
||||
/// and tokens.
|
||||
pub trait AstLike: Sized {
|
||||
pub trait AstLike: Sized + Debug {
|
||||
/// This is `true` if this `AstLike` might support 'custom' (proc-macro) inner
|
||||
/// attributes. Attributes like `#![cfg]` and `#![cfg_attr]` are not
|
||||
/// considered 'custom' attributes
|
||||
///
|
||||
/// If this is `false`, then this `AstLike` definitely does
|
||||
/// not support 'custom' inner attributes, which enables some optimizations
|
||||
/// during token collection.
|
||||
const SUPPORTS_CUSTOM_INNER_ATTRS: bool;
|
||||
fn attrs(&self) -> &[Attribute];
|
||||
fn visit_attrs(&mut self, f: impl FnOnce(&mut Vec<Attribute>));
|
||||
/// Called by `Parser::collect_tokens` to store the collected
|
||||
/// tokens inside an AST node
|
||||
fn finalize_tokens(&mut self, _tokens: LazyTokenStream) {
|
||||
// This default impl makes this trait easier to implement
|
||||
// in tools like `rust-analyzer`
|
||||
panic!("`finalize_tokens` is not supported!")
|
||||
}
|
||||
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>>;
|
||||
}
|
||||
|
||||
impl<T: AstLike + 'static> AstLike for P<T> {
|
||||
const SUPPORTS_CUSTOM_INNER_ATTRS: bool = T::SUPPORTS_CUSTOM_INNER_ATTRS;
|
||||
fn attrs(&self) -> &[Attribute] {
|
||||
(**self).attrs()
|
||||
}
|
||||
fn visit_attrs(&mut self, f: impl FnOnce(&mut Vec<Attribute>)) {
|
||||
(**self).visit_attrs(f);
|
||||
}
|
||||
fn finalize_tokens(&mut self, tokens: LazyTokenStream) {
|
||||
(**self).finalize_tokens(tokens)
|
||||
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
|
||||
(**self).tokens_mut()
|
||||
}
|
||||
}
|
||||
|
||||
impl AstLike for crate::token::Nonterminal {
|
||||
const SUPPORTS_CUSTOM_INNER_ATTRS: bool = true;
|
||||
fn attrs(&self) -> &[Attribute] {
|
||||
match self {
|
||||
Nonterminal::NtItem(item) => item.attrs(),
|
||||
Nonterminal::NtStmt(stmt) => stmt.attrs(),
|
||||
Nonterminal::NtExpr(expr) | Nonterminal::NtLiteral(expr) => expr.attrs(),
|
||||
Nonterminal::NtPat(_)
|
||||
| Nonterminal::NtTy(_)
|
||||
| Nonterminal::NtMeta(_)
|
||||
| Nonterminal::NtPath(_)
|
||||
| Nonterminal::NtVis(_)
|
||||
| Nonterminal::NtTT(_)
|
||||
| Nonterminal::NtBlock(_)
|
||||
| Nonterminal::NtIdent(..)
|
||||
| Nonterminal::NtLifetime(_) => &[],
|
||||
}
|
||||
}
|
||||
fn visit_attrs(&mut self, f: impl FnOnce(&mut Vec<Attribute>)) {
|
||||
match self {
|
||||
Nonterminal::NtItem(item) => item.visit_attrs(f),
|
||||
Nonterminal::NtStmt(stmt) => stmt.visit_attrs(f),
|
||||
Nonterminal::NtExpr(expr) | Nonterminal::NtLiteral(expr) => expr.visit_attrs(f),
|
||||
Nonterminal::NtPat(_)
|
||||
| Nonterminal::NtTy(_)
|
||||
| Nonterminal::NtMeta(_)
|
||||
| Nonterminal::NtPath(_)
|
||||
| Nonterminal::NtVis(_)
|
||||
| Nonterminal::NtTT(_)
|
||||
| Nonterminal::NtBlock(_)
|
||||
| Nonterminal::NtIdent(..)
|
||||
| Nonterminal::NtLifetime(_) => {}
|
||||
}
|
||||
}
|
||||
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
|
||||
match self {
|
||||
Nonterminal::NtItem(item) => item.tokens_mut(),
|
||||
Nonterminal::NtStmt(stmt) => stmt.tokens_mut(),
|
||||
Nonterminal::NtExpr(expr) | Nonterminal::NtLiteral(expr) => expr.tokens_mut(),
|
||||
Nonterminal::NtPat(pat) => pat.tokens_mut(),
|
||||
Nonterminal::NtTy(ty) => ty.tokens_mut(),
|
||||
Nonterminal::NtMeta(attr_item) => attr_item.tokens_mut(),
|
||||
Nonterminal::NtPath(path) => path.tokens_mut(),
|
||||
Nonterminal::NtVis(vis) => vis.tokens_mut(),
|
||||
_ => panic!("Called tokens_mut on {:?}", self),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -41,13 +96,17 @@ fn visit_attrvec(attrs: &mut AttrVec, f: impl FnOnce(&mut Vec<Attribute>)) {
|
|||
}
|
||||
|
||||
impl AstLike for StmtKind {
|
||||
// This might be an `StmtKind::Item`, which contains
|
||||
// an item that supports inner attrs
|
||||
const SUPPORTS_CUSTOM_INNER_ATTRS: bool = true;
|
||||
|
||||
fn attrs(&self) -> &[Attribute] {
|
||||
match *self {
|
||||
StmtKind::Local(ref local) => local.attrs(),
|
||||
StmtKind::Expr(ref expr) | StmtKind::Semi(ref expr) => expr.attrs(),
|
||||
StmtKind::Item(ref item) => item.attrs(),
|
||||
match self {
|
||||
StmtKind::Local(local) => local.attrs(),
|
||||
StmtKind::Expr(expr) | StmtKind::Semi(expr) => expr.attrs(),
|
||||
StmtKind::Item(item) => item.attrs(),
|
||||
StmtKind::Empty => &[],
|
||||
StmtKind::MacCall(ref mac) => &*mac.attrs,
|
||||
StmtKind::MacCall(mac) => &mac.attrs,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -60,21 +119,20 @@ impl AstLike for StmtKind {
|
|||
StmtKind::MacCall(mac) => visit_attrvec(&mut mac.attrs, f),
|
||||
}
|
||||
}
|
||||
fn finalize_tokens(&mut self, tokens: LazyTokenStream) {
|
||||
let stmt_tokens = match self {
|
||||
StmtKind::Local(ref mut local) => &mut local.tokens,
|
||||
StmtKind::Item(ref mut item) => &mut item.tokens,
|
||||
StmtKind::Expr(ref mut expr) | StmtKind::Semi(ref mut expr) => &mut expr.tokens,
|
||||
StmtKind::Empty => return,
|
||||
StmtKind::MacCall(ref mut mac) => &mut mac.tokens,
|
||||
};
|
||||
if stmt_tokens.is_none() {
|
||||
*stmt_tokens = Some(tokens);
|
||||
}
|
||||
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
|
||||
Some(match self {
|
||||
StmtKind::Local(local) => &mut local.tokens,
|
||||
StmtKind::Item(item) => &mut item.tokens,
|
||||
StmtKind::Expr(expr) | StmtKind::Semi(expr) => &mut expr.tokens,
|
||||
StmtKind::Empty => return None,
|
||||
StmtKind::MacCall(mac) => &mut mac.tokens,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl AstLike for Stmt {
|
||||
const SUPPORTS_CUSTOM_INNER_ATTRS: bool = StmtKind::SUPPORTS_CUSTOM_INNER_ATTRS;
|
||||
|
||||
fn attrs(&self) -> &[Attribute] {
|
||||
self.kind.attrs()
|
||||
}
|
||||
|
@ -82,31 +140,31 @@ impl AstLike for Stmt {
|
|||
fn visit_attrs(&mut self, f: impl FnOnce(&mut Vec<Attribute>)) {
|
||||
self.kind.visit_attrs(f);
|
||||
}
|
||||
fn finalize_tokens(&mut self, tokens: LazyTokenStream) {
|
||||
self.kind.finalize_tokens(tokens)
|
||||
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
|
||||
self.kind.tokens_mut()
|
||||
}
|
||||
}
|
||||
|
||||
impl AstLike for Attribute {
|
||||
const SUPPORTS_CUSTOM_INNER_ATTRS: bool = false;
|
||||
|
||||
fn attrs(&self) -> &[Attribute] {
|
||||
&[]
|
||||
}
|
||||
fn visit_attrs(&mut self, _f: impl FnOnce(&mut Vec<Attribute>)) {}
|
||||
fn finalize_tokens(&mut self, tokens: LazyTokenStream) {
|
||||
match &mut self.kind {
|
||||
AttrKind::Normal(_, attr_tokens) => {
|
||||
if attr_tokens.is_none() {
|
||||
*attr_tokens = Some(tokens);
|
||||
}
|
||||
}
|
||||
AttrKind::DocComment(..) => {
|
||||
panic!("Called finalize_tokens on doc comment attr {:?}", self)
|
||||
}
|
||||
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
|
||||
Some(match &mut self.kind {
|
||||
AttrKind::Normal(_, tokens) => tokens,
|
||||
kind @ AttrKind::DocComment(..) => {
|
||||
panic!("Called tokens_mut on doc comment attr {:?}", kind)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: AstLike> AstLike for Option<T> {
|
||||
const SUPPORTS_CUSTOM_INNER_ATTRS: bool = T::SUPPORTS_CUSTOM_INNER_ATTRS;
|
||||
|
||||
fn attrs(&self) -> &[Attribute] {
|
||||
self.as_ref().map(|inner| inner.attrs()).unwrap_or(&[])
|
||||
}
|
||||
|
@ -115,10 +173,8 @@ impl<T: AstLike> AstLike for Option<T> {
|
|||
inner.visit_attrs(f);
|
||||
}
|
||||
}
|
||||
fn finalize_tokens(&mut self, tokens: LazyTokenStream) {
|
||||
if let Some(inner) = self {
|
||||
inner.finalize_tokens(tokens);
|
||||
}
|
||||
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
|
||||
self.as_mut().and_then(|inner| inner.tokens_mut())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -142,8 +198,13 @@ impl VecOrAttrVec for AttrVec {
|
|||
}
|
||||
|
||||
macro_rules! derive_has_tokens_and_attrs {
|
||||
($($ty:path),*) => { $(
|
||||
(
|
||||
const SUPPORTS_CUSTOM_INNER_ATTRS: bool = $inner_attrs:literal;
|
||||
$($ty:path),*
|
||||
) => { $(
|
||||
impl AstLike for $ty {
|
||||
const SUPPORTS_CUSTOM_INNER_ATTRS: bool = $inner_attrs;
|
||||
|
||||
fn attrs(&self) -> &[Attribute] {
|
||||
&self.attrs
|
||||
}
|
||||
|
@ -152,19 +213,19 @@ macro_rules! derive_has_tokens_and_attrs {
|
|||
VecOrAttrVec::visit(&mut self.attrs, f)
|
||||
}
|
||||
|
||||
fn finalize_tokens(&mut self, tokens: LazyTokenStream) {
|
||||
if self.tokens.is_none() {
|
||||
self.tokens = Some(tokens);
|
||||
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
|
||||
Some(&mut self.tokens)
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
)* }
|
||||
}
|
||||
|
||||
macro_rules! derive_has_attrs_no_tokens {
|
||||
($($ty:path),*) => { $(
|
||||
impl AstLike for $ty {
|
||||
const SUPPORTS_CUSTOM_INNER_ATTRS: bool = false;
|
||||
|
||||
fn attrs(&self) -> &[Attribute] {
|
||||
&self.attrs
|
||||
}
|
||||
|
@ -173,7 +234,9 @@ macro_rules! derive_has_attrs_no_tokens {
|
|||
VecOrAttrVec::visit(&mut self.attrs, f)
|
||||
}
|
||||
|
||||
fn finalize_tokens(&mut self, _tokens: LazyTokenStream) {}
|
||||
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
|
||||
None
|
||||
}
|
||||
}
|
||||
)* }
|
||||
}
|
||||
|
@ -181,34 +244,38 @@ macro_rules! derive_has_attrs_no_tokens {
|
|||
macro_rules! derive_has_tokens_no_attrs {
|
||||
($($ty:path),*) => { $(
|
||||
impl AstLike for $ty {
|
||||
const SUPPORTS_CUSTOM_INNER_ATTRS: bool = false;
|
||||
|
||||
fn attrs(&self) -> &[Attribute] {
|
||||
&[]
|
||||
}
|
||||
|
||||
fn visit_attrs(&mut self, _f: impl FnOnce(&mut Vec<Attribute>)) {
|
||||
}
|
||||
|
||||
fn finalize_tokens(&mut self, tokens: LazyTokenStream) {
|
||||
if self.tokens.is_none() {
|
||||
self.tokens = Some(tokens);
|
||||
}
|
||||
|
||||
fn visit_attrs(&mut self, _f: impl FnOnce(&mut Vec<Attribute>)) {}
|
||||
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
|
||||
Some(&mut self.tokens)
|
||||
}
|
||||
}
|
||||
)* }
|
||||
}
|
||||
|
||||
// These AST nodes support both inert and active
|
||||
// attributes, so they also have tokens.
|
||||
// These ast nodes support both active and inert attributes,
|
||||
// so they have tokens collected to pass to proc macros
|
||||
derive_has_tokens_and_attrs! {
|
||||
Item, Expr, Local, AssocItem, ForeignItem
|
||||
// Both `Item` and `AssocItem` can have bodies, which
|
||||
// can contain inner attributes
|
||||
const SUPPORTS_CUSTOM_INNER_ATTRS: bool = true;
|
||||
Item, AssocItem, ForeignItem
|
||||
}
|
||||
|
||||
derive_has_tokens_and_attrs! {
|
||||
const SUPPORTS_CUSTOM_INNER_ATTRS: bool = false;
|
||||
Local, MacCallStmt, Expr
|
||||
}
|
||||
|
||||
// These ast nodes only support inert attributes, so they don't
|
||||
// store tokens (since nothing can observe them)
|
||||
derive_has_attrs_no_tokens! {
|
||||
StructField, Arm,
|
||||
Field, FieldPat, Variant, Param, GenericParam
|
||||
FieldDef, Arm, ExprField, PatField, Variant, Param, GenericParam
|
||||
}
|
||||
|
||||
// These AST nodes don't support attributes, but can
|
||||
|
|
|
@ -6,7 +6,9 @@ use crate::ast::{Lit, LitKind};
|
|||
use crate::ast::{MacArgs, MacDelimiter, MetaItem, MetaItemKind, NestedMetaItem};
|
||||
use crate::ast::{Path, PathSegment};
|
||||
use crate::token::{self, CommentKind, Token};
|
||||
use crate::tokenstream::{DelimSpan, LazyTokenStream, TokenStream, TokenTree, TreeAndSpacing};
|
||||
use crate::tokenstream::{AttrAnnotatedTokenStream, AttrAnnotatedTokenTree};
|
||||
use crate::tokenstream::{DelimSpan, Spacing, TokenTree, TreeAndSpacing};
|
||||
use crate::tokenstream::{LazyTokenStream, TokenStream};
|
||||
|
||||
use rustc_index::bit_set::GrowableBitSet;
|
||||
use rustc_span::source_map::BytePos;
|
||||
|
@ -33,10 +35,6 @@ impl MarkedAttrs {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn is_known_lint_tool(m_item: Ident) -> bool {
|
||||
[sym::clippy, sym::rustc, sym::rustdoc].contains(&m_item.name)
|
||||
}
|
||||
|
||||
impl NestedMetaItem {
|
||||
/// Returns the `MetaItem` if `self` is a `NestedMetaItem::MetaItem`.
|
||||
pub fn meta_item(&self) -> Option<&MetaItem> {
|
||||
|
@ -104,22 +102,14 @@ impl NestedMetaItem {
|
|||
self.meta_item().map_or(false, |meta_item| meta_item.is_word())
|
||||
}
|
||||
|
||||
/// Returns `true` if `self` is a `MetaItem` and the meta item is a `ValueString`.
|
||||
pub fn is_value_str(&self) -> bool {
|
||||
self.value_str().is_some()
|
||||
}
|
||||
|
||||
/// Returns `true` if `self` is a `MetaItem` and the meta item is a list.
|
||||
pub fn is_meta_item_list(&self) -> bool {
|
||||
self.meta_item_list().is_some()
|
||||
}
|
||||
|
||||
/// See [`MetaItem::name_value_literal_span`].
|
||||
pub fn name_value_literal_span(&self) -> Option<Span> {
|
||||
self.meta_item()?.name_value_literal_span()
|
||||
}
|
||||
}
|
||||
|
||||
impl Attribute {
|
||||
#[inline]
|
||||
pub fn has_name(&self, name: Symbol) -> bool {
|
||||
match self.kind {
|
||||
AttrKind::Normal(ref item, _) => item.path == name,
|
||||
|
@ -168,31 +158,6 @@ impl Attribute {
|
|||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_meta_item_list(&self) -> bool {
|
||||
self.meta_item_list().is_some()
|
||||
}
|
||||
|
||||
/// Indicates if the attribute is a `ValueString`.
|
||||
pub fn is_value_str(&self) -> bool {
|
||||
self.value_str().is_some()
|
||||
}
|
||||
|
||||
/// This is used in case you want the value span instead of the whole attribute. Example:
|
||||
///
|
||||
/// ```text
|
||||
/// #[doc(alias = "foo")]
|
||||
/// ```
|
||||
///
|
||||
/// In here, it'll return a span for `"foo"`.
|
||||
pub fn name_value_literal_span(&self) -> Option<Span> {
|
||||
match self.kind {
|
||||
AttrKind::Normal(ref item, _) => {
|
||||
item.meta(self.span).and_then(|meta| meta.name_value_literal_span())
|
||||
}
|
||||
AttrKind::DocComment(..) => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MetaItem {
|
||||
|
@ -239,10 +204,6 @@ impl MetaItem {
|
|||
self.path == name
|
||||
}
|
||||
|
||||
pub fn is_value_str(&self) -> bool {
|
||||
self.value_str().is_some()
|
||||
}
|
||||
|
||||
/// This is used in case you want the value span instead of the whole attribute. Example:
|
||||
///
|
||||
/// ```text
|
||||
|
@ -309,14 +270,18 @@ impl Attribute {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn tokens(&self) -> TokenStream {
|
||||
pub fn tokens(&self) -> AttrAnnotatedTokenStream {
|
||||
match self.kind {
|
||||
AttrKind::Normal(_, ref tokens) => tokens
|
||||
.as_ref()
|
||||
.unwrap_or_else(|| panic!("attribute is missing tokens: {:?}", self))
|
||||
.create_token_stream(),
|
||||
AttrKind::DocComment(comment_kind, data) => TokenStream::from(TokenTree::Token(
|
||||
Token::new(token::DocComment(comment_kind, self.style, data), self.span),
|
||||
AttrKind::DocComment(comment_kind, data) => AttrAnnotatedTokenStream::from((
|
||||
AttrAnnotatedTokenTree::Token(Token::new(
|
||||
token::DocComment(comment_kind, self.style, data),
|
||||
self.span,
|
||||
)),
|
||||
Spacing::Alone,
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
//! Definitions shared by macros / syntax extensions and e.g. librustc_middle.
|
||||
//! Definitions shared by macros / syntax extensions and e.g. `rustc_middle`.
|
||||
|
||||
pub mod allocator;
|
||||
|
|
|
@ -14,9 +14,10 @@
|
|||
#![feature(const_fn_transmute)]
|
||||
#![feature(const_panic)]
|
||||
#![feature(crate_visibility_modifier)]
|
||||
#![feature(iter_zip)]
|
||||
#![feature(label_break_value)]
|
||||
#![feature(nll)]
|
||||
#![feature(or_patterns)]
|
||||
#![cfg_attr(bootstrap, feature(or_patterns))]
|
||||
#![recursion_limit = "256"]
|
||||
|
||||
#[macro_use]
|
||||
|
@ -42,7 +43,6 @@ pub mod util {
|
|||
pub mod ast;
|
||||
pub mod ast_like;
|
||||
pub mod attr;
|
||||
pub mod crate_disambiguator;
|
||||
pub mod entry;
|
||||
pub mod expand;
|
||||
pub mod mut_visit;
|
||||
|
@ -59,7 +59,7 @@ use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
|||
|
||||
/// Requirements for a `StableHashingContext` to be used in this crate.
|
||||
/// This is a hack to allow using the `HashStable_Generic` derive macro
|
||||
/// instead of implementing everything in librustc_middle.
|
||||
/// instead of implementing everything in `rustc_middle`.
|
||||
pub trait HashStableContext: rustc_span::HashStableContext {
|
||||
fn hash_attr(&mut self, _: &ast::Attribute, hasher: &mut StableHasher);
|
||||
}
|
||||
|
|
|
@ -102,8 +102,8 @@ pub trait MutVisitor: Sized {
|
|||
noop_visit_fn_header(header, self);
|
||||
}
|
||||
|
||||
fn flat_map_struct_field(&mut self, sf: StructField) -> SmallVec<[StructField; 1]> {
|
||||
noop_flat_map_struct_field(sf, self)
|
||||
fn flat_map_field_def(&mut self, fd: FieldDef) -> SmallVec<[FieldDef; 1]> {
|
||||
noop_flat_map_field_def(fd, self)
|
||||
}
|
||||
|
||||
fn visit_item_kind(&mut self, i: &mut ItemKind) {
|
||||
|
@ -254,8 +254,8 @@ pub trait MutVisitor: Sized {
|
|||
noop_visit_mt(mt, self);
|
||||
}
|
||||
|
||||
fn flat_map_field(&mut self, f: Field) -> SmallVec<[Field; 1]> {
|
||||
noop_flat_map_field(f, self)
|
||||
fn flat_map_expr_field(&mut self, f: ExprField) -> SmallVec<[ExprField; 1]> {
|
||||
noop_flat_map_expr_field(f, self)
|
||||
}
|
||||
|
||||
fn visit_where_clause(&mut self, where_clause: &mut WhereClause) {
|
||||
|
@ -278,8 +278,8 @@ pub trait MutVisitor: Sized {
|
|||
// Do nothing.
|
||||
}
|
||||
|
||||
fn flat_map_field_pattern(&mut self, fp: FieldPat) -> SmallVec<[FieldPat; 1]> {
|
||||
noop_flat_map_field_pattern(fp, self)
|
||||
fn flat_map_pat_field(&mut self, fp: PatField) -> SmallVec<[PatField; 1]> {
|
||||
noop_flat_map_pat_field(fp, self)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -385,11 +385,11 @@ pub fn visit_delim_span<T: MutVisitor>(dspan: &mut DelimSpan, vis: &mut T) {
|
|||
vis.visit_span(&mut dspan.close);
|
||||
}
|
||||
|
||||
pub fn noop_flat_map_field_pattern<T: MutVisitor>(
|
||||
mut fp: FieldPat,
|
||||
pub fn noop_flat_map_pat_field<T: MutVisitor>(
|
||||
mut fp: PatField,
|
||||
vis: &mut T,
|
||||
) -> SmallVec<[FieldPat; 1]> {
|
||||
let FieldPat { attrs, id, ident, is_placeholder: _, is_shorthand: _, pat, span } = &mut fp;
|
||||
) -> SmallVec<[PatField; 1]> {
|
||||
let PatField { attrs, id, ident, is_placeholder: _, is_shorthand: _, pat, span } = &mut fp;
|
||||
vis.visit_id(id);
|
||||
vis.visit_ident(ident);
|
||||
vis.visit_pat(pat);
|
||||
|
@ -630,6 +630,33 @@ pub fn noop_flat_map_param<T: MutVisitor>(mut param: Param, vis: &mut T) -> Smal
|
|||
smallvec![param]
|
||||
}
|
||||
|
||||
// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
|
||||
pub fn visit_attr_annotated_tt<T: MutVisitor>(tt: &mut AttrAnnotatedTokenTree, vis: &mut T) {
|
||||
match tt {
|
||||
AttrAnnotatedTokenTree::Token(token) => {
|
||||
visit_token(token, vis);
|
||||
}
|
||||
AttrAnnotatedTokenTree::Delimited(DelimSpan { open, close }, _delim, tts) => {
|
||||
vis.visit_span(open);
|
||||
vis.visit_span(close);
|
||||
visit_attr_annotated_tts(tts, vis);
|
||||
}
|
||||
AttrAnnotatedTokenTree::Attributes(data) => {
|
||||
for attr in &mut *data.attrs {
|
||||
match &mut attr.kind {
|
||||
AttrKind::Normal(_, attr_tokens) => {
|
||||
visit_lazy_tts(attr_tokens, vis);
|
||||
}
|
||||
AttrKind::DocComment(..) => {
|
||||
vis.visit_span(&mut attr.span);
|
||||
}
|
||||
}
|
||||
}
|
||||
visit_lazy_tts_opt_mut(Some(&mut data.tokens), vis);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
|
||||
pub fn visit_tt<T: MutVisitor>(tt: &mut TokenTree, vis: &mut T) {
|
||||
match tt {
|
||||
|
@ -652,16 +679,30 @@ pub fn visit_tts<T: MutVisitor>(TokenStream(tts): &mut TokenStream, vis: &mut T)
|
|||
}
|
||||
}
|
||||
|
||||
pub fn visit_lazy_tts<T: MutVisitor>(lazy_tts: &mut Option<LazyTokenStream>, vis: &mut T) {
|
||||
if vis.token_visiting_enabled() {
|
||||
visit_opt(lazy_tts, |lazy_tts| {
|
||||
let mut tts = lazy_tts.create_token_stream();
|
||||
visit_tts(&mut tts, vis);
|
||||
*lazy_tts = LazyTokenStream::new(tts);
|
||||
})
|
||||
pub fn visit_attr_annotated_tts<T: MutVisitor>(
|
||||
AttrAnnotatedTokenStream(tts): &mut AttrAnnotatedTokenStream,
|
||||
vis: &mut T,
|
||||
) {
|
||||
if vis.token_visiting_enabled() && !tts.is_empty() {
|
||||
let tts = Lrc::make_mut(tts);
|
||||
visit_vec(tts, |(tree, _is_joint)| visit_attr_annotated_tt(tree, vis));
|
||||
}
|
||||
}
|
||||
|
||||
pub fn visit_lazy_tts_opt_mut<T: MutVisitor>(lazy_tts: Option<&mut LazyTokenStream>, vis: &mut T) {
|
||||
if vis.token_visiting_enabled() {
|
||||
if let Some(lazy_tts) = lazy_tts {
|
||||
let mut tts = lazy_tts.create_token_stream();
|
||||
visit_attr_annotated_tts(&mut tts, vis);
|
||||
*lazy_tts = LazyTokenStream::new(tts);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn visit_lazy_tts<T: MutVisitor>(lazy_tts: &mut Option<LazyTokenStream>, vis: &mut T) {
|
||||
visit_lazy_tts_opt_mut(lazy_tts.as_mut(), vis);
|
||||
}
|
||||
|
||||
// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
|
||||
// Applies ident visitor if it's an ident; applies other visits to interpolated nodes.
|
||||
// In practice the ident part is not actually used by specific visitors right now,
|
||||
|
@ -842,10 +883,10 @@ pub fn noop_visit_where_predicate<T: MutVisitor>(pred: &mut WherePredicate, vis:
|
|||
pub fn noop_visit_variant_data<T: MutVisitor>(vdata: &mut VariantData, vis: &mut T) {
|
||||
match vdata {
|
||||
VariantData::Struct(fields, ..) => {
|
||||
fields.flat_map_in_place(|field| vis.flat_map_struct_field(field));
|
||||
fields.flat_map_in_place(|field| vis.flat_map_field_def(field));
|
||||
}
|
||||
VariantData::Tuple(fields, id) => {
|
||||
fields.flat_map_in_place(|field| vis.flat_map_struct_field(field));
|
||||
fields.flat_map_in_place(|field| vis.flat_map_field_def(field));
|
||||
vis.visit_id(id);
|
||||
}
|
||||
VariantData::Unit(id) => vis.visit_id(id),
|
||||
|
@ -864,22 +905,25 @@ pub fn noop_visit_poly_trait_ref<T: MutVisitor>(p: &mut PolyTraitRef, vis: &mut
|
|||
vis.visit_span(span);
|
||||
}
|
||||
|
||||
pub fn noop_flat_map_struct_field<T: MutVisitor>(
|
||||
mut sf: StructField,
|
||||
pub fn noop_flat_map_field_def<T: MutVisitor>(
|
||||
mut fd: FieldDef,
|
||||
visitor: &mut T,
|
||||
) -> SmallVec<[StructField; 1]> {
|
||||
let StructField { span, ident, vis, id, ty, attrs, is_placeholder: _ } = &mut sf;
|
||||
) -> SmallVec<[FieldDef; 1]> {
|
||||
let FieldDef { span, ident, vis, id, ty, attrs, is_placeholder: _ } = &mut fd;
|
||||
visitor.visit_span(span);
|
||||
visit_opt(ident, |ident| visitor.visit_ident(ident));
|
||||
visitor.visit_vis(vis);
|
||||
visitor.visit_id(id);
|
||||
visitor.visit_ty(ty);
|
||||
visit_attrs(attrs, visitor);
|
||||
smallvec![sf]
|
||||
smallvec![fd]
|
||||
}
|
||||
|
||||
pub fn noop_flat_map_field<T: MutVisitor>(mut f: Field, vis: &mut T) -> SmallVec<[Field; 1]> {
|
||||
let Field { ident, expr, span, is_shorthand: _, attrs, id, is_placeholder: _ } = &mut f;
|
||||
pub fn noop_flat_map_expr_field<T: MutVisitor>(
|
||||
mut f: ExprField,
|
||||
vis: &mut T,
|
||||
) -> SmallVec<[ExprField; 1]> {
|
||||
let ExprField { ident, expr, span, is_shorthand: _, attrs, id, is_placeholder: _ } = &mut f;
|
||||
vis.visit_ident(ident);
|
||||
vis.visit_expr(expr);
|
||||
vis.visit_id(id);
|
||||
|
@ -1102,7 +1146,7 @@ pub fn noop_visit_pat<T: MutVisitor>(pat: &mut P<Pat>, vis: &mut T) {
|
|||
}
|
||||
PatKind::Struct(path, fields, _etc) => {
|
||||
vis.visit_path(path);
|
||||
fields.flat_map_in_place(|field| vis.flat_map_field_pattern(field));
|
||||
fields.flat_map_in_place(|field| vis.flat_map_pat_field(field));
|
||||
}
|
||||
PatKind::Box(inner) => vis.visit_pat(inner),
|
||||
PatKind::Ref(inner, _mutbl) => vis.visit_pat(inner),
|
||||
|
@ -1249,7 +1293,6 @@ pub fn noop_visit_expr<T: MutVisitor>(
|
|||
match op {
|
||||
InlineAsmOperand::In { expr, .. }
|
||||
| InlineAsmOperand::InOut { expr, .. }
|
||||
| InlineAsmOperand::Const { expr, .. }
|
||||
| InlineAsmOperand::Sym { expr, .. } => vis.visit_expr(expr),
|
||||
InlineAsmOperand::Out { expr, .. } => {
|
||||
if let Some(expr) = expr {
|
||||
|
@ -1262,6 +1305,7 @@ pub fn noop_visit_expr<T: MutVisitor>(
|
|||
vis.visit_expr(out_expr);
|
||||
}
|
||||
}
|
||||
InlineAsmOperand::Const { anon_const, .. } => vis.visit_anon_const(anon_const),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1283,10 +1327,11 @@ pub fn noop_visit_expr<T: MutVisitor>(
|
|||
visit_vec(inputs, |(_c, expr)| vis.visit_expr(expr));
|
||||
}
|
||||
ExprKind::MacCall(mac) => vis.visit_mac_call(mac),
|
||||
ExprKind::Struct(path, fields, expr) => {
|
||||
ExprKind::Struct(se) => {
|
||||
let StructExpr { path, fields, rest } = se.deref_mut();
|
||||
vis.visit_path(path);
|
||||
fields.flat_map_in_place(|field| vis.flat_map_field(field));
|
||||
match expr {
|
||||
fields.flat_map_in_place(|field| vis.flat_map_expr_field(field));
|
||||
match rest {
|
||||
StructRest::Base(expr) => vis.visit_expr(expr),
|
||||
StructRest::Rest(_span) => {}
|
||||
StructRest::None => {}
|
||||
|
|
|
@ -11,11 +11,9 @@ use crate::tokenstream::TokenTree;
|
|||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use rustc_macros::HashStable_Generic;
|
||||
use rustc_span::hygiene::ExpnKind;
|
||||
use rustc_span::source_map::SourceMap;
|
||||
use rustc_span::symbol::{kw, sym};
|
||||
use rustc_span::symbol::{Ident, Symbol};
|
||||
use rustc_span::{self, edition::Edition, FileName, RealFileName, Span, DUMMY_SP};
|
||||
use rustc_span::{self, edition::Edition, Span, DUMMY_SP};
|
||||
use std::borrow::Cow;
|
||||
use std::{fmt, mem};
|
||||
|
||||
|
@ -244,7 +242,7 @@ pub enum TokenKind {
|
|||
}
|
||||
|
||||
// `TokenKind` is used a lot. Make sure it doesn't unintentionally get bigger.
|
||||
#[cfg(target_arch = "x86_64")]
|
||||
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
|
||||
rustc_data_structures::static_assert_size!(TokenKind, 16);
|
||||
|
||||
#[derive(Clone, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
|
||||
|
@ -682,7 +680,7 @@ pub enum Nonterminal {
|
|||
}
|
||||
|
||||
// `Nonterminal` is used a lot. Make sure it doesn't unintentionally get bigger.
|
||||
#[cfg(target_arch = "x86_64")]
|
||||
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
|
||||
rustc_data_structures::static_assert_size!(Nonterminal, 48);
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Encodable, Decodable)]
|
||||
|
@ -690,13 +688,13 @@ pub enum NonterminalKind {
|
|||
Item,
|
||||
Block,
|
||||
Stmt,
|
||||
Pat2018 {
|
||||
/// Keep track of whether the user used `:pat2018` or `:pat` and we inferred it from the
|
||||
Pat2015 {
|
||||
/// Keep track of whether the user used `:pat2015` or `:pat` and we inferred it from the
|
||||
/// edition of the span. This is used for diagnostics.
|
||||
inferred: bool,
|
||||
},
|
||||
Pat2021 {
|
||||
/// Keep track of whether the user used `:pat2018` or `:pat` and we inferred it from the
|
||||
/// Keep track of whether the user used `:pat2015` or `:pat` and we inferred it from the
|
||||
/// edition of the span. This is used for diagnostics.
|
||||
inferred: bool,
|
||||
},
|
||||
|
@ -724,11 +722,11 @@ impl NonterminalKind {
|
|||
sym::stmt => NonterminalKind::Stmt,
|
||||
sym::pat => match edition() {
|
||||
Edition::Edition2015 | Edition::Edition2018 => {
|
||||
NonterminalKind::Pat2018 { inferred: true }
|
||||
NonterminalKind::Pat2015 { inferred: true }
|
||||
}
|
||||
Edition::Edition2021 => NonterminalKind::Pat2021 { inferred: true },
|
||||
},
|
||||
sym::pat2018 => NonterminalKind::Pat2018 { inferred: false },
|
||||
sym::pat2015 => NonterminalKind::Pat2015 { inferred: false },
|
||||
sym::pat2021 => NonterminalKind::Pat2021 { inferred: false },
|
||||
sym::expr => NonterminalKind::Expr,
|
||||
sym::ty => NonterminalKind::Ty,
|
||||
|
@ -747,9 +745,9 @@ impl NonterminalKind {
|
|||
NonterminalKind::Item => sym::item,
|
||||
NonterminalKind::Block => sym::block,
|
||||
NonterminalKind::Stmt => sym::stmt,
|
||||
NonterminalKind::Pat2018 { inferred: false } => sym::pat2018,
|
||||
NonterminalKind::Pat2015 { inferred: false } => sym::pat2015,
|
||||
NonterminalKind::Pat2021 { inferred: false } => sym::pat2021,
|
||||
NonterminalKind::Pat2018 { inferred: true }
|
||||
NonterminalKind::Pat2015 { inferred: true }
|
||||
| NonterminalKind::Pat2021 { inferred: true } => sym::pat,
|
||||
NonterminalKind::Expr => sym::expr,
|
||||
NonterminalKind::Ty => sym::ty,
|
||||
|
@ -786,79 +784,6 @@ impl Nonterminal {
|
|||
NtTT(tt) => tt.span(),
|
||||
}
|
||||
}
|
||||
|
||||
/// This nonterminal looks like some specific enums from
|
||||
/// `proc-macro-hack` and `procedural-masquerade` crates.
|
||||
/// We need to maintain some special pretty-printing behavior for them due to incorrect
|
||||
/// asserts in old versions of those crates and their wide use in the ecosystem.
|
||||
/// See issue #73345 for more details.
|
||||
/// FIXME(#73933): Remove this eventually.
|
||||
pub fn pretty_printing_compatibility_hack(&self) -> bool {
|
||||
let item = match self {
|
||||
NtItem(item) => item,
|
||||
NtStmt(stmt) => match &stmt.kind {
|
||||
ast::StmtKind::Item(item) => item,
|
||||
_ => return false,
|
||||
},
|
||||
_ => return false,
|
||||
};
|
||||
|
||||
let name = item.ident.name;
|
||||
if name == sym::ProceduralMasqueradeDummyType || name == sym::ProcMacroHack {
|
||||
if let ast::ItemKind::Enum(enum_def, _) = &item.kind {
|
||||
if let [variant] = &*enum_def.variants {
|
||||
return variant.ident.name == sym::Input;
|
||||
}
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
// See issue #74616 for details
|
||||
pub fn ident_name_compatibility_hack(
|
||||
&self,
|
||||
orig_span: Span,
|
||||
source_map: &SourceMap,
|
||||
) -> Option<(Ident, bool)> {
|
||||
if let NtIdent(ident, is_raw) = self {
|
||||
if let ExpnKind::Macro(_, macro_name) = orig_span.ctxt().outer_expn_data().kind {
|
||||
let filename = source_map.span_to_filename(orig_span);
|
||||
if let FileName::Real(RealFileName::Named(path)) = filename {
|
||||
let matches_prefix = |prefix, filename| {
|
||||
// Check for a path that ends with 'prefix*/src/<filename>'
|
||||
let mut iter = path.components().rev();
|
||||
iter.next().and_then(|p| p.as_os_str().to_str()) == Some(filename)
|
||||
&& iter.next().and_then(|p| p.as_os_str().to_str()) == Some("src")
|
||||
&& iter
|
||||
.next()
|
||||
.and_then(|p| p.as_os_str().to_str())
|
||||
.map_or(false, |p| p.starts_with(prefix))
|
||||
};
|
||||
|
||||
if (macro_name == sym::impl_macros
|
||||
&& matches_prefix("time-macros-impl", "lib.rs"))
|
||||
|| (macro_name == sym::arrays && matches_prefix("js-sys", "lib.rs"))
|
||||
{
|
||||
let snippet = source_map.span_to_snippet(orig_span);
|
||||
if snippet.as_deref() == Ok("$name") {
|
||||
return Some((*ident, *is_raw));
|
||||
}
|
||||
}
|
||||
|
||||
if macro_name == sym::tuple_from_req
|
||||
&& (matches_prefix("actix-web", "extract.rs")
|
||||
|| matches_prefix("actori-web", "extract.rs"))
|
||||
{
|
||||
let snippet = source_map.span_to_snippet(orig_span);
|
||||
if snippet.as_deref() == Ok("$T") {
|
||||
return Some((*ident, *is_raw));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for Nonterminal {
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
//! ownership of the original.
|
||||
|
||||
use crate::token::{self, DelimToken, Token, TokenKind};
|
||||
use crate::AttrVec;
|
||||
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||
use rustc_data_structures::sync::{self, Lrc};
|
||||
|
@ -89,10 +90,6 @@ impl TokenTree {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn joint(self) -> TokenStream {
|
||||
TokenStream::new(vec![(self, Spacing::Joint)])
|
||||
}
|
||||
|
||||
pub fn token(kind: TokenKind, span: Span) -> TokenTree {
|
||||
TokenTree::Token(Token::new(kind, span))
|
||||
}
|
||||
|
@ -127,11 +124,11 @@ where
|
|||
}
|
||||
|
||||
pub trait CreateTokenStream: sync::Send + sync::Sync {
|
||||
fn create_token_stream(&self) -> TokenStream;
|
||||
fn create_token_stream(&self) -> AttrAnnotatedTokenStream;
|
||||
}
|
||||
|
||||
impl CreateTokenStream for TokenStream {
|
||||
fn create_token_stream(&self) -> TokenStream {
|
||||
impl CreateTokenStream for AttrAnnotatedTokenStream {
|
||||
fn create_token_stream(&self) -> AttrAnnotatedTokenStream {
|
||||
self.clone()
|
||||
}
|
||||
}
|
||||
|
@ -147,14 +144,14 @@ impl LazyTokenStream {
|
|||
LazyTokenStream(Lrc::new(Box::new(inner)))
|
||||
}
|
||||
|
||||
pub fn create_token_stream(&self) -> TokenStream {
|
||||
pub fn create_token_stream(&self) -> AttrAnnotatedTokenStream {
|
||||
self.0.create_token_stream()
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for LazyTokenStream {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
fmt::Debug::fmt("LazyTokenStream", f)
|
||||
write!(f, "LazyTokenStream({:?})", self.create_token_stream())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -177,19 +174,158 @@ impl<CTX> HashStable<CTX> for LazyTokenStream {
|
|||
}
|
||||
}
|
||||
|
||||
/// A `AttrAnnotatedTokenStream` is similar to a `TokenStream`, but with extra
|
||||
/// information about the tokens for attribute targets. This is used
|
||||
/// during expansion to perform early cfg-expansion, and to process attributes
|
||||
/// during proc-macro invocations.
|
||||
#[derive(Clone, Debug, Default, Encodable, Decodable)]
|
||||
pub struct AttrAnnotatedTokenStream(pub Lrc<Vec<(AttrAnnotatedTokenTree, Spacing)>>);
|
||||
|
||||
/// Like `TokenTree`, but for `AttrAnnotatedTokenStream`
|
||||
#[derive(Clone, Debug, Encodable, Decodable)]
|
||||
pub enum AttrAnnotatedTokenTree {
|
||||
Token(Token),
|
||||
Delimited(DelimSpan, DelimToken, AttrAnnotatedTokenStream),
|
||||
/// Stores the attributes for an attribute target,
|
||||
/// along with the tokens for that attribute target.
|
||||
/// See `AttributesData` for more information
|
||||
Attributes(AttributesData),
|
||||
}
|
||||
|
||||
impl AttrAnnotatedTokenStream {
|
||||
pub fn new(tokens: Vec<(AttrAnnotatedTokenTree, Spacing)>) -> AttrAnnotatedTokenStream {
|
||||
AttrAnnotatedTokenStream(Lrc::new(tokens))
|
||||
}
|
||||
|
||||
/// Converts this `AttrAnnotatedTokenStream` to a plain `TokenStream
|
||||
/// During conversion, `AttrAnnotatedTokenTree::Attributes` get 'flattened'
|
||||
/// back to a `TokenStream` of the form `outer_attr attr_target`.
|
||||
/// If there are inner attributes, they are inserted into the proper
|
||||
/// place in the attribute target tokens.
|
||||
pub fn to_tokenstream(&self) -> TokenStream {
|
||||
let trees: Vec<_> = self
|
||||
.0
|
||||
.iter()
|
||||
.flat_map(|tree| match &tree.0 {
|
||||
AttrAnnotatedTokenTree::Token(inner) => {
|
||||
smallvec![(TokenTree::Token(inner.clone()), tree.1)].into_iter()
|
||||
}
|
||||
AttrAnnotatedTokenTree::Delimited(span, delim, stream) => smallvec![(
|
||||
TokenTree::Delimited(*span, *delim, stream.to_tokenstream()),
|
||||
tree.1,
|
||||
)]
|
||||
.into_iter(),
|
||||
AttrAnnotatedTokenTree::Attributes(data) => {
|
||||
let mut outer_attrs = Vec::new();
|
||||
let mut inner_attrs = Vec::new();
|
||||
let attrs: Vec<_> = data.attrs.clone().into();
|
||||
for attr in attrs {
|
||||
match attr.style {
|
||||
crate::AttrStyle::Outer => {
|
||||
assert!(
|
||||
inner_attrs.len() == 0,
|
||||
"Found outer attribute {:?} after inner attrs {:?}",
|
||||
attr,
|
||||
inner_attrs
|
||||
);
|
||||
outer_attrs.push(attr);
|
||||
}
|
||||
crate::AttrStyle::Inner => {
|
||||
inner_attrs.push(attr);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut target_tokens: Vec<_> = data
|
||||
.tokens
|
||||
.create_token_stream()
|
||||
.to_tokenstream()
|
||||
.0
|
||||
.iter()
|
||||
.cloned()
|
||||
.collect();
|
||||
if !inner_attrs.is_empty() {
|
||||
let mut found = false;
|
||||
// Check the last two trees (to account for a trailing semi)
|
||||
for (tree, _) in target_tokens.iter_mut().rev().take(2) {
|
||||
if let TokenTree::Delimited(span, delim, delim_tokens) = tree {
|
||||
// Inner attributes are only supported on extern blocks, functions, impls,
|
||||
// and modules. All of these have their inner attributes placed at
|
||||
// the beginning of the rightmost outermost braced group:
|
||||
// e.g. fn foo() { #![my_attr} }
|
||||
//
|
||||
// Therefore, we can insert them back into the right location
|
||||
// without needing to do any extra position tracking.
|
||||
//
|
||||
// Note: Outline modules are an exception - they can
|
||||
// have attributes like `#![my_attr]` at the start of a file.
|
||||
// Support for custom attributes in this position is not
|
||||
// properly implemented - we always synthesize fake tokens,
|
||||
// so we never reach this code.
|
||||
|
||||
let mut builder = TokenStreamBuilder::new();
|
||||
for inner_attr in &inner_attrs {
|
||||
builder.push(inner_attr.tokens().to_tokenstream());
|
||||
}
|
||||
builder.push(delim_tokens.clone());
|
||||
*tree = TokenTree::Delimited(*span, *delim, builder.build());
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
assert!(
|
||||
found,
|
||||
"Failed to find trailing delimited group in: {:?}",
|
||||
target_tokens
|
||||
);
|
||||
}
|
||||
let mut flat: SmallVec<[_; 1]> = SmallVec::new();
|
||||
for attr in outer_attrs {
|
||||
// FIXME: Make this more efficient
|
||||
flat.extend(attr.tokens().to_tokenstream().0.clone().iter().cloned());
|
||||
}
|
||||
flat.extend(target_tokens);
|
||||
flat.into_iter()
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
TokenStream::new(trees)
|
||||
}
|
||||
}
|
||||
|
||||
/// Stores the tokens for an attribute target, along
|
||||
/// with its attributes.
|
||||
///
|
||||
/// This is constructed during parsing when we need to capture
|
||||
/// tokens.
|
||||
///
|
||||
/// For example, `#[cfg(FALSE)] struct Foo {}` would
|
||||
/// have an `attrs` field containing the `#[cfg(FALSE)]` attr,
|
||||
/// and a `tokens` field storing the (unparesd) tokens `struct Foo {}`
|
||||
#[derive(Clone, Debug, Encodable, Decodable)]
|
||||
pub struct AttributesData {
|
||||
/// Attributes, both outer and inner.
|
||||
/// These are stored in the original order that they were parsed in.
|
||||
pub attrs: AttrVec,
|
||||
/// The underlying tokens for the attribute target that `attrs`
|
||||
/// are applied to
|
||||
pub tokens: LazyTokenStream,
|
||||
}
|
||||
|
||||
/// A `TokenStream` is an abstract sequence of tokens, organized into [`TokenTree`]s.
|
||||
///
|
||||
/// The goal is for procedural macros to work with `TokenStream`s and `TokenTree`s
|
||||
/// instead of a representation of the abstract syntax tree.
|
||||
/// Today's `TokenTree`s can still contain AST via `token::Interpolated` for
|
||||
/// backwards compatability.
|
||||
/// backwards compatibility.
|
||||
#[derive(Clone, Debug, Default, Encodable, Decodable)]
|
||||
pub struct TokenStream(pub(crate) Lrc<Vec<TreeAndSpacing>>);
|
||||
|
||||
pub type TreeAndSpacing = (TokenTree, Spacing);
|
||||
|
||||
// `TokenStream` is used a lot. Make sure it doesn't unintentionally get bigger.
|
||||
#[cfg(target_arch = "x86_64")]
|
||||
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
|
||||
rustc_data_structures::static_assert_size!(TokenStream, 8);
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Encodable, Decodable)]
|
||||
|
@ -239,6 +375,12 @@ impl TokenStream {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<(AttrAnnotatedTokenTree, Spacing)> for AttrAnnotatedTokenStream {
|
||||
fn from((tree, spacing): (AttrAnnotatedTokenTree, Spacing)) -> AttrAnnotatedTokenStream {
|
||||
AttrAnnotatedTokenStream::new(vec![(tree, spacing)])
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TokenTree> for TokenStream {
|
||||
fn from(tree: TokenTree) -> TokenStream {
|
||||
TokenStream::new(vec![(tree, Spacing::Alone)])
|
||||
|
@ -278,14 +420,6 @@ impl TokenStream {
|
|||
self.0.len()
|
||||
}
|
||||
|
||||
pub fn span(&self) -> Option<Span> {
|
||||
match &**self.0 {
|
||||
[] => None,
|
||||
[(tt, _)] => Some(tt.span()),
|
||||
[(tt_start, _), .., (tt_end, _)] => Some(tt_start.span().to(tt_end.span())),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_streams(mut streams: SmallVec<[TokenStream; 2]>) -> TokenStream {
|
||||
match streams.len() {
|
||||
0 => TokenStream::default(),
|
||||
|
@ -325,10 +459,6 @@ impl TokenStream {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn trees_ref(&self) -> CursorRef<'_> {
|
||||
CursorRef::new(self)
|
||||
}
|
||||
|
||||
pub fn trees(&self) -> Cursor {
|
||||
self.clone().into_trees()
|
||||
}
|
||||
|
@ -341,7 +471,7 @@ impl TokenStream {
|
|||
pub fn eq_unspanned(&self, other: &TokenStream) -> bool {
|
||||
let mut t1 = self.trees();
|
||||
let mut t2 = other.trees();
|
||||
for (t1, t2) in t1.by_ref().zip(t2.by_ref()) {
|
||||
for (t1, t2) in iter::zip(&mut t1, &mut t2) {
|
||||
if !t1.eq_unspanned(&t2) {
|
||||
return false;
|
||||
}
|
||||
|
@ -427,10 +557,6 @@ pub struct CursorRef<'t> {
|
|||
}
|
||||
|
||||
impl<'t> CursorRef<'t> {
|
||||
fn new(stream: &TokenStream) -> CursorRef<'_> {
|
||||
CursorRef { stream, index: 0 }
|
||||
}
|
||||
|
||||
fn next_with_spacing(&mut self) -> Option<&'t TreeAndSpacing> {
|
||||
self.stream.0.get(self.index).map(|tree| {
|
||||
self.index += 1;
|
||||
|
@ -477,6 +603,10 @@ impl Cursor {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn index(&self) -> usize {
|
||||
self.index
|
||||
}
|
||||
|
||||
pub fn append(&mut self, new_stream: TokenStream) {
|
||||
if new_stream.is_empty() {
|
||||
return;
|
||||
|
|
|
@ -151,8 +151,8 @@ pub trait Visitor<'ast>: Sized {
|
|||
fn visit_variant_data(&mut self, s: &'ast VariantData) {
|
||||
walk_struct_def(self, s)
|
||||
}
|
||||
fn visit_struct_field(&mut self, s: &'ast StructField) {
|
||||
walk_struct_field(self, s)
|
||||
fn visit_field_def(&mut self, s: &'ast FieldDef) {
|
||||
walk_field_def(self, s)
|
||||
}
|
||||
fn visit_enum_def(
|
||||
&mut self,
|
||||
|
@ -208,11 +208,11 @@ pub trait Visitor<'ast>: Sized {
|
|||
fn visit_fn_header(&mut self, _header: &'ast FnHeader) {
|
||||
// Nothing to do
|
||||
}
|
||||
fn visit_field(&mut self, f: &'ast Field) {
|
||||
walk_field(self, f)
|
||||
fn visit_expr_field(&mut self, f: &'ast ExprField) {
|
||||
walk_expr_field(self, f)
|
||||
}
|
||||
fn visit_field_pattern(&mut self, fp: &'ast FieldPat) {
|
||||
walk_field_pattern(self, fp)
|
||||
fn visit_pat_field(&mut self, fp: &'ast PatField) {
|
||||
walk_pat_field(self, fp)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -364,13 +364,13 @@ where
|
|||
walk_list!(visitor, visit_attribute, &variant.attrs);
|
||||
}
|
||||
|
||||
pub fn walk_field<'a, V: Visitor<'a>>(visitor: &mut V, f: &'a Field) {
|
||||
pub fn walk_expr_field<'a, V: Visitor<'a>>(visitor: &mut V, f: &'a ExprField) {
|
||||
visitor.visit_expr(&f.expr);
|
||||
visitor.visit_ident(f.ident);
|
||||
walk_list!(visitor, visit_attribute, f.attrs.iter());
|
||||
}
|
||||
|
||||
pub fn walk_field_pattern<'a, V: Visitor<'a>>(visitor: &mut V, fp: &'a FieldPat) {
|
||||
pub fn walk_pat_field<'a, V: Visitor<'a>>(visitor: &mut V, fp: &'a PatField) {
|
||||
visitor.visit_ident(fp.ident);
|
||||
visitor.visit_pat(&fp.pat);
|
||||
walk_list!(visitor, visit_attribute, fp.attrs.iter());
|
||||
|
@ -509,7 +509,7 @@ pub fn walk_pat<'a, V: Visitor<'a>>(visitor: &mut V, pattern: &'a Pat) {
|
|||
}
|
||||
PatKind::Struct(ref path, ref fields, _) => {
|
||||
visitor.visit_path(path, pattern.id);
|
||||
walk_list!(visitor, visit_field_pattern, fields);
|
||||
walk_list!(visitor, visit_pat_field, fields);
|
||||
}
|
||||
PatKind::Box(ref subpattern)
|
||||
| PatKind::Ref(ref subpattern, _)
|
||||
|
@ -668,16 +668,16 @@ pub fn walk_assoc_item<'a, V: Visitor<'a>>(visitor: &mut V, item: &'a AssocItem,
|
|||
}
|
||||
|
||||
pub fn walk_struct_def<'a, V: Visitor<'a>>(visitor: &mut V, struct_definition: &'a VariantData) {
|
||||
walk_list!(visitor, visit_struct_field, struct_definition.fields());
|
||||
walk_list!(visitor, visit_field_def, struct_definition.fields());
|
||||
}
|
||||
|
||||
pub fn walk_struct_field<'a, V: Visitor<'a>>(visitor: &mut V, struct_field: &'a StructField) {
|
||||
visitor.visit_vis(&struct_field.vis);
|
||||
if let Some(ident) = struct_field.ident {
|
||||
pub fn walk_field_def<'a, V: Visitor<'a>>(visitor: &mut V, field: &'a FieldDef) {
|
||||
visitor.visit_vis(&field.vis);
|
||||
if let Some(ident) = field.ident {
|
||||
visitor.visit_ident(ident);
|
||||
}
|
||||
visitor.visit_ty(&struct_field.ty);
|
||||
walk_list!(visitor, visit_attribute, &struct_field.attrs);
|
||||
visitor.visit_ty(&field.ty);
|
||||
walk_list!(visitor, visit_attribute, &field.attrs);
|
||||
}
|
||||
|
||||
pub fn walk_block<'a, V: Visitor<'a>>(visitor: &mut V, block: &'a Block) {
|
||||
|
@ -721,10 +721,10 @@ pub fn walk_expr<'a, V: Visitor<'a>>(visitor: &mut V, expression: &'a Expr) {
|
|||
visitor.visit_expr(element);
|
||||
visitor.visit_anon_const(count)
|
||||
}
|
||||
ExprKind::Struct(ref path, ref fields, ref optional_base) => {
|
||||
visitor.visit_path(path, expression.id);
|
||||
walk_list!(visitor, visit_field, fields);
|
||||
match optional_base {
|
||||
ExprKind::Struct(ref se) => {
|
||||
visitor.visit_path(&se.path, expression.id);
|
||||
walk_list!(visitor, visit_expr_field, &se.fields);
|
||||
match &se.rest {
|
||||
StructRest::Base(expr) => visitor.visit_expr(expr),
|
||||
StructRest::Rest(_span) => {}
|
||||
StructRest::None => {}
|
||||
|
@ -835,7 +835,6 @@ pub fn walk_expr<'a, V: Visitor<'a>>(visitor: &mut V, expression: &'a Expr) {
|
|||
match op {
|
||||
InlineAsmOperand::In { expr, .. }
|
||||
| InlineAsmOperand::InOut { expr, .. }
|
||||
| InlineAsmOperand::Const { expr, .. }
|
||||
| InlineAsmOperand::Sym { expr, .. } => visitor.visit_expr(expr),
|
||||
InlineAsmOperand::Out { expr, .. } => {
|
||||
if let Some(expr) = expr {
|
||||
|
@ -848,6 +847,9 @@ pub fn walk_expr<'a, V: Visitor<'a>>(visitor: &mut V, expression: &'a Expr) {
|
|||
visitor.visit_expr(out_expr);
|
||||
}
|
||||
}
|
||||
InlineAsmOperand::Const { anon_const, .. } => {
|
||||
visitor.visit_anon_const(anon_const)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -97,6 +97,23 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
ExprKind::Let(ref pat, ref scrutinee) => {
|
||||
self.lower_expr_if_let(e.span, pat, scrutinee, then, else_opt.as_deref())
|
||||
}
|
||||
ExprKind::Paren(ref paren) => match paren.peel_parens().kind {
|
||||
ExprKind::Let(ref pat, ref scrutinee) => {
|
||||
// A user has written `if (let Some(x) = foo) {`, we want to avoid
|
||||
// confusing them with mentions of nightly features.
|
||||
// If this logic is changed, you will also likely need to touch
|
||||
// `unused::UnusedParens::check_expr`.
|
||||
self.if_let_expr_with_parens(cond, &paren.peel_parens());
|
||||
self.lower_expr_if_let(
|
||||
e.span,
|
||||
pat,
|
||||
scrutinee,
|
||||
then,
|
||||
else_opt.as_deref(),
|
||||
)
|
||||
}
|
||||
_ => self.lower_expr_if(cond, then, else_opt.as_deref()),
|
||||
},
|
||||
_ => self.lower_expr_if(cond, then, else_opt.as_deref()),
|
||||
},
|
||||
ExprKind::While(ref cond, ref body, opt_label) => self
|
||||
|
@ -207,8 +224,8 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
}
|
||||
ExprKind::InlineAsm(ref asm) => self.lower_expr_asm(e.span, asm),
|
||||
ExprKind::LlvmInlineAsm(ref asm) => self.lower_expr_llvm_asm(asm),
|
||||
ExprKind::Struct(ref path, ref fields, ref rest) => {
|
||||
let rest = match rest {
|
||||
ExprKind::Struct(ref se) => {
|
||||
let rest = match &se.rest {
|
||||
StructRest::Base(e) => Some(self.lower_expr(e)),
|
||||
StructRest::Rest(sp) => {
|
||||
self.sess
|
||||
|
@ -223,11 +240,12 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
self.arena.alloc(self.lower_qpath(
|
||||
e.id,
|
||||
&None,
|
||||
path,
|
||||
&se.path,
|
||||
ParamMode::Optional,
|
||||
ImplTraitContext::disallowed(),
|
||||
)),
|
||||
self.arena.alloc_from_iter(fields.iter().map(|x| self.lower_field(x))),
|
||||
self.arena
|
||||
.alloc_from_iter(se.fields.iter().map(|x| self.lower_expr_field(x))),
|
||||
rest,
|
||||
)
|
||||
}
|
||||
|
@ -241,9 +259,18 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
ex.span = e.span;
|
||||
}
|
||||
// Merge attributes into the inner expression.
|
||||
let mut attrs: Vec<_> = e.attrs.iter().map(|a| self.lower_attr(a)).collect();
|
||||
attrs.extend::<Vec<_>>(ex.attrs.into());
|
||||
ex.attrs = attrs.into();
|
||||
if !e.attrs.is_empty() {
|
||||
let old_attrs = self.attrs.get(&ex.hir_id).map(|la| *la).unwrap_or(&[]);
|
||||
self.attrs.insert(
|
||||
ex.hir_id,
|
||||
&*self.arena.alloc_from_iter(
|
||||
e.attrs
|
||||
.iter()
|
||||
.map(|a| self.lower_attr(a))
|
||||
.chain(old_attrs.iter().cloned()),
|
||||
),
|
||||
);
|
||||
}
|
||||
return ex;
|
||||
}
|
||||
|
||||
|
@ -255,12 +282,9 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
ExprKind::MacCall(_) => panic!("{:?} shouldn't exist here", e.span),
|
||||
};
|
||||
|
||||
hir::Expr {
|
||||
hir_id: self.lower_node_id(e.id),
|
||||
kind,
|
||||
span: e.span,
|
||||
attrs: e.attrs.iter().map(|a| self.lower_attr(a)).collect::<Vec<_>>().into(),
|
||||
}
|
||||
let hir_id = self.lower_node_id(e.id);
|
||||
self.lower_attrs(hir_id, &e.attrs);
|
||||
hir::Expr { hir_id, kind, span: e.span }
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -346,6 +370,25 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
hir::ExprKind::Call(f, self.lower_exprs(&real_args))
|
||||
}
|
||||
|
||||
fn if_let_expr_with_parens(&mut self, cond: &Expr, paren: &Expr) {
|
||||
let start = cond.span.until(paren.span);
|
||||
let end = paren.span.shrink_to_hi().until(cond.span.shrink_to_hi());
|
||||
self.sess
|
||||
.struct_span_err(
|
||||
vec![start, end],
|
||||
"invalid parentheses around `let` expression in `if let`",
|
||||
)
|
||||
.multipart_suggestion(
|
||||
"`if let` needs to be written without parentheses",
|
||||
vec![(start, String::new()), (end, String::new())],
|
||||
rustc_errors::Applicability::MachineApplicable,
|
||||
)
|
||||
.emit();
|
||||
// Ideally, we'd remove the feature gating of a `let` expression since we are already
|
||||
// complaining about it here, but `feature_gate::check_crate` has already run by now:
|
||||
// self.sess.parse_sess.gated_spans.ungate_last(sym::let_chains, paren.span);
|
||||
}
|
||||
|
||||
/// Emit an error and lower `ast::ExprKind::Let(pat, scrutinee)` into:
|
||||
/// ```rust
|
||||
/// match scrutinee { pats => true, _ => false }
|
||||
|
@ -356,8 +399,10 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
if self.sess.opts.unstable_features.is_nightly_build() {
|
||||
self.sess
|
||||
.struct_span_err(span, "`let` expressions are not supported here")
|
||||
.note("only supported directly in conditions of `if`- and `while`-expressions")
|
||||
.note("as well as when nested within `&&` and parenthesis in those conditions")
|
||||
.note(
|
||||
"only supported directly without parentheses in conditions of `if`- and \
|
||||
`while`-expressions, as well as in `let` chains within parentheses",
|
||||
)
|
||||
.emit();
|
||||
} else {
|
||||
self.sess
|
||||
|
@ -580,14 +625,9 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
hir::Guard::If(self.lower_expr(cond))
|
||||
}
|
||||
});
|
||||
hir::Arm {
|
||||
hir_id: self.next_id(),
|
||||
attrs: self.lower_attrs(&arm.attrs),
|
||||
pat,
|
||||
guard,
|
||||
body: self.lower_expr(&arm.body),
|
||||
span: arm.span,
|
||||
}
|
||||
let hir_id = self.next_id();
|
||||
self.lower_attrs(hir_id, &arm.attrs);
|
||||
hir::Arm { hir_id, pat, guard, body: self.lower_expr(&arm.body), span: arm.span }
|
||||
}
|
||||
|
||||
/// Lower an `async` construct to a generator that is then wrapped so it implements `Future`.
|
||||
|
@ -631,7 +671,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
Ident::with_dummy_span(sym::_task_context),
|
||||
hir::BindingAnnotation::Mutable,
|
||||
);
|
||||
let param = hir::Param { attrs: &[], hir_id: self.next_id(), pat, ty_span: span, span };
|
||||
let param = hir::Param { hir_id: self.next_id(), pat, ty_span: span, span };
|
||||
let params = arena_vec![self; param];
|
||||
|
||||
let body_id = self.lower_body(move |this| {
|
||||
|
@ -652,12 +692,8 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
span,
|
||||
Some(hir::Movability::Static),
|
||||
);
|
||||
let generator = hir::Expr {
|
||||
hir_id: self.lower_node_id(closure_node_id),
|
||||
kind: generator_kind,
|
||||
span,
|
||||
attrs: ThinVec::new(),
|
||||
};
|
||||
let generator =
|
||||
hir::Expr { hir_id: self.lower_node_id(closure_node_id), kind: generator_kind, span };
|
||||
|
||||
// `future::from_generator`:
|
||||
let unstable_span =
|
||||
|
@ -811,7 +847,6 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
hir_id: loop_hir_id,
|
||||
kind: hir::ExprKind::Loop(loop_block, None, hir::LoopSource::Loop, span),
|
||||
span,
|
||||
attrs: ThinVec::new(),
|
||||
});
|
||||
|
||||
// mut pinned => loop { ... }
|
||||
|
@ -988,7 +1023,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
|
||||
// Introduce a `let` for destructuring: `let (lhs1, lhs2) = t`.
|
||||
let destructure_let = self.stmt_let_pat(
|
||||
ThinVec::new(),
|
||||
None,
|
||||
whole_span,
|
||||
Some(rhs),
|
||||
pat,
|
||||
|
@ -1076,10 +1111,10 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
}
|
||||
}
|
||||
// Structs.
|
||||
ExprKind::Struct(path, fields, rest) => {
|
||||
let field_pats = self.arena.alloc_from_iter(fields.iter().map(|f| {
|
||||
ExprKind::Struct(se) => {
|
||||
let field_pats = self.arena.alloc_from_iter(se.fields.iter().map(|f| {
|
||||
let pat = self.destructure_assign(&f.expr, eq_sign_span, assignments);
|
||||
hir::FieldPat {
|
||||
hir::PatField {
|
||||
hir_id: self.next_id(),
|
||||
ident: f.ident,
|
||||
pat,
|
||||
|
@ -1090,11 +1125,11 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
let qpath = self.lower_qpath(
|
||||
lhs.id,
|
||||
&None,
|
||||
path,
|
||||
&se.path,
|
||||
ParamMode::Optional,
|
||||
ImplTraitContext::disallowed(),
|
||||
);
|
||||
let fields_omitted = match rest {
|
||||
let fields_omitted = match &se.rest {
|
||||
StructRest::Base(e) => {
|
||||
self.sess
|
||||
.struct_span_err(
|
||||
|
@ -1210,7 +1245,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
e1.iter().map(|e| ("start", e)).chain(e2.iter().map(|e| ("end", e))).map(|(s, e)| {
|
||||
let expr = self.lower_expr(&e);
|
||||
let ident = Ident::new(Symbol::intern(s), e.span);
|
||||
self.field(ident, expr, e.span)
|
||||
self.expr_field(ident, expr, e.span)
|
||||
}),
|
||||
);
|
||||
|
||||
|
@ -1297,104 +1332,97 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
}
|
||||
|
||||
fn lower_expr_asm(&mut self, sp: Span, asm: &InlineAsm) -> hir::ExprKind<'hir> {
|
||||
if self.sess.asm_arch.is_none() {
|
||||
// Rustdoc needs to support asm! from foriegn architectures: don't try
|
||||
// lowering the register contraints in this case.
|
||||
let asm_arch = if self.sess.opts.actually_rustdoc { None } else { self.sess.asm_arch };
|
||||
if asm_arch.is_none() && !self.sess.opts.actually_rustdoc {
|
||||
struct_span_err!(self.sess, sp, E0472, "asm! is unsupported on this target").emit();
|
||||
}
|
||||
if asm.options.contains(InlineAsmOptions::ATT_SYNTAX)
|
||||
&& !matches!(
|
||||
self.sess.asm_arch,
|
||||
Some(asm::InlineAsmArch::X86 | asm::InlineAsmArch::X86_64)
|
||||
)
|
||||
&& !matches!(asm_arch, Some(asm::InlineAsmArch::X86 | asm::InlineAsmArch::X86_64))
|
||||
&& !self.sess.opts.actually_rustdoc
|
||||
{
|
||||
self.sess
|
||||
.struct_span_err(sp, "the `att_syntax` option is only supported on x86")
|
||||
.emit();
|
||||
}
|
||||
|
||||
// Lower operands to HIR, filter_map skips any operands with invalid
|
||||
// register classes.
|
||||
// Lower operands to HIR. We use dummy register classes if an error
|
||||
// occurs during lowering because we still need to be able to produce a
|
||||
// valid HIR.
|
||||
let sess = self.sess;
|
||||
let operands: Vec<_> = asm
|
||||
.operands
|
||||
.iter()
|
||||
.filter_map(|(op, op_sp)| {
|
||||
let lower_reg = |reg| {
|
||||
Some(match reg {
|
||||
InlineAsmRegOrRegClass::Reg(s) => asm::InlineAsmRegOrRegClass::Reg(
|
||||
.map(|(op, op_sp)| {
|
||||
let lower_reg = |reg| match reg {
|
||||
InlineAsmRegOrRegClass::Reg(s) => {
|
||||
asm::InlineAsmRegOrRegClass::Reg(if let Some(asm_arch) = asm_arch {
|
||||
asm::InlineAsmReg::parse(
|
||||
sess.asm_arch?,
|
||||
asm_arch,
|
||||
|feature| sess.target_features.contains(&Symbol::intern(feature)),
|
||||
&sess.target,
|
||||
s,
|
||||
)
|
||||
.map_err(|e| {
|
||||
.unwrap_or_else(|e| {
|
||||
let msg = format!("invalid register `{}`: {}", s.as_str(), e);
|
||||
sess.struct_span_err(*op_sp, &msg).emit();
|
||||
asm::InlineAsmReg::Err
|
||||
})
|
||||
.ok()?,
|
||||
),
|
||||
InlineAsmRegOrRegClass::RegClass(s) => {
|
||||
asm::InlineAsmRegOrRegClass::RegClass(
|
||||
asm::InlineAsmRegClass::parse(sess.asm_arch?, s)
|
||||
.map_err(|e| {
|
||||
let msg = format!(
|
||||
"invalid register class `{}`: {}",
|
||||
s.as_str(),
|
||||
e
|
||||
);
|
||||
sess.struct_span_err(*op_sp, &msg).emit();
|
||||
} else {
|
||||
asm::InlineAsmReg::Err
|
||||
})
|
||||
.ok()?,
|
||||
)
|
||||
}
|
||||
InlineAsmRegOrRegClass::RegClass(s) => {
|
||||
asm::InlineAsmRegOrRegClass::RegClass(if let Some(asm_arch) = asm_arch {
|
||||
asm::InlineAsmRegClass::parse(asm_arch, s).unwrap_or_else(|e| {
|
||||
let msg = format!("invalid register class `{}`: {}", s.as_str(), e);
|
||||
sess.struct_span_err(*op_sp, &msg).emit();
|
||||
asm::InlineAsmRegClass::Err
|
||||
})
|
||||
} else {
|
||||
asm::InlineAsmRegClass::Err
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
// lower_reg is executed last because we need to lower all
|
||||
// sub-expressions even if we throw them away later.
|
||||
let op = match *op {
|
||||
InlineAsmOperand::In { reg, ref expr } => hir::InlineAsmOperand::In {
|
||||
reg: lower_reg(reg),
|
||||
expr: self.lower_expr_mut(expr),
|
||||
reg: lower_reg(reg)?,
|
||||
},
|
||||
InlineAsmOperand::Out { reg, late, ref expr } => hir::InlineAsmOperand::Out {
|
||||
reg: lower_reg(reg),
|
||||
late,
|
||||
expr: expr.as_ref().map(|expr| self.lower_expr_mut(expr)),
|
||||
reg: lower_reg(reg)?,
|
||||
},
|
||||
InlineAsmOperand::InOut { reg, late, ref expr } => {
|
||||
hir::InlineAsmOperand::InOut {
|
||||
reg: lower_reg(reg),
|
||||
late,
|
||||
expr: self.lower_expr_mut(expr),
|
||||
reg: lower_reg(reg)?,
|
||||
}
|
||||
}
|
||||
InlineAsmOperand::SplitInOut { reg, late, ref in_expr, ref out_expr } => {
|
||||
hir::InlineAsmOperand::SplitInOut {
|
||||
reg: lower_reg(reg),
|
||||
late,
|
||||
in_expr: self.lower_expr_mut(in_expr),
|
||||
out_expr: out_expr.as_ref().map(|expr| self.lower_expr_mut(expr)),
|
||||
reg: lower_reg(reg)?,
|
||||
}
|
||||
}
|
||||
InlineAsmOperand::Const { ref expr } => {
|
||||
hir::InlineAsmOperand::Const { expr: self.lower_expr_mut(expr) }
|
||||
}
|
||||
InlineAsmOperand::Const { ref anon_const } => hir::InlineAsmOperand::Const {
|
||||
anon_const: self.lower_anon_const(anon_const),
|
||||
},
|
||||
InlineAsmOperand::Sym { ref expr } => {
|
||||
hir::InlineAsmOperand::Sym { expr: self.lower_expr_mut(expr) }
|
||||
}
|
||||
};
|
||||
Some((op, *op_sp))
|
||||
(op, *op_sp)
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Stop if there were any errors when lowering the register classes
|
||||
if operands.len() != asm.operands.len() || sess.asm_arch.is_none() {
|
||||
return hir::ExprKind::Err;
|
||||
}
|
||||
|
||||
// Validate template modifiers against the register classes for the operands
|
||||
let asm_arch = sess.asm_arch.unwrap();
|
||||
for p in &asm.template {
|
||||
if let InlineAsmTemplatePiece::Placeholder {
|
||||
operand_idx,
|
||||
|
@ -1409,7 +1437,10 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
| hir::InlineAsmOperand::InOut { reg, .. }
|
||||
| hir::InlineAsmOperand::SplitInOut { reg, .. } => {
|
||||
let class = reg.reg_class();
|
||||
let valid_modifiers = class.valid_modifiers(asm_arch);
|
||||
if class == asm::InlineAsmRegClass::Err {
|
||||
continue;
|
||||
}
|
||||
let valid_modifiers = class.valid_modifiers(asm_arch.unwrap());
|
||||
if !valid_modifiers.contains(&modifier) {
|
||||
let mut err = sess.struct_span_err(
|
||||
placeholder_span,
|
||||
|
@ -1468,11 +1499,31 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
// previous iteration.
|
||||
required_features.clear();
|
||||
|
||||
let reg_class = reg.reg_class();
|
||||
if reg_class == asm::InlineAsmRegClass::Err {
|
||||
continue;
|
||||
}
|
||||
|
||||
// We ignore target feature requirements for clobbers: if the
|
||||
// feature is disabled then the compiler doesn't care what we
|
||||
// do with the registers.
|
||||
//
|
||||
// Note that this is only possible for explicit register
|
||||
// operands, which cannot be used in the asm string.
|
||||
let is_clobber = matches!(
|
||||
op,
|
||||
hir::InlineAsmOperand::Out {
|
||||
reg: asm::InlineAsmRegOrRegClass::Reg(_),
|
||||
late: _,
|
||||
expr: None
|
||||
}
|
||||
);
|
||||
|
||||
if !is_clobber {
|
||||
// Validate register classes against currently enabled target
|
||||
// features. We check that at least one type is available for
|
||||
// the current target.
|
||||
let reg_class = reg.reg_class();
|
||||
for &(_, feature) in reg_class.supported_types(asm_arch) {
|
||||
for &(_, feature) in reg_class.supported_types(asm_arch.unwrap()) {
|
||||
if let Some(feature) = feature {
|
||||
if self.sess.target_features.contains(&Symbol::intern(feature)) {
|
||||
required_features.clear();
|
||||
|
@ -1507,6 +1558,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
sess.struct_span_err(op_sp, &msg).emit();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check for conflicts between explicit register operands.
|
||||
if let asm::InlineAsmRegOrRegClass::Reg(reg) = reg {
|
||||
|
@ -1624,8 +1676,8 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
hir::ExprKind::LlvmInlineAsm(self.arena.alloc(hir_asm))
|
||||
}
|
||||
|
||||
fn lower_field(&mut self, f: &Field) -> hir::Field<'hir> {
|
||||
hir::Field {
|
||||
fn lower_expr_field(&mut self, f: &ExprField) -> hir::ExprField<'hir> {
|
||||
hir::ExprField {
|
||||
hir_id: self.next_id(),
|
||||
ident: f.ident,
|
||||
expr: self.lower_expr(&f.expr),
|
||||
|
@ -1747,7 +1799,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
|
||||
// `let mut __next`
|
||||
let next_let = self.stmt_let_pat(
|
||||
ThinVec::new(),
|
||||
None,
|
||||
desugared_span,
|
||||
None,
|
||||
next_pat,
|
||||
|
@ -1757,7 +1809,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
// `let <pat> = __next`
|
||||
let pat = self.lower_pat(pat);
|
||||
let pat_let = self.stmt_let_pat(
|
||||
ThinVec::new(),
|
||||
None,
|
||||
desugared_span,
|
||||
Some(next_expr),
|
||||
pat,
|
||||
|
@ -1781,12 +1833,8 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
hir::LoopSource::ForLoop,
|
||||
e.span.with_hi(orig_head_span.hi()),
|
||||
);
|
||||
let loop_expr = self.arena.alloc(hir::Expr {
|
||||
hir_id: self.lower_node_id(e.id),
|
||||
kind,
|
||||
span: e.span,
|
||||
attrs: ThinVec::new(),
|
||||
});
|
||||
let loop_expr =
|
||||
self.arena.alloc(hir::Expr { hir_id: self.lower_node_id(e.id), kind, span: e.span });
|
||||
|
||||
// `mut iter => { ... }`
|
||||
let iter_arm = self.arm(iter_pat, loop_expr);
|
||||
|
@ -2121,21 +2169,21 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
kind: hir::ExprKind<'hir>,
|
||||
attrs: AttrVec,
|
||||
) -> hir::Expr<'hir> {
|
||||
hir::Expr { hir_id: self.next_id(), kind, span, attrs }
|
||||
let hir_id = self.next_id();
|
||||
self.lower_attrs(hir_id, &attrs);
|
||||
hir::Expr { hir_id, kind, span }
|
||||
}
|
||||
|
||||
fn field(&mut self, ident: Ident, expr: &'hir hir::Expr<'hir>, span: Span) -> hir::Field<'hir> {
|
||||
hir::Field { hir_id: self.next_id(), ident, span, expr, is_shorthand: false }
|
||||
fn expr_field(
|
||||
&mut self,
|
||||
ident: Ident,
|
||||
expr: &'hir hir::Expr<'hir>,
|
||||
span: Span,
|
||||
) -> hir::ExprField<'hir> {
|
||||
hir::ExprField { hir_id: self.next_id(), ident, span, expr, is_shorthand: false }
|
||||
}
|
||||
|
||||
fn arm(&mut self, pat: &'hir hir::Pat<'hir>, expr: &'hir hir::Expr<'hir>) -> hir::Arm<'hir> {
|
||||
hir::Arm {
|
||||
hir_id: self.next_id(),
|
||||
attrs: &[],
|
||||
pat,
|
||||
guard: None,
|
||||
span: expr.span,
|
||||
body: expr,
|
||||
}
|
||||
hir::Arm { hir_id: self.next_id(), pat, guard: None, span: expr.span, body: expr }
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,6 +18,7 @@ use rustc_target::spec::abi;
|
|||
use smallvec::{smallvec, SmallVec};
|
||||
use tracing::debug;
|
||||
|
||||
use std::iter;
|
||||
use std::mem;
|
||||
|
||||
pub(super) struct ItemLowerer<'a, 'lowering, 'hir> {
|
||||
|
@ -206,7 +207,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
UseTreeKind::Glob => {}
|
||||
UseTreeKind::Simple(_, id1, id2) => {
|
||||
for (_, &id) in
|
||||
self.expect_full_res_from_use(base_id).skip(1).zip([id1, id2].iter())
|
||||
iter::zip(self.expect_full_res_from_use(base_id).skip(1), &[id1, id2])
|
||||
{
|
||||
vec.push(id);
|
||||
}
|
||||
|
@ -217,44 +218,41 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
pub fn lower_item(&mut self, i: &Item) -> Option<hir::Item<'hir>> {
|
||||
let mut ident = i.ident;
|
||||
let mut vis = self.lower_visibility(&i.vis, None);
|
||||
let attrs = self.lower_attrs(&i.attrs);
|
||||
|
||||
if let ItemKind::MacroDef(MacroDef { ref body, macro_rules }) = i.kind {
|
||||
if !macro_rules || self.sess.contains_name(&i.attrs, sym::macro_export) {
|
||||
let def_id = self.lower_node_id(i.id).expect_owner();
|
||||
let hir_id = self.lower_node_id(i.id);
|
||||
self.lower_attrs(hir_id, &i.attrs);
|
||||
let body = P(self.lower_mac_args(body));
|
||||
self.exported_macros.push(hir::MacroDef {
|
||||
ident,
|
||||
vis,
|
||||
attrs,
|
||||
def_id,
|
||||
def_id: hir_id.expect_owner(),
|
||||
span: i.span,
|
||||
ast: MacroDef { body, macro_rules },
|
||||
});
|
||||
} else {
|
||||
self.non_exported_macro_attrs.extend(attrs.iter().cloned());
|
||||
for a in i.attrs.iter() {
|
||||
let a = self.lower_attr(a);
|
||||
self.non_exported_macro_attrs.push(a);
|
||||
}
|
||||
}
|
||||
return None;
|
||||
}
|
||||
|
||||
let kind = self.lower_item_kind(i.span, i.id, &mut ident, attrs, &mut vis, &i.kind);
|
||||
|
||||
Some(hir::Item {
|
||||
def_id: self.lower_node_id(i.id).expect_owner(),
|
||||
ident,
|
||||
attrs,
|
||||
kind,
|
||||
vis,
|
||||
span: i.span,
|
||||
})
|
||||
let hir_id = self.lower_node_id(i.id);
|
||||
let attrs = self.lower_attrs(hir_id, &i.attrs);
|
||||
let kind = self.lower_item_kind(i.span, i.id, hir_id, &mut ident, attrs, &mut vis, &i.kind);
|
||||
Some(hir::Item { def_id: hir_id.expect_owner(), ident, kind, vis, span: i.span })
|
||||
}
|
||||
|
||||
fn lower_item_kind(
|
||||
&mut self,
|
||||
span: Span,
|
||||
id: NodeId,
|
||||
hir_id: hir::HirId,
|
||||
ident: &mut Ident,
|
||||
attrs: &'hir [Attribute],
|
||||
attrs: Option<&'hir [Attribute]>,
|
||||
vis: &mut hir::Visibility<'hir>,
|
||||
i: &ItemKind,
|
||||
) -> hir::ItemKind<'hir> {
|
||||
|
@ -322,10 +320,10 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
},
|
||||
ItemKind::ForeignMod(ref fm) => {
|
||||
if fm.abi.is_none() {
|
||||
self.maybe_lint_missing_abi(span, id, abi::Abi::C);
|
||||
self.maybe_lint_missing_abi(span, id, abi::Abi::C { unwind: false });
|
||||
}
|
||||
hir::ItemKind::ForeignMod {
|
||||
abi: fm.abi.map_or(abi::Abi::C, |abi| self.lower_abi(abi)),
|
||||
abi: fm.abi.map_or(abi::Abi::C { unwind: false }, |abi| self.lower_abi(abi)),
|
||||
items: self
|
||||
.arena
|
||||
.alloc_from_iter(fm.items.iter().map(|x| self.lower_foreign_item_ref(x))),
|
||||
|
@ -345,7 +343,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
ty,
|
||||
ImplTraitContext::OtherOpaqueTy {
|
||||
capturable_lifetimes: &mut FxHashSet::default(),
|
||||
origin: hir::OpaqueTyOrigin::Misc,
|
||||
origin: hir::OpaqueTyOrigin::TyAlias,
|
||||
},
|
||||
);
|
||||
let generics = self.lower_generics(gen, ImplTraitContext::disallowed());
|
||||
|
@ -365,14 +363,14 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
self.lower_generics(generics, ImplTraitContext::disallowed()),
|
||||
),
|
||||
ItemKind::Struct(ref struct_def, ref generics) => {
|
||||
let struct_def = self.lower_variant_data(struct_def);
|
||||
let struct_def = self.lower_variant_data(hir_id, struct_def);
|
||||
hir::ItemKind::Struct(
|
||||
struct_def,
|
||||
self.lower_generics(generics, ImplTraitContext::disallowed()),
|
||||
)
|
||||
}
|
||||
ItemKind::Union(ref vdata, ref generics) => {
|
||||
let vdata = self.lower_variant_data(vdata);
|
||||
let vdata = self.lower_variant_data(hir_id, vdata);
|
||||
hir::ItemKind::Union(
|
||||
vdata,
|
||||
self.lower_generics(generics, ImplTraitContext::disallowed()),
|
||||
|
@ -505,7 +503,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
id: NodeId,
|
||||
vis: &mut hir::Visibility<'hir>,
|
||||
ident: &mut Ident,
|
||||
attrs: &'hir [Attribute],
|
||||
attrs: Option<&'hir [Attribute]>,
|
||||
) -> hir::ItemKind<'hir> {
|
||||
debug!("lower_use_tree(tree={:?})", tree);
|
||||
debug!("lower_use_tree: vis = {:?}", vis);
|
||||
|
@ -540,7 +538,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
// won't be dealing with macros in the rest of the compiler.
|
||||
// Essentially a single `use` which imports two names is desugared into
|
||||
// two imports.
|
||||
for (res, &new_node_id) in resolutions.zip([id1, id2].iter()) {
|
||||
for (res, &new_node_id) in iter::zip(resolutions, &[id1, id2]) {
|
||||
let ident = *ident;
|
||||
let mut path = path.clone();
|
||||
for seg in &mut path.segments {
|
||||
|
@ -554,11 +552,13 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
let path = this.lower_path_extra(res, &path, ParamMode::Explicit, None);
|
||||
let kind = hir::ItemKind::Use(path, hir::UseKind::Single);
|
||||
let vis = this.rebuild_vis(&vis);
|
||||
if let Some(attrs) = attrs {
|
||||
this.attrs.insert(new_id, attrs);
|
||||
}
|
||||
|
||||
this.insert_item(hir::Item {
|
||||
def_id: new_id.expect_owner(),
|
||||
ident,
|
||||
attrs,
|
||||
kind,
|
||||
vis,
|
||||
span,
|
||||
|
@ -626,11 +626,13 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
|
||||
let kind =
|
||||
this.lower_use_tree(use_tree, &prefix, id, &mut vis, &mut ident, attrs);
|
||||
if let Some(attrs) = attrs {
|
||||
this.attrs.insert(new_hir_id, attrs);
|
||||
}
|
||||
|
||||
this.insert_item(hir::Item {
|
||||
def_id: new_hir_id.expect_owner(),
|
||||
ident,
|
||||
attrs,
|
||||
kind,
|
||||
vis,
|
||||
span: use_tree.span,
|
||||
|
@ -699,11 +701,12 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
}
|
||||
|
||||
fn lower_foreign_item(&mut self, i: &ForeignItem) -> hir::ForeignItem<'hir> {
|
||||
let def_id = self.resolver.local_def_id(i.id);
|
||||
let hir_id = self.lower_node_id(i.id);
|
||||
let def_id = hir_id.expect_owner();
|
||||
self.lower_attrs(hir_id, &i.attrs);
|
||||
hir::ForeignItem {
|
||||
def_id,
|
||||
ident: i.ident,
|
||||
attrs: self.lower_attrs(&i.attrs),
|
||||
kind: match i.kind {
|
||||
ForeignItemKind::Fn(box FnKind(_, ref sig, ref generics, _)) => {
|
||||
let fdec = &sig.decl;
|
||||
|
@ -748,33 +751,47 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
}
|
||||
|
||||
fn lower_variant(&mut self, v: &Variant) -> hir::Variant<'hir> {
|
||||
let id = self.lower_node_id(v.id);
|
||||
self.lower_attrs(id, &v.attrs);
|
||||
hir::Variant {
|
||||
attrs: self.lower_attrs(&v.attrs),
|
||||
data: self.lower_variant_data(&v.data),
|
||||
id,
|
||||
data: self.lower_variant_data(id, &v.data),
|
||||
disr_expr: v.disr_expr.as_ref().map(|e| self.lower_anon_const(e)),
|
||||
id: self.lower_node_id(v.id),
|
||||
ident: v.ident,
|
||||
span: v.span,
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_variant_data(&mut self, vdata: &VariantData) -> hir::VariantData<'hir> {
|
||||
fn lower_variant_data(
|
||||
&mut self,
|
||||
parent_id: hir::HirId,
|
||||
vdata: &VariantData,
|
||||
) -> hir::VariantData<'hir> {
|
||||
match *vdata {
|
||||
VariantData::Struct(ref fields, recovered) => hir::VariantData::Struct(
|
||||
self.arena
|
||||
.alloc_from_iter(fields.iter().enumerate().map(|f| self.lower_struct_field(f))),
|
||||
.alloc_from_iter(fields.iter().enumerate().map(|f| self.lower_field_def(f))),
|
||||
recovered,
|
||||
),
|
||||
VariantData::Tuple(ref fields, id) => hir::VariantData::Tuple(
|
||||
self.arena
|
||||
.alloc_from_iter(fields.iter().enumerate().map(|f| self.lower_struct_field(f))),
|
||||
self.lower_node_id(id),
|
||||
VariantData::Tuple(ref fields, id) => {
|
||||
let ctor_id = self.lower_node_id(id);
|
||||
self.alias_attrs(ctor_id, parent_id);
|
||||
hir::VariantData::Tuple(
|
||||
self.arena.alloc_from_iter(
|
||||
fields.iter().enumerate().map(|f| self.lower_field_def(f)),
|
||||
),
|
||||
VariantData::Unit(id) => hir::VariantData::Unit(self.lower_node_id(id)),
|
||||
ctor_id,
|
||||
)
|
||||
}
|
||||
VariantData::Unit(id) => {
|
||||
let ctor_id = self.lower_node_id(id);
|
||||
self.alias_attrs(ctor_id, parent_id);
|
||||
hir::VariantData::Unit(ctor_id)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_struct_field(&mut self, (index, f): (usize, &StructField)) -> hir::StructField<'hir> {
|
||||
fn lower_field_def(&mut self, (index, f): (usize, &FieldDef)) -> hir::FieldDef<'hir> {
|
||||
let ty = if let TyKind::Path(ref qself, ref path) = f.ty.kind {
|
||||
let t = self.lower_path_ty(
|
||||
&f.ty,
|
||||
|
@ -787,9 +804,11 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
} else {
|
||||
self.lower_ty(&f.ty, ImplTraitContext::disallowed())
|
||||
};
|
||||
hir::StructField {
|
||||
let hir_id = self.lower_node_id(f.id);
|
||||
self.lower_attrs(hir_id, &f.attrs);
|
||||
hir::FieldDef {
|
||||
span: f.span,
|
||||
hir_id: self.lower_node_id(f.id),
|
||||
hir_id,
|
||||
ident: match f.ident {
|
||||
Some(ident) => ident,
|
||||
// FIXME(jseyfried): positional field hygiene.
|
||||
|
@ -797,12 +816,12 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
},
|
||||
vis: self.lower_visibility(&f.vis, None),
|
||||
ty,
|
||||
attrs: self.lower_attrs(&f.attrs),
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_trait_item(&mut self, i: &AssocItem) -> hir::TraitItem<'hir> {
|
||||
let trait_item_def_id = self.resolver.local_def_id(i.id);
|
||||
let hir_id = self.lower_node_id(i.id);
|
||||
let trait_item_def_id = hir_id.expect_owner();
|
||||
|
||||
let (generics, kind) = match i.kind {
|
||||
AssocItemKind::Const(_, ref ty, ref default) => {
|
||||
|
@ -817,9 +836,17 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
(generics, hir::TraitItemKind::Fn(sig, hir::TraitFn::Required(names)))
|
||||
}
|
||||
AssocItemKind::Fn(box FnKind(_, ref sig, ref generics, Some(ref body))) => {
|
||||
let body_id = self.lower_fn_body_block(i.span, &sig.decl, Some(body));
|
||||
let (generics, sig) =
|
||||
self.lower_method_sig(generics, sig, trait_item_def_id, false, None, i.id);
|
||||
let asyncness = sig.header.asyncness;
|
||||
let body_id =
|
||||
self.lower_maybe_async_body(i.span, &sig.decl, asyncness, Some(&body));
|
||||
let (generics, sig) = self.lower_method_sig(
|
||||
generics,
|
||||
sig,
|
||||
trait_item_def_id,
|
||||
false,
|
||||
asyncness.opt_return_id(),
|
||||
i.id,
|
||||
);
|
||||
(generics, hir::TraitItemKind::Fn(sig, hir::TraitFn::Provided(body_id)))
|
||||
}
|
||||
AssocItemKind::TyAlias(box TyAliasKind(_, ref generics, ref bounds, ref default)) => {
|
||||
|
@ -835,14 +862,8 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
AssocItemKind::MacCall(..) => panic!("macro item shouldn't exist at this point"),
|
||||
};
|
||||
|
||||
hir::TraitItem {
|
||||
def_id: trait_item_def_id,
|
||||
ident: i.ident,
|
||||
attrs: self.lower_attrs(&i.attrs),
|
||||
generics,
|
||||
kind,
|
||||
span: i.span,
|
||||
}
|
||||
self.lower_attrs(hir_id, &i.attrs);
|
||||
hir::TraitItem { def_id: trait_item_def_id, ident: i.ident, generics, kind, span: i.span }
|
||||
}
|
||||
|
||||
fn lower_trait_item_ref(&mut self, i: &AssocItem) -> hir::TraitItemRef {
|
||||
|
@ -906,7 +927,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
ty,
|
||||
ImplTraitContext::OtherOpaqueTy {
|
||||
capturable_lifetimes: &mut FxHashSet::default(),
|
||||
origin: hir::OpaqueTyOrigin::Misc,
|
||||
origin: hir::OpaqueTyOrigin::TyAlias,
|
||||
},
|
||||
);
|
||||
hir::ImplItemKind::TyAlias(ty)
|
||||
|
@ -920,10 +941,11 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
// Since `default impl` is not yet implemented, this is always true in impls.
|
||||
let has_value = true;
|
||||
let (defaultness, _) = self.lower_defaultness(i.kind.defaultness(), has_value);
|
||||
let hir_id = self.lower_node_id(i.id);
|
||||
self.lower_attrs(hir_id, &i.attrs);
|
||||
hir::ImplItem {
|
||||
def_id: self.lower_node_id(i.id).expect_owner(),
|
||||
def_id: hir_id.expect_owner(),
|
||||
ident: i.ident,
|
||||
attrs: self.lower_attrs(&i.attrs),
|
||||
generics,
|
||||
vis: self.lower_visibility(&i.vis, None),
|
||||
defaultness,
|
||||
|
@ -1024,9 +1046,10 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
}
|
||||
|
||||
fn lower_param(&mut self, param: &Param) -> hir::Param<'hir> {
|
||||
let hir_id = self.lower_node_id(param.id);
|
||||
self.lower_attrs(hir_id, ¶m.attrs);
|
||||
hir::Param {
|
||||
attrs: self.lower_attrs(¶m.attrs),
|
||||
hir_id: self.lower_node_id(param.id),
|
||||
hir_id,
|
||||
pat: self.lower_pat(¶m.pat),
|
||||
ty_span: param.ty.span,
|
||||
span: param.span,
|
||||
|
@ -1158,11 +1181,9 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
//
|
||||
// If this is the simple case, this parameter will end up being the same as the
|
||||
// original parameter, but with a different pattern id.
|
||||
let mut stmt_attrs = AttrVec::new();
|
||||
stmt_attrs.extend(parameter.attrs.iter().cloned());
|
||||
let stmt_attrs = this.attrs.get(¶meter.hir_id).copied();
|
||||
let (new_parameter_pat, new_parameter_id) = this.pat_ident(desugared_span, ident);
|
||||
let new_parameter = hir::Param {
|
||||
attrs: parameter.attrs,
|
||||
hir_id: parameter.hir_id,
|
||||
pat: new_parameter_pat,
|
||||
ty_span: parameter.ty_span,
|
||||
|
@ -1205,7 +1226,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
);
|
||||
let move_expr = this.expr_ident(desugared_span, ident, new_parameter_id);
|
||||
let move_stmt = this.stmt_let_pat(
|
||||
AttrVec::new(),
|
||||
None,
|
||||
desugared_span,
|
||||
Some(move_expr),
|
||||
move_pat,
|
||||
|
@ -1322,8 +1343,8 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
match ext {
|
||||
Extern::None => abi::Abi::Rust,
|
||||
Extern::Implicit => {
|
||||
self.maybe_lint_missing_abi(span, id, abi::Abi::C);
|
||||
abi::Abi::C
|
||||
self.maybe_lint_missing_abi(span, id, abi::Abi::C { unwind: false });
|
||||
abi::Abi::C { unwind: false }
|
||||
}
|
||||
Extern::Explicit(abi) => self.lower_abi(abi),
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
//! For the simpler lowering steps, IDs and spans should be preserved. Unlike
|
||||
//! expansion we do not preserve the process of lowering in the spans, so spans
|
||||
//! should not be modified here. When creating a new node (as opposed to
|
||||
//! 'folding' an existing one), then you create a new ID using `next_id()`.
|
||||
//! "folding" an existing one), create a new ID using `next_id()`.
|
||||
//!
|
||||
//! You must ensure that IDs are unique. That means that you should only use the
|
||||
//! ID from an AST node in a single HIR node (you can assume that AST node-IDs
|
||||
|
@ -26,18 +26,19 @@
|
|||
//! span and spans don't need to be kept in order, etc. Where code is preserved
|
||||
//! by lowering, it should have the same span as in the AST. Where HIR nodes are
|
||||
//! new it is probably best to give a span for the whole AST node being lowered.
|
||||
//! All nodes should have real spans, don't use dummy spans. Tools are likely to
|
||||
//! All nodes should have real spans; don't use dummy spans. Tools are likely to
|
||||
//! get confused if the spans from leaf AST nodes occur in multiple places
|
||||
//! in the HIR, especially for multiple identifiers.
|
||||
|
||||
#![feature(crate_visibility_modifier)]
|
||||
#![feature(or_patterns)]
|
||||
#![cfg_attr(bootstrap, feature(or_patterns))]
|
||||
#![feature(box_patterns)]
|
||||
#![feature(iter_zip)]
|
||||
#![recursion_limit = "256"]
|
||||
|
||||
use rustc_ast::node_id::NodeMap;
|
||||
use rustc_ast::token::{self, DelimToken, Nonterminal, Token};
|
||||
use rustc_ast::tokenstream::{CanSynthesizeMissingTokens, DelimSpan, TokenStream, TokenTree};
|
||||
use rustc_ast::token::{self, Token};
|
||||
use rustc_ast::tokenstream::{CanSynthesizeMissingTokens, TokenStream, TokenTree};
|
||||
use rustc_ast::visit::{self, AssocCtxt, Visitor};
|
||||
use rustc_ast::walk_list;
|
||||
use rustc_ast::{self as ast, *};
|
||||
|
@ -55,7 +56,7 @@ use rustc_hir::{ConstArg, GenericArg, ParamName};
|
|||
use rustc_index::vec::{Idx, IndexVec};
|
||||
use rustc_session::lint::builtin::{BARE_TRAIT_OBJECTS, MISSING_ABI};
|
||||
use rustc_session::lint::{BuiltinLintDiagnostics, LintBuffer};
|
||||
use rustc_session::parse::ParseSess;
|
||||
use rustc_session::utils::{FlattenNonterminals, NtToTokenstream};
|
||||
use rustc_session::Session;
|
||||
use rustc_span::hygiene::ExpnId;
|
||||
use rustc_span::source_map::{respan, DesugaringKind};
|
||||
|
@ -92,10 +93,10 @@ struct LoweringContext<'a, 'hir: 'a> {
|
|||
|
||||
/// HACK(Centril): there is a cyclic dependency between the parser and lowering
|
||||
/// if we don't have this function pointer. To avoid that dependency so that
|
||||
/// librustc_middle is independent of the parser, we use dynamic dispatch here.
|
||||
/// `rustc_middle` is independent of the parser, we use dynamic dispatch here.
|
||||
nt_to_tokenstream: NtToTokenstream,
|
||||
|
||||
/// Used to allocate HIR nodes
|
||||
/// Used to allocate HIR nodes.
|
||||
arena: &'hir Arena<'hir>,
|
||||
|
||||
/// The items being lowered are collected here.
|
||||
|
@ -114,6 +115,8 @@ struct LoweringContext<'a, 'hir: 'a> {
|
|||
|
||||
generator_kind: Option<hir::GeneratorKind>,
|
||||
|
||||
attrs: BTreeMap<hir::HirId, &'hir [Attribute]>,
|
||||
|
||||
/// When inside an `async` context, this is the `HirId` of the
|
||||
/// `task_context` local bound to the resume argument of the generator.
|
||||
task_context: Option<hir::HirId>,
|
||||
|
@ -128,7 +131,7 @@ struct LoweringContext<'a, 'hir: 'a> {
|
|||
is_in_trait_impl: bool,
|
||||
is_in_dyn_type: bool,
|
||||
|
||||
/// What to do when we encounter either an "anonymous lifetime
|
||||
/// What to do when we encounter an "anonymous lifetime
|
||||
/// reference". The term "anonymous" is meant to encompass both
|
||||
/// `'_` lifetimes as well as fully elided cases where nothing is
|
||||
/// written at all (e.g., `&T` or `std::cell::Ref<T>`).
|
||||
|
@ -210,8 +213,6 @@ pub trait ResolverAstLowering {
|
|||
) -> LocalDefId;
|
||||
}
|
||||
|
||||
type NtToTokenstream = fn(&Nonterminal, &ParseSess, CanSynthesizeMissingTokens) -> TokenStream;
|
||||
|
||||
/// Context of `impl Trait` in code, which determines whether it is allowed in an HIR subtree,
|
||||
/// and if so, what meaning it has.
|
||||
#[derive(Debug)]
|
||||
|
@ -221,7 +222,7 @@ enum ImplTraitContext<'b, 'a> {
|
|||
/// equivalent to a fresh universal parameter like `fn foo<T: Debug>(x: T)`.
|
||||
///
|
||||
/// Newly generated parameters should be inserted into the given `Vec`.
|
||||
Universal(&'b mut Vec<hir::GenericParam<'a>>),
|
||||
Universal(&'b mut Vec<hir::GenericParam<'a>>, LocalDefId),
|
||||
|
||||
/// Treat `impl Trait` as shorthand for a new opaque type.
|
||||
/// Example: `fn foo() -> impl Debug`, where `impl Debug` is conceptually
|
||||
|
@ -238,11 +239,13 @@ enum ImplTraitContext<'b, 'a> {
|
|||
OtherOpaqueTy {
|
||||
/// Set of lifetimes that this opaque type can capture, if it uses
|
||||
/// them. This includes lifetimes bound since we entered this context.
|
||||
/// For example, in
|
||||
/// For example:
|
||||
///
|
||||
/// ```
|
||||
/// type A<'b> = impl for<'a> Trait<'a, Out = impl Sized + 'a>;
|
||||
/// ```
|
||||
///
|
||||
/// the inner opaque type captures `'a` because it uses it. It doesn't
|
||||
/// Here the inner opaque type captures `'a` because it uses it. It doesn't
|
||||
/// need to capture `'b` because it already inherits the lifetime
|
||||
/// parameter from `A`.
|
||||
// FIXME(impl_trait): but `required_region_bounds` will ICE later
|
||||
|
@ -274,7 +277,7 @@ impl<'a> ImplTraitContext<'_, 'a> {
|
|||
fn reborrow<'this>(&'this mut self) -> ImplTraitContext<'this, 'a> {
|
||||
use self::ImplTraitContext::*;
|
||||
match self {
|
||||
Universal(params) => Universal(params),
|
||||
Universal(params, parent) => Universal(params, *parent),
|
||||
ReturnPositionOpaqueTy { fn_def_id, origin } => {
|
||||
ReturnPositionOpaqueTy { fn_def_id: *fn_def_id, origin: *origin }
|
||||
}
|
||||
|
@ -307,6 +310,7 @@ pub fn lower_crate<'a, 'hir>(
|
|||
bodies: BTreeMap::new(),
|
||||
trait_impls: BTreeMap::new(),
|
||||
modules: BTreeMap::new(),
|
||||
attrs: BTreeMap::default(),
|
||||
exported_macros: Vec::new(),
|
||||
non_exported_macro_attrs: Vec::new(),
|
||||
catch_scopes: Vec::new(),
|
||||
|
@ -397,67 +401,6 @@ enum AnonymousLifetimeMode {
|
|||
PassThrough,
|
||||
}
|
||||
|
||||
struct TokenStreamLowering<'a> {
|
||||
parse_sess: &'a ParseSess,
|
||||
synthesize_tokens: CanSynthesizeMissingTokens,
|
||||
nt_to_tokenstream: NtToTokenstream,
|
||||
}
|
||||
|
||||
impl<'a> TokenStreamLowering<'a> {
|
||||
fn lower_token_stream(&mut self, tokens: TokenStream) -> TokenStream {
|
||||
tokens.into_trees().flat_map(|tree| self.lower_token_tree(tree).into_trees()).collect()
|
||||
}
|
||||
|
||||
fn lower_token_tree(&mut self, tree: TokenTree) -> TokenStream {
|
||||
match tree {
|
||||
TokenTree::Token(token) => self.lower_token(token),
|
||||
TokenTree::Delimited(span, delim, tts) => {
|
||||
TokenTree::Delimited(span, delim, self.lower_token_stream(tts)).into()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_token(&mut self, token: Token) -> TokenStream {
|
||||
match token.kind {
|
||||
token::Interpolated(nt) => {
|
||||
let tts = (self.nt_to_tokenstream)(&nt, self.parse_sess, self.synthesize_tokens);
|
||||
TokenTree::Delimited(
|
||||
DelimSpan::from_single(token.span),
|
||||
DelimToken::NoDelim,
|
||||
self.lower_token_stream(tts),
|
||||
)
|
||||
.into()
|
||||
}
|
||||
_ => TokenTree::Token(token).into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct ImplTraitTypeIdVisitor<'a> {
|
||||
ids: &'a mut SmallVec<[NodeId; 1]>,
|
||||
}
|
||||
|
||||
impl Visitor<'_> for ImplTraitTypeIdVisitor<'_> {
|
||||
fn visit_ty(&mut self, ty: &Ty) {
|
||||
match ty.kind {
|
||||
TyKind::Typeof(_) | TyKind::BareFn(_) => return,
|
||||
|
||||
TyKind::ImplTrait(id, _) => self.ids.push(id),
|
||||
_ => {}
|
||||
}
|
||||
visit::walk_ty(self, ty);
|
||||
}
|
||||
|
||||
fn visit_path_segment(&mut self, path_span: Span, path_segment: &PathSegment) {
|
||||
if let Some(ref p) = path_segment.args {
|
||||
if let GenericArgs::Parenthesized(_) = **p {
|
||||
return;
|
||||
}
|
||||
}
|
||||
visit::walk_path_segment(self, path_span, path_segment)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
fn lower_crate(mut self, c: &Crate) -> hir::Crate<'hir> {
|
||||
/// Full-crate AST visitor that inserts into a fresh
|
||||
|
@ -470,25 +413,18 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
}
|
||||
|
||||
impl MiscCollector<'_, '_, '_> {
|
||||
fn allocate_use_tree_hir_id_counters(&mut self, tree: &UseTree, owner: LocalDefId) {
|
||||
fn allocate_use_tree_hir_id_counters(&mut self, tree: &UseTree) {
|
||||
match tree.kind {
|
||||
UseTreeKind::Simple(_, id1, id2) => {
|
||||
for &id in &[id1, id2] {
|
||||
self.lctx.resolver.create_def(
|
||||
owner,
|
||||
id,
|
||||
DefPathData::Misc,
|
||||
ExpnId::root(),
|
||||
tree.prefix.span,
|
||||
);
|
||||
self.lctx.allocate_hir_id_counter(id);
|
||||
}
|
||||
}
|
||||
UseTreeKind::Glob => (),
|
||||
UseTreeKind::Nested(ref trees) => {
|
||||
for &(ref use_tree, id) in trees {
|
||||
let hir_id = self.lctx.allocate_hir_id_counter(id);
|
||||
self.allocate_use_tree_hir_id_counters(use_tree, hir_id.owner);
|
||||
self.lctx.allocate_hir_id_counter(id);
|
||||
self.allocate_use_tree_hir_id_counters(use_tree);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -497,7 +433,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
|
||||
impl<'tcx> Visitor<'tcx> for MiscCollector<'tcx, '_, '_> {
|
||||
fn visit_item(&mut self, item: &'tcx Item) {
|
||||
let hir_id = self.lctx.allocate_hir_id_counter(item.id);
|
||||
self.lctx.allocate_hir_id_counter(item.id);
|
||||
|
||||
match item.kind {
|
||||
ItemKind::Struct(_, ref generics)
|
||||
|
@ -516,7 +452,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
self.lctx.type_def_lifetime_params.insert(def_id.to_def_id(), count);
|
||||
}
|
||||
ItemKind::Use(ref use_tree) => {
|
||||
self.allocate_use_tree_hir_id_counters(use_tree, hir_id.owner);
|
||||
self.allocate_use_tree_hir_id_counters(use_tree);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
@ -547,10 +483,6 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
}
|
||||
self.visit_fn_ret_ty(&f.decl.output)
|
||||
}
|
||||
TyKind::ImplTrait(def_node_id, _) => {
|
||||
self.lctx.allocate_hir_id_counter(def_node_id);
|
||||
visit::walk_ty(self, t);
|
||||
}
|
||||
_ => visit::walk_ty(self, t),
|
||||
}
|
||||
}
|
||||
|
@ -563,7 +495,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
visit::walk_crate(&mut item::ItemLowerer { lctx: &mut self }, c);
|
||||
|
||||
let module = self.lower_mod(&c.items, c.span);
|
||||
let attrs = self.lower_attrs(&c.attrs);
|
||||
self.lower_attrs(hir::CRATE_HIR_ID, &c.attrs);
|
||||
let body_ids = body_ids(&self.bodies);
|
||||
let proc_macros =
|
||||
c.proc_macros.iter().map(|id| self.node_id_to_hir_id[*id].unwrap()).collect();
|
||||
|
@ -590,8 +522,16 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
|
||||
self.resolver.definitions().init_def_id_to_hir_id_mapping(def_id_to_hir_id);
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
for (&id, attrs) in self.attrs.iter() {
|
||||
// Verify that we do not store empty slices in the map.
|
||||
if attrs.is_empty() {
|
||||
panic!("Stored empty attributes for {:?}", id);
|
||||
}
|
||||
}
|
||||
|
||||
hir::Crate {
|
||||
item: hir::CrateItem { module, attrs, span: c.span },
|
||||
item: module,
|
||||
exported_macros: self.arena.alloc_from_iter(self.exported_macros),
|
||||
non_exported_macro_attrs: self.arena.alloc_from_iter(self.non_exported_macro_attrs),
|
||||
items: self.items,
|
||||
|
@ -604,6 +544,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
modules: self.modules,
|
||||
proc_macros,
|
||||
trait_map,
|
||||
attrs: self.attrs,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -832,7 +773,6 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
hir::GenericParam {
|
||||
hir_id: self.lower_node_id(node_id),
|
||||
name: hir_name,
|
||||
attrs: &[],
|
||||
bounds: &[],
|
||||
span,
|
||||
pure_wrt_drop: false,
|
||||
|
@ -926,8 +866,13 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
// `lifetimes_to_define`. If we swapped the order of these two,
|
||||
// in-band-lifetimes introduced by generics or where-clauses
|
||||
// wouldn't have been added yet.
|
||||
let generics =
|
||||
this.lower_generics_mut(generics, ImplTraitContext::Universal(&mut params));
|
||||
let generics = this.lower_generics_mut(
|
||||
generics,
|
||||
ImplTraitContext::Universal(
|
||||
&mut params,
|
||||
this.current_hir_id_owner.last().unwrap().0,
|
||||
),
|
||||
);
|
||||
let res = f(this, &mut params);
|
||||
(params, (generics, res))
|
||||
})
|
||||
|
@ -965,11 +910,18 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
ret
|
||||
}
|
||||
|
||||
fn lower_attrs(&mut self, attrs: &[Attribute]) -> &'hir [Attribute] {
|
||||
self.arena.alloc_from_iter(attrs.iter().map(|a| self.lower_attr(a)))
|
||||
fn lower_attrs(&mut self, id: hir::HirId, attrs: &[Attribute]) -> Option<&'hir [Attribute]> {
|
||||
if attrs.is_empty() {
|
||||
None
|
||||
} else {
|
||||
let ret = self.arena.alloc_from_iter(attrs.iter().map(|a| self.lower_attr(a)));
|
||||
debug_assert!(!ret.is_empty());
|
||||
self.attrs.insert(id, ret);
|
||||
Some(ret)
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_attr(&mut self, attr: &Attribute) -> Attribute {
|
||||
fn lower_attr(&self, attr: &Attribute) -> Attribute {
|
||||
// Note that we explicitly do not walk the path. Since we don't really
|
||||
// lower attributes (we use the AST version) there is nowhere to keep
|
||||
// the `HirId`s. We don't actually need HIR version of attributes anyway.
|
||||
|
@ -989,7 +941,14 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
Attribute { kind, id: attr.id, style: attr.style, span: attr.span }
|
||||
}
|
||||
|
||||
fn lower_mac_args(&mut self, args: &MacArgs) -> MacArgs {
|
||||
fn alias_attrs(&mut self, id: hir::HirId, target_id: hir::HirId) {
|
||||
if let Some(&a) = self.attrs.get(&target_id) {
|
||||
debug_assert!(!a.is_empty());
|
||||
self.attrs.insert(id, a);
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_mac_args(&self, args: &MacArgs) -> MacArgs {
|
||||
match *args {
|
||||
MacArgs::Empty => MacArgs::Empty,
|
||||
MacArgs::Delimited(dspan, delim, ref tokens) => {
|
||||
|
@ -1040,12 +999,12 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
}
|
||||
}
|
||||
|
||||
let tokens = TokenStreamLowering {
|
||||
let tokens = FlattenNonterminals {
|
||||
parse_sess: &self.sess.parse_sess,
|
||||
synthesize_tokens: CanSynthesizeMissingTokens::Yes,
|
||||
nt_to_tokenstream: self.nt_to_tokenstream,
|
||||
}
|
||||
.lower_token(token.clone());
|
||||
.process_token(token.clone());
|
||||
MacArgs::Eq(eq_span, unwrap_single_token(self.sess, tokens, token.span))
|
||||
}
|
||||
}
|
||||
|
@ -1056,12 +1015,12 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
tokens: TokenStream,
|
||||
synthesize_tokens: CanSynthesizeMissingTokens,
|
||||
) -> TokenStream {
|
||||
TokenStreamLowering {
|
||||
FlattenNonterminals {
|
||||
parse_sess: &self.sess.parse_sess,
|
||||
synthesize_tokens,
|
||||
nt_to_tokenstream: self.nt_to_tokenstream,
|
||||
}
|
||||
.lower_token_stream(tokens)
|
||||
.process_token_stream(tokens)
|
||||
}
|
||||
|
||||
/// Given an associated type constraint like one of these:
|
||||
|
@ -1118,6 +1077,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
}
|
||||
AssocTyConstraintKind::Bound { ref bounds } => {
|
||||
let mut capturable_lifetimes;
|
||||
let mut parent_def_id = self.current_hir_id_owner.last().unwrap().0;
|
||||
// Piggy-back on the `impl Trait` context to figure out the correct behavior.
|
||||
let (desugar_to_impl_trait, itctx) = match itctx {
|
||||
// We are in the return position:
|
||||
|
@ -1137,7 +1097,10 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
// so desugar to
|
||||
//
|
||||
// fn foo(x: dyn Iterator<Item = impl Debug>)
|
||||
ImplTraitContext::Universal(..) if self.is_in_dyn_type => (true, itctx),
|
||||
ImplTraitContext::Universal(_, parent) if self.is_in_dyn_type => {
|
||||
parent_def_id = parent;
|
||||
(true, itctx)
|
||||
}
|
||||
|
||||
// In `type Foo = dyn Iterator<Item: Debug>` we desugar to
|
||||
// `type Foo = dyn Iterator<Item = impl Debug>` but we have to override the
|
||||
|
@ -1171,7 +1134,6 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
// constructing the HIR for `impl bounds...` and then lowering that.
|
||||
|
||||
let impl_trait_node_id = self.resolver.next_node_id();
|
||||
let parent_def_id = self.current_hir_id_owner.last().unwrap().0;
|
||||
self.resolver.create_def(
|
||||
parent_def_id,
|
||||
impl_trait_node_id,
|
||||
|
@ -1393,7 +1355,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
if kind != TraitObjectSyntax::Dyn {
|
||||
self.maybe_lint_bare_trait(t.span, t.id, false);
|
||||
}
|
||||
hir::TyKind::TraitObject(bounds, lifetime_bound)
|
||||
hir::TyKind::TraitObject(bounds, lifetime_bound, kind)
|
||||
}
|
||||
TyKind::ImplTrait(def_node_id, ref bounds) => {
|
||||
let span = t.span;
|
||||
|
@ -1424,25 +1386,20 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
|this| this.lower_param_bounds(bounds, nested_itctx),
|
||||
)
|
||||
}
|
||||
ImplTraitContext::Universal(in_band_ty_params) => {
|
||||
ImplTraitContext::Universal(in_band_ty_params, parent_def_id) => {
|
||||
// Add a definition for the in-band `Param`.
|
||||
let def_id = self.resolver.local_def_id(def_node_id);
|
||||
|
||||
self.allocate_hir_id_counter(def_node_id);
|
||||
|
||||
let hir_bounds = self.with_hir_id_owner(def_node_id, |this| {
|
||||
this.lower_param_bounds(
|
||||
let hir_bounds = self.lower_param_bounds(
|
||||
bounds,
|
||||
ImplTraitContext::Universal(in_band_ty_params),
|
||||
)
|
||||
});
|
||||
ImplTraitContext::Universal(in_band_ty_params, parent_def_id),
|
||||
);
|
||||
// Set the name to `impl Bound1 + Bound2`.
|
||||
let ident = Ident::from_str_and_span(&pprust::ty_to_string(t), span);
|
||||
in_band_ty_params.push(hir::GenericParam {
|
||||
hir_id: self.lower_node_id(def_node_id),
|
||||
name: ParamName::Plain(ident),
|
||||
pure_wrt_drop: false,
|
||||
attrs: &[],
|
||||
bounds: hir_bounds,
|
||||
span,
|
||||
kind: hir::GenericParamKind::Type {
|
||||
|
@ -1570,7 +1527,6 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
let opaque_ty_item = hir::Item {
|
||||
def_id: opaque_ty_id,
|
||||
ident: Ident::invalid(),
|
||||
attrs: Default::default(),
|
||||
kind: opaque_ty_item_kind,
|
||||
vis: respan(span.shrink_to_lo(), hir::VisibilityKind::Inherited),
|
||||
span: opaque_ty_span,
|
||||
|
@ -1731,7 +1687,6 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
name,
|
||||
span: lifetime.span,
|
||||
pure_wrt_drop: false,
|
||||
attrs: &[],
|
||||
bounds: &[],
|
||||
kind: hir::GenericParamKind::Lifetime { kind },
|
||||
});
|
||||
|
@ -1764,14 +1719,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
)
|
||||
}
|
||||
|
||||
fn lower_local(&mut self, l: &Local) -> (hir::Local<'hir>, SmallVec<[NodeId; 1]>) {
|
||||
let mut ids = SmallVec::<[NodeId; 1]>::new();
|
||||
if self.sess.features_untracked().impl_trait_in_bindings {
|
||||
if let Some(ref ty) = l.ty {
|
||||
let mut visitor = ImplTraitTypeIdVisitor { ids: &mut ids };
|
||||
visitor.visit_ty(ty);
|
||||
}
|
||||
}
|
||||
fn lower_local(&mut self, l: &Local) -> hir::Local<'hir> {
|
||||
let ty = l.ty.as_ref().map(|t| {
|
||||
let mut capturable_lifetimes;
|
||||
self.lower_ty(
|
||||
|
@ -1788,18 +1736,16 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
)
|
||||
});
|
||||
let init = l.init.as_ref().map(|e| self.lower_expr(e));
|
||||
(
|
||||
let hir_id = self.lower_node_id(l.id);
|
||||
self.lower_attrs(hir_id, &l.attrs);
|
||||
hir::Local {
|
||||
hir_id: self.lower_node_id(l.id),
|
||||
hir_id,
|
||||
ty,
|
||||
pat: self.lower_pat(&l.pat),
|
||||
init,
|
||||
span: l.span,
|
||||
attrs: l.attrs.iter().map(|a| self.lower_attr(a)).collect::<Vec<_>>().into(),
|
||||
source: hir::LocalSource::Normal,
|
||||
},
|
||||
ids,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_fn_params_to_names(&mut self, decl: &FnDecl) -> &'hir [Ident] {
|
||||
|
@ -1866,7 +1812,13 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
}
|
||||
this.arena.alloc_from_iter(inputs.iter().map(|param| {
|
||||
if let Some((_, ibty)) = &mut in_band_ty_params {
|
||||
this.lower_ty_direct(¶m.ty, ImplTraitContext::Universal(ibty))
|
||||
this.lower_ty_direct(
|
||||
¶m.ty,
|
||||
ImplTraitContext::Universal(
|
||||
ibty,
|
||||
this.current_hir_id_owner.last().unwrap().0,
|
||||
),
|
||||
)
|
||||
} else {
|
||||
this.lower_ty_direct(¶m.ty, ImplTraitContext::disallowed())
|
||||
}
|
||||
|
@ -2110,7 +2062,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
hir::FnRetTy::Return(self.arena.alloc(opaque_ty))
|
||||
}
|
||||
|
||||
/// Transforms `-> T` into `Future<Output = T>`
|
||||
/// Transforms `-> T` into `Future<Output = T>`.
|
||||
fn lower_async_fn_output_type_to_future_bound(
|
||||
&mut self,
|
||||
output: &FnRetTy,
|
||||
|
@ -2269,13 +2221,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
|
||||
let kind = hir::GenericParamKind::Type {
|
||||
default: default.as_ref().map(|x| {
|
||||
self.lower_ty(
|
||||
x,
|
||||
ImplTraitContext::OtherOpaqueTy {
|
||||
capturable_lifetimes: &mut FxHashSet::default(),
|
||||
origin: hir::OpaqueTyOrigin::Misc,
|
||||
},
|
||||
)
|
||||
self.lower_ty(x, ImplTraitContext::Disallowed(ImplTraitPosition::Other))
|
||||
}),
|
||||
synthetic: param
|
||||
.attrs
|
||||
|
@ -2293,17 +2239,17 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
this.lower_ty(&ty, ImplTraitContext::disallowed())
|
||||
});
|
||||
let default = default.as_ref().map(|def| self.lower_anon_const(def));
|
||||
|
||||
(hir::ParamName::Plain(param.ident), hir::GenericParamKind::Const { ty, default })
|
||||
}
|
||||
};
|
||||
|
||||
let hir_id = self.lower_node_id(param.id);
|
||||
self.lower_attrs(hir_id, ¶m.attrs);
|
||||
hir::GenericParam {
|
||||
hir_id: self.lower_node_id(param.id),
|
||||
hir_id,
|
||||
name,
|
||||
span: param.ident.span,
|
||||
pure_wrt_drop: self.sess.contains_name(¶m.attrs, sym::may_dangle),
|
||||
attrs: self.lower_attrs(¶m.attrs),
|
||||
bounds: self.arena.alloc_from_iter(bounds),
|
||||
kind,
|
||||
}
|
||||
|
@ -2383,26 +2329,12 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
}
|
||||
|
||||
fn lower_block_noalloc(&mut self, b: &Block, targeted_by_break: bool) -> hir::Block<'hir> {
|
||||
let mut expr: Option<&'hir _> = None;
|
||||
|
||||
let stmts = self.arena.alloc_from_iter(
|
||||
b.stmts
|
||||
.iter()
|
||||
.enumerate()
|
||||
.filter_map(|(index, stmt)| {
|
||||
if index == b.stmts.len() - 1 {
|
||||
if let StmtKind::Expr(ref e) = stmt.kind {
|
||||
expr = Some(self.lower_expr(e));
|
||||
None
|
||||
} else {
|
||||
Some(self.lower_stmt(stmt))
|
||||
}
|
||||
} else {
|
||||
Some(self.lower_stmt(stmt))
|
||||
}
|
||||
})
|
||||
.flatten(),
|
||||
);
|
||||
let (stmts, expr) = match &*b.stmts {
|
||||
[stmts @ .., Stmt { kind: StmtKind::Expr(e), .. }] => (stmts, Some(&*e)),
|
||||
stmts => (stmts, None),
|
||||
};
|
||||
let stmts = self.arena.alloc_from_iter(stmts.iter().flat_map(|stmt| self.lower_stmt(stmt)));
|
||||
let expr = expr.map(|e| self.lower_expr(e));
|
||||
let rules = self.lower_block_check_mode(&b.rules);
|
||||
let hir_id = self.lower_node_id(b.id);
|
||||
|
||||
|
@ -2424,27 +2356,16 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
}
|
||||
|
||||
fn lower_stmt(&mut self, s: &Stmt) -> SmallVec<[hir::Stmt<'hir>; 1]> {
|
||||
let kind = match s.kind {
|
||||
let (hir_id, kind) = match s.kind {
|
||||
StmtKind::Local(ref l) => {
|
||||
let (l, item_ids) = self.lower_local(l);
|
||||
let mut ids: SmallVec<[hir::Stmt<'hir>; 1]> = item_ids
|
||||
.into_iter()
|
||||
.map(|item_id| {
|
||||
let item_id = hir::ItemId {
|
||||
// All the items that `lower_local` finds are `impl Trait` types.
|
||||
def_id: self.lower_node_id(item_id).expect_owner(),
|
||||
};
|
||||
self.stmt(s.span, hir::StmtKind::Item(item_id))
|
||||
})
|
||||
.collect();
|
||||
ids.push({
|
||||
hir::Stmt {
|
||||
hir_id: self.lower_node_id(s.id),
|
||||
let l = self.lower_local(l);
|
||||
let hir_id = self.lower_node_id(s.id);
|
||||
self.alias_attrs(hir_id, l.hir_id);
|
||||
return smallvec![hir::Stmt {
|
||||
hir_id,
|
||||
kind: hir::StmtKind::Local(self.arena.alloc(l)),
|
||||
span: s.span,
|
||||
}
|
||||
});
|
||||
return ids;
|
||||
}];
|
||||
}
|
||||
StmtKind::Item(ref it) => {
|
||||
// Can only use the ID once.
|
||||
|
@ -2462,12 +2383,22 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
})
|
||||
.collect();
|
||||
}
|
||||
StmtKind::Expr(ref e) => hir::StmtKind::Expr(self.lower_expr(e)),
|
||||
StmtKind::Semi(ref e) => hir::StmtKind::Semi(self.lower_expr(e)),
|
||||
StmtKind::Expr(ref e) => {
|
||||
let e = self.lower_expr(e);
|
||||
let hir_id = self.lower_node_id(s.id);
|
||||
self.alias_attrs(hir_id, e.hir_id);
|
||||
(hir_id, hir::StmtKind::Expr(e))
|
||||
}
|
||||
StmtKind::Semi(ref e) => {
|
||||
let e = self.lower_expr(e);
|
||||
let hir_id = self.lower_node_id(s.id);
|
||||
self.alias_attrs(hir_id, e.hir_id);
|
||||
(hir_id, hir::StmtKind::Semi(e))
|
||||
}
|
||||
StmtKind::Empty => return smallvec![],
|
||||
StmtKind::MacCall(..) => panic!("shouldn't exist here"),
|
||||
};
|
||||
smallvec![hir::Stmt { hir_id: self.lower_node_id(s.id), kind, span: s.span }]
|
||||
smallvec![hir::Stmt { hir_id, kind, span: s.span }]
|
||||
}
|
||||
|
||||
fn lower_block_check_mode(&mut self, b: &BlockCheckMode) -> hir::BlockCheckMode {
|
||||
|
@ -2511,13 +2442,18 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
|
||||
fn stmt_let_pat(
|
||||
&mut self,
|
||||
attrs: AttrVec,
|
||||
attrs: Option<&'hir [Attribute]>,
|
||||
span: Span,
|
||||
init: Option<&'hir hir::Expr<'hir>>,
|
||||
pat: &'hir hir::Pat<'hir>,
|
||||
source: hir::LocalSource,
|
||||
) -> hir::Stmt<'hir> {
|
||||
let local = hir::Local { attrs, hir_id: self.next_id(), init, pat, source, span, ty: None };
|
||||
let hir_id = self.next_id();
|
||||
if let Some(a) = attrs {
|
||||
debug_assert!(!a.is_empty());
|
||||
self.attrs.insert(hir_id, a);
|
||||
}
|
||||
let local = hir::Local { hir_id, init, pat, source, span, ty: None };
|
||||
self.stmt(span, hir::StmtKind::Local(self.arena.alloc(local)))
|
||||
}
|
||||
|
||||
|
@ -2571,8 +2507,8 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
&mut self,
|
||||
span: Span,
|
||||
pat: &'hir hir::Pat<'hir>,
|
||||
) -> &'hir [hir::FieldPat<'hir>] {
|
||||
let field = hir::FieldPat {
|
||||
) -> &'hir [hir::PatField<'hir>] {
|
||||
let field = hir::PatField {
|
||||
hir_id: self.next_id(),
|
||||
ident: Ident::new(sym::integer(0), span),
|
||||
is_shorthand: false,
|
||||
|
@ -2586,7 +2522,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
&mut self,
|
||||
span: Span,
|
||||
lang_item: hir::LangItem,
|
||||
fields: &'hir [hir::FieldPat<'hir>],
|
||||
fields: &'hir [hir::PatField<'hir>],
|
||||
) -> &'hir hir::Pat<'hir> {
|
||||
let qpath = hir::QPath::LangItem(lang_item, span);
|
||||
self.pat(span, hir::PatKind::Struct(qpath, fields, false))
|
||||
|
@ -2660,6 +2596,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
hir::TyKind::TraitObject(
|
||||
arena_vec![self; principal],
|
||||
self.elided_dyn_bound(span),
|
||||
TraitObjectSyntax::None,
|
||||
)
|
||||
}
|
||||
_ => hir::TyKind::Path(hir::QPath::Resolved(None, path)),
|
||||
|
|
|
@ -56,7 +56,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
ImplTraitContext::disallowed(),
|
||||
);
|
||||
|
||||
let fs = self.arena.alloc_from_iter(fields.iter().map(|f| hir::FieldPat {
|
||||
let fs = self.arena.alloc_from_iter(fields.iter().map(|f| hir::PatField {
|
||||
hir_id: self.next_id(),
|
||||
ident: f.ident,
|
||||
pat: self.lower_pat(&f.pat),
|
||||
|
|
|
@ -30,6 +30,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
let partial_res =
|
||||
self.resolver.get_partial_res(id).unwrap_or_else(|| PartialRes::new(Res::Err));
|
||||
|
||||
let path_span_lo = p.span.shrink_to_lo();
|
||||
let proj_start = p.segments.len() - partial_res.unresolved_segments();
|
||||
let path = self.arena.alloc(hir::Path {
|
||||
res: self.lower_res(partial_res.base_res()),
|
||||
|
@ -108,7 +109,9 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
)
|
||||
},
|
||||
)),
|
||||
span: p.span,
|
||||
span: p.segments[..proj_start]
|
||||
.last()
|
||||
.map_or(path_span_lo, |segment| path_span_lo.to(segment.span())),
|
||||
});
|
||||
|
||||
// Simple case, either no projections, or only fully-qualified.
|
||||
|
@ -127,7 +130,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
// e.g., `Vec` in `Vec::new` or `<I as Iterator>::Item` in
|
||||
// `<I as Iterator>::Item::default`.
|
||||
let new_id = self.next_id();
|
||||
self.arena.alloc(self.ty_path(new_id, p.span, hir::QPath::Resolved(qself, path)))
|
||||
self.arena.alloc(self.ty_path(new_id, path.span, hir::QPath::Resolved(qself, path)))
|
||||
};
|
||||
|
||||
// Anything after the base path are associated "extensions",
|
||||
|
@ -141,7 +144,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
// 3. `<<std::vec::Vec<T>>::IntoIter>::Item`
|
||||
// * final path is `<<<std::vec::Vec<T>>::IntoIter>::Item>::clone`
|
||||
for (i, segment) in p.segments.iter().enumerate().skip(proj_start) {
|
||||
let segment = self.arena.alloc(self.lower_path_segment(
|
||||
let hir_segment = self.arena.alloc(self.lower_path_segment(
|
||||
p.span,
|
||||
segment,
|
||||
param_mode,
|
||||
|
@ -150,7 +153,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
itctx.reborrow(),
|
||||
None,
|
||||
));
|
||||
let qpath = hir::QPath::TypeRelative(ty, segment);
|
||||
let qpath = hir::QPath::TypeRelative(ty, hir_segment);
|
||||
|
||||
// It's finished, return the extension of the right node type.
|
||||
if i == p.segments.len() - 1 {
|
||||
|
@ -159,7 +162,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
|
||||
// Wrap the associated extension in another type node.
|
||||
let new_id = self.next_id();
|
||||
ty = self.arena.alloc(self.ty_path(new_id, p.span, qpath));
|
||||
ty = self.arena.alloc(self.ty_path(new_id, path_span_lo.to(segment.span()), qpath));
|
||||
}
|
||||
|
||||
// We should've returned in the for loop above.
|
||||
|
|
|
@ -532,6 +532,25 @@ impl<'a> AstValidator<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
/// An item in `extern { ... }` cannot use non-ascii identifier.
|
||||
fn check_foreign_item_ascii_only(&self, ident: Ident) {
|
||||
let symbol_str = ident.as_str();
|
||||
if !symbol_str.is_ascii() {
|
||||
let n = 83942;
|
||||
self.err_handler()
|
||||
.struct_span_err(
|
||||
ident.span,
|
||||
"items in `extern` blocks cannot use non-ascii identifiers",
|
||||
)
|
||||
.span_label(self.current_extern_span(), "in this `extern` block")
|
||||
.note(&format!(
|
||||
"This limitation may be lifted in the future; see issue #{} <https://github.com/rust-lang/rust/issues/{}> for more information",
|
||||
n, n,
|
||||
))
|
||||
.emit();
|
||||
}
|
||||
}
|
||||
|
||||
/// Reject C-varadic type unless the function is foreign,
|
||||
/// or free and `unsafe extern "C"` semantically.
|
||||
fn check_c_varadic_type(&self, fk: FnKind<'a>) {
|
||||
|
@ -592,7 +611,7 @@ impl<'a> AstValidator<'a> {
|
|||
self.session,
|
||||
ident.span,
|
||||
E0754,
|
||||
"trying to load file for module `{}` with non ascii identifer name",
|
||||
"trying to load file for module `{}` with non-ascii identifier name",
|
||||
ident.name
|
||||
)
|
||||
.help("consider using `#[path]` attribute to specify filesystem path")
|
||||
|
@ -1103,15 +1122,18 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
|
|||
self.check_defaultness(fi.span, *def);
|
||||
self.check_foreign_fn_bodyless(fi.ident, body.as_deref());
|
||||
self.check_foreign_fn_headerless(fi.ident, fi.span, sig.header);
|
||||
self.check_foreign_item_ascii_only(fi.ident);
|
||||
}
|
||||
ForeignItemKind::TyAlias(box TyAliasKind(def, generics, bounds, body)) => {
|
||||
self.check_defaultness(fi.span, *def);
|
||||
self.check_foreign_kind_bodyless(fi.ident, "type", body.as_ref().map(|b| b.span));
|
||||
self.check_type_no_bounds(bounds, "`extern` blocks");
|
||||
self.check_foreign_ty_genericless(generics);
|
||||
self.check_foreign_item_ascii_only(fi.ident);
|
||||
}
|
||||
ForeignItemKind::Static(_, _, body) => {
|
||||
self.check_foreign_kind_bodyless(fi.ident, "static", body.as_ref().map(|b| b.span));
|
||||
self.check_foreign_item_ascii_only(fi.ident);
|
||||
}
|
||||
ForeignItemKind::MacCall(..) => {}
|
||||
}
|
||||
|
@ -1150,20 +1172,23 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
|
|||
}
|
||||
|
||||
fn visit_generics(&mut self, generics: &'a Generics) {
|
||||
let mut prev_ty_default = None;
|
||||
let cg_defaults = self.session.features_untracked().const_generics_defaults;
|
||||
|
||||
let mut prev_param_default = None;
|
||||
for param in &generics.params {
|
||||
match param.kind {
|
||||
GenericParamKind::Lifetime => (),
|
||||
GenericParamKind::Type { default: Some(_), .. } => {
|
||||
prev_ty_default = Some(param.ident.span);
|
||||
GenericParamKind::Type { default: Some(_), .. }
|
||||
| GenericParamKind::Const { default: Some(_), .. } => {
|
||||
prev_param_default = Some(param.ident.span);
|
||||
}
|
||||
GenericParamKind::Type { .. } | GenericParamKind::Const { .. } => {
|
||||
if let Some(span) = prev_ty_default {
|
||||
if let Some(span) = prev_param_default {
|
||||
let mut err = self.err_handler().struct_span_err(
|
||||
span,
|
||||
"type parameters with a default must be trailing",
|
||||
"generic parameters with a default must be trailing",
|
||||
);
|
||||
if matches!(param.kind, GenericParamKind::Const { .. }) {
|
||||
if matches!(param.kind, GenericParamKind::Const { .. }) && !cg_defaults {
|
||||
err.note(
|
||||
"using type defaults and const parameters \
|
||||
in the same parameter list is currently not permitted",
|
||||
|
@ -1188,8 +1213,41 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
|
|||
deny_equality_constraints(self, predicate, generics);
|
||||
}
|
||||
}
|
||||
walk_list!(self, visit_generic_param, &generics.params);
|
||||
for predicate in &generics.where_clause.predicates {
|
||||
match predicate {
|
||||
WherePredicate::BoundPredicate(bound_pred) => {
|
||||
// A type binding, eg `for<'c> Foo: Send+Clone+'c`
|
||||
self.check_late_bound_lifetime_defs(&bound_pred.bound_generic_params);
|
||||
|
||||
visit::walk_generics(self, generics)
|
||||
// This is slightly complicated. Our representation for poly-trait-refs contains a single
|
||||
// binder and thus we only allow a single level of quantification. However,
|
||||
// the syntax of Rust permits quantification in two places in where clauses,
|
||||
// e.g., `T: for <'a> Foo<'a>` and `for <'a, 'b> &'b T: Foo<'a>`. If both are
|
||||
// defined, then error.
|
||||
if !bound_pred.bound_generic_params.is_empty() {
|
||||
for bound in &bound_pred.bounds {
|
||||
match bound {
|
||||
GenericBound::Trait(t, _) => {
|
||||
if !t.bound_generic_params.is_empty() {
|
||||
struct_span_err!(
|
||||
self.err_handler(),
|
||||
t.span,
|
||||
E0316,
|
||||
"nested quantification of lifetimes"
|
||||
)
|
||||
.emit();
|
||||
}
|
||||
}
|
||||
GenericBound::Outlives(_) => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
self.visit_where_predicate(predicate);
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_generic_param(&mut self, param: &'a GenericParam) {
|
||||
|
@ -1238,14 +1296,6 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
|
|||
visit::walk_pat(self, pat)
|
||||
}
|
||||
|
||||
fn visit_where_predicate(&mut self, p: &'a WherePredicate) {
|
||||
if let &WherePredicate::BoundPredicate(ref bound_predicate) = p {
|
||||
// A type binding, eg `for<'c> Foo: Send+Clone+'c`
|
||||
self.check_late_bound_lifetime_defs(&bound_predicate.bound_generic_params);
|
||||
}
|
||||
visit::walk_where_predicate(self, p);
|
||||
}
|
||||
|
||||
fn visit_poly_trait_ref(&mut self, t: &'a PolyTraitRef, m: &'a TraitBoundModifier) {
|
||||
self.check_late_bound_lifetime_defs(&t.bound_generic_params);
|
||||
visit::walk_poly_trait_ref(self, t, m);
|
||||
|
|
|
@ -8,7 +8,7 @@ use rustc_feature::{Features, GateIssue};
|
|||
use rustc_session::parse::{feature_err, feature_err_issue};
|
||||
use rustc_session::Session;
|
||||
use rustc_span::source_map::Spanned;
|
||||
use rustc_span::symbol::{sym, Symbol};
|
||||
use rustc_span::symbol::sym;
|
||||
use rustc_span::Span;
|
||||
|
||||
use tracing::debug;
|
||||
|
@ -164,6 +164,46 @@ impl<'a> PostExpansionVisitor<'a> {
|
|||
"C-cmse-nonsecure-call ABI is experimental and subject to change"
|
||||
);
|
||||
}
|
||||
"C-unwind" => {
|
||||
gate_feature_post!(
|
||||
&self,
|
||||
c_unwind,
|
||||
span,
|
||||
"C-unwind ABI is experimental and subject to change"
|
||||
);
|
||||
}
|
||||
"stdcall-unwind" => {
|
||||
gate_feature_post!(
|
||||
&self,
|
||||
c_unwind,
|
||||
span,
|
||||
"stdcall-unwind ABI is experimental and subject to change"
|
||||
);
|
||||
}
|
||||
"system-unwind" => {
|
||||
gate_feature_post!(
|
||||
&self,
|
||||
c_unwind,
|
||||
span,
|
||||
"system-unwind ABI is experimental and subject to change"
|
||||
);
|
||||
}
|
||||
"thiscall-unwind" => {
|
||||
gate_feature_post!(
|
||||
&self,
|
||||
c_unwind,
|
||||
span,
|
||||
"thiscall-unwind ABI is experimental and subject to change"
|
||||
);
|
||||
}
|
||||
"wasm" => {
|
||||
gate_feature_post!(
|
||||
&self,
|
||||
wasm_abi,
|
||||
span,
|
||||
"wasm ABI is experimental and subject to change"
|
||||
);
|
||||
}
|
||||
abi => self
|
||||
.sess
|
||||
.parse_sess
|
||||
|
@ -247,7 +287,7 @@ impl<'a> PostExpansionVisitor<'a> {
|
|||
if let ast::TyKind::ImplTrait(..) = ty.kind {
|
||||
gate_feature_post!(
|
||||
&self.vis,
|
||||
type_alias_impl_trait,
|
||||
min_type_alias_impl_trait,
|
||||
ty.span,
|
||||
"`impl Trait` in type aliases is unstable"
|
||||
);
|
||||
|
@ -281,24 +321,13 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
|
|||
include => external_doc
|
||||
cfg => doc_cfg
|
||||
masked => doc_masked
|
||||
spotlight => doc_spotlight
|
||||
notable_trait => doc_notable_trait
|
||||
keyword => doc_keyword
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_name(&mut self, sp: Span, name: Symbol) {
|
||||
if !name.as_str().is_ascii() {
|
||||
gate_feature_post!(
|
||||
&self,
|
||||
non_ascii_idents,
|
||||
self.sess.parse_sess.source_map().guess_head_span(sp),
|
||||
"non-ascii idents are not fully supported"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_item(&mut self, i: &'a ast::Item) {
|
||||
match i.kind {
|
||||
ast::ItemKind::ForeignMod(ref foreign_module) => {
|
||||
|
@ -326,16 +355,6 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
|
|||
over time"
|
||||
);
|
||||
}
|
||||
if self.sess.contains_name(&i.attrs[..], sym::main) {
|
||||
gate_feature_post!(
|
||||
&self,
|
||||
main,
|
||||
i.span,
|
||||
"declaration of a non-standard `#[main]` \
|
||||
function may change over time, for now \
|
||||
a top-level `fn main()` is required"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
ast::ItemKind::Struct(..) => {
|
||||
|
@ -638,15 +657,22 @@ pub fn check_crate(krate: &ast::Crate, sess: &Session) {
|
|||
}
|
||||
};
|
||||
}
|
||||
gate_all!(if_let_guard, "`if let` guards are experimental");
|
||||
gate_all!(let_chains, "`let` expressions in this position are experimental");
|
||||
gate_all!(
|
||||
if_let_guard,
|
||||
"`if let` guards are experimental",
|
||||
"you can write `if matches!(<expr>, <pattern>)` instead of `if let <pattern> = <expr>`"
|
||||
);
|
||||
gate_all!(
|
||||
let_chains,
|
||||
"`let` expressions in this position are experimental",
|
||||
"you can write `matches!(<expr>, <pattern>)` instead of `let <pattern> = <expr>`"
|
||||
);
|
||||
gate_all!(
|
||||
async_closure,
|
||||
"async closures are unstable",
|
||||
"to use an async block, remove the `||`: `async {`"
|
||||
);
|
||||
gate_all!(generators, "yield syntax is experimental");
|
||||
gate_all!(or_patterns, "or-patterns syntax is experimental");
|
||||
gate_all!(raw_ref_op, "raw address of syntax is experimental");
|
||||
gate_all!(const_trait_bound_opt_out, "`?const` on trait bounds is experimental");
|
||||
gate_all!(const_trait_impl, "const trait impls are experimental");
|
||||
|
@ -701,16 +727,46 @@ pub fn check_crate(krate: &ast::Crate, sess: &Session) {
|
|||
}
|
||||
|
||||
fn maybe_stage_features(sess: &Session, krate: &ast::Crate) {
|
||||
use rustc_errors::Applicability;
|
||||
|
||||
if !sess.opts.unstable_features.is_nightly_build() {
|
||||
let lang_features = &sess.features_untracked().declared_lang_features;
|
||||
for attr in krate.attrs.iter().filter(|attr| sess.check_name(attr, sym::feature)) {
|
||||
struct_span_err!(
|
||||
let mut err = struct_span_err!(
|
||||
sess.parse_sess.span_diagnostic,
|
||||
attr.span,
|
||||
E0554,
|
||||
"`#![feature]` may not be used on the {} release channel",
|
||||
option_env!("CFG_RELEASE_CHANNEL").unwrap_or("(unknown)")
|
||||
)
|
||||
.emit();
|
||||
);
|
||||
let mut all_stable = true;
|
||||
for ident in
|
||||
attr.meta_item_list().into_iter().flatten().map(|nested| nested.ident()).flatten()
|
||||
{
|
||||
let name = ident.name;
|
||||
let stable_since = lang_features
|
||||
.iter()
|
||||
.flat_map(|&(feature, _, since)| if feature == name { since } else { None })
|
||||
.next();
|
||||
if let Some(since) = stable_since {
|
||||
err.help(&format!(
|
||||
"the feature `{}` has been stable since {} and no longer requires \
|
||||
an attribute to enable",
|
||||
name, since
|
||||
));
|
||||
} else {
|
||||
all_stable = false;
|
||||
}
|
||||
}
|
||||
if all_stable {
|
||||
err.span_suggestion(
|
||||
attr.span,
|
||||
"remove the attribute",
|
||||
String::new(),
|
||||
Applicability::MachineApplicable,
|
||||
);
|
||||
}
|
||||
err.emit();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -88,9 +88,9 @@ impl<'ast> Visitor<'ast> for NodeCounter {
|
|||
self.count += 1;
|
||||
walk_struct_def(self, s)
|
||||
}
|
||||
fn visit_struct_field(&mut self, s: &StructField) {
|
||||
fn visit_field_def(&mut self, s: &FieldDef) {
|
||||
self.count += 1;
|
||||
walk_struct_field(self, s)
|
||||
walk_field_def(self, s)
|
||||
}
|
||||
fn visit_enum_def(
|
||||
&mut self,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
#![feature(bool_to_option)]
|
||||
#![feature(crate_visibility_modifier)]
|
||||
#![feature(or_patterns)]
|
||||
#![cfg_attr(bootstrap, feature(or_patterns))]
|
||||
#![feature(box_patterns)]
|
||||
#![recursion_limit = "256"]
|
||||
|
||||
|
|
|
@ -22,10 +22,6 @@ pub fn token_to_string(token: &Token) -> String {
|
|||
State::new().token_to_string(token)
|
||||
}
|
||||
|
||||
pub fn token_to_string_ext(token: &Token, convert_dollar_crate: bool) -> String {
|
||||
State::new().token_to_string_ext(token, convert_dollar_crate)
|
||||
}
|
||||
|
||||
pub fn ty_to_string(ty: &ast::Ty) -> String {
|
||||
State::new().ty_to_string(ty)
|
||||
}
|
||||
|
@ -50,18 +46,10 @@ pub fn tts_to_string(tokens: &TokenStream) -> String {
|
|||
State::new().tts_to_string(tokens)
|
||||
}
|
||||
|
||||
pub fn stmt_to_string(stmt: &ast::Stmt) -> String {
|
||||
State::new().stmt_to_string(stmt)
|
||||
}
|
||||
|
||||
pub fn item_to_string(i: &ast::Item) -> String {
|
||||
State::new().item_to_string(i)
|
||||
}
|
||||
|
||||
pub fn generic_params_to_string(generic_params: &[ast::GenericParam]) -> String {
|
||||
State::new().generic_params_to_string(generic_params)
|
||||
}
|
||||
|
||||
pub fn path_to_string(p: &ast::Path) -> String {
|
||||
State::new().path_to_string(p)
|
||||
}
|
||||
|
@ -74,26 +62,14 @@ pub fn vis_to_string(v: &ast::Visibility) -> String {
|
|||
State::new().vis_to_string(v)
|
||||
}
|
||||
|
||||
pub fn block_to_string(blk: &ast::Block) -> String {
|
||||
State::new().block_to_string(blk)
|
||||
}
|
||||
|
||||
pub fn meta_list_item_to_string(li: &ast::NestedMetaItem) -> String {
|
||||
State::new().meta_list_item_to_string(li)
|
||||
}
|
||||
|
||||
pub fn attr_item_to_string(ai: &ast::AttrItem) -> String {
|
||||
State::new().attr_item_to_string(ai)
|
||||
}
|
||||
|
||||
pub fn attribute_to_string(attr: &ast::Attribute) -> String {
|
||||
State::new().attribute_to_string(attr)
|
||||
}
|
||||
|
||||
pub fn param_to_string(arg: &ast::Param) -> String {
|
||||
State::new().param_to_string(arg)
|
||||
}
|
||||
|
||||
pub fn to_string(f: impl FnOnce(&mut State<'_>)) -> String {
|
||||
State::new().to_string(f)
|
||||
}
|
||||
|
|
|
@ -1711,7 +1711,7 @@ impl<'a> State<'a> {
|
|||
fn print_expr_struct(
|
||||
&mut self,
|
||||
path: &ast::Path,
|
||||
fields: &[ast::Field],
|
||||
fields: &[ast::ExprField],
|
||||
rest: &ast::StructRest,
|
||||
attrs: &[ast::Attribute],
|
||||
) {
|
||||
|
@ -1873,8 +1873,8 @@ impl<'a> State<'a> {
|
|||
ast::ExprKind::Repeat(ref element, ref count) => {
|
||||
self.print_expr_repeat(element, count, attrs);
|
||||
}
|
||||
ast::ExprKind::Struct(ref path, ref fields, ref rest) => {
|
||||
self.print_expr_struct(path, &fields[..], rest, attrs);
|
||||
ast::ExprKind::Struct(ref se) => {
|
||||
self.print_expr_struct(&se.path, &se.fields, &se.rest, attrs);
|
||||
}
|
||||
ast::ExprKind::Tup(ref exprs) => {
|
||||
self.print_expr_tup(&exprs[..], attrs);
|
||||
|
@ -2149,10 +2149,10 @@ impl<'a> State<'a> {
|
|||
None => s.word("_"),
|
||||
}
|
||||
}
|
||||
InlineAsmOperand::Const { expr } => {
|
||||
InlineAsmOperand::Const { anon_const } => {
|
||||
s.word("const");
|
||||
s.space();
|
||||
s.print_expr(expr);
|
||||
s.print_expr(&anon_const.value);
|
||||
}
|
||||
InlineAsmOperand::Sym { expr } => {
|
||||
s.word("sym");
|
||||
|
@ -2292,10 +2292,6 @@ impl<'a> State<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn print_usize(&mut self, i: usize) {
|
||||
self.s.word(i.to_string())
|
||||
}
|
||||
|
||||
crate fn print_name(&mut self, name: Symbol) {
|
||||
self.s.word(name.to_string());
|
||||
self.ann.post(self, AnnNode::Name(&name))
|
||||
|
@ -2659,8 +2655,10 @@ impl<'a> State<'a> {
|
|||
s.word_space(":");
|
||||
s.print_type(ty);
|
||||
s.print_type_bounds(":", ¶m.bounds);
|
||||
if let Some(ref _default) = default {
|
||||
// FIXME(const_generics_defaults): print the `default` value here
|
||||
if let Some(ref default) = default {
|
||||
s.s.space();
|
||||
s.word_space("=");
|
||||
s.print_expr(&default.value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -862,18 +862,6 @@ pub fn find_repr_attrs(sess: &Session, attr: &Attribute) -> Vec<ReprAttr> {
|
|||
if let Some(items) = attr.meta_item_list() {
|
||||
sess.mark_attr_used(attr);
|
||||
for item in items {
|
||||
if !item.is_meta_item() {
|
||||
handle_errors(
|
||||
&sess.parse_sess,
|
||||
item.span(),
|
||||
AttrError::UnsupportedLiteral(
|
||||
"meta item in `repr` must be an identifier",
|
||||
false,
|
||||
),
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
let mut recognised = false;
|
||||
if item.is_word() {
|
||||
let hint = match item.name_or_empty() {
|
||||
|
@ -890,23 +878,6 @@ pub fn find_repr_attrs(sess: &Session, attr: &Attribute) -> Vec<ReprAttr> {
|
|||
acc.push(h);
|
||||
}
|
||||
} else if let Some((name, value)) = item.name_value_literal() {
|
||||
let parse_alignment = |node: &ast::LitKind| -> Result<u32, &'static str> {
|
||||
if let ast::LitKind::Int(literal, ast::LitIntType::Unsuffixed) = node {
|
||||
if literal.is_power_of_two() {
|
||||
// rustc_middle::ty::layout::Align restricts align to <= 2^29
|
||||
if *literal <= 1 << 29 {
|
||||
Ok(*literal as u32)
|
||||
} else {
|
||||
Err("larger than 2^29")
|
||||
}
|
||||
} else {
|
||||
Err("not a power of two")
|
||||
}
|
||||
} else {
|
||||
Err("not an unsuffixed integer")
|
||||
}
|
||||
};
|
||||
|
||||
let mut literal_error = None;
|
||||
if name == sym::align {
|
||||
recognised = true;
|
||||
|
@ -966,13 +937,7 @@ pub fn find_repr_attrs(sess: &Session, attr: &Attribute) -> Vec<ReprAttr> {
|
|||
}
|
||||
if !recognised {
|
||||
// Not a word we recognize
|
||||
struct_span_err!(
|
||||
diagnostic,
|
||||
item.span(),
|
||||
E0552,
|
||||
"unrecognized representation hint"
|
||||
)
|
||||
.emit();
|
||||
diagnostic.delay_span_bug(item.span(), "unrecognized representation hint");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1080,3 +1045,16 @@ fn allow_unstable<'a>(
|
|||
name
|
||||
})
|
||||
}
|
||||
|
||||
pub fn parse_alignment(node: &ast::LitKind) -> Result<u32, &'static str> {
|
||||
if let ast::LitKind::Int(literal, ast::LitIntType::Unsuffixed) = node {
|
||||
if literal.is_power_of_two() {
|
||||
// rustc_middle::ty::layout::Align restricts align to <= 2^29
|
||||
if *literal <= 1 << 29 { Ok(*literal as u32) } else { Err("larger than 2^29") }
|
||||
} else {
|
||||
Err("not a power of two")
|
||||
}
|
||||
} else {
|
||||
Err("not an unsuffixed integer")
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
//! The goal is to move the definition of `MetaItem` and things that don't need to be in `syntax`
|
||||
//! to this crate.
|
||||
|
||||
#![feature(or_patterns)]
|
||||
#![cfg_attr(bootstrap, feature(or_patterns))]
|
||||
|
||||
#[macro_use]
|
||||
extern crate rustc_macros;
|
||||
|
|
|
@ -7,8 +7,10 @@ use rustc_errors::{Applicability, DiagnosticBuilder};
|
|||
use rustc_expand::base::{self, *};
|
||||
use rustc_parse::parser::Parser;
|
||||
use rustc_parse_format as parse;
|
||||
use rustc_session::lint;
|
||||
use rustc_span::symbol::{kw, sym, Symbol};
|
||||
use rustc_span::{InnerSpan, Span};
|
||||
use rustc_target::asm::InlineAsmArch;
|
||||
|
||||
struct AsmArgs {
|
||||
templates: Vec<P<ast::Expr>>,
|
||||
|
@ -134,8 +136,8 @@ fn parse_args<'a>(
|
|||
ast::InlineAsmOperand::InOut { reg, expr, late: true }
|
||||
}
|
||||
} else if p.eat_keyword(kw::Const) {
|
||||
let expr = p.parse_expr()?;
|
||||
ast::InlineAsmOperand::Const { expr }
|
||||
let anon_const = p.parse_anon_const_expr()?;
|
||||
ast::InlineAsmOperand::Const { anon_const }
|
||||
} else if p.eat_keyword(sym::sym) {
|
||||
let expr = p.parse_expr()?;
|
||||
match expr.kind {
|
||||
|
@ -424,6 +426,40 @@ fn expand_preparsed_asm(ecx: &mut ExtCtxt<'_>, sp: Span, args: AsmArgs) -> P<ast
|
|||
|
||||
let template_str = &template_str.as_str();
|
||||
let template_snippet = ecx.source_map().span_to_snippet(template_sp).ok();
|
||||
|
||||
if let Some(InlineAsmArch::X86 | InlineAsmArch::X86_64) = ecx.sess.asm_arch {
|
||||
let find_span = |needle: &str| -> Span {
|
||||
if let Some(snippet) = &template_snippet {
|
||||
if let Some(pos) = snippet.find(needle) {
|
||||
let end = pos
|
||||
+ &snippet[pos..]
|
||||
.find(|c| matches!(c, '\n' | ';' | '\\' | '"'))
|
||||
.unwrap_or(snippet[pos..].len() - 1);
|
||||
let inner = InnerSpan::new(pos, end);
|
||||
return template_sp.from_inner(inner);
|
||||
}
|
||||
}
|
||||
template_sp
|
||||
};
|
||||
|
||||
if template_str.contains(".intel_syntax") {
|
||||
ecx.parse_sess().buffer_lint(
|
||||
lint::builtin::BAD_ASM_STYLE,
|
||||
find_span(".intel_syntax"),
|
||||
ecx.resolver.lint_node_id(ecx.current_expansion.id),
|
||||
"avoid using `.intel_syntax`, Intel syntax is the default",
|
||||
);
|
||||
}
|
||||
if template_str.contains(".att_syntax") {
|
||||
ecx.parse_sess().buffer_lint(
|
||||
lint::builtin::BAD_ASM_STYLE,
|
||||
find_span(".att_syntax"),
|
||||
ecx.resolver.lint_node_id(ecx.current_expansion.id),
|
||||
"avoid using `.att_syntax`, prefer using `options(att_syntax)` instead",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let mut parser = parse::Parser::new(
|
||||
template_str,
|
||||
str_style,
|
||||
|
|
269
compiler/rustc_builtin_macros/src/cfg_eval.rs
Normal file
269
compiler/rustc_builtin_macros/src/cfg_eval.rs
Normal file
|
@ -0,0 +1,269 @@
|
|||
use crate::util::check_builtin_macro_attribute;
|
||||
|
||||
use rustc_ast as ast;
|
||||
use rustc_ast::mut_visit::MutVisitor;
|
||||
use rustc_ast::tokenstream::CanSynthesizeMissingTokens;
|
||||
use rustc_ast::visit::Visitor;
|
||||
use rustc_ast::{mut_visit, visit};
|
||||
use rustc_ast::{AstLike, Attribute};
|
||||
use rustc_expand::base::{Annotatable, ExtCtxt};
|
||||
use rustc_expand::config::StripUnconfigured;
|
||||
use rustc_expand::configure;
|
||||
use rustc_parse::parser::ForceCollect;
|
||||
use rustc_session::utils::FlattenNonterminals;
|
||||
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_span::symbol::sym;
|
||||
use rustc_span::Span;
|
||||
use smallvec::SmallVec;
|
||||
|
||||
crate fn expand(
|
||||
ecx: &mut ExtCtxt<'_>,
|
||||
_span: Span,
|
||||
meta_item: &ast::MetaItem,
|
||||
annotatable: Annotatable,
|
||||
) -> Vec<Annotatable> {
|
||||
check_builtin_macro_attribute(ecx, meta_item, sym::cfg_eval);
|
||||
cfg_eval(ecx, annotatable)
|
||||
}
|
||||
|
||||
crate fn cfg_eval(ecx: &ExtCtxt<'_>, annotatable: Annotatable) -> Vec<Annotatable> {
|
||||
let mut visitor = CfgEval {
|
||||
cfg: &mut StripUnconfigured {
|
||||
sess: ecx.sess,
|
||||
features: ecx.ecfg.features,
|
||||
config_tokens: true,
|
||||
},
|
||||
};
|
||||
let annotatable = visitor.configure_annotatable(annotatable);
|
||||
vec![annotatable]
|
||||
}
|
||||
|
||||
struct CfgEval<'a, 'b> {
|
||||
cfg: &'a mut StripUnconfigured<'b>,
|
||||
}
|
||||
|
||||
fn flat_map_annotatable(vis: &mut impl MutVisitor, annotatable: Annotatable) -> Annotatable {
|
||||
// Since the item itself has already been configured by the InvocationCollector,
|
||||
// we know that fold result vector will contain exactly one element
|
||||
match annotatable {
|
||||
Annotatable::Item(item) => Annotatable::Item(vis.flat_map_item(item).pop().unwrap()),
|
||||
Annotatable::TraitItem(item) => {
|
||||
Annotatable::TraitItem(vis.flat_map_trait_item(item).pop().unwrap())
|
||||
}
|
||||
Annotatable::ImplItem(item) => {
|
||||
Annotatable::ImplItem(vis.flat_map_impl_item(item).pop().unwrap())
|
||||
}
|
||||
Annotatable::ForeignItem(item) => {
|
||||
Annotatable::ForeignItem(vis.flat_map_foreign_item(item).pop().unwrap())
|
||||
}
|
||||
Annotatable::Stmt(stmt) => {
|
||||
Annotatable::Stmt(stmt.map(|stmt| vis.flat_map_stmt(stmt).pop().unwrap()))
|
||||
}
|
||||
Annotatable::Expr(mut expr) => Annotatable::Expr({
|
||||
vis.visit_expr(&mut expr);
|
||||
expr
|
||||
}),
|
||||
Annotatable::Arm(arm) => Annotatable::Arm(vis.flat_map_arm(arm).pop().unwrap()),
|
||||
Annotatable::ExprField(field) => {
|
||||
Annotatable::ExprField(vis.flat_map_expr_field(field).pop().unwrap())
|
||||
}
|
||||
Annotatable::PatField(fp) => {
|
||||
Annotatable::PatField(vis.flat_map_pat_field(fp).pop().unwrap())
|
||||
}
|
||||
Annotatable::GenericParam(param) => {
|
||||
Annotatable::GenericParam(vis.flat_map_generic_param(param).pop().unwrap())
|
||||
}
|
||||
Annotatable::Param(param) => Annotatable::Param(vis.flat_map_param(param).pop().unwrap()),
|
||||
Annotatable::FieldDef(sf) => {
|
||||
Annotatable::FieldDef(vis.flat_map_field_def(sf).pop().unwrap())
|
||||
}
|
||||
Annotatable::Variant(v) => Annotatable::Variant(vis.flat_map_variant(v).pop().unwrap()),
|
||||
}
|
||||
}
|
||||
|
||||
struct CfgFinder {
|
||||
has_cfg_or_cfg_attr: bool,
|
||||
}
|
||||
|
||||
impl CfgFinder {
|
||||
fn has_cfg_or_cfg_attr(annotatable: &Annotatable) -> bool {
|
||||
let mut finder = CfgFinder { has_cfg_or_cfg_attr: false };
|
||||
match annotatable {
|
||||
Annotatable::Item(item) => finder.visit_item(&item),
|
||||
Annotatable::TraitItem(item) => finder.visit_assoc_item(&item, visit::AssocCtxt::Trait),
|
||||
Annotatable::ImplItem(item) => finder.visit_assoc_item(&item, visit::AssocCtxt::Impl),
|
||||
Annotatable::ForeignItem(item) => finder.visit_foreign_item(&item),
|
||||
Annotatable::Stmt(stmt) => finder.visit_stmt(&stmt),
|
||||
Annotatable::Expr(expr) => finder.visit_expr(&expr),
|
||||
Annotatable::Arm(arm) => finder.visit_arm(&arm),
|
||||
Annotatable::ExprField(field) => finder.visit_expr_field(&field),
|
||||
Annotatable::PatField(field) => finder.visit_pat_field(&field),
|
||||
Annotatable::GenericParam(param) => finder.visit_generic_param(¶m),
|
||||
Annotatable::Param(param) => finder.visit_param(¶m),
|
||||
Annotatable::FieldDef(field) => finder.visit_field_def(&field),
|
||||
Annotatable::Variant(variant) => finder.visit_variant(&variant),
|
||||
};
|
||||
finder.has_cfg_or_cfg_attr
|
||||
}
|
||||
}
|
||||
|
||||
impl<'ast> visit::Visitor<'ast> for CfgFinder {
|
||||
fn visit_attribute(&mut self, attr: &'ast Attribute) {
|
||||
// We want short-circuiting behavior, so don't use the '|=' operator.
|
||||
self.has_cfg_or_cfg_attr = self.has_cfg_or_cfg_attr
|
||||
|| attr
|
||||
.ident()
|
||||
.map_or(false, |ident| ident.name == sym::cfg || ident.name == sym::cfg_attr);
|
||||
}
|
||||
}
|
||||
|
||||
impl CfgEval<'_, '_> {
|
||||
fn configure<T: AstLike>(&mut self, node: T) -> Option<T> {
|
||||
self.cfg.configure(node)
|
||||
}
|
||||
|
||||
pub fn configure_annotatable(&mut self, mut annotatable: Annotatable) -> Annotatable {
|
||||
// Tokenizing and re-parsing the `Annotatable` can have a significant
|
||||
// performance impact, so try to avoid it if possible
|
||||
if !CfgFinder::has_cfg_or_cfg_attr(&annotatable) {
|
||||
return annotatable;
|
||||
}
|
||||
|
||||
// The majority of parsed attribute targets will never need to have early cfg-expansion
|
||||
// run (e.g. they are not part of a `#[derive]` or `#[cfg_eval]` macro inoput).
|
||||
// Therefore, we normally do not capture the necessary information about `#[cfg]`
|
||||
// and `#[cfg_attr]` attributes during parsing.
|
||||
//
|
||||
// Therefore, when we actually *do* run early cfg-expansion, we need to tokenize
|
||||
// and re-parse the attribute target, this time capturing information about
|
||||
// the location of `#[cfg]` and `#[cfg_attr]` in the token stream. The tokenization
|
||||
// process is lossless, so this process is invisible to proc-macros.
|
||||
|
||||
// FIXME - get rid of this clone
|
||||
let nt = annotatable.clone().into_nonterminal();
|
||||
|
||||
let mut orig_tokens = rustc_parse::nt_to_tokenstream(
|
||||
&nt,
|
||||
&self.cfg.sess.parse_sess,
|
||||
CanSynthesizeMissingTokens::No,
|
||||
);
|
||||
|
||||
// 'Flatten' all nonterminals (i.e. `TokenKind::Interpolated`)
|
||||
// to `None`-delimited groups containing the corresponding tokens. This
|
||||
// is normally delayed until the proc-macro server actually needs to
|
||||
// provide a `TokenKind::Interpolated` to a proc-macro. We do this earlier,
|
||||
// so that we can handle cases like:
|
||||
//
|
||||
// ```rust
|
||||
// #[cfg_eval] #[cfg] $item
|
||||
//```
|
||||
//
|
||||
// where `$item` is `#[cfg_attr] struct Foo {}`. We want to make
|
||||
// sure to evaluate *all* `#[cfg]` and `#[cfg_attr]` attributes - the simplest
|
||||
// way to do this is to do a single parse of a stream without any nonterminals.
|
||||
let mut flatten = FlattenNonterminals {
|
||||
nt_to_tokenstream: rustc_parse::nt_to_tokenstream,
|
||||
parse_sess: &self.cfg.sess.parse_sess,
|
||||
synthesize_tokens: CanSynthesizeMissingTokens::No,
|
||||
};
|
||||
orig_tokens = flatten.process_token_stream(orig_tokens);
|
||||
|
||||
// Re-parse the tokens, setting the `capture_cfg` flag to save extra information
|
||||
// to the captured `AttrAnnotatedTokenStream` (specifically, we capture
|
||||
// `AttrAnnotatedTokenTree::AttributesData` for all occurences of `#[cfg]` and `#[cfg_attr]`)
|
||||
let mut parser =
|
||||
rustc_parse::stream_to_parser(&self.cfg.sess.parse_sess, orig_tokens, None);
|
||||
parser.capture_cfg = true;
|
||||
annotatable = match annotatable {
|
||||
Annotatable::Item(_) => {
|
||||
Annotatable::Item(parser.parse_item(ForceCollect::Yes).unwrap().unwrap())
|
||||
}
|
||||
Annotatable::TraitItem(_) => Annotatable::TraitItem(
|
||||
parser.parse_trait_item(ForceCollect::Yes).unwrap().unwrap().unwrap(),
|
||||
),
|
||||
Annotatable::ImplItem(_) => Annotatable::ImplItem(
|
||||
parser.parse_impl_item(ForceCollect::Yes).unwrap().unwrap().unwrap(),
|
||||
),
|
||||
Annotatable::ForeignItem(_) => Annotatable::ForeignItem(
|
||||
parser.parse_foreign_item(ForceCollect::Yes).unwrap().unwrap().unwrap(),
|
||||
),
|
||||
Annotatable::Stmt(_) => {
|
||||
Annotatable::Stmt(P(parser.parse_stmt(ForceCollect::Yes).unwrap().unwrap()))
|
||||
}
|
||||
Annotatable::Expr(_) => Annotatable::Expr(parser.parse_expr_force_collect().unwrap()),
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
// Now that we have our re-parsed `AttrAnnotatedTokenStream`, recursively configuring
|
||||
// our attribute target will correctly the tokens as well.
|
||||
flat_map_annotatable(self, annotatable)
|
||||
}
|
||||
}
|
||||
|
||||
impl MutVisitor for CfgEval<'_, '_> {
|
||||
fn visit_expr(&mut self, expr: &mut P<ast::Expr>) {
|
||||
self.cfg.configure_expr(expr);
|
||||
mut_visit::noop_visit_expr(expr, self);
|
||||
}
|
||||
|
||||
fn filter_map_expr(&mut self, expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
|
||||
let mut expr = configure!(self, expr);
|
||||
mut_visit::noop_visit_expr(&mut expr, self);
|
||||
Some(expr)
|
||||
}
|
||||
|
||||
fn flat_map_generic_param(
|
||||
&mut self,
|
||||
param: ast::GenericParam,
|
||||
) -> SmallVec<[ast::GenericParam; 1]> {
|
||||
mut_visit::noop_flat_map_generic_param(configure!(self, param), self)
|
||||
}
|
||||
|
||||
fn flat_map_stmt(&mut self, stmt: ast::Stmt) -> SmallVec<[ast::Stmt; 1]> {
|
||||
mut_visit::noop_flat_map_stmt(configure!(self, stmt), self)
|
||||
}
|
||||
|
||||
fn flat_map_item(&mut self, item: P<ast::Item>) -> SmallVec<[P<ast::Item>; 1]> {
|
||||
mut_visit::noop_flat_map_item(configure!(self, item), self)
|
||||
}
|
||||
|
||||
fn flat_map_impl_item(&mut self, item: P<ast::AssocItem>) -> SmallVec<[P<ast::AssocItem>; 1]> {
|
||||
mut_visit::noop_flat_map_assoc_item(configure!(self, item), self)
|
||||
}
|
||||
|
||||
fn flat_map_trait_item(&mut self, item: P<ast::AssocItem>) -> SmallVec<[P<ast::AssocItem>; 1]> {
|
||||
mut_visit::noop_flat_map_assoc_item(configure!(self, item), self)
|
||||
}
|
||||
|
||||
fn flat_map_foreign_item(
|
||||
&mut self,
|
||||
foreign_item: P<ast::ForeignItem>,
|
||||
) -> SmallVec<[P<ast::ForeignItem>; 1]> {
|
||||
mut_visit::noop_flat_map_foreign_item(configure!(self, foreign_item), self)
|
||||
}
|
||||
|
||||
fn flat_map_arm(&mut self, arm: ast::Arm) -> SmallVec<[ast::Arm; 1]> {
|
||||
mut_visit::noop_flat_map_arm(configure!(self, arm), self)
|
||||
}
|
||||
|
||||
fn flat_map_expr_field(&mut self, field: ast::ExprField) -> SmallVec<[ast::ExprField; 1]> {
|
||||
mut_visit::noop_flat_map_expr_field(configure!(self, field), self)
|
||||
}
|
||||
|
||||
fn flat_map_pat_field(&mut self, fp: ast::PatField) -> SmallVec<[ast::PatField; 1]> {
|
||||
mut_visit::noop_flat_map_pat_field(configure!(self, fp), self)
|
||||
}
|
||||
|
||||
fn flat_map_param(&mut self, p: ast::Param) -> SmallVec<[ast::Param; 1]> {
|
||||
mut_visit::noop_flat_map_param(configure!(self, p), self)
|
||||
}
|
||||
|
||||
fn flat_map_field_def(&mut self, sf: ast::FieldDef) -> SmallVec<[ast::FieldDef; 1]> {
|
||||
mut_visit::noop_flat_map_field_def(configure!(self, sf), self)
|
||||
}
|
||||
|
||||
fn flat_map_variant(&mut self, variant: ast::Variant) -> SmallVec<[ast::Variant; 1]> {
|
||||
mut_visit::noop_flat_map_variant(configure!(self, variant), self)
|
||||
}
|
||||
}
|
|
@ -1,7 +1,8 @@
|
|||
use rustc_ast::{self as ast, token, ItemKind, MetaItemKind, NestedMetaItem, StmtKind};
|
||||
use crate::cfg_eval::cfg_eval;
|
||||
|
||||
use rustc_ast::{self as ast, attr, token, ItemKind, MetaItemKind, NestedMetaItem, StmtKind};
|
||||
use rustc_errors::{struct_span_err, Applicability};
|
||||
use rustc_expand::base::{Annotatable, ExpandResult, ExtCtxt, Indeterminate, MultiItemModifier};
|
||||
use rustc_expand::config::StripUnconfigured;
|
||||
use rustc_feature::AttributeTemplate;
|
||||
use rustc_parse::validate_attr;
|
||||
use rustc_session::Session;
|
||||
|
@ -25,13 +26,19 @@ impl MultiItemModifier for Expander {
|
|||
return ExpandResult::Ready(vec![item]);
|
||||
}
|
||||
|
||||
let result =
|
||||
ecx.resolver.resolve_derives(ecx.current_expansion.id, ecx.force_mode, &|| {
|
||||
let template =
|
||||
AttributeTemplate { list: Some("Trait1, Trait2, ..."), ..Default::default() };
|
||||
let attr = ecx.attribute(meta_item.clone());
|
||||
validate_attr::check_builtin_attribute(&sess.parse_sess, &attr, sym::derive, template);
|
||||
let attr = attr::mk_attr_outer(meta_item.clone());
|
||||
validate_attr::check_builtin_attribute(
|
||||
&sess.parse_sess,
|
||||
&attr,
|
||||
sym::derive,
|
||||
template,
|
||||
);
|
||||
|
||||
let derives: Vec<_> = attr
|
||||
.meta_item_list()
|
||||
attr.meta_item_list()
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.filter_map(|nested_meta| match nested_meta {
|
||||
|
@ -47,30 +54,12 @@ impl MultiItemModifier for Expander {
|
|||
report_path_args(sess, &meta);
|
||||
meta.path
|
||||
})
|
||||
.collect();
|
||||
.map(|path| (path, None))
|
||||
.collect()
|
||||
});
|
||||
|
||||
// FIXME: Try to cache intermediate results to avoid collecting same paths multiple times.
|
||||
match ecx.resolver.resolve_derives(ecx.current_expansion.id, derives, ecx.force_mode) {
|
||||
Ok(()) => {
|
||||
let mut visitor =
|
||||
StripUnconfigured { sess, features: ecx.ecfg.features, modified: false };
|
||||
let mut item = visitor.fully_configure(item);
|
||||
if visitor.modified {
|
||||
// Erase the tokens if cfg-stripping modified the item
|
||||
// This will cause us to synthesize fake tokens
|
||||
// when `nt_to_tokenstream` is called on this item.
|
||||
match &mut item {
|
||||
Annotatable::Item(item) => item,
|
||||
Annotatable::Stmt(stmt) => match &mut stmt.kind {
|
||||
StmtKind::Item(item) => item,
|
||||
_ => unreachable!(),
|
||||
},
|
||||
_ => unreachable!(),
|
||||
}
|
||||
.tokens = None;
|
||||
}
|
||||
ExpandResult::Ready(vec![item])
|
||||
}
|
||||
match result {
|
||||
Ok(()) => ExpandResult::Ready(cfg_eval(ecx, item)),
|
||||
Err(Indeterminate) => ExpandResult::Retry(item),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -541,7 +541,7 @@ impl<'a> TraitDef<'a> {
|
|||
self.generics.to_generics(cx, self.span, type_ident, generics);
|
||||
|
||||
// Create the generic parameters
|
||||
params.extend(generics.params.iter().map(|param| match param.kind {
|
||||
params.extend(generics.params.iter().map(|param| match ¶m.kind {
|
||||
GenericParamKind::Lifetime { .. } => param.clone(),
|
||||
GenericParamKind::Type { .. } => {
|
||||
// I don't think this can be moved out of the loop, since
|
||||
|
@ -561,7 +561,18 @@ impl<'a> TraitDef<'a> {
|
|||
|
||||
cx.typaram(self.span, param.ident, vec![], bounds, None)
|
||||
}
|
||||
GenericParamKind::Const { .. } => param.clone(),
|
||||
GenericParamKind::Const { ty, kw_span, .. } => {
|
||||
let const_nodefault_kind = GenericParamKind::Const {
|
||||
ty: ty.clone(),
|
||||
kw_span: kw_span.clone(),
|
||||
|
||||
// We can't have default values inside impl block
|
||||
default: None,
|
||||
};
|
||||
let mut param_clone = param.clone();
|
||||
param_clone.kind = const_nodefault_kind;
|
||||
param_clone
|
||||
}
|
||||
}));
|
||||
|
||||
// and similarly for where clauses
|
||||
|
@ -1034,7 +1045,7 @@ impl<'a> MethodDef<'a> {
|
|||
// make a series of nested matches, to destructure the
|
||||
// structs. This is actually right-to-left, but it shouldn't
|
||||
// matter.
|
||||
for (arg_expr, pat) in self_args.iter().zip(patterns) {
|
||||
for (arg_expr, pat) in iter::zip(self_args, patterns) {
|
||||
body = cx.expr_match(
|
||||
trait_.span,
|
||||
arg_expr.clone(),
|
||||
|
@ -1351,7 +1362,7 @@ impl<'a> MethodDef<'a> {
|
|||
let mut discriminant_test = cx.expr_bool(sp, true);
|
||||
|
||||
let mut first_ident = None;
|
||||
for (&ident, self_arg) in vi_idents.iter().zip(&self_args) {
|
||||
for (&ident, self_arg) in iter::zip(&vi_idents, &self_args) {
|
||||
let self_addr = cx.expr_addr_of(sp, self_arg.clone());
|
||||
let variant_value =
|
||||
deriving::call_intrinsic(cx, sp, sym::discriminant_value, vec![self_addr]);
|
||||
|
@ -1571,14 +1582,12 @@ impl<'a> TraitDef<'a> {
|
|||
let subpats = self.create_subpatterns(cx, paths, mutbl, use_temporaries);
|
||||
let pattern = match *struct_def {
|
||||
VariantData::Struct(..) => {
|
||||
let field_pats = subpats
|
||||
.into_iter()
|
||||
.zip(&ident_exprs)
|
||||
let field_pats = iter::zip(subpats, &ident_exprs)
|
||||
.map(|(pat, &(sp, ident, ..))| {
|
||||
if ident.is_none() {
|
||||
cx.span_bug(sp, "a braced struct with unnamed fields in `derive`");
|
||||
}
|
||||
ast::FieldPat {
|
||||
ast::PatField {
|
||||
ident: ident.unwrap(),
|
||||
is_shorthand: false,
|
||||
attrs: ast::AttrVec::new(),
|
||||
|
|
|
@ -14,31 +14,31 @@ pub fn expand(
|
|||
ecx: &mut ExtCtxt<'_>,
|
||||
_span: Span,
|
||||
meta_item: &ast::MetaItem,
|
||||
mut item: Annotatable,
|
||||
item: Annotatable,
|
||||
) -> Vec<Annotatable> {
|
||||
check_builtin_macro_attribute(ecx, meta_item, sym::global_allocator);
|
||||
|
||||
let not_static = |item: Annotatable| {
|
||||
ecx.sess.parse_sess.span_diagnostic.span_err(item.span(), "allocators must be statics");
|
||||
vec![item]
|
||||
};
|
||||
let orig_item = item.clone();
|
||||
let mut is_stmt = false;
|
||||
let not_static = || {
|
||||
ecx.sess.parse_sess.span_diagnostic.span_err(item.span(), "allocators must be statics");
|
||||
vec![orig_item.clone()]
|
||||
};
|
||||
|
||||
// Allow using `#[global_allocator]` on an item statement
|
||||
if let Annotatable::Stmt(stmt) = &item {
|
||||
if let StmtKind::Item(item_) = &stmt.kind {
|
||||
item = Annotatable::Item(item_.clone());
|
||||
is_stmt = true;
|
||||
}
|
||||
}
|
||||
|
||||
let item = match item {
|
||||
// FIXME - if we get deref patterns, use them to reduce duplication here
|
||||
let (item, is_stmt) = match &item {
|
||||
Annotatable::Item(item) => match item.kind {
|
||||
ItemKind::Static(..) => item,
|
||||
_ => return not_static(Annotatable::Item(item)),
|
||||
ItemKind::Static(..) => (item, false),
|
||||
_ => return not_static(),
|
||||
},
|
||||
_ => return not_static(item),
|
||||
Annotatable::Stmt(stmt) => match &stmt.kind {
|
||||
StmtKind::Item(item_) => match item_.kind {
|
||||
ItemKind::Static(..) => (item_, true),
|
||||
_ => return not_static(),
|
||||
},
|
||||
_ => return not_static(),
|
||||
},
|
||||
_ => return not_static(),
|
||||
};
|
||||
|
||||
// Generate a bunch of new items using the AllocFnFactory
|
||||
|
|
|
@ -7,10 +7,12 @@
|
|||
#![feature(bool_to_option)]
|
||||
#![feature(crate_visibility_modifier)]
|
||||
#![feature(decl_macro)]
|
||||
#![feature(iter_zip)]
|
||||
#![feature(nll)]
|
||||
#![feature(or_patterns)]
|
||||
#![cfg_attr(bootstrap, feature(or_patterns))]
|
||||
#![feature(proc_macro_internals)]
|
||||
#![feature(proc_macro_quote)]
|
||||
#![recursion_limit = "256"]
|
||||
|
||||
extern crate proc_macro;
|
||||
|
||||
|
@ -24,6 +26,7 @@ mod asm;
|
|||
mod assert;
|
||||
mod cfg;
|
||||
mod cfg_accessible;
|
||||
mod cfg_eval;
|
||||
mod compile_error;
|
||||
mod concat;
|
||||
mod concat_idents;
|
||||
|
@ -89,6 +92,7 @@ pub fn register_builtin_macros(resolver: &mut dyn ResolverExpand) {
|
|||
register_attr! {
|
||||
bench: test::expand_bench,
|
||||
cfg_accessible: cfg_accessible::Expander,
|
||||
cfg_eval: cfg_eval::expand,
|
||||
derive: derive::Expander,
|
||||
global_allocator: global_allocator::expand,
|
||||
test: test::expand_test,
|
||||
|
|
|
@ -4,7 +4,7 @@ use rustc_ast::token;
|
|||
use rustc_ast::tokenstream::TokenStream;
|
||||
use rustc_ast_pretty::pprust;
|
||||
use rustc_expand::base::{self, *};
|
||||
use rustc_expand::module::DirectoryOwnership;
|
||||
use rustc_expand::module::DirOwnership;
|
||||
use rustc_parse::parser::{ForceCollect, Parser};
|
||||
use rustc_parse::{self, new_parser_from_file};
|
||||
use rustc_session::lint::builtin::INCOMPLETE_INCLUDE;
|
||||
|
@ -101,7 +101,7 @@ pub fn expand_include<'cx>(
|
|||
None => return DummyResult::any(sp),
|
||||
};
|
||||
// The file will be added to the code map by the parser
|
||||
let mut file = match cx.resolve_path(file, sp) {
|
||||
let file = match cx.resolve_path(file, sp) {
|
||||
Ok(f) => f,
|
||||
Err(mut err) => {
|
||||
err.emit();
|
||||
|
@ -114,10 +114,9 @@ pub fn expand_include<'cx>(
|
|||
// then the path of `bar.rs` should be relative to the directory of `file`.
|
||||
// See https://github.com/rust-lang/rust/pull/69838/files#r395217057 for a discussion.
|
||||
// `MacroExpander::fully_expand_fragment` later restores, so "stack discipline" is maintained.
|
||||
file.pop();
|
||||
cx.current_expansion.directory_ownership = DirectoryOwnership::Owned { relative: None };
|
||||
let mod_path = cx.current_expansion.module.mod_path.clone();
|
||||
cx.current_expansion.module = Rc::new(ModuleData { mod_path, directory: file });
|
||||
let dir_path = file.parent().unwrap_or(&file).to_owned();
|
||||
cx.current_expansion.module = Rc::new(cx.current_expansion.module.with_dir_path(dir_path));
|
||||
cx.current_expansion.dir_ownership = DirOwnership::Owned { relative: None };
|
||||
|
||||
struct ExpandResult<'a> {
|
||||
p: Parser<'a>,
|
||||
|
|
|
@ -2,7 +2,7 @@ use rustc_ast as ast;
|
|||
use rustc_expand::base::{ExtCtxt, ResolverExpand};
|
||||
use rustc_expand::expand::ExpansionConfig;
|
||||
use rustc_session::Session;
|
||||
use rustc_span::edition::Edition;
|
||||
use rustc_span::edition::Edition::*;
|
||||
use rustc_span::hygiene::AstPass;
|
||||
use rustc_span::symbol::{kw, sym, Ident, Symbol};
|
||||
use rustc_span::DUMMY_SP;
|
||||
|
@ -13,7 +13,7 @@ pub fn inject(
|
|||
sess: &Session,
|
||||
alt_std_name: Option<Symbol>,
|
||||
) -> ast::Crate {
|
||||
let rust_2018 = sess.parse_sess.edition >= Edition::Edition2018;
|
||||
let edition = sess.parse_sess.edition;
|
||||
|
||||
// the first name in this list is the crate name of the crate with the prelude
|
||||
let names: &[Symbol] = if sess.contains_name(&krate.attrs, sym::no_core) {
|
||||
|
@ -42,7 +42,11 @@ pub fn inject(
|
|||
|
||||
// .rev() to preserve ordering above in combination with insert(0, ...)
|
||||
for &name in names.iter().rev() {
|
||||
let ident = if rust_2018 { Ident::new(name, span) } else { Ident::new(name, call_site) };
|
||||
let ident = if edition >= Edition2018 {
|
||||
Ident::new(name, span)
|
||||
} else {
|
||||
Ident::new(name, call_site)
|
||||
};
|
||||
krate.items.insert(
|
||||
0,
|
||||
cx.item(
|
||||
|
@ -58,14 +62,18 @@ pub fn inject(
|
|||
// the one with the prelude.
|
||||
let name = names[0];
|
||||
|
||||
let import_path = if rust_2018 {
|
||||
[name, sym::prelude, sym::v1].iter().map(|symbol| Ident::new(*symbol, span)).collect()
|
||||
} else {
|
||||
[kw::PathRoot, name, sym::prelude, sym::v1]
|
||||
let root = (edition == Edition2015).then(|| kw::PathRoot);
|
||||
|
||||
let import_path = root
|
||||
.iter()
|
||||
.map(|symbol| Ident::new(*symbol, span))
|
||||
.collect()
|
||||
};
|
||||
.chain(&[name, sym::prelude])
|
||||
.chain(&[match edition {
|
||||
Edition2015 => sym::rust_2015,
|
||||
Edition2018 => sym::rust_2018,
|
||||
Edition2021 => sym::rust_2021,
|
||||
}])
|
||||
.map(|&symbol| Ident::new(symbol, span))
|
||||
.collect();
|
||||
|
||||
let use_item = cx.item(
|
||||
span,
|
||||
|
|
|
@ -142,7 +142,7 @@ fn entry_point_type(sess: &Session, item: &ast::Item, depth: usize) -> EntryPoin
|
|||
ast::ItemKind::Fn(..) => {
|
||||
if sess.contains_name(&item.attrs, sym::start) {
|
||||
EntryPointType::Start
|
||||
} else if sess.contains_name(&item.attrs, sym::main) {
|
||||
} else if sess.contains_name(&item.attrs, sym::rustc_main) {
|
||||
EntryPointType::MainAttr
|
||||
} else if item.ident.name == sym::main {
|
||||
if depth == 1 {
|
||||
|
@ -187,7 +187,7 @@ impl<'a> MutVisitor for EntryPointCleaner<'a> {
|
|||
let attrs = attrs
|
||||
.into_iter()
|
||||
.filter(|attr| {
|
||||
!self.sess.check_name(attr, sym::main)
|
||||
!self.sess.check_name(attr, sym::rustc_main)
|
||||
&& !self.sess.check_name(attr, sym::start)
|
||||
})
|
||||
.chain(iter::once(allow_dead_code))
|
||||
|
@ -220,7 +220,7 @@ fn generate_test_harness(
|
|||
let expn_id = ext_cx.resolver.expansion_for_ast_pass(
|
||||
DUMMY_SP,
|
||||
AstPass::TestHarness,
|
||||
&[sym::main, sym::test, sym::rustc_attrs],
|
||||
&[sym::test, sym::rustc_attrs],
|
||||
None,
|
||||
);
|
||||
let def_site = DUMMY_SP.with_def_site_ctxt(expn_id);
|
||||
|
@ -247,7 +247,7 @@ fn generate_test_harness(
|
|||
/// By default this expands to
|
||||
///
|
||||
/// ```
|
||||
/// #[main]
|
||||
/// #[rustc_main]
|
||||
/// pub fn main() {
|
||||
/// extern crate test;
|
||||
/// test::test_main_static(&[
|
||||
|
@ -297,8 +297,8 @@ fn mk_main(cx: &mut TestCtxt<'_>) -> P<ast::Item> {
|
|||
let test_extern_stmt =
|
||||
ecx.stmt_item(sp, ecx.item(sp, test_id, vec![], ast::ItemKind::ExternCrate(None)));
|
||||
|
||||
// #[main]
|
||||
let main_meta = ecx.meta_word(sp, sym::main);
|
||||
// #[rustc_main]
|
||||
let main_meta = ecx.meta_word(sp, sym::rustc_main);
|
||||
let main_attr = ecx.attribute(main_meta);
|
||||
|
||||
// pub fn main() { ... }
|
||||
|
|
25
compiler/rustc_codegen_cranelift/.cirrus.yml
Normal file
25
compiler/rustc_codegen_cranelift/.cirrus.yml
Normal file
|
@ -0,0 +1,25 @@
|
|||
task:
|
||||
name: freebsd
|
||||
freebsd_instance:
|
||||
image: freebsd-12-1-release-amd64
|
||||
setup_rust_script:
|
||||
- pkg install -y curl git bash
|
||||
- curl https://sh.rustup.rs -sSf --output rustup.sh
|
||||
- sh rustup.sh --default-toolchain none -y --profile=minimal
|
||||
cargo_bin_cache:
|
||||
folder: ~/.cargo/bin
|
||||
target_cache:
|
||||
folder: target
|
||||
prepare_script:
|
||||
- . $HOME/.cargo/env
|
||||
- git config --global user.email "user@example.com"
|
||||
- git config --global user.name "User"
|
||||
- ./prepare.sh
|
||||
test_script:
|
||||
- . $HOME/.cargo/env
|
||||
- # Enable backtraces for easier debugging
|
||||
- export RUST_BACKTRACE=1
|
||||
- # Reduce amount of benchmark runs as they are slow
|
||||
- export COMPILE_RUNS=2
|
||||
- export RUN_RUNS=2
|
||||
- ./test.sh
|
|
@ -1,44 +0,0 @@
|
|||
name: Bootstrap rustc using cg_clif
|
||||
|
||||
on:
|
||||
- push
|
||||
|
||||
jobs:
|
||||
bootstrap_rustc:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Cache cargo installed crates
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/.cargo/bin
|
||||
key: ${{ runner.os }}-cargo-installed-crates
|
||||
|
||||
- name: Cache cargo registry and index
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/registry
|
||||
~/.cargo/git
|
||||
key: ${{ runner.os }}-cargo-registry-and-index-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Cache cargo target dir
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: target
|
||||
key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('rust-toolchain', '**/Cargo.lock') }}
|
||||
|
||||
- name: Prepare dependencies
|
||||
run: |
|
||||
git config --global user.email "user@example.com"
|
||||
git config --global user.name "User"
|
||||
./prepare.sh
|
||||
|
||||
- name: Test
|
||||
run: |
|
||||
# Enable backtraces for easier debugging
|
||||
export RUST_BACKTRACE=1
|
||||
|
||||
./scripts/test_bootstrap.sh
|
|
@ -7,14 +7,18 @@ on:
|
|||
jobs:
|
||||
build:
|
||||
runs-on: ${{ matrix.os }}
|
||||
timeout-minutes: 60
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest]
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
- os: macos-latest
|
||||
# cross-compile from Linux to Windows using mingw
|
||||
- os: ubuntu-latest
|
||||
env:
|
||||
- BACKEND: ""
|
||||
- BACKEND: --oldbe
|
||||
TARGET_TRIPLE: x86_64-pc-windows-gnu
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
@ -39,6 +43,12 @@ jobs:
|
|||
path: target
|
||||
key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('rust-toolchain', '**/Cargo.lock') }}
|
||||
|
||||
- name: Install MinGW toolchain and wine
|
||||
if: matrix.os == 'ubuntu-latest' && matrix.env.TARGET_TRIPLE == 'x86_64-pc-windows-gnu'
|
||||
run: |
|
||||
sudo apt-get install -y gcc-mingw-w64-x86-64 wine-stable
|
||||
rustup target add x86_64-pc-windows-gnu
|
||||
|
||||
- name: Prepare dependencies
|
||||
run: |
|
||||
git config --global user.email "user@example.com"
|
||||
|
@ -46,6 +56,8 @@ jobs:
|
|||
./prepare.sh
|
||||
|
||||
- name: Test
|
||||
env:
|
||||
TARGET_TRIPLE: ${{ matrix.env.TARGET_TRIPLE }}
|
||||
run: |
|
||||
# Enable backtraces for easier debugging
|
||||
export RUST_BACKTRACE=1
|
||||
|
@ -54,12 +66,16 @@ jobs:
|
|||
export COMPILE_RUNS=2
|
||||
export RUN_RUNS=2
|
||||
|
||||
./test.sh $BACKEND
|
||||
# Enable extra checks
|
||||
export CG_CLIF_ENABLE_VERIFIER=1
|
||||
|
||||
./test.sh
|
||||
|
||||
- name: Package prebuilt cg_clif
|
||||
run: tar cvfJ cg_clif.tar.xz build
|
||||
|
||||
- name: Upload prebuilt cg_clif
|
||||
if: matrix.env.TARGET_TRIPLE != 'x86_64-pc-windows-gnu'
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: cg_clif-${{ runner.os }}
|
||||
|
|
82
compiler/rustc_codegen_cranelift/.github/workflows/rustc.yml
vendored
Normal file
82
compiler/rustc_codegen_cranelift/.github/workflows/rustc.yml
vendored
Normal file
|
@ -0,0 +1,82 @@
|
|||
name: Various rustc tests
|
||||
|
||||
on:
|
||||
- push
|
||||
|
||||
jobs:
|
||||
bootstrap_rustc:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Cache cargo installed crates
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/.cargo/bin
|
||||
key: ${{ runner.os }}-cargo-installed-crates
|
||||
|
||||
- name: Cache cargo registry and index
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/registry
|
||||
~/.cargo/git
|
||||
key: ${{ runner.os }}-cargo-registry-and-index-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Cache cargo target dir
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: target
|
||||
key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('rust-toolchain', '**/Cargo.lock') }}
|
||||
|
||||
- name: Prepare dependencies
|
||||
run: |
|
||||
git config --global user.email "user@example.com"
|
||||
git config --global user.name "User"
|
||||
./prepare.sh
|
||||
|
||||
- name: Test
|
||||
run: |
|
||||
# Enable backtraces for easier debugging
|
||||
export RUST_BACKTRACE=1
|
||||
|
||||
./scripts/test_bootstrap.sh
|
||||
rustc_test_suite:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Cache cargo installed crates
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/.cargo/bin
|
||||
key: ${{ runner.os }}-cargo-installed-crates
|
||||
|
||||
- name: Cache cargo registry and index
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/registry
|
||||
~/.cargo/git
|
||||
key: ${{ runner.os }}-cargo-registry-and-index-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Cache cargo target dir
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: target
|
||||
key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('rust-toolchain', '**/Cargo.lock') }}
|
||||
|
||||
- name: Prepare dependencies
|
||||
run: |
|
||||
git config --global user.email "user@example.com"
|
||||
git config --global user.name "User"
|
||||
./prepare.sh
|
||||
|
||||
- name: Test
|
||||
run: |
|
||||
# Enable backtraces for easier debugging
|
||||
export RUST_BACKTRACE=1
|
||||
|
||||
./scripts/test_rustc_tests.sh
|
|
@ -1,8 +1,8 @@
|
|||
{
|
||||
// source for rustc_* is not included in the rust-src component; disable the errors about this
|
||||
"rust-analyzer.diagnostics.disabled": ["unresolved-extern-crate"],
|
||||
"rust-analyzer.diagnostics.disabled": ["unresolved-extern-crate", "macro-error"],
|
||||
"rust-analyzer.assist.importMergeBehavior": "last",
|
||||
"rust-analyzer.cargo.loadOutDirsFromCheck": true,
|
||||
"rust-analyzer.cargo.runBuildScripts": true,
|
||||
"rust-analyzer.linkedProjects": [
|
||||
"./Cargo.toml",
|
||||
//"./build_sysroot/sysroot_src/src/libstd/Cargo.toml",
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
# This file is automatically @generated by Cargo.
|
||||
# It is not intended for manual editing.
|
||||
version = 3
|
||||
|
||||
[[package]]
|
||||
name = "anyhow"
|
||||
version = "1.0.38"
|
||||
|
@ -29,18 +31,6 @@ version = "1.4.2"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ae44d1a3d5a19df61dd0c8beb138458ac2a53a7ac09eba97d55592540004306b"
|
||||
|
||||
[[package]]
|
||||
name = "cc"
|
||||
version = "1.0.66"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4c0496836a84f8d0495758516b8621a622beb77c0fed418570e50764093ced48"
|
||||
|
||||
[[package]]
|
||||
name = "cfg-if"
|
||||
version = "0.1.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
|
||||
|
||||
[[package]]
|
||||
name = "cfg-if"
|
||||
version = "1.0.0"
|
||||
|
@ -49,16 +39,16 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
|||
|
||||
[[package]]
|
||||
name = "cranelift-bforest"
|
||||
version = "0.69.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#986b5768f9e68f1564b43f32b8a4080a6582c8ca"
|
||||
version = "0.72.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#8e43e96410a14143d368273cf1e708f8094bb8e0"
|
||||
dependencies = [
|
||||
"cranelift-entity",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cranelift-codegen"
|
||||
version = "0.69.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#986b5768f9e68f1564b43f32b8a4080a6582c8ca"
|
||||
version = "0.72.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#8e43e96410a14143d368273cf1e708f8094bb8e0"
|
||||
dependencies = [
|
||||
"byteorder",
|
||||
"cranelift-bforest",
|
||||
|
@ -75,8 +65,8 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "cranelift-codegen-meta"
|
||||
version = "0.69.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#986b5768f9e68f1564b43f32b8a4080a6582c8ca"
|
||||
version = "0.72.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#8e43e96410a14143d368273cf1e708f8094bb8e0"
|
||||
dependencies = [
|
||||
"cranelift-codegen-shared",
|
||||
"cranelift-entity",
|
||||
|
@ -84,18 +74,18 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "cranelift-codegen-shared"
|
||||
version = "0.69.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#986b5768f9e68f1564b43f32b8a4080a6582c8ca"
|
||||
version = "0.72.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#8e43e96410a14143d368273cf1e708f8094bb8e0"
|
||||
|
||||
[[package]]
|
||||
name = "cranelift-entity"
|
||||
version = "0.69.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#986b5768f9e68f1564b43f32b8a4080a6582c8ca"
|
||||
version = "0.72.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#8e43e96410a14143d368273cf1e708f8094bb8e0"
|
||||
|
||||
[[package]]
|
||||
name = "cranelift-frontend"
|
||||
version = "0.69.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#986b5768f9e68f1564b43f32b8a4080a6582c8ca"
|
||||
version = "0.72.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#8e43e96410a14143d368273cf1e708f8094bb8e0"
|
||||
dependencies = [
|
||||
"cranelift-codegen",
|
||||
"log",
|
||||
|
@ -105,8 +95,8 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "cranelift-jit"
|
||||
version = "0.69.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#986b5768f9e68f1564b43f32b8a4080a6582c8ca"
|
||||
version = "0.72.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#8e43e96410a14143d368273cf1e708f8094bb8e0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"cranelift-codegen",
|
||||
|
@ -123,8 +113,8 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "cranelift-module"
|
||||
version = "0.69.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#986b5768f9e68f1564b43f32b8a4080a6582c8ca"
|
||||
version = "0.72.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#8e43e96410a14143d368273cf1e708f8094bb8e0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"cranelift-codegen",
|
||||
|
@ -135,18 +125,17 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "cranelift-native"
|
||||
version = "0.69.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#986b5768f9e68f1564b43f32b8a4080a6582c8ca"
|
||||
version = "0.72.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#8e43e96410a14143d368273cf1e708f8094bb8e0"
|
||||
dependencies = [
|
||||
"cranelift-codegen",
|
||||
"raw-cpuid",
|
||||
"target-lexicon",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cranelift-object"
|
||||
version = "0.69.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#986b5768f9e68f1564b43f32b8a4080a6582c8ca"
|
||||
version = "0.72.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#8e43e96410a14143d368273cf1e708f8094bb8e0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"cranelift-codegen",
|
||||
|
@ -162,7 +151,7 @@ version = "1.2.1"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "81156fece84ab6a9f2afdb109ce3ae577e42b1228441eded99bd77f627953b1a"
|
||||
dependencies = [
|
||||
"cfg-if 1.0.0",
|
||||
"cfg-if",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -219,9 +208,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.82"
|
||||
version = "0.2.86"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "89203f3fba0a3795506acaad8ebce3c80c0af93f994d5a1d7a0b1eeb23271929"
|
||||
checksum = "b7282d924be3275cec7f6756ff4121987bc6481325397dde6ba3e7802b1a8b1c"
|
||||
|
||||
[[package]]
|
||||
name = "libloading"
|
||||
|
@ -229,17 +218,17 @@ version = "0.6.7"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "351a32417a12d5f7e82c368a66781e307834dae04c6ce0cd4456d52989229883"
|
||||
dependencies = [
|
||||
"cfg-if 1.0.0",
|
||||
"cfg-if",
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "log"
|
||||
version = "0.4.13"
|
||||
version = "0.4.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fcf3805d4480bb5b86070dcfeb9e2cb2ebc148adb753c5cca5f884d1d65a42b2"
|
||||
checksum = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710"
|
||||
dependencies = [
|
||||
"cfg-if 0.1.10",
|
||||
"cfg-if",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -253,9 +242,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "object"
|
||||
version = "0.22.0"
|
||||
version = "0.23.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8d3b63360ec3cb337817c2dbd47ab4a0f170d285d8e5a2064600f3def1402397"
|
||||
checksum = "a9a7ab5d64814df0fe4a4b5ead45ed6c5f181ee3ff04ba344313a6c80446c5d4"
|
||||
dependencies = [
|
||||
"crc32fast",
|
||||
"indexmap",
|
||||
|
@ -272,24 +261,13 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "quote"
|
||||
version = "1.0.8"
|
||||
version = "1.0.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "991431c3519a3f36861882da93630ce66b52918dcf1b8e2fd66b397fc96f28df"
|
||||
checksum = "c3d0b9745dc2debf507c8422de05d7226cc1f0644216dfdfead988f9b1ab32a7"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "raw-cpuid"
|
||||
version = "8.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1fdf7d9dbd43f3d81d94a49c1c3df73cc2b3827995147e6cf7f89d4ec5483e73"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"cc",
|
||||
"rustc_version",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regalloc"
|
||||
version = "0.0.31"
|
||||
|
@ -337,30 +315,6 @@ dependencies = [
|
|||
"target-lexicon",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustc_version"
|
||||
version = "0.2.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a"
|
||||
dependencies = [
|
||||
"semver",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "semver"
|
||||
version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403"
|
||||
dependencies = [
|
||||
"semver-parser",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "semver-parser"
|
||||
version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
|
||||
|
||||
[[package]]
|
||||
name = "smallvec"
|
||||
version = "1.6.1"
|
||||
|
@ -369,9 +323,9 @@ checksum = "fe0f37c9e8f3c5a4a66ad655a93c74daac4ad00c441533bf5c6e7990bb42604e"
|
|||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "1.0.58"
|
||||
version = "1.0.60"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cc60a3d73ea6594cd712d830cc1f0390fd71542d8c8cd24e70cc54cdfd5e05d5"
|
||||
checksum = "c700597eca8a5a762beb35753ef6b94df201c81cca676604f547495a0d7f0081"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
@ -380,24 +334,24 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "target-lexicon"
|
||||
version = "0.11.1"
|
||||
version = "0.11.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4ee5a98e506fb7231a304c3a1bd7c132a55016cf65001e0282480665870dfcb9"
|
||||
checksum = "422045212ea98508ae3d28025bc5aaa2bd4a9cdaecd442a08da2ee620ee9ea95"
|
||||
|
||||
[[package]]
|
||||
name = "thiserror"
|
||||
version = "1.0.23"
|
||||
version = "1.0.24"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "76cc616c6abf8c8928e2fdcc0dbfab37175edd8fb49a4641066ad1364fdab146"
|
||||
checksum = "e0f4a65597094d4483ddaed134f409b2cb7c1beccf25201a9f73c719254fa98e"
|
||||
dependencies = [
|
||||
"thiserror-impl",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thiserror-impl"
|
||||
version = "1.0.23"
|
||||
version = "1.0.24"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9be73a2caec27583d0046ef3796c3794f868a5bc813db689eed00c7631275cd1"
|
||||
checksum = "7765189610d8241a44529806d6fd1f2e0a08734313a35d5b3a556f92b381f3c0"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
|
|
@ -9,14 +9,14 @@ crate-type = ["dylib"]
|
|||
|
||||
[dependencies]
|
||||
# These have to be in sync with each other
|
||||
cranelift-codegen = { git = "https://github.com/bytecodealliance/wasmtime/", branch = "main", features = ["unwind", "x86", "x64"] }
|
||||
cranelift-codegen = { git = "https://github.com/bytecodealliance/wasmtime/", branch = "main", features = ["unwind", "x64"] }
|
||||
cranelift-frontend = { git = "https://github.com/bytecodealliance/wasmtime/", branch = "main" }
|
||||
cranelift-module = { git = "https://github.com/bytecodealliance/wasmtime/", branch = "main" }
|
||||
cranelift-jit = { git = "https://github.com/bytecodealliance/wasmtime/", branch = "main", optional = true }
|
||||
cranelift-object = { git = "https://github.com/bytecodealliance/wasmtime/", branch = "main" }
|
||||
target-lexicon = "0.11.0"
|
||||
gimli = { version = "0.23.0", default-features = false, features = ["write"]}
|
||||
object = { version = "0.22.0", default-features = false, features = ["std", "read_core", "write", "coff", "elf", "macho", "pe"] }
|
||||
object = { version = "0.23.0", default-features = false, features = ["std", "read_core", "write", "archive", "coff", "elf", "macho", "pe"] }
|
||||
|
||||
ar = { git = "https://github.com/bjorn3/rust-ar.git", branch = "do_not_remove_cg_clif_ranlib" }
|
||||
indexmap = "1.0.2"
|
||||
|
@ -38,7 +38,6 @@ smallvec = "1.6.1"
|
|||
default = ["jit", "inline_asm"]
|
||||
jit = ["cranelift-jit", "libloading"]
|
||||
inline_asm = []
|
||||
oldbe = []
|
||||
|
||||
[profile.dev]
|
||||
# By compiling dependencies with optimizations, performing tests gets much faster.
|
||||
|
@ -76,3 +75,6 @@ debug = false
|
|||
[profile.release.package.syn]
|
||||
opt-level = 0
|
||||
debug = false
|
||||
|
||||
[package.metadata.rust-analyzer]
|
||||
rustc_private = true
|
||||
|
|
|
@ -34,70 +34,19 @@ rustc_codegen_cranelift can be used as a near-drop-in replacement for `cargo bui
|
|||
|
||||
Assuming `$cg_clif_dir` is the directory you cloned this repo into and you followed the instructions (`prepare.sh` and `build.sh` or `test.sh`).
|
||||
|
||||
### Cargo
|
||||
|
||||
In the directory with your project (where you can do the usual `cargo build`), run:
|
||||
|
||||
```bash
|
||||
$ $cg_clif_dir/build/cargo.sh run
|
||||
$ $cg_clif_dir/build/cargo.sh build
|
||||
```
|
||||
|
||||
This should build and run your project with rustc_codegen_cranelift instead of the usual LLVM backend.
|
||||
This will build your project with rustc_codegen_cranelift instead of the usual LLVM backend.
|
||||
|
||||
### Rustc
|
||||
|
||||
> You should prefer using the Cargo method.
|
||||
|
||||
```bash
|
||||
$ $cg_clif_dir/build/bin/cg_clif my_crate.rs
|
||||
```
|
||||
|
||||
### Jit mode
|
||||
|
||||
In jit mode cg_clif will immediately execute your code without creating an executable file.
|
||||
|
||||
> This requires all dependencies to be available as dynamic library.
|
||||
> The jit mode will probably need cargo integration to make this possible.
|
||||
|
||||
```bash
|
||||
$ $cg_clif_dir/build/cargo.sh jit
|
||||
```
|
||||
|
||||
or
|
||||
|
||||
```bash
|
||||
$ $cg_clif_dir/build/bin/cg_clif -Cllvm-args=mode=jit -Cprefer-dynamic my_crate.rs
|
||||
```
|
||||
|
||||
There is also an experimental lazy jit mode. In this mode functions are only compiled once they are
|
||||
first called. It currently does not work with multi-threaded programs. When a not yet compiled
|
||||
function is called from another thread than the main thread, you will get an ICE.
|
||||
|
||||
```bash
|
||||
$ $cg_clif_dir/build/cargo.sh lazy-jit
|
||||
```
|
||||
|
||||
### Shell
|
||||
|
||||
These are a few functions that allow you to easily run rust code from the shell using cg_clif as jit.
|
||||
|
||||
```bash
|
||||
function jit_naked() {
|
||||
echo "$@" | $cg_clif_dir/build/bin/cg_clif - -Cllvm-args=mode=jit -Cprefer-dynamic
|
||||
}
|
||||
|
||||
function jit() {
|
||||
jit_naked "fn main() { $@ }"
|
||||
}
|
||||
|
||||
function jit_calc() {
|
||||
jit 'println!("0x{:x}", ' $@ ');';
|
||||
}
|
||||
```
|
||||
For additional ways to use rustc_codegen_cranelift like the JIT mode see [usage.md](docs/usage.md).
|
||||
|
||||
## Env vars
|
||||
|
||||
[see env_vars.md](docs/env_vars.md)
|
||||
See [env_vars.md](docs/env_vars.md) for all env vars used by rustc_codegen_cranelift.
|
||||
|
||||
## Not yet supported
|
||||
|
||||
|
@ -106,3 +55,20 @@ function jit_calc() {
|
|||
`llvm_asm!` will remain unimplemented forever. `asm!` doesn't yet support reg classes. You
|
||||
have to specify specific registers instead.
|
||||
* SIMD ([tracked here](https://github.com/bjorn3/rustc_codegen_cranelift/issues/171), some basic things work)
|
||||
|
||||
## License
|
||||
|
||||
Licensed under either of
|
||||
|
||||
* Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or
|
||||
http://www.apache.org/licenses/LICENSE-2.0)
|
||||
* MIT license ([LICENSE-MIT](LICENSE-MIT) or
|
||||
http://opensource.org/licenses/MIT)
|
||||
|
||||
at your option.
|
||||
|
||||
### Contribution
|
||||
|
||||
Unless you explicitly state otherwise, any contribution intentionally submitted
|
||||
for inclusion in the work by you shall be dual licensed as above, without any
|
||||
additional terms or conditions.
|
||||
|
|
|
@ -1,11 +1,10 @@
|
|||
#!/bin/bash
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
# Settings
|
||||
export CHANNEL="release"
|
||||
build_sysroot="clif"
|
||||
target_dir='build'
|
||||
oldbe=''
|
||||
while [[ $# != 0 ]]; do
|
||||
case $1 in
|
||||
"--debug")
|
||||
|
@ -19,12 +18,9 @@ while [[ $# != 0 ]]; do
|
|||
target_dir=$2
|
||||
shift
|
||||
;;
|
||||
"--oldbe")
|
||||
oldbe='--features oldbe'
|
||||
;;
|
||||
*)
|
||||
echo "Unknown flag '$1'"
|
||||
echo "Usage: ./build.sh [--debug] [--sysroot none|clif|llvm] [--target-dir DIR] [--oldbe]"
|
||||
echo "Usage: ./build.sh [--debug] [--sysroot none|clif|llvm] [--target-dir DIR]"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
@ -34,19 +30,19 @@ done
|
|||
# Build cg_clif
|
||||
unset CARGO_TARGET_DIR
|
||||
unamestr=$(uname)
|
||||
if [[ "$unamestr" == 'Linux' ]]; then
|
||||
if [[ "$unamestr" == 'Linux' || "$unamestr" == "FreeBSD" ]]; then
|
||||
export RUSTFLAGS='-Clink-arg=-Wl,-rpath=$ORIGIN/../lib '$RUSTFLAGS
|
||||
elif [[ "$unamestr" == 'Darwin' ]]; then
|
||||
export RUSTFLAGS='-Csplit-debuginfo=unpacked -Clink-arg=-Wl,-rpath,@loader_path/../lib -Zosx-rpath-install-name '$RUSTFLAGS
|
||||
dylib_ext='dylib'
|
||||
else
|
||||
echo "Unsupported os"
|
||||
echo "Unsupported os $unamestr"
|
||||
exit 1
|
||||
fi
|
||||
if [[ "$CHANNEL" == "release" ]]; then
|
||||
cargo build $oldbe --release
|
||||
cargo build --release
|
||||
else
|
||||
cargo build $oldbe
|
||||
cargo build
|
||||
fi
|
||||
|
||||
source scripts/ext_config.sh
|
||||
|
@ -59,6 +55,7 @@ ln target/$CHANNEL/*rustc_codegen_cranelift* "$target_dir"/lib
|
|||
ln rust-toolchain scripts/config.sh scripts/cargo.sh "$target_dir"
|
||||
|
||||
mkdir -p "$target_dir/lib/rustlib/$TARGET_TRIPLE/lib/"
|
||||
mkdir -p "$target_dir/lib/rustlib/$HOST_TRIPLE/lib/"
|
||||
if [[ "$TARGET_TRIPLE" == "x86_64-pc-windows-gnu" ]]; then
|
||||
cp $(rustc --print sysroot)/lib/rustlib/$TARGET_TRIPLE/lib/*.o "$target_dir/lib/rustlib/$TARGET_TRIPLE/lib/"
|
||||
fi
|
||||
|
@ -68,12 +65,18 @@ case "$build_sysroot" in
|
|||
;;
|
||||
"llvm")
|
||||
cp -r $(rustc --print sysroot)/lib/rustlib/$TARGET_TRIPLE/lib "$target_dir/lib/rustlib/$TARGET_TRIPLE/"
|
||||
if [[ "$HOST_TRIPLE" != "$TARGET_TRIPLE" ]]; then
|
||||
cp -r $(rustc --print sysroot)/lib/rustlib/$HOST_TRIPLE/lib "$target_dir/lib/rustlib/$HOST_TRIPLE/"
|
||||
fi
|
||||
;;
|
||||
"clif")
|
||||
echo "[BUILD] sysroot"
|
||||
dir=$(pwd)
|
||||
cd "$target_dir"
|
||||
time "$dir/build_sysroot/build_sysroot.sh"
|
||||
if [[ "$HOST_TRIPLE" != "$TARGET_TRIPLE" ]]; then
|
||||
time TARGET_TRIPLE="$HOST_TRIPLE" "$dir/build_sysroot/build_sysroot.sh"
|
||||
fi
|
||||
cp lib/rustlib/*/lib/libstd-* lib/
|
||||
;;
|
||||
*)
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
# This file is automatically @generated by Cargo.
|
||||
# It is not intended for manual editing.
|
||||
version = 3
|
||||
|
||||
[[package]]
|
||||
name = "addr2line"
|
||||
version = "0.14.1"
|
||||
|
@ -14,9 +16,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "adler"
|
||||
version = "0.2.3"
|
||||
version = "1.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ee2a4ec343196209d6594e19543ae87a39f96d5534d7174822a3ad825dd6ed7e"
|
||||
checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
|
||||
dependencies = [
|
||||
"compiler_builtins",
|
||||
"rustc-std-workspace-core",
|
||||
|
@ -30,15 +32,6 @@ dependencies = [
|
|||
"core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "alloc_system"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"compiler_builtins",
|
||||
"core",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "autocfg"
|
||||
version = "1.0.1"
|
||||
|
@ -47,9 +40,9 @@ checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a"
|
|||
|
||||
[[package]]
|
||||
name = "cc"
|
||||
version = "1.0.66"
|
||||
version = "1.0.67"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4c0496836a84f8d0495758516b8621a622beb77c0fed418570e50764093ced48"
|
||||
checksum = "e3c69b077ad434294d3ce9f1f6143a2a4b89a8a2d54ef813d85003a4fd1137fd"
|
||||
|
||||
[[package]]
|
||||
name = "cfg-if"
|
||||
|
@ -117,9 +110,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "hashbrown"
|
||||
version = "0.9.1"
|
||||
version = "0.11.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d7afe4a420e3fe79967a00898cc1f4db7c8a49a9333a29f8a4bd76a253d5cd04"
|
||||
checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e"
|
||||
dependencies = [
|
||||
"compiler_builtins",
|
||||
"rustc-std-workspace-alloc",
|
||||
|
@ -139,18 +132,18 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.84"
|
||||
version = "0.2.91"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1cca32fa0182e8c0989459524dc356b8f2b5c10f1b9eb521b7d182c03cf8c5ff"
|
||||
checksum = "8916b1f6ca17130ec6568feccee27c156ad12037880833a3b842a823236502e7"
|
||||
dependencies = [
|
||||
"rustc-std-workspace-core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "miniz_oxide"
|
||||
version = "0.4.3"
|
||||
version = "0.4.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0f2d26ec3309788e423cfbf68ad1800f061638098d76a83681af979dc4eda19d"
|
||||
checksum = "a92518e98c078586bc6c934028adcca4c92a53d6a958196de835170a01d84e4b"
|
||||
dependencies = [
|
||||
"adler",
|
||||
"autocfg",
|
||||
|
@ -258,7 +251,6 @@ name = "sysroot"
|
|||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"alloc",
|
||||
"alloc_system",
|
||||
"compiler_builtins",
|
||||
"core",
|
||||
"std",
|
||||
|
|
|
@ -9,8 +9,6 @@ alloc = { path = "./sysroot_src/library/alloc" }
|
|||
std = { path = "./sysroot_src/library/std", features = ["panic_unwind", "backtrace"] }
|
||||
test = { path = "./sysroot_src/library/test" }
|
||||
|
||||
alloc_system = { path = "./alloc_system" }
|
||||
|
||||
compiler_builtins = { version = "0.1.39", default-features = false, features = ["no-asm"] }
|
||||
|
||||
[patch.crates-io]
|
||||
|
|
|
@ -1,13 +0,0 @@
|
|||
[package]
|
||||
authors = ["The Rust Project Developers", "bjorn3 (edited to be usable outside the rust source)"]
|
||||
name = "alloc_system"
|
||||
version = "0.0.0"
|
||||
[lib]
|
||||
name = "alloc_system"
|
||||
path = "lib.rs"
|
||||
test = false
|
||||
doc = false
|
||||
[dependencies]
|
||||
core = { path = "../sysroot_src/library/core" }
|
||||
libc = { version = "0.2.43", features = ['rustc-dep-of-std'], default-features = false }
|
||||
compiler_builtins = "0.1"
|
|
@ -1,4 +1,4 @@
|
|||
#!/bin/bash
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Requires the CHANNEL env var to be set to `debug` or `release.`
|
||||
|
||||
|
@ -28,7 +28,7 @@ export __CARGO_DEFAULT_LIB_METADATA="cg_clif"
|
|||
if [[ "$1" != "--debug" ]]; then
|
||||
sysroot_channel='release'
|
||||
# FIXME Enable incremental again once rust-lang/rust#74946 is fixed
|
||||
CARGO_INCREMENTAL=0 RUSTFLAGS="$RUSTFLAGS -Zmir-opt-level=2" cargo build --target "$TARGET_TRIPLE" --release
|
||||
CARGO_INCREMENTAL=0 RUSTFLAGS="$RUSTFLAGS -Zmir-opt-level=3" cargo build --target "$TARGET_TRIPLE" --release
|
||||
else
|
||||
sysroot_channel='debug'
|
||||
cargo build --target "$TARGET_TRIPLE"
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#!/bin/bash
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
cd "$(dirname "$0")"
|
||||
|
||||
|
@ -33,7 +33,7 @@ git clone https://github.com/rust-lang/compiler-builtins.git || echo "rust-lang/
|
|||
pushd compiler-builtins
|
||||
git checkout -- .
|
||||
git checkout 0.1.39
|
||||
git apply ../../crate_patches/0001-compiler-builtins-Remove-rotate_left-from-Int.patch
|
||||
git apply ../../crate_patches/000*-compiler-builtins-*.patch
|
||||
popd
|
||||
|
||||
echo "Successfully prepared sysroot source for building"
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#!/bin/bash --verbose
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
rm -rf target/ build/ build_sysroot/{sysroot_src/,target/,compiler-builtins/} perf.data{,.old}
|
||||
|
|
|
@ -0,0 +1,48 @@
|
|||
From 1d574bf5e32d51641dcacaf8ef777e95b44f6f2a Mon Sep 17 00:00:00 2001
|
||||
From: bjorn3 <bjorn3@users.noreply.github.com>
|
||||
Date: Thu, 18 Feb 2021 18:30:55 +0100
|
||||
Subject: [PATCH] Disable 128bit atomic operations
|
||||
|
||||
Cranelift doesn't support them yet
|
||||
---
|
||||
src/mem/mod.rs | 12 ------------
|
||||
1 file changed, 12 deletions(-)
|
||||
|
||||
diff --git a/src/mem/mod.rs b/src/mem/mod.rs
|
||||
index 107762c..2d1ae10 100644
|
||||
--- a/src/mem/mod.rs
|
||||
+++ b/src/mem/mod.rs
|
||||
@@ -137,10 +137,6 @@ intrinsics! {
|
||||
pub extern "C" fn __llvm_memcpy_element_unordered_atomic_8(dest: *mut u64, src: *const u64, bytes: usize) -> () {
|
||||
memcpy_element_unordered_atomic(dest, src, bytes);
|
||||
}
|
||||
- #[cfg(target_has_atomic_load_store = "128")]
|
||||
- pub extern "C" fn __llvm_memcpy_element_unordered_atomic_16(dest: *mut u128, src: *const u128, bytes: usize) -> () {
|
||||
- memcpy_element_unordered_atomic(dest, src, bytes);
|
||||
- }
|
||||
|
||||
#[cfg(target_has_atomic_load_store = "8")]
|
||||
pub extern "C" fn __llvm_memmove_element_unordered_atomic_1(dest: *mut u8, src: *const u8, bytes: usize) -> () {
|
||||
@@ -158,10 +154,6 @@ intrinsics! {
|
||||
pub extern "C" fn __llvm_memmove_element_unordered_atomic_8(dest: *mut u64, src: *const u64, bytes: usize) -> () {
|
||||
memmove_element_unordered_atomic(dest, src, bytes);
|
||||
}
|
||||
- #[cfg(target_has_atomic_load_store = "128")]
|
||||
- pub extern "C" fn __llvm_memmove_element_unordered_atomic_16(dest: *mut u128, src: *const u128, bytes: usize) -> () {
|
||||
- memmove_element_unordered_atomic(dest, src, bytes);
|
||||
- }
|
||||
|
||||
#[cfg(target_has_atomic_load_store = "8")]
|
||||
pub extern "C" fn __llvm_memset_element_unordered_atomic_1(s: *mut u8, c: u8, bytes: usize) -> () {
|
||||
@@ -179,8 +171,4 @@ intrinsics! {
|
||||
pub extern "C" fn __llvm_memset_element_unordered_atomic_8(s: *mut u64, c: u8, bytes: usize) -> () {
|
||||
memset_element_unordered_atomic(s, c, bytes);
|
||||
}
|
||||
- #[cfg(target_has_atomic_load_store = "128")]
|
||||
- pub extern "C" fn __llvm_memset_element_unordered_atomic_16(s: *mut u128, c: u8, bytes: usize) -> () {
|
||||
- memset_element_unordered_atomic(s, c, bytes);
|
||||
- }
|
||||
}
|
||||
--
|
||||
2.26.2.7.g19db9cfb68
|
||||
|
|
@ -8,5 +8,8 @@
|
|||
to make it possible to use incremental mode for all analyses performed by rustc without caching
|
||||
object files when their content should have been changed by a change to cg_clif.</dd>
|
||||
<dt>CG_CLIF_DISPLAY_CG_TIME</dt>
|
||||
<dd>If "1", display the time it took to perform codegen for a crate</dd>
|
||||
<dd>If "1", display the time it took to perform codegen for a crate.</dd>
|
||||
<dt>CG_CLIF_ENABLE_VERIFIER</dt>
|
||||
<dd>Enable the Cranelift ir verifier for all compilation passes. If not set it will only run once
|
||||
before passing the clif ir to Cranelift for compilation.</dt>
|
||||
</dl>
|
||||
|
|
66
compiler/rustc_codegen_cranelift/docs/usage.md
Normal file
66
compiler/rustc_codegen_cranelift/docs/usage.md
Normal file
|
@ -0,0 +1,66 @@
|
|||
# Usage
|
||||
|
||||
rustc_codegen_cranelift can be used as a near-drop-in replacement for `cargo build` or `cargo run` for existing projects.
|
||||
|
||||
Assuming `$cg_clif_dir` is the directory you cloned this repo into and you followed the instructions (`prepare.sh` and `build.sh` or `test.sh`).
|
||||
|
||||
## Cargo
|
||||
|
||||
In the directory with your project (where you can do the usual `cargo build`), run:
|
||||
|
||||
```bash
|
||||
$ $cg_clif_dir/build/cargo.sh build
|
||||
```
|
||||
|
||||
This will build your project with rustc_codegen_cranelift instead of the usual LLVM backend.
|
||||
|
||||
## Rustc
|
||||
|
||||
> You should prefer using the Cargo method.
|
||||
|
||||
```bash
|
||||
$ $cg_clif_dir/build/bin/cg_clif my_crate.rs
|
||||
```
|
||||
|
||||
## Jit mode
|
||||
|
||||
In jit mode cg_clif will immediately execute your code without creating an executable file.
|
||||
|
||||
> This requires all dependencies to be available as dynamic library.
|
||||
> The jit mode will probably need cargo integration to make this possible.
|
||||
|
||||
```bash
|
||||
$ $cg_clif_dir/build/cargo.sh jit
|
||||
```
|
||||
|
||||
or
|
||||
|
||||
```bash
|
||||
$ $cg_clif_dir/build/bin/cg_clif -Cllvm-args=mode=jit -Cprefer-dynamic my_crate.rs
|
||||
```
|
||||
|
||||
There is also an experimental lazy jit mode. In this mode functions are only compiled once they are
|
||||
first called. It currently does not work with multi-threaded programs. When a not yet compiled
|
||||
function is called from another thread than the main thread, you will get an ICE.
|
||||
|
||||
```bash
|
||||
$ $cg_clif_dir/build/cargo.sh lazy-jit
|
||||
```
|
||||
|
||||
## Shell
|
||||
|
||||
These are a few functions that allow you to easily run rust code from the shell using cg_clif as jit.
|
||||
|
||||
```bash
|
||||
function jit_naked() {
|
||||
echo "$@" | $cg_clif_dir/build/bin/cg_clif - -Cllvm-args=mode=jit -Cprefer-dynamic
|
||||
}
|
||||
|
||||
function jit() {
|
||||
jit_naked "fn main() { $@ }"
|
||||
}
|
||||
|
||||
function jit_calc() {
|
||||
jit 'println!("0x{:x}", ' $@ ');';
|
||||
}
|
||||
```
|
|
@ -1,4 +1,4 @@
|
|||
#![feature(start, box_syntax, alloc_system, core_intrinsics, alloc_prelude, alloc_error_handler)]
|
||||
#![feature(start, box_syntax, core_intrinsics, alloc_prelude, alloc_error_handler)]
|
||||
#![no_std]
|
||||
|
||||
extern crate alloc;
|
||||
|
|
|
@ -8,66 +8,24 @@
|
|||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
#![no_std]
|
||||
#![allow(unused_attributes)]
|
||||
#![unstable(feature = "alloc_system",
|
||||
reason = "this library is unlikely to be stabilized in its current \
|
||||
form or name",
|
||||
issue = "32838")]
|
||||
#![feature(allocator_api)]
|
||||
#![feature(core_intrinsics)]
|
||||
#![feature(nll)]
|
||||
#![feature(staged_api)]
|
||||
#![feature(rustc_attrs)]
|
||||
#![feature(alloc_layout_extra)]
|
||||
#![cfg_attr(
|
||||
all(target_arch = "wasm32", not(target_os = "emscripten")),
|
||||
feature(integer_atomics, stdsimd)
|
||||
)]
|
||||
#![feature(allocator_api, rustc_private)]
|
||||
#![cfg_attr(any(unix, target_os = "redox"), feature(libc))]
|
||||
|
||||
// The minimum alignment guaranteed by the architecture. This value is used to
|
||||
// add fast paths for low alignment values.
|
||||
#[cfg(all(any(target_arch = "x86",
|
||||
target_arch = "arm",
|
||||
target_arch = "mips",
|
||||
target_arch = "powerpc",
|
||||
target_arch = "powerpc64",
|
||||
target_arch = "asmjs",
|
||||
target_arch = "wasm32")))]
|
||||
#[allow(dead_code)]
|
||||
target_arch = "powerpc64")))]
|
||||
const MIN_ALIGN: usize = 8;
|
||||
#[cfg(all(any(target_arch = "x86_64",
|
||||
target_arch = "aarch64",
|
||||
target_arch = "mips64",
|
||||
target_arch = "s390x",
|
||||
target_arch = "sparc64")))]
|
||||
#[allow(dead_code)]
|
||||
const MIN_ALIGN: usize = 16;
|
||||
|
||||
/// The default memory allocator provided by the operating system.
|
||||
///
|
||||
/// This is based on `malloc` on Unix platforms and `HeapAlloc` on Windows,
|
||||
/// plus related functions.
|
||||
///
|
||||
/// This type can be used in a `static` item
|
||||
/// with the `#[global_allocator]` attribute
|
||||
/// to force the global allocator to be the system’s one.
|
||||
/// (The default is jemalloc for executables, on some platforms.)
|
||||
///
|
||||
/// ```rust
|
||||
/// use std::alloc::System;
|
||||
///
|
||||
/// #[global_allocator]
|
||||
/// static A: System = System;
|
||||
///
|
||||
/// fn main() {
|
||||
/// let a = Box::new(4); // Allocates from the system allocator.
|
||||
/// println!("{}", a);
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// It can also be used directly to allocate memory
|
||||
/// independently of the standard library’s global allocator.
|
||||
#[stable(feature = "alloc_system_type", since = "1.28.0")]
|
||||
pub struct System;
|
||||
#[cfg(any(windows, unix, target_os = "redox"))]
|
||||
mod realloc_fallback {
|
||||
|
@ -96,7 +54,6 @@ mod platform {
|
|||
use MIN_ALIGN;
|
||||
use System;
|
||||
use core::alloc::{GlobalAlloc, Layout};
|
||||
#[stable(feature = "alloc_system_type", since = "1.28.0")]
|
||||
unsafe impl GlobalAlloc for System {
|
||||
#[inline]
|
||||
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
|
||||
|
@ -221,7 +178,6 @@ mod platform {
|
|||
};
|
||||
ptr as *mut u8
|
||||
}
|
||||
#[stable(feature = "alloc_system_type", since = "1.28.0")]
|
||||
unsafe impl GlobalAlloc for System {
|
||||
#[inline]
|
||||
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
|
||||
|
@ -254,89 +210,3 @@ mod platform {
|
|||
}
|
||||
}
|
||||
}
|
||||
// This is an implementation of a global allocator on the wasm32 platform when
|
||||
// emscripten is not in use. In that situation there's no actual runtime for us
|
||||
// to lean on for allocation, so instead we provide our own!
|
||||
//
|
||||
// The wasm32 instruction set has two instructions for getting the current
|
||||
// amount of memory and growing the amount of memory. These instructions are the
|
||||
// foundation on which we're able to build an allocator, so we do so! Note that
|
||||
// the instructions are also pretty "global" and this is the "global" allocator
|
||||
// after all!
|
||||
//
|
||||
// The current allocator here is the `dlmalloc` crate which we've got included
|
||||
// in the rust-lang/rust repository as a submodule. The crate is a port of
|
||||
// dlmalloc.c from C to Rust and is basically just so we can have "pure Rust"
|
||||
// for now which is currently technically required (can't link with C yet).
|
||||
//
|
||||
// The crate itself provides a global allocator which on wasm has no
|
||||
// synchronization as there are no threads!
|
||||
#[cfg(all(target_arch = "wasm32", not(target_os = "emscripten")))]
|
||||
mod platform {
|
||||
extern crate dlmalloc;
|
||||
use core::alloc::{GlobalAlloc, Layout};
|
||||
use System;
|
||||
static mut DLMALLOC: dlmalloc::Dlmalloc = dlmalloc::DLMALLOC_INIT;
|
||||
#[stable(feature = "alloc_system_type", since = "1.28.0")]
|
||||
unsafe impl GlobalAlloc for System {
|
||||
#[inline]
|
||||
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
|
||||
let _lock = lock::lock();
|
||||
DLMALLOC.malloc(layout.size(), layout.align())
|
||||
}
|
||||
#[inline]
|
||||
unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 {
|
||||
let _lock = lock::lock();
|
||||
DLMALLOC.calloc(layout.size(), layout.align())
|
||||
}
|
||||
#[inline]
|
||||
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
|
||||
let _lock = lock::lock();
|
||||
DLMALLOC.free(ptr, layout.size(), layout.align())
|
||||
}
|
||||
#[inline]
|
||||
unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
|
||||
let _lock = lock::lock();
|
||||
DLMALLOC.realloc(ptr, layout.size(), layout.align(), new_size)
|
||||
}
|
||||
}
|
||||
#[cfg(target_feature = "atomics")]
|
||||
mod lock {
|
||||
use core::arch::wasm32;
|
||||
use core::sync::atomic::{AtomicI32, Ordering::SeqCst};
|
||||
static LOCKED: AtomicI32 = AtomicI32::new(0);
|
||||
pub struct DropLock;
|
||||
pub fn lock() -> DropLock {
|
||||
loop {
|
||||
if LOCKED.swap(1, SeqCst) == 0 {
|
||||
return DropLock
|
||||
}
|
||||
unsafe {
|
||||
let r = wasm32::atomic::wait_i32(
|
||||
&LOCKED as *const AtomicI32 as *mut i32,
|
||||
1, // expected value
|
||||
-1, // timeout
|
||||
);
|
||||
debug_assert!(r == 0 || r == 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
impl Drop for DropLock {
|
||||
fn drop(&mut self) {
|
||||
let r = LOCKED.swap(0, SeqCst);
|
||||
debug_assert_eq!(r, 1);
|
||||
unsafe {
|
||||
wasm32::atomic::wake(
|
||||
&LOCKED as *const AtomicI32 as *mut i32,
|
||||
1, // only one thread
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
#[cfg(not(target_feature = "atomics"))]
|
||||
mod lock {
|
||||
#[inline]
|
||||
pub fn lock() {} // no atomics, no threads, that's easy!
|
||||
}
|
||||
}
|
|
@ -1,22 +1,12 @@
|
|||
// Adapted from rustc run-pass test suite
|
||||
|
||||
#![feature(no_core, arbitrary_self_types, box_syntax)]
|
||||
#![feature(arbitrary_self_types, unsize, coerce_unsized, dispatch_from_dyn)]
|
||||
#![feature(rustc_attrs)]
|
||||
|
||||
#![feature(start, lang_items)]
|
||||
#![no_core]
|
||||
|
||||
extern crate mini_core;
|
||||
|
||||
use mini_core::*;
|
||||
|
||||
macro_rules! assert_eq {
|
||||
($l:expr, $r: expr) => {
|
||||
if $l != $r {
|
||||
panic(stringify!($l != $r));
|
||||
}
|
||||
}
|
||||
}
|
||||
use std::{
|
||||
ops::{Deref, CoerceUnsized, DispatchFromDyn},
|
||||
marker::Unsize,
|
||||
};
|
||||
|
||||
struct Ptr<T: ?Sized>(Box<T>);
|
||||
|
||||
|
@ -67,16 +57,13 @@ impl Trait for i32 {
|
|||
}
|
||||
}
|
||||
|
||||
#[start]
|
||||
fn main(_: isize, _: *const *const u8) -> isize {
|
||||
let pw = Ptr(box Wrapper(5)) as Ptr<Wrapper<dyn Trait>>;
|
||||
fn main() {
|
||||
let pw = Ptr(Box::new(Wrapper(5))) as Ptr<Wrapper<dyn Trait>>;
|
||||
assert_eq!(pw.ptr_wrapper(), 5);
|
||||
|
||||
let wp = Wrapper(Ptr(box 6)) as Wrapper<Ptr<dyn Trait>>;
|
||||
let wp = Wrapper(Ptr(Box::new(6))) as Wrapper<Ptr<dyn Trait>>;
|
||||
assert_eq!(wp.wrapper_ptr(), 6);
|
||||
|
||||
let wpw = Wrapper(Ptr(box Wrapper(7))) as Wrapper<Ptr<Wrapper<dyn Trait>>>;
|
||||
let wpw = Wrapper(Ptr(Box::new(Wrapper(7)))) as Wrapper<Ptr<Wrapper<dyn Trait>>>;
|
||||
assert_eq!(wpw.wrapper_ptr_wrapper(), 7);
|
||||
|
||||
0
|
||||
}
|
||||
|
|
|
@ -365,6 +365,22 @@ impl <T: PartialEq> PartialEq for Option<T> {
|
|||
}
|
||||
}
|
||||
|
||||
#[lang = "shl"]
|
||||
pub trait Shl<RHS = Self> {
|
||||
type Output;
|
||||
|
||||
#[must_use]
|
||||
fn shl(self, rhs: RHS) -> Self::Output;
|
||||
}
|
||||
|
||||
impl Shl for u128 {
|
||||
type Output = u128;
|
||||
|
||||
fn shl(self, rhs: u128) -> u128 {
|
||||
self << rhs
|
||||
}
|
||||
}
|
||||
|
||||
#[lang = "neg"]
|
||||
pub trait Neg {
|
||||
type Output;
|
||||
|
@ -605,6 +621,7 @@ struct PanicLocation {
|
|||
}
|
||||
|
||||
#[no_mangle]
|
||||
#[cfg(not(windows))]
|
||||
pub fn get_tls() -> u8 {
|
||||
#[thread_local]
|
||||
static A: u8 = 42;
|
||||
|
|
|
@ -1,7 +1,4 @@
|
|||
#![feature(
|
||||
no_core, start, lang_items, box_syntax, never_type, linkage,
|
||||
extern_types, thread_local
|
||||
)]
|
||||
#![feature(no_core, lang_items, box_syntax, never_type, linkage, extern_types, thread_local)]
|
||||
#![no_core]
|
||||
#![allow(dead_code, non_camel_case_types)]
|
||||
|
||||
|
@ -239,7 +236,7 @@ fn main() {
|
|||
|
||||
assert_eq!(((|()| 42u8) as fn(()) -> u8)(()), 42);
|
||||
|
||||
#[cfg(not(jit))]
|
||||
#[cfg(not(any(jit, windows)))]
|
||||
{
|
||||
extern {
|
||||
#[linkage = "extern_weak"]
|
||||
|
@ -264,6 +261,9 @@ fn main() {
|
|||
assert_eq!(f2 as i8, -128);
|
||||
assert_eq!(f2 as u8, 0);
|
||||
|
||||
let amount = 0;
|
||||
assert_eq!(1u128 << amount, 1);
|
||||
|
||||
static ANOTHER_STATIC: &u8 = &A_STATIC;
|
||||
assert_eq!(*ANOTHER_STATIC, 42);
|
||||
|
||||
|
@ -289,7 +289,7 @@ fn main() {
|
|||
|
||||
from_decimal_string();
|
||||
|
||||
#[cfg(not(jit))]
|
||||
#[cfg(not(any(jit, windows)))]
|
||||
test_tls();
|
||||
|
||||
#[cfg(all(not(jit), target_os = "linux"))]
|
||||
|
|
|
@ -119,21 +119,5 @@ index 6609bc3..241b497 100644
|
|||
|
||||
#[test]
|
||||
#[should_panic(expected = "index 0 greater than length of slice")]
|
||||
diff --git a/library/core/tests/num/ops.rs b/library/core/tests/num/ops.rs
|
||||
index 9979cc8..d5d1d83 100644
|
||||
--- a/library/core/tests/num/ops.rs
|
||||
+++ b/library/core/tests/num/ops.rs
|
||||
@@ -238,7 +238,7 @@ macro_rules! test_shift_assign {
|
||||
}
|
||||
};
|
||||
}
|
||||
-test_shift!(test_shl_defined, Shl::shl);
|
||||
-test_shift_assign!(test_shl_assign_defined, ShlAssign::shl_assign);
|
||||
-test_shift!(test_shr_defined, Shr::shr);
|
||||
-test_shift_assign!(test_shr_assign_defined, ShrAssign::shr_assign);
|
||||
+//test_shift!(test_shl_defined, Shl::shl);
|
||||
+//test_shift_assign!(test_shl_assign_defined, ShlAssign::shl_assign);
|
||||
+//test_shift!(test_shr_defined, Shr::shr);
|
||||
+//test_shift_assign!(test_shr_assign_defined, ShrAssign::shr_assign);
|
||||
--
|
||||
2.21.0 (Apple Git-122)
|
||||
|
|
|
@ -0,0 +1,103 @@
|
|||
From 894e07dfec2624ba539129b1c1d63e1d7d812bda Mon Sep 17 00:00:00 2001
|
||||
From: bjorn3 <bjorn3@users.noreply.github.com>
|
||||
Date: Thu, 18 Feb 2021 18:45:28 +0100
|
||||
Subject: [PATCH] Disable 128bit atomic operations
|
||||
|
||||
Cranelift doesn't support them yet
|
||||
---
|
||||
library/core/src/sync/atomic.rs | 38 ---------------------------------
|
||||
library/core/tests/atomic.rs | 4 ----
|
||||
library/std/src/panic.rs | 6 ------
|
||||
3 files changed, 48 deletions(-)
|
||||
|
||||
diff --git a/library/core/src/sync/atomic.rs b/library/core/src/sync/atomic.rs
|
||||
index 81c9e1d..65c9503 100644
|
||||
--- a/library/core/src/sync/atomic.rs
|
||||
+++ b/library/core/src/sync/atomic.rs
|
||||
@@ -2228,44 +2228,6 @@ atomic_int! {
|
||||
"AtomicU64::new(0)",
|
||||
u64 AtomicU64 ATOMIC_U64_INIT
|
||||
}
|
||||
-#[cfg(target_has_atomic_load_store = "128")]
|
||||
-atomic_int! {
|
||||
- cfg(target_has_atomic = "128"),
|
||||
- cfg(target_has_atomic_equal_alignment = "128"),
|
||||
- unstable(feature = "integer_atomics", issue = "32976"),
|
||||
- unstable(feature = "integer_atomics", issue = "32976"),
|
||||
- unstable(feature = "integer_atomics", issue = "32976"),
|
||||
- unstable(feature = "integer_atomics", issue = "32976"),
|
||||
- unstable(feature = "integer_atomics", issue = "32976"),
|
||||
- unstable(feature = "integer_atomics", issue = "32976"),
|
||||
- rustc_const_stable(feature = "const_integer_atomics", since = "1.34.0"),
|
||||
- unstable(feature = "integer_atomics", issue = "32976"),
|
||||
- "i128",
|
||||
- "#![feature(integer_atomics)]\n\n",
|
||||
- atomic_min, atomic_max,
|
||||
- 16,
|
||||
- "AtomicI128::new(0)",
|
||||
- i128 AtomicI128 ATOMIC_I128_INIT
|
||||
-}
|
||||
-#[cfg(target_has_atomic_load_store = "128")]
|
||||
-atomic_int! {
|
||||
- cfg(target_has_atomic = "128"),
|
||||
- cfg(target_has_atomic_equal_alignment = "128"),
|
||||
- unstable(feature = "integer_atomics", issue = "32976"),
|
||||
- unstable(feature = "integer_atomics", issue = "32976"),
|
||||
- unstable(feature = "integer_atomics", issue = "32976"),
|
||||
- unstable(feature = "integer_atomics", issue = "32976"),
|
||||
- unstable(feature = "integer_atomics", issue = "32976"),
|
||||
- unstable(feature = "integer_atomics", issue = "32976"),
|
||||
- rustc_const_stable(feature = "const_integer_atomics", since = "1.34.0"),
|
||||
- unstable(feature = "integer_atomics", issue = "32976"),
|
||||
- "u128",
|
||||
- "#![feature(integer_atomics)]\n\n",
|
||||
- atomic_umin, atomic_umax,
|
||||
- 16,
|
||||
- "AtomicU128::new(0)",
|
||||
- u128 AtomicU128 ATOMIC_U128_INIT
|
||||
-}
|
||||
|
||||
macro_rules! atomic_int_ptr_sized {
|
||||
( $($target_pointer_width:literal $align:literal)* ) => { $(
|
||||
diff --git a/library/core/tests/atomic.rs b/library/core/tests/atomic.rs
|
||||
index 2d1e449..cb6da5d 100644
|
||||
--- a/library/core/tests/atomic.rs
|
||||
+++ b/library/core/tests/atomic.rs
|
||||
@@ -145,10 +145,6 @@ fn atomic_alignment() {
|
||||
assert_eq!(align_of::<AtomicU64>(), size_of::<AtomicU64>());
|
||||
#[cfg(target_has_atomic = "64")]
|
||||
assert_eq!(align_of::<AtomicI64>(), size_of::<AtomicI64>());
|
||||
- #[cfg(target_has_atomic = "128")]
|
||||
- assert_eq!(align_of::<AtomicU128>(), size_of::<AtomicU128>());
|
||||
- #[cfg(target_has_atomic = "128")]
|
||||
- assert_eq!(align_of::<AtomicI128>(), size_of::<AtomicI128>());
|
||||
#[cfg(target_has_atomic = "ptr")]
|
||||
assert_eq!(align_of::<AtomicUsize>(), size_of::<AtomicUsize>());
|
||||
#[cfg(target_has_atomic = "ptr")]
|
||||
diff --git a/library/std/src/panic.rs b/library/std/src/panic.rs
|
||||
index 89a822a..779fd88 100644
|
||||
--- a/library/std/src/panic.rs
|
||||
+++ b/library/std/src/panic.rs
|
||||
@@ -279,9 +279,6 @@ impl RefUnwindSafe for atomic::AtomicI32 {}
|
||||
#[cfg(target_has_atomic_load_store = "64")]
|
||||
#[stable(feature = "integer_atomics_stable", since = "1.34.0")]
|
||||
impl RefUnwindSafe for atomic::AtomicI64 {}
|
||||
-#[cfg(target_has_atomic_load_store = "128")]
|
||||
-#[unstable(feature = "integer_atomics", issue = "32976")]
|
||||
-impl RefUnwindSafe for atomic::AtomicI128 {}
|
||||
|
||||
#[cfg(target_has_atomic_load_store = "ptr")]
|
||||
#[stable(feature = "unwind_safe_atomic_refs", since = "1.14.0")]
|
||||
@@ -298,9 +295,6 @@ impl RefUnwindSafe for atomic::AtomicU32 {}
|
||||
#[cfg(target_has_atomic_load_store = "64")]
|
||||
#[stable(feature = "integer_atomics_stable", since = "1.34.0")]
|
||||
impl RefUnwindSafe for atomic::AtomicU64 {}
|
||||
-#[cfg(target_has_atomic_load_store = "128")]
|
||||
-#[unstable(feature = "integer_atomics", issue = "32976")]
|
||||
-impl RefUnwindSafe for atomic::AtomicU128 {}
|
||||
|
||||
#[cfg(target_has_atomic_load_store = "8")]
|
||||
#[stable(feature = "unwind_safe_atomic_refs", since = "1.14.0")]
|
||||
--
|
||||
2.26.2.7.g19db9cfb68
|
||||
|
|
@ -1,7 +1,6 @@
|
|||
#!/bin/bash --verbose
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
rustup component add rust-src rustc-dev llvm-tools-preview
|
||||
./build_sysroot/prepare_sysroot_src.sh
|
||||
cargo install hyperfine || echo "Skipping hyperfine install"
|
||||
|
||||
|
|
|
@ -1 +1,3 @@
|
|||
nightly-2021-01-30
|
||||
[toolchain]
|
||||
channel = "nightly-2021-03-29"
|
||||
components = ["rust-src", "rustc-dev", "llvm-tools-preview"]
|
||||
|
|
4
compiler/rustc_codegen_cranelift/rustfmt.toml
Normal file
4
compiler/rustc_codegen_cranelift/rustfmt.toml
Normal file
|
@ -0,0 +1,4 @@
|
|||
# Matches rustfmt.toml of rustc
|
||||
version = "Two"
|
||||
use_small_heuristics = "Max"
|
||||
merge_derives = false
|
|
@ -1,10 +1,10 @@
|
|||
#!/bin/bash
|
||||
#!/usr/bin/env bash
|
||||
|
||||
dir=$(dirname "$0")
|
||||
source "$dir/config.sh"
|
||||
|
||||
# read nightly compiler from rust-toolchain file
|
||||
TOOLCHAIN=$(cat "$dir/rust-toolchain")
|
||||
TOOLCHAIN=$(cat "$dir/rust-toolchain" | grep channel | sed "s/channel = \"\(.*\)\"/\1/")
|
||||
|
||||
cmd=$1
|
||||
shift || true
|
||||
|
|
|
@ -2,15 +2,7 @@
|
|||
|
||||
set -e
|
||||
|
||||
unamestr=$(uname)
|
||||
if [[ "$unamestr" == 'Linux' ]]; then
|
||||
dylib_ext='so'
|
||||
elif [[ "$unamestr" == 'Darwin' ]]; then
|
||||
dylib_ext='dylib'
|
||||
else
|
||||
echo "Unsupported os"
|
||||
exit 1
|
||||
fi
|
||||
dylib=$(echo "" | rustc --print file-names --crate-type dylib --crate-name rustc_codegen_cranelift -)
|
||||
|
||||
if echo "$RUSTC_WRAPPER" | grep sccache; then
|
||||
echo
|
||||
|
@ -24,10 +16,10 @@ dir=$(cd "$(dirname "${BASH_SOURCE[0]}")"; pwd)
|
|||
export RUSTC=$dir"/bin/cg_clif"
|
||||
|
||||
export RUSTDOCFLAGS=$linker' -Cpanic=abort -Zpanic-abort-tests '\
|
||||
'-Zcodegen-backend='$dir'/lib/librustc_codegen_cranelift.'$dylib_ext' --sysroot '$dir
|
||||
'-Zcodegen-backend='$dir'/lib/'$dylib' --sysroot '$dir
|
||||
|
||||
# FIXME remove once the atomic shim is gone
|
||||
if [[ "$unamestr" == 'Darwin' ]]; then
|
||||
# FIXME fix `#[linkage = "extern_weak"]` without this
|
||||
if [[ "$(uname)" == 'Darwin' ]]; then
|
||||
export RUSTFLAGS="$RUSTFLAGS -Clink-arg=-undefined -Clink-arg=dynamic_lookup"
|
||||
fi
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#!/bin/bash
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
|
@ -8,7 +8,7 @@ case $1 in
|
|||
|
||||
echo "=> Installing new nightly"
|
||||
rustup toolchain install --profile minimal "nightly-${TOOLCHAIN}" # Sanity check to see if the nightly exists
|
||||
echo "nightly-${TOOLCHAIN}" > rust-toolchain
|
||||
sed -i "s/\"nightly-.*\"/\"nightly-${TOOLCHAIN}\"/" rust-toolchain
|
||||
rustup component add rustfmt || true
|
||||
|
||||
echo "=> Uninstalling all old nighlies"
|
||||
|
|
68
compiler/rustc_codegen_cranelift/scripts/setup_rust_fork.sh
Normal file
68
compiler/rustc_codegen_cranelift/scripts/setup_rust_fork.sh
Normal file
|
@ -0,0 +1,68 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
./build.sh
|
||||
source build/config.sh
|
||||
|
||||
echo "[SETUP] Rust fork"
|
||||
git clone https://github.com/rust-lang/rust.git || true
|
||||
pushd rust
|
||||
git fetch
|
||||
git checkout -- .
|
||||
git checkout "$(rustc -V | cut -d' ' -f3 | tr -d '(')"
|
||||
|
||||
git apply - <<EOF
|
||||
diff --git a/Cargo.toml b/Cargo.toml
|
||||
index 5bd1147cad5..10d68a2ff14 100644
|
||||
--- a/Cargo.toml
|
||||
+++ b/Cargo.toml
|
||||
@@ -111,5 +111,7 @@ rustc-std-workspace-std = { path = 'library/rustc-std-workspace-std' }
|
||||
rustc-std-workspace-alloc = { path = 'library/rustc-std-workspace-alloc' }
|
||||
rustc-std-workspace-std = { path = 'library/rustc-std-workspace-std' }
|
||||
|
||||
+compiler_builtins = { path = "../build_sysroot/compiler-builtins" }
|
||||
+
|
||||
[patch."https://github.com/rust-lang/rust-clippy"]
|
||||
clippy_lints = { path = "src/tools/clippy/clippy_lints" }
|
||||
diff --git a/compiler/rustc_data_structures/Cargo.toml b/compiler/rustc_data_structures/Cargo.toml
|
||||
index 23e689fcae7..5f077b765b6 100644
|
||||
--- a/compiler/rustc_data_structures/Cargo.toml
|
||||
+++ b/compiler/rustc_data_structures/Cargo.toml
|
||||
@@ -32,7 +32,6 @@ tempfile = "3.0.5"
|
||||
|
||||
[dependencies.parking_lot]
|
||||
version = "0.11"
|
||||
-features = ["nightly"]
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
winapi = { version = "0.3", features = ["fileapi", "psapi"] }
|
||||
diff --git a/library/alloc/Cargo.toml b/library/alloc/Cargo.toml
|
||||
index d95b5b7f17f..00b6f0e3635 100644
|
||||
--- a/library/alloc/Cargo.toml
|
||||
+++ b/library/alloc/Cargo.toml
|
||||
@@ -8,7 +8,7 @@ edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
core = { path = "../core" }
|
||||
-compiler_builtins = { version = "0.1.39", features = ['rustc-dep-of-std'] }
|
||||
+compiler_builtins = { version = "0.1.39", features = ['rustc-dep-of-std', 'no-asm'] }
|
||||
|
||||
[dev-dependencies]
|
||||
rand = "0.7"
|
||||
EOF
|
||||
|
||||
cat > config.toml <<EOF
|
||||
[llvm]
|
||||
ninja = false
|
||||
|
||||
[build]
|
||||
rustc = "$(pwd)/../build/bin/cg_clif"
|
||||
cargo = "$(rustup which cargo)"
|
||||
full-bootstrap = true
|
||||
local-rebuild = true
|
||||
|
||||
[rust]
|
||||
codegen-backends = ["cranelift"]
|
||||
deny-warnings = false
|
||||
EOF
|
||||
popd
|
|
@ -1,62 +1,12 @@
|
|||
#!/bin/bash
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
cd "$(dirname "$0")/../"
|
||||
|
||||
./build.sh
|
||||
source build/config.sh
|
||||
source ./scripts/setup_rust_fork.sh
|
||||
|
||||
echo "[TEST] Bootstrap of rustc"
|
||||
git clone https://github.com/rust-lang/rust.git || true
|
||||
pushd rust
|
||||
git fetch
|
||||
git checkout -- .
|
||||
git checkout "$(rustc -V | cut -d' ' -f3 | tr -d '(')"
|
||||
|
||||
git apply - <<EOF
|
||||
diff --git a/.gitmodules b/.gitmodules
|
||||
index 984113151de..c1e9d960d56 100644
|
||||
--- a/.gitmodules
|
||||
+++ b/.gitmodules
|
||||
@@ -34,10 +34,6 @@
|
||||
[submodule "src/doc/edition-guide"]
|
||||
path = src/doc/edition-guide
|
||||
url = https://github.com/rust-lang/edition-guide.git
|
||||
-[submodule "src/llvm-project"]
|
||||
- path = src/llvm-project
|
||||
- url = https://github.com/rust-lang/llvm-project.git
|
||||
- branch = rustc/11.0-2020-10-12
|
||||
[submodule "src/doc/embedded-book"]
|
||||
path = src/doc/embedded-book
|
||||
url = https://github.com/rust-embedded/book.git
|
||||
diff --git a/compiler/rustc_data_structures/Cargo.toml b/compiler/rustc_data_structures/Cargo.toml
|
||||
index 23e689fcae7..5f077b765b6 100644
|
||||
--- a/compiler/rustc_data_structures/Cargo.toml
|
||||
+++ b/compiler/rustc_data_structures/Cargo.toml
|
||||
@@ -32,7 +32,6 @@ tempfile = "3.0.5"
|
||||
|
||||
[dependencies.parking_lot]
|
||||
version = "0.11"
|
||||
-features = ["nightly"]
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
winapi = { version = "0.3", features = ["fileapi", "psapi"] }
|
||||
EOF
|
||||
|
||||
cat > config.toml <<EOF
|
||||
[llvm]
|
||||
ninja = false
|
||||
|
||||
[build]
|
||||
rustc = "$(pwd)/../build/bin/cg_clif"
|
||||
cargo = "$(rustup which cargo)"
|
||||
full-bootstrap = true
|
||||
local-rebuild = true
|
||||
|
||||
[rust]
|
||||
codegen-backends = ["cranelift"]
|
||||
EOF
|
||||
|
||||
rm -r compiler/rustc_codegen_cranelift/{Cargo.*,src}
|
||||
cp ../Cargo.* compiler/rustc_codegen_cranelift/
|
||||
cp -r ../src compiler/rustc_codegen_cranelift/src
|
||||
|
|
87
compiler/rustc_codegen_cranelift/scripts/test_rustc_tests.sh
Executable file
87
compiler/rustc_codegen_cranelift/scripts/test_rustc_tests.sh
Executable file
|
@ -0,0 +1,87 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
cd $(dirname "$0")/../
|
||||
|
||||
source ./scripts/setup_rust_fork.sh
|
||||
|
||||
echo "[TEST] Test suite of rustc"
|
||||
pushd rust
|
||||
|
||||
cargo install ripgrep
|
||||
|
||||
rm -r src/test/ui/{extern/,panics/,unsized-locals/,thinlto/,simd*,*lto*.rs,linkage*,unwind-*.rs} || true
|
||||
for test in $(rg --files-with-matches "asm!|catch_unwind|should_panic|lto" src/test/ui); do
|
||||
rm $test
|
||||
done
|
||||
|
||||
for test in $(rg -i --files-with-matches "//(\[\w+\])?~|// error-pattern:|// build-fail|// run-fail|-Cllvm-args" src/test/ui); do
|
||||
rm $test
|
||||
done
|
||||
|
||||
git checkout -- src/test/ui/issues/auxiliary/issue-3136-a.rs # contains //~ERROR, but shouldn't be removed
|
||||
|
||||
# these all depend on unwinding support
|
||||
rm src/test/ui/backtrace.rs
|
||||
rm src/test/ui/array-slice-vec/box-of-array-of-drop-*.rs
|
||||
rm src/test/ui/array-slice-vec/slice-panic-*.rs
|
||||
rm src/test/ui/array-slice-vec/nested-vec-3.rs
|
||||
rm src/test/ui/cleanup-rvalue-temp-during-incomplete-alloc.rs
|
||||
rm src/test/ui/issues/issue-26655.rs
|
||||
rm src/test/ui/issues/issue-29485.rs
|
||||
rm src/test/ui/issues/issue-30018-panic.rs
|
||||
rm src/test/ui/multi-panic.rs
|
||||
rm src/test/ui/sepcomp/sepcomp-unwind.rs
|
||||
rm src/test/ui/structs-enums/unit-like-struct-drop-run.rs
|
||||
rm src/test/ui/terminate-in-initializer.rs
|
||||
rm src/test/ui/threads-sendsync/task-stderr.rs
|
||||
rm src/test/ui/numbers-arithmetic/int-abs-overflow.rs
|
||||
rm src/test/ui/drop/drop-trait-enum.rs
|
||||
rm src/test/ui/numbers-arithmetic/issue-8460.rs
|
||||
|
||||
rm src/test/ui/issues/issue-28950.rs # depends on stack size optimizations
|
||||
rm src/test/ui/init-large-type.rs # same
|
||||
rm src/test/ui/sse2.rs # cpuid not supported, so sse2 not detected
|
||||
rm src/test/ui/issues/issue-33992.rs # unsupported linkages
|
||||
rm src/test/ui/issues/issue-51947.rs # same
|
||||
rm src/test/ui/numbers-arithmetic/saturating-float-casts.rs # intrinsic gives different but valid result
|
||||
rm src/test/ui/mir/mir_misc_casts.rs # depends on deduplication of constants
|
||||
rm src/test/ui/mir/mir_raw_fat_ptr.rs # same
|
||||
rm src/test/ui/async-await/async-fn-size-moved-locals.rs # -Cpanic=abort shrinks some generator by one byte
|
||||
rm src/test/ui/async-await/async-fn-size-uninit-locals.rs # same
|
||||
rm src/test/ui/generator/size-moved-locals.rs # same
|
||||
rm src/test/ui/fn/dyn-fn-alignment.rs # wants a 256 byte alignment
|
||||
rm src/test/ui/test-attrs/test-fn-signature-verification-for-explicit-return-type.rs # "Cannot run dynamic test fn out-of-process"
|
||||
rm src/test/ui/intrinsics/intrinsic-nearby.rs # unimplemented nearbyintf32 and nearbyintf64 intrinsics
|
||||
|
||||
rm src/test/incremental/hashes/inline_asm.rs # inline asm
|
||||
rm src/test/incremental/issue-72386.rs # same
|
||||
rm src/test/incremental/change_crate_dep_kind.rs # requires -Cpanic=unwind
|
||||
rm src/test/incremental/issue-49482.rs # same
|
||||
rm src/test/incremental/issue-54059.rs # same
|
||||
rm src/test/incremental/lto.rs # requires lto
|
||||
|
||||
rm src/test/pretty/asm.rs # inline asm
|
||||
rm src/test/pretty/raw-str-nonexpr.rs # same
|
||||
|
||||
rm -r src/test/run-pass-valgrind/unsized-locals
|
||||
|
||||
rm src/test/ui/json-bom-plus-crlf-multifile.rs # differing warning
|
||||
rm src/test/ui/json-bom-plus-crlf.rs # same
|
||||
rm src/test/ui/type-alias-impl-trait/cross_crate_ice*.rs # requires removed aux dep
|
||||
|
||||
rm src/test/ui/allocator/no_std-alloc-error-handler-default.rs # missing rust_oom definition
|
||||
rm src/test/ui/cfg/cfg-panic.rs
|
||||
rm src/test/ui/default-alloc-error-hook.rs
|
||||
rm -r src/test/ui/hygiene/
|
||||
|
||||
rm -r src/test/ui/polymorphization/ # polymorphization not yet supported
|
||||
rm src/test/codegen-units/polymorphization/unused_type_parameters.rs # same
|
||||
|
||||
rm -r src/test/run-make/fmt-write-bloat/ # tests an optimization
|
||||
rm src/test/ui/abi/mir/mir_codegen_calls_variadic.rs # requires float varargs
|
||||
rm src/test/ui/abi/variadic-ffi.rs # requires callee side vararg support
|
||||
|
||||
echo "[TEST] rustc test suite"
|
||||
RUST_TEST_NOCAPTURE=1 COMPILETEST_FORCE_STAGE0=1 ./x.py test --stage 0 src/test/{codegen-units,run-make,run-pass-valgrind,ui}
|
||||
popd
|
|
@ -1,4 +1,4 @@
|
|||
#!/bin/bash
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
|
@ -27,13 +27,16 @@ function no_sysroot_tests() {
|
|||
$MY_RUSTC example/mini_core_hello_world.rs --crate-name mini_core_hello_world --crate-type bin -g --target "$TARGET_TRIPLE"
|
||||
$RUN_WRAPPER ./target/out/mini_core_hello_world abc bcd
|
||||
# (echo "break set -n main"; echo "run"; sleep 1; echo "si -c 10"; sleep 1; echo "frame variable") | lldb -- ./target/out/mini_core_hello_world abc bcd
|
||||
|
||||
echo "[AOT] arbitrary_self_types_pointers_and_wrappers"
|
||||
$MY_RUSTC example/arbitrary_self_types_pointers_and_wrappers.rs --crate-name arbitrary_self_types_pointers_and_wrappers --crate-type bin --target "$TARGET_TRIPLE"
|
||||
$RUN_WRAPPER ./target/out/arbitrary_self_types_pointers_and_wrappers
|
||||
}
|
||||
|
||||
function base_sysroot_tests() {
|
||||
echo "[AOT] arbitrary_self_types_pointers_and_wrappers"
|
||||
$MY_RUSTC example/arbitrary_self_types_pointers_and_wrappers.rs --crate-name arbitrary_self_types_pointers_and_wrappers --crate-type bin --target "$TARGET_TRIPLE"
|
||||
$RUN_WRAPPER ./target/out/arbitrary_self_types_pointers_and_wrappers
|
||||
|
||||
echo "[AOT] alloc_system"
|
||||
$MY_RUSTC example/alloc_system.rs --crate-type lib --target "$TARGET_TRIPLE"
|
||||
|
||||
echo "[AOT] alloc_example"
|
||||
$MY_RUSTC example/alloc_example.rs --crate-type bin --target "$TARGET_TRIPLE"
|
||||
$RUN_WRAPPER ./target/out/alloc_example
|
||||
|
@ -68,14 +71,20 @@ function base_sysroot_tests() {
|
|||
echo "[AOT] mod_bench"
|
||||
$MY_RUSTC example/mod_bench.rs --crate-type bin --target "$TARGET_TRIPLE"
|
||||
$RUN_WRAPPER ./target/out/mod_bench
|
||||
|
||||
pushd rand
|
||||
rm -r ./target || true
|
||||
../build/cargo.sh test --workspace
|
||||
popd
|
||||
}
|
||||
|
||||
function extended_sysroot_tests() {
|
||||
pushd rand
|
||||
cargo clean
|
||||
if [[ "$HOST_TRIPLE" = "$TARGET_TRIPLE" ]]; then
|
||||
echo "[TEST] rust-random/rand"
|
||||
../build/cargo.sh test --workspace
|
||||
else
|
||||
echo "[AOT] rust-random/rand"
|
||||
../build/cargo.sh build --workspace --target $TARGET_TRIPLE --tests
|
||||
fi
|
||||
popd
|
||||
|
||||
pushd simple-raytracer
|
||||
if [[ "$HOST_TRIPLE" = "$TARGET_TRIPLE" ]]; then
|
||||
echo "[BENCH COMPILE] ebobby/simple-raytracer"
|
||||
|
@ -89,27 +98,40 @@ function extended_sysroot_tests() {
|
|||
else
|
||||
echo "[BENCH COMPILE] ebobby/simple-raytracer (skipped)"
|
||||
echo "[COMPILE] ebobby/simple-raytracer"
|
||||
../cargo.sh build
|
||||
../build/cargo.sh build --target $TARGET_TRIPLE
|
||||
echo "[BENCH RUN] ebobby/simple-raytracer (skipped)"
|
||||
fi
|
||||
popd
|
||||
|
||||
pushd build_sysroot/sysroot_src/library/core/tests
|
||||
echo "[TEST] libcore"
|
||||
rm -r ./target || true
|
||||
cargo clean
|
||||
if [[ "$HOST_TRIPLE" = "$TARGET_TRIPLE" ]]; then
|
||||
../../../../../build/cargo.sh test
|
||||
else
|
||||
../../../../../build/cargo.sh build --target $TARGET_TRIPLE --tests
|
||||
fi
|
||||
popd
|
||||
|
||||
pushd regex
|
||||
echo "[TEST] rust-lang/regex example shootout-regex-dna"
|
||||
../build/cargo.sh clean
|
||||
cargo clean
|
||||
# Make sure `[codegen mono items] start` doesn't poison the diff
|
||||
../build/cargo.sh build --example shootout-regex-dna
|
||||
cat examples/regexdna-input.txt | ../build/cargo.sh run --example shootout-regex-dna | grep -v "Spawned thread" > res.txt
|
||||
../build/cargo.sh build --example shootout-regex-dna --target $TARGET_TRIPLE
|
||||
if [[ "$HOST_TRIPLE" = "$TARGET_TRIPLE" ]]; then
|
||||
cat examples/regexdna-input.txt \
|
||||
| ../build/cargo.sh run --example shootout-regex-dna --target $TARGET_TRIPLE \
|
||||
| grep -v "Spawned thread" > res.txt
|
||||
diff -u res.txt examples/regexdna-output.txt
|
||||
fi
|
||||
|
||||
if [[ "$HOST_TRIPLE" = "$TARGET_TRIPLE" ]]; then
|
||||
echo "[TEST] rust-lang/regex tests"
|
||||
../build/cargo.sh test --tests -- --exclude-should-panic --test-threads 1 -Zunstable-options -q
|
||||
else
|
||||
echo "[AOT] rust-lang/regex tests"
|
||||
../build/cargo.sh build --tests --target $TARGET_TRIPLE
|
||||
fi
|
||||
popd
|
||||
}
|
||||
|
||||
|
|
|
@ -10,14 +10,16 @@ use cranelift_codegen::entity::EntityRef;
|
|||
|
||||
use crate::prelude::*;
|
||||
|
||||
pub(super) fn add_args_header_comment(fx: &mut FunctionCx<'_, '_, impl Module>) {
|
||||
pub(super) fn add_args_header_comment(fx: &mut FunctionCx<'_, '_, '_>) {
|
||||
if fx.clif_comments.enabled() {
|
||||
fx.add_global_comment(
|
||||
"kind loc.idx param pass mode ty".to_string(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn add_arg_comment<'tcx>(
|
||||
fx: &mut FunctionCx<'_, 'tcx, impl Module>,
|
||||
fx: &mut FunctionCx<'_, '_, 'tcx>,
|
||||
kind: &str,
|
||||
local: Option<mir::Local>,
|
||||
local_field: Option<usize>,
|
||||
|
@ -25,6 +27,10 @@ pub(super) fn add_arg_comment<'tcx>(
|
|||
arg_abi_mode: PassMode,
|
||||
arg_layout: TyAndLayout<'tcx>,
|
||||
) {
|
||||
if !fx.clif_comments.enabled() {
|
||||
return;
|
||||
}
|
||||
|
||||
let local = if let Some(local) = local {
|
||||
Cow::Owned(format!("{:?}", local))
|
||||
} else {
|
||||
|
@ -42,11 +48,7 @@ pub(super) fn add_arg_comment<'tcx>(
|
|||
[param_a, param_b] => Cow::Owned(format!("= {:?},{:?}", param_a, param_b)),
|
||||
params => Cow::Owned(format!(
|
||||
"= {}",
|
||||
params
|
||||
.iter()
|
||||
.map(ToString::to_string)
|
||||
.collect::<Vec<_>>()
|
||||
.join(",")
|
||||
params.iter().map(ToString::to_string).collect::<Vec<_>>().join(",")
|
||||
)),
|
||||
};
|
||||
|
||||
|
@ -62,27 +64,26 @@ pub(super) fn add_arg_comment<'tcx>(
|
|||
));
|
||||
}
|
||||
|
||||
pub(super) fn add_locals_header_comment(fx: &mut FunctionCx<'_, '_, impl Module>) {
|
||||
pub(super) fn add_locals_header_comment(fx: &mut FunctionCx<'_, '_, '_>) {
|
||||
if fx.clif_comments.enabled() {
|
||||
fx.add_global_comment(String::new());
|
||||
fx.add_global_comment(
|
||||
"kind local ty size align (abi,pref)".to_string(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn add_local_place_comments<'tcx>(
|
||||
fx: &mut FunctionCx<'_, 'tcx, impl Module>,
|
||||
fx: &mut FunctionCx<'_, '_, 'tcx>,
|
||||
place: CPlace<'tcx>,
|
||||
local: Local,
|
||||
) {
|
||||
if !fx.clif_comments.enabled() {
|
||||
return;
|
||||
}
|
||||
let TyAndLayout { ty, layout } = place.layout();
|
||||
let rustc_target::abi::Layout {
|
||||
size,
|
||||
align,
|
||||
abi: _,
|
||||
variants: _,
|
||||
fields: _,
|
||||
largest_niche: _,
|
||||
} = layout;
|
||||
let rustc_target::abi::Layout { size, align, abi: _, variants: _, fields: _, largest_niche: _ } =
|
||||
layout;
|
||||
|
||||
let (kind, extra) = match *place.inner() {
|
||||
CPlaceInner::Var(place_local, var) => {
|
||||
|
@ -91,10 +92,7 @@ pub(super) fn add_local_place_comments<'tcx>(
|
|||
}
|
||||
CPlaceInner::VarPair(place_local, var1, var2) => {
|
||||
assert_eq!(local, place_local);
|
||||
(
|
||||
"ssa",
|
||||
Cow::Owned(format!(",var=({}, {})", var1.index(), var2.index())),
|
||||
)
|
||||
("ssa", Cow::Owned(format!(",var=({}, {})", var1.index(), var2.index())))
|
||||
}
|
||||
CPlaceInner::VarLane(_local, _var, _lane) => unreachable!(),
|
||||
CPlaceInner::Addr(ptr, meta) => {
|
||||
|
@ -103,19 +101,16 @@ pub(super) fn add_local_place_comments<'tcx>(
|
|||
} else {
|
||||
Cow::Borrowed("")
|
||||
};
|
||||
match ptr.base_and_offset() {
|
||||
(crate::pointer::PointerBase::Addr(addr), offset) => (
|
||||
"reuse",
|
||||
format!("storage={}{}{}", addr, offset, meta).into(),
|
||||
),
|
||||
(crate::pointer::PointerBase::Stack(stack_slot), offset) => (
|
||||
"stack",
|
||||
format!("storage={}{}{}", stack_slot, offset, meta).into(),
|
||||
),
|
||||
(crate::pointer::PointerBase::Dangling(align), offset) => (
|
||||
"zst",
|
||||
format!("align={},offset={}", align.bytes(), offset).into(),
|
||||
),
|
||||
match ptr.debug_base_and_offset() {
|
||||
(crate::pointer::PointerBase::Addr(addr), offset) => {
|
||||
("reuse", format!("storage={}{}{}", addr, offset, meta).into())
|
||||
}
|
||||
(crate::pointer::PointerBase::Stack(stack_slot), offset) => {
|
||||
("stack", format!("storage={}{}{}", stack_slot, offset, meta).into())
|
||||
}
|
||||
(crate::pointer::PointerBase::Dangling(align), offset) => {
|
||||
("zst", format!("align={},offset={}", align.bytes(), offset).into())
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -128,11 +123,7 @@ pub(super) fn add_local_place_comments<'tcx>(
|
|||
size.bytes(),
|
||||
align.abi.bytes(),
|
||||
align.pref.bytes(),
|
||||
if extra.is_empty() {
|
||||
""
|
||||
} else {
|
||||
" "
|
||||
},
|
||||
if extra.is_empty() { "" } else { " " },
|
||||
extra,
|
||||
));
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
//! Handling of everything related to the calling convention. Also fills `fx.local_map`.
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
mod comments;
|
||||
mod pass_mode;
|
||||
mod returning;
|
||||
|
@ -38,25 +37,15 @@ fn clif_sig_from_fn_abi<'tcx>(
|
|||
| Conv::X86VectorCall
|
||||
| Conv::AmdGpuKernel
|
||||
| Conv::AvrInterrupt
|
||||
| Conv::AvrNonBlockingInterrupt => {
|
||||
todo!("{:?}", fn_abi.conv)
|
||||
}
|
||||
| Conv::AvrNonBlockingInterrupt => todo!("{:?}", fn_abi.conv),
|
||||
};
|
||||
let inputs = fn_abi
|
||||
.args
|
||||
.iter()
|
||||
.map(|arg_abi| arg_abi.get_abi_param(tcx).into_iter())
|
||||
.flatten();
|
||||
let inputs = fn_abi.args.iter().map(|arg_abi| arg_abi.get_abi_param(tcx).into_iter()).flatten();
|
||||
|
||||
let (return_ptr, returns) = fn_abi.ret.get_abi_return(tcx);
|
||||
// Sometimes the first param is an pointer to the place where the return value needs to be stored.
|
||||
let params: Vec<_> = return_ptr.into_iter().chain(inputs).collect();
|
||||
|
||||
Signature {
|
||||
params,
|
||||
returns,
|
||||
call_conv,
|
||||
}
|
||||
Signature { params, returns, call_conv }
|
||||
}
|
||||
|
||||
pub(crate) fn get_function_sig<'tcx>(
|
||||
|
@ -65,37 +54,29 @@ pub(crate) fn get_function_sig<'tcx>(
|
|||
inst: Instance<'tcx>,
|
||||
) -> Signature {
|
||||
assert!(!inst.substs.needs_infer());
|
||||
clif_sig_from_fn_abi(
|
||||
tcx,
|
||||
triple,
|
||||
&FnAbi::of_instance(&RevealAllLayoutCx(tcx), inst, &[]),
|
||||
)
|
||||
clif_sig_from_fn_abi(tcx, triple, &FnAbi::of_instance(&RevealAllLayoutCx(tcx), inst, &[]))
|
||||
}
|
||||
|
||||
/// Instance must be monomorphized
|
||||
pub(crate) fn import_function<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
module: &mut impl Module,
|
||||
module: &mut dyn Module,
|
||||
inst: Instance<'tcx>,
|
||||
) -> FuncId {
|
||||
let name = tcx.symbol_name(inst).name.to_string();
|
||||
let sig = get_function_sig(tcx, module.isa().triple(), inst);
|
||||
module
|
||||
.declare_function(&name, Linkage::Import, &sig)
|
||||
.unwrap()
|
||||
module.declare_function(&name, Linkage::Import, &sig).unwrap()
|
||||
}
|
||||
|
||||
impl<'tcx, M: Module> FunctionCx<'_, 'tcx, M> {
|
||||
impl<'tcx> FunctionCx<'_, '_, 'tcx> {
|
||||
/// Instance must be monomorphized
|
||||
pub(crate) fn get_function_ref(&mut self, inst: Instance<'tcx>) -> FuncRef {
|
||||
let func_id = import_function(self.tcx, &mut self.cx.module, inst);
|
||||
let func_ref = self
|
||||
.cx
|
||||
.module
|
||||
.declare_func_in_func(func_id, &mut self.bcx.func);
|
||||
let func_id = import_function(self.tcx, self.cx.module, inst);
|
||||
let func_ref = self.cx.module.declare_func_in_func(func_id, &mut self.bcx.func);
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
if self.clif_comments.enabled() {
|
||||
self.add_comment(func_ref, format!("{:?}", inst));
|
||||
}
|
||||
|
||||
func_ref
|
||||
}
|
||||
|
@ -107,23 +88,11 @@ impl<'tcx, M: Module> FunctionCx<'_, 'tcx, M> {
|
|||
returns: Vec<AbiParam>,
|
||||
args: &[Value],
|
||||
) -> &[Value] {
|
||||
let sig = Signature {
|
||||
params,
|
||||
returns,
|
||||
call_conv: CallConv::triple_default(self.triple()),
|
||||
};
|
||||
let func_id = self
|
||||
.cx
|
||||
.module
|
||||
.declare_function(&name, Linkage::Import, &sig)
|
||||
.unwrap();
|
||||
let func_ref = self
|
||||
.cx
|
||||
.module
|
||||
.declare_func_in_func(func_id, &mut self.bcx.func);
|
||||
let sig = Signature { params, returns, call_conv: CallConv::triple_default(self.triple()) };
|
||||
let func_id = self.cx.module.declare_function(&name, Linkage::Import, &sig).unwrap();
|
||||
let func_ref = self.cx.module.declare_func_in_func(func_id, &mut self.bcx.func);
|
||||
let call_inst = self.bcx.ins().call(func_ref, args);
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
if self.clif_comments.enabled() {
|
||||
self.add_comment(call_inst, format!("easy_call {}", name));
|
||||
}
|
||||
let results = self.bcx.inst_results(call_inst);
|
||||
|
@ -140,17 +109,12 @@ impl<'tcx, M: Module> FunctionCx<'_, 'tcx, M> {
|
|||
let (input_tys, args): (Vec<_>, Vec<_>) = args
|
||||
.iter()
|
||||
.map(|arg| {
|
||||
(
|
||||
AbiParam::new(self.clif_type(arg.layout().ty).unwrap()),
|
||||
arg.load_scalar(self),
|
||||
)
|
||||
(AbiParam::new(self.clif_type(arg.layout().ty).unwrap()), arg.load_scalar(self))
|
||||
})
|
||||
.unzip();
|
||||
let return_layout = self.layout_of(return_ty);
|
||||
let return_tys = if let ty::Tuple(tup) = return_ty.kind() {
|
||||
tup.types()
|
||||
.map(|ty| AbiParam::new(self.clif_type(ty).unwrap()))
|
||||
.collect()
|
||||
tup.types().map(|ty| AbiParam::new(self.clif_type(ty).unwrap())).collect()
|
||||
} else {
|
||||
vec![AbiParam::new(self.clif_type(return_ty).unwrap())]
|
||||
};
|
||||
|
@ -169,7 +133,7 @@ impl<'tcx, M: Module> FunctionCx<'_, 'tcx, M> {
|
|||
|
||||
/// Make a [`CPlace`] capable of holding value of the specified type.
|
||||
fn make_local_place<'tcx>(
|
||||
fx: &mut FunctionCx<'_, 'tcx, impl Module>,
|
||||
fx: &mut FunctionCx<'_, '_, 'tcx>,
|
||||
local: Local,
|
||||
layout: TyAndLayout<'tcx>,
|
||||
is_ssa: bool,
|
||||
|
@ -184,16 +148,12 @@ fn make_local_place<'tcx>(
|
|||
CPlace::new_stack_slot(fx, layout)
|
||||
};
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
self::comments::add_local_place_comments(fx, place, local);
|
||||
|
||||
place
|
||||
}
|
||||
|
||||
pub(crate) fn codegen_fn_prelude<'tcx>(
|
||||
fx: &mut FunctionCx<'_, 'tcx, impl Module>,
|
||||
start_block: Block,
|
||||
) {
|
||||
pub(crate) fn codegen_fn_prelude<'tcx>(fx: &mut FunctionCx<'_, '_, 'tcx>, start_block: Block) {
|
||||
fx.bcx.append_block_params_for_function_params(start_block);
|
||||
|
||||
fx.bcx.switch_to_block(start_block);
|
||||
|
@ -201,16 +161,9 @@ pub(crate) fn codegen_fn_prelude<'tcx>(
|
|||
|
||||
let ssa_analyzed = crate::analyze::analyze(fx);
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
self::comments::add_args_header_comment(fx);
|
||||
|
||||
let mut block_params_iter = fx
|
||||
.bcx
|
||||
.func
|
||||
.dfg
|
||||
.block_params(start_block)
|
||||
.to_vec()
|
||||
.into_iter();
|
||||
let mut block_params_iter = fx.bcx.func.dfg.block_params(start_block).to_vec().into_iter();
|
||||
let ret_place =
|
||||
self::returning::codegen_return_param(fx, &ssa_analyzed, &mut block_params_iter);
|
||||
assert_eq!(fx.local_map.push(ret_place), RETURN_PLACE);
|
||||
|
@ -272,7 +225,6 @@ pub(crate) fn codegen_fn_prelude<'tcx>(
|
|||
fx.fn_abi = Some(fn_abi);
|
||||
assert!(block_params_iter.next().is_none(), "arg_value left behind");
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
self::comments::add_locals_header_comment(fx);
|
||||
|
||||
for (local, arg_kind, ty) in func_params {
|
||||
|
@ -286,10 +238,10 @@ pub(crate) fn codegen_fn_prelude<'tcx>(
|
|||
if let Some((addr, meta)) = val.try_to_ptr() {
|
||||
let local_decl = &fx.mir.local_decls[local];
|
||||
// v this ! is important
|
||||
let internally_mutable = !val.layout().ty.is_freeze(
|
||||
fx.tcx.at(local_decl.source_info.span),
|
||||
ParamEnv::reveal_all(),
|
||||
);
|
||||
let internally_mutable = !val
|
||||
.layout()
|
||||
.ty
|
||||
.is_freeze(fx.tcx.at(local_decl.source_info.span), ParamEnv::reveal_all());
|
||||
if local_decl.mutability == mir::Mutability::Not && !internally_mutable {
|
||||
// We wont mutate this argument, so it is fine to borrow the backing storage
|
||||
// of this argument, to prevent a copy.
|
||||
|
@ -300,7 +252,6 @@ pub(crate) fn codegen_fn_prelude<'tcx>(
|
|||
CPlace::for_ptr(addr, val.layout())
|
||||
};
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
self::comments::add_local_place_comments(fx, place, local);
|
||||
|
||||
assert_eq!(fx.local_map.push(place), local);
|
||||
|
@ -321,9 +272,7 @@ pub(crate) fn codegen_fn_prelude<'tcx>(
|
|||
ArgKind::Spread(params) => {
|
||||
for (i, param) in params.into_iter().enumerate() {
|
||||
if let Some(param) = param {
|
||||
place
|
||||
.place_field(fx, mir::Field::new(i))
|
||||
.write_cvalue(fx, param);
|
||||
place.place_field(fx, mir::Field::new(i)).write_cvalue(fx, param);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -340,13 +289,11 @@ pub(crate) fn codegen_fn_prelude<'tcx>(
|
|||
assert_eq!(fx.local_map.push(place), local);
|
||||
}
|
||||
|
||||
fx.bcx
|
||||
.ins()
|
||||
.jump(*fx.block_map.get(START_BLOCK).unwrap(), &[]);
|
||||
fx.bcx.ins().jump(*fx.block_map.get(START_BLOCK).unwrap(), &[]);
|
||||
}
|
||||
|
||||
pub(crate) fn codegen_terminator_call<'tcx>(
|
||||
fx: &mut FunctionCx<'_, 'tcx, impl Module>,
|
||||
fx: &mut FunctionCx<'_, '_, 'tcx>,
|
||||
span: Span,
|
||||
current_block: Block,
|
||||
func: &Operand<'tcx>,
|
||||
|
@ -354,9 +301,8 @@ pub(crate) fn codegen_terminator_call<'tcx>(
|
|||
destination: Option<(Place<'tcx>, BasicBlock)>,
|
||||
) {
|
||||
let fn_ty = fx.monomorphize(func.ty(fx.mir, fx.tcx));
|
||||
let fn_sig = fx
|
||||
.tcx
|
||||
.normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), fn_ty.fn_sig(fx.tcx));
|
||||
let fn_sig =
|
||||
fx.tcx.normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), fn_ty.fn_sig(fx.tcx));
|
||||
|
||||
let destination = destination.map(|(place, bb)| (codegen_place(fx, place), bb));
|
||||
|
||||
|
@ -404,20 +350,11 @@ pub(crate) fn codegen_terminator_call<'tcx>(
|
|||
let fn_abi = if let Some(instance) = instance {
|
||||
FnAbi::of_instance(&RevealAllLayoutCx(fx.tcx), instance, &extra_args)
|
||||
} else {
|
||||
FnAbi::of_fn_ptr(
|
||||
&RevealAllLayoutCx(fx.tcx),
|
||||
fn_ty.fn_sig(fx.tcx),
|
||||
&extra_args,
|
||||
)
|
||||
FnAbi::of_fn_ptr(&RevealAllLayoutCx(fx.tcx), fn_ty.fn_sig(fx.tcx), &extra_args)
|
||||
};
|
||||
|
||||
let is_cold = instance
|
||||
.map(|inst| {
|
||||
fx.tcx
|
||||
.codegen_fn_attrs(inst.def_id())
|
||||
.flags
|
||||
.contains(CodegenFnAttrFlags::COLD)
|
||||
})
|
||||
.map(|inst| fx.tcx.codegen_fn_attrs(inst.def_id()).flags.contains(CodegenFnAttrFlags::COLD))
|
||||
.unwrap_or(false);
|
||||
if is_cold {
|
||||
fx.cold_blocks.insert(current_block);
|
||||
|
@ -441,9 +378,7 @@ pub(crate) fn codegen_terminator_call<'tcx>(
|
|||
}
|
||||
args
|
||||
} else {
|
||||
args.iter()
|
||||
.map(|arg| codegen_operand(fx, arg))
|
||||
.collect::<Vec<_>>()
|
||||
args.iter().map(|arg| codegen_operand(fx, arg)).collect::<Vec<_>>()
|
||||
};
|
||||
|
||||
// | indirect call target
|
||||
|
@ -451,12 +386,8 @@ pub(crate) fn codegen_terminator_call<'tcx>(
|
|||
// v v
|
||||
let (func_ref, first_arg) = match instance {
|
||||
// Trait object call
|
||||
Some(Instance {
|
||||
def: InstanceDef::Virtual(_, idx),
|
||||
..
|
||||
}) => {
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
Some(Instance { def: InstanceDef::Virtual(_, idx), .. }) => {
|
||||
if fx.clif_comments.enabled() {
|
||||
let nop_inst = fx.bcx.ins().nop();
|
||||
fx.add_comment(
|
||||
nop_inst,
|
||||
|
@ -477,8 +408,7 @@ pub(crate) fn codegen_terminator_call<'tcx>(
|
|||
|
||||
// Indirect call
|
||||
None => {
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
if fx.clif_comments.enabled() {
|
||||
let nop_inst = fx.bcx.ins().nop();
|
||||
fx.add_comment(nop_inst, "indirect call");
|
||||
}
|
||||
|
@ -511,10 +441,7 @@ pub(crate) fn codegen_terminator_call<'tcx>(
|
|||
)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if instance
|
||||
.map(|inst| inst.def.requires_caller_location(fx.tcx))
|
||||
.unwrap_or(false)
|
||||
{
|
||||
if instance.map(|inst| inst.def.requires_caller_location(fx.tcx)).unwrap_or(false) {
|
||||
// Pass the caller location for `#[track_caller]`.
|
||||
let caller_location = fx.get_caller_location(span);
|
||||
call_args.extend(
|
||||
|
@ -542,11 +469,8 @@ pub(crate) fn codegen_terminator_call<'tcx>(
|
|||
|
||||
// FIXME find a cleaner way to support varargs
|
||||
if fn_sig.c_variadic {
|
||||
if fn_sig.abi != Abi::C {
|
||||
fx.tcx.sess.span_fatal(
|
||||
span,
|
||||
&format!("Variadic call for non-C abi {:?}", fn_sig.abi),
|
||||
);
|
||||
if !matches!(fn_sig.abi, Abi::C { .. }) {
|
||||
fx.tcx.sess.span_fatal(span, &format!("Variadic call for non-C abi {:?}", fn_sig.abi));
|
||||
}
|
||||
let sig_ref = fx.bcx.func.dfg.call_signature(call_inst).unwrap();
|
||||
let abi_params = call_args
|
||||
|
@ -555,9 +479,7 @@ pub(crate) fn codegen_terminator_call<'tcx>(
|
|||
let ty = fx.bcx.func.dfg.value_type(arg);
|
||||
if !ty.is_int() {
|
||||
// FIXME set %al to upperbound on float args once floats are supported
|
||||
fx.tcx
|
||||
.sess
|
||||
.span_fatal(span, &format!("Non int ty {:?} for variadic call", ty));
|
||||
fx.tcx.sess.span_fatal(span, &format!("Non int ty {:?} for variadic call", ty));
|
||||
}
|
||||
AbiParam::new(ty)
|
||||
})
|
||||
|
@ -574,7 +496,7 @@ pub(crate) fn codegen_terminator_call<'tcx>(
|
|||
}
|
||||
|
||||
pub(crate) fn codegen_drop<'tcx>(
|
||||
fx: &mut FunctionCx<'_, 'tcx, impl Module>,
|
||||
fx: &mut FunctionCx<'_, '_, 'tcx>,
|
||||
span: Span,
|
||||
drop_place: CPlace<'tcx>,
|
||||
) {
|
||||
|
@ -611,10 +533,7 @@ pub(crate) fn codegen_drop<'tcx>(
|
|||
fx,
|
||||
fx.layout_of(fx.tcx.mk_ref(
|
||||
&ty::RegionKind::ReErased,
|
||||
TypeAndMut {
|
||||
ty,
|
||||
mutbl: crate::rustc_hir::Mutability::Mut,
|
||||
},
|
||||
TypeAndMut { ty, mutbl: crate::rustc_hir::Mutability::Mut },
|
||||
)),
|
||||
);
|
||||
let arg_value = adjust_arg_for_abi(fx, arg_value, &fn_abi.args[0]);
|
||||
|
|
|
@ -71,12 +71,7 @@ fn cast_target_to_abi_params(cast: CastTarget) -> SmallVec<[AbiParam; 2]> {
|
|||
.prefix
|
||||
.iter()
|
||||
.flatten()
|
||||
.map(|&kind| {
|
||||
reg_to_abi_param(Reg {
|
||||
kind,
|
||||
size: cast.prefix_chunk_size,
|
||||
})
|
||||
})
|
||||
.map(|&kind| reg_to_abi_param(Reg { kind, size: cast.prefix_chunk_size }))
|
||||
.chain((0..rest_count).map(|_| reg_to_abi_param(cast.rest.unit)))
|
||||
.collect::<SmallVec<_>>();
|
||||
|
||||
|
@ -98,12 +93,10 @@ impl<'tcx> ArgAbiExt<'tcx> for ArgAbi<'tcx, Ty<'tcx>> {
|
|||
match self.mode {
|
||||
PassMode::Ignore => smallvec![],
|
||||
PassMode::Direct(attrs) => match &self.layout.abi {
|
||||
Abi::Scalar(scalar) => {
|
||||
smallvec![apply_arg_attrs_to_abi_param(
|
||||
Abi::Scalar(scalar) => smallvec![apply_arg_attrs_to_abi_param(
|
||||
AbiParam::new(scalar_to_clif_type(tcx, scalar.clone())),
|
||||
attrs
|
||||
)]
|
||||
}
|
||||
)],
|
||||
Abi::Vector { .. } => {
|
||||
let vector_ty = crate::intrinsics::clif_vector_type(tcx, self.layout).unwrap();
|
||||
smallvec![AbiParam::new(vector_ty)]
|
||||
|
@ -122,11 +115,7 @@ impl<'tcx> ArgAbiExt<'tcx> for ArgAbi<'tcx, Ty<'tcx>> {
|
|||
_ => unreachable!("{:?}", self.layout.abi),
|
||||
},
|
||||
PassMode::Cast(cast) => cast_target_to_abi_params(cast),
|
||||
PassMode::Indirect {
|
||||
attrs,
|
||||
extra_attrs: None,
|
||||
on_stack,
|
||||
} => {
|
||||
PassMode::Indirect { attrs, extra_attrs: None, on_stack } => {
|
||||
if on_stack {
|
||||
let size = u32::try_from(self.layout.size.bytes()).unwrap();
|
||||
smallvec![apply_arg_attrs_to_abi_param(
|
||||
|
@ -134,17 +123,10 @@ impl<'tcx> ArgAbiExt<'tcx> for ArgAbi<'tcx, Ty<'tcx>> {
|
|||
attrs
|
||||
)]
|
||||
} else {
|
||||
smallvec![apply_arg_attrs_to_abi_param(
|
||||
AbiParam::new(pointer_ty(tcx)),
|
||||
attrs
|
||||
)]
|
||||
smallvec![apply_arg_attrs_to_abi_param(AbiParam::new(pointer_ty(tcx)), attrs)]
|
||||
}
|
||||
}
|
||||
PassMode::Indirect {
|
||||
attrs,
|
||||
extra_attrs: Some(extra_attrs),
|
||||
on_stack,
|
||||
} => {
|
||||
PassMode::Indirect { attrs, extra_attrs: Some(extra_attrs), on_stack } => {
|
||||
assert!(!on_stack);
|
||||
smallvec![
|
||||
apply_arg_attrs_to_abi_param(AbiParam::new(pointer_ty(tcx)), attrs),
|
||||
|
@ -158,10 +140,9 @@ impl<'tcx> ArgAbiExt<'tcx> for ArgAbi<'tcx, Ty<'tcx>> {
|
|||
match self.mode {
|
||||
PassMode::Ignore => (None, vec![]),
|
||||
PassMode::Direct(_) => match &self.layout.abi {
|
||||
Abi::Scalar(scalar) => (
|
||||
None,
|
||||
vec![AbiParam::new(scalar_to_clif_type(tcx, scalar.clone()))],
|
||||
),
|
||||
Abi::Scalar(scalar) => {
|
||||
(None, vec![AbiParam::new(scalar_to_clif_type(tcx, scalar.clone()))])
|
||||
}
|
||||
Abi::Vector { .. } => {
|
||||
let vector_ty = crate::intrinsics::clif_vector_type(tcx, self.layout).unwrap();
|
||||
(None, vec![AbiParam::new(vector_ty)])
|
||||
|
@ -177,31 +158,19 @@ impl<'tcx> ArgAbiExt<'tcx> for ArgAbi<'tcx, Ty<'tcx>> {
|
|||
_ => unreachable!("{:?}", self.layout.abi),
|
||||
},
|
||||
PassMode::Cast(cast) => (None, cast_target_to_abi_params(cast).into_iter().collect()),
|
||||
PassMode::Indirect {
|
||||
attrs: _,
|
||||
extra_attrs: None,
|
||||
on_stack,
|
||||
} => {
|
||||
PassMode::Indirect { attrs: _, extra_attrs: None, on_stack } => {
|
||||
assert!(!on_stack);
|
||||
(
|
||||
Some(AbiParam::special(
|
||||
pointer_ty(tcx),
|
||||
ArgumentPurpose::StructReturn,
|
||||
)),
|
||||
vec![],
|
||||
)
|
||||
(Some(AbiParam::special(pointer_ty(tcx), ArgumentPurpose::StructReturn)), vec![])
|
||||
}
|
||||
PassMode::Indirect { attrs: _, extra_attrs: Some(_), on_stack: _ } => {
|
||||
unreachable!("unsized return value")
|
||||
}
|
||||
PassMode::Indirect {
|
||||
attrs: _,
|
||||
extra_attrs: Some(_),
|
||||
on_stack: _,
|
||||
} => unreachable!("unsized return value"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn to_casted_value<'tcx>(
|
||||
fx: &mut FunctionCx<'_, 'tcx, impl Module>,
|
||||
fx: &mut FunctionCx<'_, '_, 'tcx>,
|
||||
arg: CValue<'tcx>,
|
||||
cast: CastTarget,
|
||||
) -> SmallVec<[Value; 2]> {
|
||||
|
@ -211,9 +180,7 @@ pub(super) fn to_casted_value<'tcx>(
|
|||
cast_target_to_abi_params(cast)
|
||||
.into_iter()
|
||||
.map(|param| {
|
||||
let val = ptr
|
||||
.offset_i64(fx, offset)
|
||||
.load(fx, param.value_type, MemFlags::new());
|
||||
let val = ptr.offset_i64(fx, offset).load(fx, param.value_type, MemFlags::new());
|
||||
offset += i64::from(param.value_type.bytes());
|
||||
val
|
||||
})
|
||||
|
@ -221,16 +188,13 @@ pub(super) fn to_casted_value<'tcx>(
|
|||
}
|
||||
|
||||
pub(super) fn from_casted_value<'tcx>(
|
||||
fx: &mut FunctionCx<'_, 'tcx, impl Module>,
|
||||
fx: &mut FunctionCx<'_, '_, 'tcx>,
|
||||
block_params: &[Value],
|
||||
layout: TyAndLayout<'tcx>,
|
||||
cast: CastTarget,
|
||||
) -> CValue<'tcx> {
|
||||
let abi_params = cast_target_to_abi_params(cast);
|
||||
let abi_param_size: u32 = abi_params
|
||||
.iter()
|
||||
.map(|param| param.value_type.bytes())
|
||||
.sum();
|
||||
let abi_param_size: u32 = abi_params.iter().map(|param| param.value_type.bytes()).sum();
|
||||
let layout_size = u32::try_from(layout.size.bytes()).unwrap();
|
||||
let stack_slot = fx.bcx.create_stack_slot(StackSlotData {
|
||||
kind: StackSlotKind::ExplicitSlot,
|
||||
|
@ -244,7 +208,7 @@ pub(super) fn from_casted_value<'tcx>(
|
|||
});
|
||||
let ptr = Pointer::new(fx.bcx.ins().stack_addr(pointer_ty(fx.tcx), stack_slot, 0));
|
||||
let mut offset = 0;
|
||||
let mut block_params_iter = block_params.into_iter().copied();
|
||||
let mut block_params_iter = block_params.iter().copied();
|
||||
for param in abi_params {
|
||||
let val = ptr.offset_i64(fx, offset).store(
|
||||
fx,
|
||||
|
@ -260,7 +224,7 @@ pub(super) fn from_casted_value<'tcx>(
|
|||
|
||||
/// Get a set of values to be passed as function arguments.
|
||||
pub(super) fn adjust_arg_for_abi<'tcx>(
|
||||
fx: &mut FunctionCx<'_, 'tcx, impl Module>,
|
||||
fx: &mut FunctionCx<'_, '_, 'tcx>,
|
||||
arg: CValue<'tcx>,
|
||||
arg_abi: &ArgAbi<'tcx, Ty<'tcx>>,
|
||||
) -> SmallVec<[Value; 2]> {
|
||||
|
@ -283,9 +247,9 @@ pub(super) fn adjust_arg_for_abi<'tcx>(
|
|||
/// Create a [`CValue`] containing the value of a function parameter adding clif function parameters
|
||||
/// as necessary.
|
||||
pub(super) fn cvalue_for_param<'tcx>(
|
||||
fx: &mut FunctionCx<'_, 'tcx, impl Module>,
|
||||
#[cfg_attr(not(debug_assertions), allow(unused_variables))] local: Option<mir::Local>,
|
||||
#[cfg_attr(not(debug_assertions), allow(unused_variables))] local_field: Option<usize>,
|
||||
fx: &mut FunctionCx<'_, '_, 'tcx>,
|
||||
local: Option<mir::Local>,
|
||||
local_field: Option<usize>,
|
||||
arg_abi: &ArgAbi<'tcx, Ty<'tcx>>,
|
||||
block_params_iter: &mut impl Iterator<Item = Value>,
|
||||
) -> Option<CValue<'tcx>> {
|
||||
|
@ -294,15 +258,11 @@ pub(super) fn cvalue_for_param<'tcx>(
|
|||
.into_iter()
|
||||
.map(|abi_param| {
|
||||
let block_param = block_params_iter.next().unwrap();
|
||||
assert_eq!(
|
||||
fx.bcx.func.dfg.value_type(block_param),
|
||||
abi_param.value_type
|
||||
);
|
||||
assert_eq!(fx.bcx.func.dfg.value_type(block_param), abi_param.value_type);
|
||||
block_param
|
||||
})
|
||||
.collect::<SmallVec<[_; 2]>>();
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
crate::abi::comments::add_arg_comment(
|
||||
fx,
|
||||
"arg",
|
||||
|
@ -321,29 +281,14 @@ pub(super) fn cvalue_for_param<'tcx>(
|
|||
}
|
||||
PassMode::Pair(_, _) => {
|
||||
assert_eq!(block_params.len(), 2, "{:?}", block_params);
|
||||
Some(CValue::by_val_pair(
|
||||
block_params[0],
|
||||
block_params[1],
|
||||
arg_abi.layout,
|
||||
))
|
||||
Some(CValue::by_val_pair(block_params[0], block_params[1], arg_abi.layout))
|
||||
}
|
||||
PassMode::Cast(cast) => Some(from_casted_value(fx, &block_params, arg_abi.layout, cast)),
|
||||
PassMode::Indirect {
|
||||
attrs: _,
|
||||
extra_attrs: None,
|
||||
on_stack: _,
|
||||
} => {
|
||||
PassMode::Indirect { attrs: _, extra_attrs: None, on_stack: _ } => {
|
||||
assert_eq!(block_params.len(), 1, "{:?}", block_params);
|
||||
Some(CValue::by_ref(
|
||||
Pointer::new(block_params[0]),
|
||||
arg_abi.layout,
|
||||
))
|
||||
Some(CValue::by_ref(Pointer::new(block_params[0]), arg_abi.layout))
|
||||
}
|
||||
PassMode::Indirect {
|
||||
attrs: _,
|
||||
extra_attrs: Some(_),
|
||||
on_stack: _,
|
||||
} => {
|
||||
PassMode::Indirect { attrs: _, extra_attrs: Some(_), on_stack: _ } => {
|
||||
assert_eq!(block_params.len(), 2, "{:?}", block_params);
|
||||
Some(CValue::by_ref_unsized(
|
||||
Pointer::new(block_params[0]),
|
||||
|
|
|
@ -8,14 +8,13 @@ use smallvec::{smallvec, SmallVec};
|
|||
|
||||
/// Can the given type be returned into an ssa var or does it need to be returned on the stack.
|
||||
pub(crate) fn can_return_to_ssa_var<'tcx>(
|
||||
fx: &FunctionCx<'_, 'tcx, impl Module>,
|
||||
fx: &FunctionCx<'_, '_, 'tcx>,
|
||||
func: &mir::Operand<'tcx>,
|
||||
args: &[mir::Operand<'tcx>],
|
||||
) -> bool {
|
||||
let fn_ty = fx.monomorphize(func.ty(fx.mir, fx.tcx));
|
||||
let fn_sig = fx
|
||||
.tcx
|
||||
.normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), fn_ty.fn_sig(fx.tcx));
|
||||
let fn_sig =
|
||||
fx.tcx.normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), fn_ty.fn_sig(fx.tcx));
|
||||
|
||||
// Handle special calls like instrinsics and empty drop glue.
|
||||
let instance = if let ty::FnDef(def_id, substs) = *fn_ty.kind() {
|
||||
|
@ -42,11 +41,7 @@ pub(crate) fn can_return_to_ssa_var<'tcx>(
|
|||
let fn_abi = if let Some(instance) = instance {
|
||||
FnAbi::of_instance(&RevealAllLayoutCx(fx.tcx), instance, &extra_args)
|
||||
} else {
|
||||
FnAbi::of_fn_ptr(
|
||||
&RevealAllLayoutCx(fx.tcx),
|
||||
fn_ty.fn_sig(fx.tcx),
|
||||
&extra_args,
|
||||
)
|
||||
FnAbi::of_fn_ptr(&RevealAllLayoutCx(fx.tcx), fn_ty.fn_sig(fx.tcx), &extra_args)
|
||||
};
|
||||
match fn_abi.ret.mode {
|
||||
PassMode::Ignore | PassMode::Direct(_) | PassMode::Pair(_, _) => true,
|
||||
|
@ -58,15 +53,12 @@ pub(crate) fn can_return_to_ssa_var<'tcx>(
|
|||
/// Return a place where the return value of the current function can be written to. If necessary
|
||||
/// this adds an extra parameter pointing to where the return value needs to be stored.
|
||||
pub(super) fn codegen_return_param<'tcx>(
|
||||
fx: &mut FunctionCx<'_, 'tcx, impl Module>,
|
||||
fx: &mut FunctionCx<'_, '_, 'tcx>,
|
||||
ssa_analyzed: &rustc_index::vec::IndexVec<Local, crate::analyze::SsaKind>,
|
||||
block_params_iter: &mut impl Iterator<Item = Value>,
|
||||
) -> CPlace<'tcx> {
|
||||
let (ret_place, ret_param): (_, SmallVec<[_; 2]>) = match fx.fn_abi.as_ref().unwrap().ret.mode {
|
||||
PassMode::Ignore => (
|
||||
CPlace::no_place(fx.fn_abi.as_ref().unwrap().ret.layout),
|
||||
smallvec![],
|
||||
),
|
||||
PassMode::Ignore => (CPlace::no_place(fx.fn_abi.as_ref().unwrap().ret.layout), smallvec![]),
|
||||
PassMode::Direct(_) | PassMode::Pair(_, _) | PassMode::Cast(_) => {
|
||||
let is_ssa = ssa_analyzed[RETURN_PLACE] == crate::analyze::SsaKind::Ssa;
|
||||
(
|
||||
|
@ -79,32 +71,19 @@ pub(super) fn codegen_return_param<'tcx>(
|
|||
smallvec![],
|
||||
)
|
||||
}
|
||||
PassMode::Indirect {
|
||||
attrs: _,
|
||||
extra_attrs: None,
|
||||
on_stack: _,
|
||||
} => {
|
||||
PassMode::Indirect { attrs: _, extra_attrs: None, on_stack: _ } => {
|
||||
let ret_param = block_params_iter.next().unwrap();
|
||||
assert_eq!(fx.bcx.func.dfg.value_type(ret_param), pointer_ty(fx.tcx));
|
||||
(
|
||||
CPlace::for_ptr(
|
||||
Pointer::new(ret_param),
|
||||
fx.fn_abi.as_ref().unwrap().ret.layout,
|
||||
),
|
||||
CPlace::for_ptr(Pointer::new(ret_param), fx.fn_abi.as_ref().unwrap().ret.layout),
|
||||
smallvec![ret_param],
|
||||
)
|
||||
}
|
||||
PassMode::Indirect {
|
||||
attrs: _,
|
||||
extra_attrs: Some(_),
|
||||
on_stack: _,
|
||||
} => unreachable!("unsized return value"),
|
||||
PassMode::Indirect { attrs: _, extra_attrs: Some(_), on_stack: _ } => {
|
||||
unreachable!("unsized return value")
|
||||
}
|
||||
};
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
let _ = ret_param;
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
crate::abi::comments::add_arg_comment(
|
||||
fx,
|
||||
"ret",
|
||||
|
@ -120,27 +99,21 @@ pub(super) fn codegen_return_param<'tcx>(
|
|||
|
||||
/// Invokes the closure with if necessary a value representing the return pointer. When the closure
|
||||
/// returns the call return value(s) if any are written to the correct place.
|
||||
pub(super) fn codegen_with_call_return_arg<'tcx, M: Module, T>(
|
||||
fx: &mut FunctionCx<'_, 'tcx, M>,
|
||||
pub(super) fn codegen_with_call_return_arg<'tcx, T>(
|
||||
fx: &mut FunctionCx<'_, '_, 'tcx>,
|
||||
ret_arg_abi: &ArgAbi<'tcx, Ty<'tcx>>,
|
||||
ret_place: Option<CPlace<'tcx>>,
|
||||
f: impl FnOnce(&mut FunctionCx<'_, 'tcx, M>, Option<Value>) -> (Inst, T),
|
||||
f: impl FnOnce(&mut FunctionCx<'_, '_, 'tcx>, Option<Value>) -> (Inst, T),
|
||||
) -> (Inst, T) {
|
||||
let return_ptr = match ret_arg_abi.mode {
|
||||
PassMode::Ignore => None,
|
||||
PassMode::Indirect {
|
||||
attrs: _,
|
||||
extra_attrs: None,
|
||||
on_stack: _,
|
||||
} => match ret_place {
|
||||
PassMode::Indirect { attrs: _, extra_attrs: None, on_stack: _ } => match ret_place {
|
||||
Some(ret_place) => Some(ret_place.to_ptr().get_addr(fx)),
|
||||
None => Some(fx.bcx.ins().iconst(fx.pointer_type, 43)), // FIXME allocate temp stack slot
|
||||
},
|
||||
PassMode::Indirect {
|
||||
attrs: _,
|
||||
extra_attrs: Some(_),
|
||||
on_stack: _,
|
||||
} => unreachable!("unsized return value"),
|
||||
PassMode::Indirect { attrs: _, extra_attrs: Some(_), on_stack: _ } => {
|
||||
unreachable!("unsized return value")
|
||||
}
|
||||
PassMode::Direct(_) | PassMode::Pair(_, _) | PassMode::Cast(_) => None,
|
||||
};
|
||||
|
||||
|
@ -169,7 +142,7 @@ pub(super) fn codegen_with_call_return_arg<'tcx, M: Module, T>(
|
|||
let results = fx
|
||||
.bcx
|
||||
.inst_results(call_inst)
|
||||
.into_iter()
|
||||
.iter()
|
||||
.copied()
|
||||
.collect::<SmallVec<[Value; 2]>>();
|
||||
let result =
|
||||
|
@ -177,37 +150,24 @@ pub(super) fn codegen_with_call_return_arg<'tcx, M: Module, T>(
|
|||
ret_place.write_cvalue(fx, result);
|
||||
}
|
||||
}
|
||||
PassMode::Indirect {
|
||||
attrs: _,
|
||||
extra_attrs: None,
|
||||
on_stack: _,
|
||||
} => {}
|
||||
PassMode::Indirect {
|
||||
attrs: _,
|
||||
extra_attrs: Some(_),
|
||||
on_stack: _,
|
||||
} => unreachable!("unsized return value"),
|
||||
PassMode::Indirect { attrs: _, extra_attrs: None, on_stack: _ } => {}
|
||||
PassMode::Indirect { attrs: _, extra_attrs: Some(_), on_stack: _ } => {
|
||||
unreachable!("unsized return value")
|
||||
}
|
||||
}
|
||||
|
||||
(call_inst, meta)
|
||||
}
|
||||
|
||||
/// Codegen a return instruction with the right return value(s) if any.
|
||||
pub(crate) fn codegen_return(fx: &mut FunctionCx<'_, '_, impl Module>) {
|
||||
pub(crate) fn codegen_return(fx: &mut FunctionCx<'_, '_, '_>) {
|
||||
match fx.fn_abi.as_ref().unwrap().ret.mode {
|
||||
PassMode::Ignore
|
||||
| PassMode::Indirect {
|
||||
attrs: _,
|
||||
extra_attrs: None,
|
||||
on_stack: _,
|
||||
} => {
|
||||
PassMode::Ignore | PassMode::Indirect { attrs: _, extra_attrs: None, on_stack: _ } => {
|
||||
fx.bcx.ins().return_(&[]);
|
||||
}
|
||||
PassMode::Indirect {
|
||||
attrs: _,
|
||||
extra_attrs: Some(_),
|
||||
on_stack: _,
|
||||
} => unreachable!("unsized return value"),
|
||||
PassMode::Indirect { attrs: _, extra_attrs: Some(_), on_stack: _ } => {
|
||||
unreachable!("unsized return value")
|
||||
}
|
||||
PassMode::Direct(_) => {
|
||||
let place = fx.get_local_place(RETURN_PLACE);
|
||||
let ret_val = place.to_cvalue(fx).load_scalar(fx);
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
|
||||
use crate::prelude::*;
|
||||
|
||||
use cranelift_codegen::binemit::{NullStackMapSink, NullTrapSink};
|
||||
use rustc_ast::expand::allocator::{AllocatorKind, AllocatorTy, ALLOCATOR_METHODS};
|
||||
use rustc_span::symbol::sym;
|
||||
|
||||
|
@ -66,13 +67,9 @@ fn codegen_inner(
|
|||
let callee_name = kind.fn_name(method.name);
|
||||
//eprintln!("Codegen allocator shim {} -> {} ({:?} -> {:?})", caller_name, callee_name, sig.params, sig.returns);
|
||||
|
||||
let func_id = module
|
||||
.declare_function(&caller_name, Linkage::Export, &sig)
|
||||
.unwrap();
|
||||
let func_id = module.declare_function(&caller_name, Linkage::Export, &sig).unwrap();
|
||||
|
||||
let callee_func_id = module
|
||||
.declare_function(&callee_name, Linkage::Import, &sig)
|
||||
.unwrap();
|
||||
let callee_func_id = module.declare_function(&callee_name, Linkage::Import, &sig).unwrap();
|
||||
|
||||
let mut ctx = Context::new();
|
||||
ctx.func = Function::with_name_signature(ExternalName::user(0, 0), sig.clone());
|
||||
|
@ -96,11 +93,7 @@ fn codegen_inner(
|
|||
bcx.finalize();
|
||||
}
|
||||
module
|
||||
.define_function(
|
||||
func_id,
|
||||
&mut ctx,
|
||||
&mut cranelift_codegen::binemit::NullTrapSink {},
|
||||
)
|
||||
.define_function(func_id, &mut ctx, &mut NullTrapSink {}, &mut NullStackMapSink {})
|
||||
.unwrap();
|
||||
unwind_context.add_function(func_id, &ctx, module.isa());
|
||||
}
|
||||
|
@ -114,13 +107,10 @@ fn codegen_inner(
|
|||
let callee_name = kind.fn_name(sym::oom);
|
||||
//eprintln!("Codegen allocator shim {} -> {} ({:?} -> {:?})", caller_name, callee_name, sig.params, sig.returns);
|
||||
|
||||
let func_id = module
|
||||
.declare_function("__rust_alloc_error_handler", Linkage::Export, &sig)
|
||||
.unwrap();
|
||||
let func_id =
|
||||
module.declare_function("__rust_alloc_error_handler", Linkage::Export, &sig).unwrap();
|
||||
|
||||
let callee_func_id = module
|
||||
.declare_function(&callee_name, Linkage::Import, &sig)
|
||||
.unwrap();
|
||||
let callee_func_id = module.declare_function(&callee_name, Linkage::Import, &sig).unwrap();
|
||||
|
||||
let mut ctx = Context::new();
|
||||
ctx.func = Function::with_name_signature(ExternalName::user(0, 0), sig);
|
||||
|
@ -143,11 +133,7 @@ fn codegen_inner(
|
|||
bcx.finalize();
|
||||
}
|
||||
module
|
||||
.define_function(
|
||||
func_id,
|
||||
&mut ctx,
|
||||
&mut cranelift_codegen::binemit::NullTrapSink {},
|
||||
)
|
||||
.define_function(func_id, &mut ctx, &mut NullTrapSink {}, &mut NullStackMapSink {})
|
||||
.unwrap();
|
||||
unwind_context.add_function(func_id, &ctx, module.isa());
|
||||
}
|
||||
|
|
|
@ -11,7 +11,7 @@ pub(crate) enum SsaKind {
|
|||
Ssa,
|
||||
}
|
||||
|
||||
pub(crate) fn analyze(fx: &FunctionCx<'_, '_, impl Module>) -> IndexVec<Local, SsaKind> {
|
||||
pub(crate) fn analyze(fx: &FunctionCx<'_, '_, '_>) -> IndexVec<Local, SsaKind> {
|
||||
let mut flag_map = fx
|
||||
.mir
|
||||
.local_decls
|
||||
|
@ -40,12 +40,7 @@ pub(crate) fn analyze(fx: &FunctionCx<'_, '_, impl Module>) -> IndexVec<Local, S
|
|||
}
|
||||
|
||||
match &bb.terminator().kind {
|
||||
TerminatorKind::Call {
|
||||
destination,
|
||||
func,
|
||||
args,
|
||||
..
|
||||
} => {
|
||||
TerminatorKind::Call { destination, func, args, .. } => {
|
||||
if let Some((dest_place, _dest_bb)) = destination {
|
||||
if !crate::abi::can_return_to_ssa_var(fx, func, args) {
|
||||
not_ssa(&mut flag_map, dest_place.local)
|
||||
|
|
|
@ -12,10 +12,7 @@ use object::{Object, ObjectSymbol, SymbolKind};
|
|||
|
||||
#[derive(Debug)]
|
||||
enum ArchiveEntry {
|
||||
FromArchive {
|
||||
archive_index: usize,
|
||||
entry_index: usize,
|
||||
},
|
||||
FromArchive { archive_index: usize, entry_index: usize },
|
||||
File(PathBuf),
|
||||
}
|
||||
|
||||
|
@ -30,7 +27,6 @@ pub(crate) struct ArArchiveBuilder<'a> {
|
|||
// Don't use `HashMap` here, as the order is important. `rust.metadata.bin` must always be at
|
||||
// the end of an archive for linkers to not get confused.
|
||||
entries: Vec<(String, ArchiveEntry)>,
|
||||
update_symbols: bool,
|
||||
}
|
||||
|
||||
impl<'a> ArchiveBuilder<'a> for ArArchiveBuilder<'a> {
|
||||
|
@ -46,10 +42,7 @@ impl<'a> ArchiveBuilder<'a> for ArArchiveBuilder<'a> {
|
|||
let entry = entry.unwrap();
|
||||
entries.push((
|
||||
String::from_utf8(entry.header().identifier().to_vec()).unwrap(),
|
||||
ArchiveEntry::FromArchive {
|
||||
archive_index: 0,
|
||||
entry_index: i,
|
||||
},
|
||||
ArchiveEntry::FromArchive { archive_index: 0, entry_index: i },
|
||||
));
|
||||
i += 1;
|
||||
}
|
||||
|
@ -69,7 +62,6 @@ impl<'a> ArchiveBuilder<'a> for ArArchiveBuilder<'a> {
|
|||
|
||||
src_archives,
|
||||
entries,
|
||||
update_symbols: false,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -95,13 +87,8 @@ impl<'a> ArchiveBuilder<'a> for ArArchiveBuilder<'a> {
|
|||
|
||||
fn add_native_library(&mut self, name: rustc_span::symbol::Symbol) {
|
||||
let location = find_library(name, &self.lib_search_paths, self.sess);
|
||||
self.add_archive(location.clone(), |_| false)
|
||||
.unwrap_or_else(|e| {
|
||||
panic!(
|
||||
"failed to add native library {}: {}",
|
||||
location.to_string_lossy(),
|
||||
e
|
||||
);
|
||||
self.add_archive(location.clone(), |_| false).unwrap_or_else(|e| {
|
||||
panic!("failed to add native library {}: {}", location.to_string_lossy(), e);
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -136,9 +123,7 @@ impl<'a> ArchiveBuilder<'a> for ArArchiveBuilder<'a> {
|
|||
})
|
||||
}
|
||||
|
||||
fn update_symbols(&mut self) {
|
||||
self.update_symbols = true;
|
||||
}
|
||||
fn update_symbols(&mut self) {}
|
||||
|
||||
fn build(mut self) {
|
||||
enum BuilderKind {
|
||||
|
@ -156,10 +141,7 @@ impl<'a> ArchiveBuilder<'a> for ArArchiveBuilder<'a> {
|
|||
// FIXME only read the symbol table of the object files to avoid having to keep all
|
||||
// object files in memory at once, or read them twice.
|
||||
let data = match entry {
|
||||
ArchiveEntry::FromArchive {
|
||||
archive_index,
|
||||
entry_index,
|
||||
} => {
|
||||
ArchiveEntry::FromArchive { archive_index, entry_index } => {
|
||||
// FIXME read symbols from symtab
|
||||
use std::io::Read;
|
||||
let (ref _src_archive_path, ref mut src_archive) =
|
||||
|
@ -225,10 +207,7 @@ impl<'a> ArchiveBuilder<'a> for ArArchiveBuilder<'a> {
|
|||
err
|
||||
));
|
||||
}),
|
||||
entries
|
||||
.iter()
|
||||
.map(|(name, _)| name.as_bytes().to_vec())
|
||||
.collect(),
|
||||
entries.iter().map(|(name, _)| name.as_bytes().to_vec()).collect(),
|
||||
ar::GnuSymbolTableFormat::Size32,
|
||||
symbol_table,
|
||||
)
|
||||
|
@ -271,8 +250,7 @@ impl<'a> ArchiveBuilder<'a> for ArArchiveBuilder<'a> {
|
|||
.expect("Couldn't run ranlib");
|
||||
|
||||
if !status.success() {
|
||||
self.sess
|
||||
.fatal(&format!("Ranlib exited with code {:?}", status.code()));
|
||||
self.sess.fatal(&format!("Ranlib exited with code {:?}", status.code()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -292,13 +270,8 @@ impl<'a> ArArchiveBuilder<'a> {
|
|||
let file_name = String::from_utf8(entry.header().identifier().to_vec())
|
||||
.map_err(|err| std::io::Error::new(std::io::ErrorKind::InvalidData, err))?;
|
||||
if !skip(&file_name) {
|
||||
self.entries.push((
|
||||
file_name,
|
||||
ArchiveEntry::FromArchive {
|
||||
archive_index,
|
||||
entry_index: i,
|
||||
},
|
||||
));
|
||||
self.entries
|
||||
.push((file_name, ArchiveEntry::FromArchive { archive_index, entry_index: i }));
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
|
|
|
@ -1,185 +0,0 @@
|
|||
//! Atomic intrinsics are implemented using a global lock for now, as Cranelift doesn't support
|
||||
//! atomic operations yet.
|
||||
|
||||
// FIXME implement atomic instructions in Cranelift.
|
||||
|
||||
use crate::prelude::*;
|
||||
|
||||
#[cfg(all(feature = "jit", unix))]
|
||||
#[no_mangle]
|
||||
static mut __cg_clif_global_atomic_mutex: libc::pthread_mutex_t = libc::PTHREAD_MUTEX_INITIALIZER;
|
||||
|
||||
pub(crate) fn init_global_lock(
|
||||
module: &mut impl Module,
|
||||
bcx: &mut FunctionBuilder<'_>,
|
||||
use_jit: bool,
|
||||
) {
|
||||
if use_jit {
|
||||
// When using JIT, dylibs won't find the __cg_clif_global_atomic_mutex data object defined here,
|
||||
// so instead we define it in the cg_clif dylib.
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
let mut data_ctx = DataContext::new();
|
||||
data_ctx.define_zeroinit(1024); // 1024 bytes should be big enough on all platforms.
|
||||
data_ctx.set_align(16);
|
||||
let atomic_mutex = module
|
||||
.declare_data(
|
||||
"__cg_clif_global_atomic_mutex",
|
||||
Linkage::Export,
|
||||
true,
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
module.define_data(atomic_mutex, &data_ctx).unwrap();
|
||||
|
||||
let pthread_mutex_init = module
|
||||
.declare_function(
|
||||
"pthread_mutex_init",
|
||||
Linkage::Import,
|
||||
&cranelift_codegen::ir::Signature {
|
||||
call_conv: module.target_config().default_call_conv,
|
||||
params: vec![
|
||||
AbiParam::new(
|
||||
module.target_config().pointer_type(), /* *mut pthread_mutex_t */
|
||||
),
|
||||
AbiParam::new(
|
||||
module.target_config().pointer_type(), /* *const pthread_mutex_attr_t */
|
||||
),
|
||||
],
|
||||
returns: vec![AbiParam::new(types::I32 /* c_int */)],
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let pthread_mutex_init = module.declare_func_in_func(pthread_mutex_init, bcx.func);
|
||||
|
||||
let atomic_mutex = module.declare_data_in_func(atomic_mutex, bcx.func);
|
||||
let atomic_mutex = bcx
|
||||
.ins()
|
||||
.global_value(module.target_config().pointer_type(), atomic_mutex);
|
||||
|
||||
let nullptr = bcx.ins().iconst(module.target_config().pointer_type(), 0);
|
||||
|
||||
bcx.ins().call(pthread_mutex_init, &[atomic_mutex, nullptr]);
|
||||
}
|
||||
|
||||
pub(crate) fn init_global_lock_constructor(
|
||||
module: &mut impl Module,
|
||||
constructor_name: &str,
|
||||
) -> FuncId {
|
||||
let sig = Signature::new(CallConv::SystemV);
|
||||
let init_func_id = module
|
||||
.declare_function(constructor_name, Linkage::Export, &sig)
|
||||
.unwrap();
|
||||
|
||||
let mut ctx = Context::new();
|
||||
ctx.func = Function::with_name_signature(ExternalName::user(0, 0), sig);
|
||||
{
|
||||
let mut func_ctx = FunctionBuilderContext::new();
|
||||
let mut bcx = FunctionBuilder::new(&mut ctx.func, &mut func_ctx);
|
||||
|
||||
let block = bcx.create_block();
|
||||
bcx.switch_to_block(block);
|
||||
|
||||
crate::atomic_shim::init_global_lock(module, &mut bcx, false);
|
||||
|
||||
bcx.ins().return_(&[]);
|
||||
bcx.seal_all_blocks();
|
||||
bcx.finalize();
|
||||
}
|
||||
module
|
||||
.define_function(
|
||||
init_func_id,
|
||||
&mut ctx,
|
||||
&mut cranelift_codegen::binemit::NullTrapSink {},
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
init_func_id
|
||||
}
|
||||
|
||||
pub(crate) fn lock_global_lock(fx: &mut FunctionCx<'_, '_, impl Module>) {
|
||||
let atomic_mutex = fx
|
||||
.cx
|
||||
.module
|
||||
.declare_data(
|
||||
"__cg_clif_global_atomic_mutex",
|
||||
Linkage::Import,
|
||||
true,
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let pthread_mutex_lock = fx
|
||||
.cx
|
||||
.module
|
||||
.declare_function(
|
||||
"pthread_mutex_lock",
|
||||
Linkage::Import,
|
||||
&cranelift_codegen::ir::Signature {
|
||||
call_conv: fx.cx.module.target_config().default_call_conv,
|
||||
params: vec![AbiParam::new(
|
||||
fx.cx.module.target_config().pointer_type(), /* *mut pthread_mutex_t */
|
||||
)],
|
||||
returns: vec![AbiParam::new(types::I32 /* c_int */)],
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let pthread_mutex_lock = fx
|
||||
.cx
|
||||
.module
|
||||
.declare_func_in_func(pthread_mutex_lock, fx.bcx.func);
|
||||
|
||||
let atomic_mutex = fx.cx.module.declare_data_in_func(atomic_mutex, fx.bcx.func);
|
||||
let atomic_mutex = fx
|
||||
.bcx
|
||||
.ins()
|
||||
.global_value(fx.cx.module.target_config().pointer_type(), atomic_mutex);
|
||||
|
||||
fx.bcx.ins().call(pthread_mutex_lock, &[atomic_mutex]);
|
||||
}
|
||||
|
||||
pub(crate) fn unlock_global_lock(fx: &mut FunctionCx<'_, '_, impl Module>) {
|
||||
let atomic_mutex = fx
|
||||
.cx
|
||||
.module
|
||||
.declare_data(
|
||||
"__cg_clif_global_atomic_mutex",
|
||||
Linkage::Import,
|
||||
true,
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let pthread_mutex_unlock = fx
|
||||
.cx
|
||||
.module
|
||||
.declare_function(
|
||||
"pthread_mutex_unlock",
|
||||
Linkage::Import,
|
||||
&cranelift_codegen::ir::Signature {
|
||||
call_conv: fx.cx.module.target_config().default_call_conv,
|
||||
params: vec![AbiParam::new(
|
||||
fx.cx.module.target_config().pointer_type(), /* *mut pthread_mutex_t */
|
||||
)],
|
||||
returns: vec![AbiParam::new(types::I32 /* c_int */)],
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let pthread_mutex_unlock = fx
|
||||
.cx
|
||||
.module
|
||||
.declare_func_in_func(pthread_mutex_unlock, fx.bcx.func);
|
||||
|
||||
let atomic_mutex = fx.cx.module.declare_data_in_func(atomic_mutex, fx.bcx.func);
|
||||
let atomic_mutex = fx
|
||||
.bcx
|
||||
.ins()
|
||||
.global_value(fx.cx.module.target_config().pointer_type(), atomic_mutex);
|
||||
|
||||
fx.bcx.ins().call(pthread_mutex_unlock, &[atomic_mutex]);
|
||||
}
|
|
@ -8,7 +8,7 @@ use rustc_session::Session;
|
|||
use cranelift_module::FuncId;
|
||||
|
||||
use object::write::*;
|
||||
use object::{RelocationEncoding, RelocationKind, SectionKind, SymbolFlags};
|
||||
use object::{RelocationEncoding, SectionKind, SymbolFlags};
|
||||
|
||||
use cranelift_object::{ObjectBuilder, ObjectModule, ObjectProduct};
|
||||
|
||||
|
@ -22,9 +22,7 @@ pub(crate) trait WriteMetadata {
|
|||
|
||||
impl WriteMetadata for object::write::Object {
|
||||
fn add_rustc_section(&mut self, symbol_name: String, data: Vec<u8>, _is_like_osx: bool) {
|
||||
let segment = self
|
||||
.segment_name(object::write::StandardSegment::Data)
|
||||
.to_vec();
|
||||
let segment = self.segment_name(object::write::StandardSegment::Data).to_vec();
|
||||
let section_id = self.add_section(segment, b".rustc".to_vec(), object::SectionKind::Data);
|
||||
let offset = self.append_section_data(section_id, &data, 1);
|
||||
// For MachO and probably PE this is necessary to prevent the linker from throwing away the
|
||||
|
@ -74,11 +72,7 @@ impl WriteDebugInfo for ObjectProduct {
|
|||
let section_id = self.object.add_section(
|
||||
segment,
|
||||
name,
|
||||
if id == SectionId::EhFrame {
|
||||
SectionKind::ReadOnlyData
|
||||
} else {
|
||||
SectionKind::Debug
|
||||
},
|
||||
if id == SectionId::EhFrame { SectionKind::ReadOnlyData } else { SectionKind::Debug },
|
||||
);
|
||||
self.object
|
||||
.section_mut(section_id)
|
||||
|
@ -118,49 +112,6 @@ impl WriteDebugInfo for ObjectProduct {
|
|||
}
|
||||
}
|
||||
|
||||
// FIXME remove once atomic instructions are implemented in Cranelift.
|
||||
pub(crate) trait AddConstructor {
|
||||
fn add_constructor(&mut self, func_id: FuncId);
|
||||
}
|
||||
|
||||
impl AddConstructor for ObjectProduct {
|
||||
fn add_constructor(&mut self, func_id: FuncId) {
|
||||
let symbol = self.function_symbol(func_id);
|
||||
let segment = self
|
||||
.object
|
||||
.segment_name(object::write::StandardSegment::Data);
|
||||
let init_array_section =
|
||||
self.object
|
||||
.add_section(segment.to_vec(), b".init_array".to_vec(), SectionKind::Data);
|
||||
let address_size = self
|
||||
.object
|
||||
.architecture()
|
||||
.address_size()
|
||||
.expect("address_size must be known")
|
||||
.bytes();
|
||||
self.object.append_section_data(
|
||||
init_array_section,
|
||||
&std::iter::repeat(0)
|
||||
.take(address_size.into())
|
||||
.collect::<Vec<u8>>(),
|
||||
8,
|
||||
);
|
||||
self.object
|
||||
.add_relocation(
|
||||
init_array_section,
|
||||
object::write::Relocation {
|
||||
offset: 0,
|
||||
size: address_size * 8,
|
||||
kind: RelocationKind::Absolute,
|
||||
encoding: RelocationEncoding::Generic,
|
||||
symbol,
|
||||
addend: 0,
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn with_object(sess: &Session, name: &str, f: impl FnOnce(&mut Object)) -> Vec<u8> {
|
||||
let triple = crate::build_isa(sess).triple().clone();
|
||||
|
||||
|
@ -175,10 +126,9 @@ pub(crate) fn with_object(sess: &Session, name: &str, f: impl FnOnce(&mut Object
|
|||
target_lexicon::Architecture::X86_64 => object::Architecture::X86_64,
|
||||
target_lexicon::Architecture::Arm(_) => object::Architecture::Arm,
|
||||
target_lexicon::Architecture::Aarch64(_) => object::Architecture::Aarch64,
|
||||
architecture => sess.fatal(&format!(
|
||||
"target architecture {:?} is unsupported",
|
||||
architecture,
|
||||
)),
|
||||
architecture => {
|
||||
sess.fatal(&format!("target architecture {:?} is unsupported", architecture,))
|
||||
}
|
||||
};
|
||||
let endian = match triple.endianness().unwrap() {
|
||||
target_lexicon::Endianness::Little => object::Endianness::Little,
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
//! Codegen of a single function
|
||||
|
||||
use cranelift_codegen::binemit::{NullStackMapSink, NullTrapSink};
|
||||
use rustc_index::vec::IndexVec;
|
||||
use rustc_middle::ty::adjustment::PointerCast;
|
||||
use rustc_middle::ty::layout::FnAbiExt;
|
||||
|
@ -7,11 +8,7 @@ use rustc_target::abi::call::FnAbi;
|
|||
|
||||
use crate::prelude::*;
|
||||
|
||||
pub(crate) fn codegen_fn<'tcx>(
|
||||
cx: &mut crate::CodegenCx<'tcx, impl Module>,
|
||||
instance: Instance<'tcx>,
|
||||
linkage: Linkage,
|
||||
) {
|
||||
pub(crate) fn codegen_fn<'tcx>(cx: &mut crate::CodegenCx<'_, 'tcx>, instance: Instance<'tcx>) {
|
||||
let tcx = cx.tcx;
|
||||
|
||||
let _inst_guard =
|
||||
|
@ -23,7 +20,7 @@ pub(crate) fn codegen_fn<'tcx>(
|
|||
// Declare function
|
||||
let name = tcx.symbol_name(instance).name.to_string();
|
||||
let sig = get_function_sig(tcx, cx.module.isa().triple(), instance);
|
||||
let func_id = cx.module.declare_function(&name, linkage, &sig).unwrap();
|
||||
let func_id = cx.module.declare_function(&name, Linkage::Local, &sig).unwrap();
|
||||
|
||||
cx.cached_context.clear();
|
||||
|
||||
|
@ -38,9 +35,8 @@ pub(crate) fn codegen_fn<'tcx>(
|
|||
|
||||
// Predefine blocks
|
||||
let start_block = bcx.create_block();
|
||||
let block_map: IndexVec<BasicBlock, Block> = (0..mir.basic_blocks().len())
|
||||
.map(|_| bcx.create_block())
|
||||
.collect();
|
||||
let block_map: IndexVec<BasicBlock, Block> =
|
||||
(0..mir.basic_blocks().len()).map(|_| bcx.create_block()).collect();
|
||||
|
||||
// Make FunctionCx
|
||||
let pointer_type = cx.module.target_config().pointer_type();
|
||||
|
@ -68,22 +64,23 @@ pub(crate) fn codegen_fn<'tcx>(
|
|||
inline_asm_index: 0,
|
||||
};
|
||||
|
||||
let arg_uninhabited = fx.mir.args_iter().any(|arg| {
|
||||
fx.layout_of(fx.monomorphize(&fx.mir.local_decls[arg].ty))
|
||||
.abi
|
||||
.is_uninhabited()
|
||||
});
|
||||
let arg_uninhabited = fx
|
||||
.mir
|
||||
.args_iter()
|
||||
.any(|arg| fx.layout_of(fx.monomorphize(&fx.mir.local_decls[arg].ty)).abi.is_uninhabited());
|
||||
|
||||
if arg_uninhabited {
|
||||
fx.bcx
|
||||
.append_block_params_for_function_params(fx.block_map[START_BLOCK]);
|
||||
if !crate::constant::check_constants(&mut fx) {
|
||||
fx.bcx.append_block_params_for_function_params(fx.block_map[START_BLOCK]);
|
||||
fx.bcx.switch_to_block(fx.block_map[START_BLOCK]);
|
||||
crate::trap::trap_unreachable(&mut fx, "compilation should have been aborted");
|
||||
} else if arg_uninhabited {
|
||||
fx.bcx.append_block_params_for_function_params(fx.block_map[START_BLOCK]);
|
||||
fx.bcx.switch_to_block(fx.block_map[START_BLOCK]);
|
||||
crate::trap::trap_unreachable(&mut fx, "function has uninhabited argument");
|
||||
} else {
|
||||
tcx.sess.time("codegen clif ir", || {
|
||||
tcx.sess.time("codegen prelude", || {
|
||||
crate::abi::codegen_fn_prelude(&mut fx, start_block)
|
||||
});
|
||||
tcx.sess
|
||||
.time("codegen prelude", || crate::abi::codegen_fn_prelude(&mut fx, start_block));
|
||||
codegen_fn_content(&mut fx);
|
||||
});
|
||||
}
|
||||
|
@ -131,11 +128,7 @@ pub(crate) fn codegen_fn<'tcx>(
|
|||
let module = &mut cx.module;
|
||||
tcx.sess.time("define function", || {
|
||||
module
|
||||
.define_function(
|
||||
func_id,
|
||||
context,
|
||||
&mut cranelift_codegen::binemit::NullTrapSink {},
|
||||
)
|
||||
.define_function(func_id, context, &mut NullTrapSink {}, &mut NullStackMapSink {})
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
|
@ -149,15 +142,13 @@ pub(crate) fn codegen_fn<'tcx>(
|
|||
&clif_comments,
|
||||
);
|
||||
|
||||
if let Some(mach_compile_result) = &context.mach_compile_result {
|
||||
if let Some(disasm) = &mach_compile_result.disasm {
|
||||
if let Some(disasm) = &context.mach_compile_result.as_ref().unwrap().disasm {
|
||||
crate::pretty_clif::write_ir_file(
|
||||
tcx,
|
||||
&format!("{}.vcode", tcx.symbol_name(instance).name),
|
||||
|file| file.write_all(disasm.as_bytes()),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Define debuginfo for function
|
||||
let isa = cx.module.isa();
|
||||
|
@ -199,16 +190,13 @@ pub(crate) fn verify_func(
|
|||
Some(Box::new(writer)),
|
||||
err,
|
||||
);
|
||||
tcx.sess
|
||||
.fatal(&format!("cranelift verify error:\n{}", pretty_error));
|
||||
tcx.sess.fatal(&format!("cranelift verify error:\n{}", pretty_error));
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
fn codegen_fn_content(fx: &mut FunctionCx<'_, '_, impl Module>) {
|
||||
crate::constant::check_constants(fx);
|
||||
|
||||
fn codegen_fn_content(fx: &mut FunctionCx<'_, '_, '_>) {
|
||||
for (bb, bb_data) in fx.mir.basic_blocks().iter_enumerated() {
|
||||
let block = fx.get_block(bb);
|
||||
fx.bcx.switch_to_block(block);
|
||||
|
@ -228,14 +216,9 @@ fn codegen_fn_content(fx: &mut FunctionCx<'_, '_, impl Module>) {
|
|||
codegen_stmt(fx, block, stmt);
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
if fx.clif_comments.enabled() {
|
||||
let mut terminator_head = "\n".to_string();
|
||||
bb_data
|
||||
.terminator()
|
||||
.kind
|
||||
.fmt_head(&mut terminator_head)
|
||||
.unwrap();
|
||||
bb_data.terminator().kind.fmt_head(&mut terminator_head).unwrap();
|
||||
let inst = fx.bcx.func.layout.last_inst(block).unwrap();
|
||||
fx.add_comment(inst, terminator_head);
|
||||
}
|
||||
|
@ -267,13 +250,7 @@ fn codegen_fn_content(fx: &mut FunctionCx<'_, '_, impl Module>) {
|
|||
TerminatorKind::Return => {
|
||||
crate::abi::codegen_return(fx);
|
||||
}
|
||||
TerminatorKind::Assert {
|
||||
cond,
|
||||
expected,
|
||||
msg,
|
||||
target,
|
||||
cleanup: _,
|
||||
} => {
|
||||
TerminatorKind::Assert { cond, expected, msg, target, cleanup: _ } => {
|
||||
if !fx.tcx.sess.overflow_checks() {
|
||||
if let mir::AssertKind::OverflowNeg(_) = *msg {
|
||||
let target = fx.get_block(*target);
|
||||
|
@ -319,11 +296,7 @@ fn codegen_fn_content(fx: &mut FunctionCx<'_, '_, impl Module>) {
|
|||
}
|
||||
}
|
||||
|
||||
TerminatorKind::SwitchInt {
|
||||
discr,
|
||||
switch_ty,
|
||||
targets,
|
||||
} => {
|
||||
TerminatorKind::SwitchInt { discr, switch_ty, targets } => {
|
||||
let discr = codegen_operand(fx, discr).load_scalar(fx);
|
||||
|
||||
let use_bool_opt = switch_ty.kind() == fx.tcx.types.bool.kind()
|
||||
|
@ -433,11 +406,7 @@ fn codegen_fn_content(fx: &mut FunctionCx<'_, '_, impl Module>) {
|
|||
| TerminatorKind::GeneratorDrop => {
|
||||
bug!("shouldn't exist at codegen {:?}", bb_data.terminator());
|
||||
}
|
||||
TerminatorKind::Drop {
|
||||
place,
|
||||
target,
|
||||
unwind: _,
|
||||
} => {
|
||||
TerminatorKind::Drop { place, target, unwind: _ } => {
|
||||
let drop_place = codegen_place(fx, *place);
|
||||
crate::abi::codegen_drop(fx, bb_data.terminator().source_info.span, drop_place);
|
||||
|
||||
|
@ -452,7 +421,7 @@ fn codegen_fn_content(fx: &mut FunctionCx<'_, '_, impl Module>) {
|
|||
}
|
||||
|
||||
fn codegen_stmt<'tcx>(
|
||||
fx: &mut FunctionCx<'_, 'tcx, impl Module>,
|
||||
fx: &mut FunctionCx<'_, '_, 'tcx>,
|
||||
#[allow(unused_variables)] cur_block: Block,
|
||||
stmt: &Statement<'tcx>,
|
||||
) {
|
||||
|
@ -460,20 +429,19 @@ fn codegen_stmt<'tcx>(
|
|||
|
||||
fx.set_debug_loc(stmt.source_info);
|
||||
|
||||
#[cfg(false_debug_assertions)]
|
||||
#[cfg(disabled)]
|
||||
match &stmt.kind {
|
||||
StatementKind::StorageLive(..) | StatementKind::StorageDead(..) => {} // Those are not very useful
|
||||
_ => {
|
||||
if fx.clif_comments.enabled() {
|
||||
let inst = fx.bcx.func.layout.last_inst(cur_block).unwrap();
|
||||
fx.add_comment(inst, format!("{:?}", stmt));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
match &stmt.kind {
|
||||
StatementKind::SetDiscriminant {
|
||||
place,
|
||||
variant_index,
|
||||
} => {
|
||||
StatementKind::SetDiscriminant { place, variant_index } => {
|
||||
let place = codegen_place(fx, **place);
|
||||
crate::discriminant::codegen_set_discriminant(fx, place, *variant_index);
|
||||
}
|
||||
|
@ -494,16 +462,16 @@ fn codegen_stmt<'tcx>(
|
|||
let val = crate::constant::codegen_tls_ref(fx, def_id, lval.layout());
|
||||
lval.write_cvalue(fx, val);
|
||||
}
|
||||
Rvalue::BinaryOp(bin_op, ref lhs, ref rhs) => {
|
||||
let lhs = codegen_operand(fx, lhs);
|
||||
let rhs = codegen_operand(fx, rhs);
|
||||
Rvalue::BinaryOp(bin_op, ref lhs_rhs) => {
|
||||
let lhs = codegen_operand(fx, &lhs_rhs.0);
|
||||
let rhs = codegen_operand(fx, &lhs_rhs.1);
|
||||
|
||||
let res = crate::num::codegen_binop(fx, bin_op, lhs, rhs);
|
||||
lval.write_cvalue(fx, res);
|
||||
}
|
||||
Rvalue::CheckedBinaryOp(bin_op, ref lhs, ref rhs) => {
|
||||
let lhs = codegen_operand(fx, lhs);
|
||||
let rhs = codegen_operand(fx, rhs);
|
||||
Rvalue::CheckedBinaryOp(bin_op, ref lhs_rhs) => {
|
||||
let lhs = codegen_operand(fx, &lhs_rhs.0);
|
||||
let rhs = codegen_operand(fx, &lhs_rhs.1);
|
||||
|
||||
let res = if !fx.tcx.sess.overflow_checks() {
|
||||
let val =
|
||||
|
@ -594,19 +562,11 @@ fn codegen_stmt<'tcx>(
|
|||
let from_ty = operand.layout().ty;
|
||||
let to_ty = fx.monomorphize(to_ty);
|
||||
|
||||
fn is_fat_ptr<'tcx>(
|
||||
fx: &FunctionCx<'_, 'tcx, impl Module>,
|
||||
ty: Ty<'tcx>,
|
||||
) -> bool {
|
||||
fn is_fat_ptr<'tcx>(fx: &FunctionCx<'_, '_, 'tcx>, ty: Ty<'tcx>) -> bool {
|
||||
ty.builtin_deref(true)
|
||||
.map(
|
||||
|ty::TypeAndMut {
|
||||
ty: pointee_ty,
|
||||
mutbl: _,
|
||||
}| {
|
||||
.map(|ty::TypeAndMut { ty: pointee_ty, mutbl: _ }| {
|
||||
has_ptr_meta(fx.tcx, pointee_ty)
|
||||
},
|
||||
)
|
||||
})
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
|
@ -626,50 +586,22 @@ fn codegen_stmt<'tcx>(
|
|||
ty::Uint(_) | ty::Int(_) => {}
|
||||
_ => unreachable!("cast adt {} -> {}", from_ty, to_ty),
|
||||
}
|
||||
let to_clif_ty = fx.clif_type(to_ty).unwrap();
|
||||
|
||||
use rustc_target::abi::{Int, TagEncoding, Variants};
|
||||
let discriminant = crate::discriminant::codegen_get_discriminant(
|
||||
fx,
|
||||
operand,
|
||||
fx.layout_of(operand.layout().ty.discriminant_ty(fx.tcx)),
|
||||
)
|
||||
.load_scalar(fx);
|
||||
|
||||
match operand.layout().variants {
|
||||
Variants::Single { index } => {
|
||||
let discr = operand
|
||||
.layout()
|
||||
.ty
|
||||
.discriminant_for_variant(fx.tcx, index)
|
||||
.unwrap();
|
||||
let discr = if discr.ty.is_signed() {
|
||||
fx.layout_of(discr.ty).size.sign_extend(discr.val)
|
||||
} else {
|
||||
discr.val
|
||||
};
|
||||
let discr = discr.into();
|
||||
|
||||
let discr = CValue::const_val(fx, fx.layout_of(to_ty), discr);
|
||||
lval.write_cvalue(fx, discr);
|
||||
}
|
||||
Variants::Multiple {
|
||||
ref tag,
|
||||
tag_field,
|
||||
tag_encoding: TagEncoding::Direct,
|
||||
variants: _,
|
||||
} => {
|
||||
let cast_to = fx.clif_type(dest_layout.ty).unwrap();
|
||||
|
||||
// Read the tag/niche-encoded discriminant from memory.
|
||||
let encoded_discr =
|
||||
operand.value_field(fx, mir::Field::new(tag_field));
|
||||
let encoded_discr = encoded_discr.load_scalar(fx);
|
||||
|
||||
// Decode the discriminant (specifically if it's niche-encoded).
|
||||
let signed = match tag.value {
|
||||
Int(_, signed) => signed,
|
||||
_ => false,
|
||||
};
|
||||
let val = clif_intcast(fx, encoded_discr, cast_to, signed);
|
||||
let val = CValue::by_val(val, dest_layout);
|
||||
lval.write_cvalue(fx, val);
|
||||
}
|
||||
Variants::Multiple { .. } => unreachable!(),
|
||||
}
|
||||
let res = crate::cast::clif_intcast(
|
||||
fx,
|
||||
discriminant,
|
||||
to_clif_ty,
|
||||
to_ty.is_signed(),
|
||||
);
|
||||
lval.write_cvalue(fx, CValue::by_val(res, dest_layout));
|
||||
} else {
|
||||
let to_clif_ty = fx.clif_type(to_ty).unwrap();
|
||||
let from = operand.load_scalar(fx);
|
||||
|
@ -725,13 +657,14 @@ fn codegen_stmt<'tcx>(
|
|||
.val
|
||||
.try_to_bits(fx.tcx.data_layout.pointer_size)
|
||||
.unwrap();
|
||||
if fx.clif_type(operand.layout().ty) == Some(types::I8) {
|
||||
if operand.layout().size.bytes() == 0 {
|
||||
// Do nothing for ZST's
|
||||
} else if fx.clif_type(operand.layout().ty) == Some(types::I8) {
|
||||
let times = fx.bcx.ins().iconst(fx.pointer_type, times as i64);
|
||||
// FIXME use emit_small_memset where possible
|
||||
let addr = lval.to_ptr().get_addr(fx);
|
||||
let val = operand.load_scalar(fx);
|
||||
fx.bcx
|
||||
.call_memset(fx.cx.module.target_config(), addr, val, times);
|
||||
fx.bcx.call_memset(fx.cx.module.target_config(), addr, val, times);
|
||||
} else {
|
||||
let loop_block = fx.bcx.create_block();
|
||||
let loop_block2 = fx.bcx.create_block();
|
||||
|
@ -766,18 +699,12 @@ fn codegen_stmt<'tcx>(
|
|||
let content_ty = fx.monomorphize(content_ty);
|
||||
let layout = fx.layout_of(content_ty);
|
||||
let llsize = fx.bcx.ins().iconst(usize_type, layout.size.bytes() as i64);
|
||||
let llalign = fx
|
||||
.bcx
|
||||
.ins()
|
||||
.iconst(usize_type, layout.align.abi.bytes() as i64);
|
||||
let llalign = fx.bcx.ins().iconst(usize_type, layout.align.abi.bytes() as i64);
|
||||
let box_layout = fx.layout_of(fx.tcx.mk_box(content_ty));
|
||||
|
||||
// Allocate space:
|
||||
let def_id = match fx
|
||||
.tcx
|
||||
.lang_items()
|
||||
.require(rustc_hir::LangItem::ExchangeMalloc)
|
||||
{
|
||||
let def_id =
|
||||
match fx.tcx.lang_items().require(rustc_hir::LangItem::ExchangeMalloc) {
|
||||
Ok(id) => id,
|
||||
Err(s) => {
|
||||
fx.tcx
|
||||
|
@ -792,10 +719,11 @@ fn codegen_stmt<'tcx>(
|
|||
lval.write_cvalue(fx, CValue::by_val(ptr, box_layout));
|
||||
}
|
||||
Rvalue::NullaryOp(NullOp::SizeOf, ty) => {
|
||||
assert!(lval
|
||||
.layout()
|
||||
assert!(
|
||||
lval.layout()
|
||||
.ty
|
||||
.is_sized(fx.tcx.at(stmt.source_info.span), ParamEnv::reveal_all()));
|
||||
.is_sized(fx.tcx.at(stmt.source_info.span), ParamEnv::reveal_all())
|
||||
);
|
||||
let ty_size = fx.layout_of(fx.monomorphize(ty)).size.bytes();
|
||||
let val =
|
||||
CValue::const_val(fx, fx.layout_of(fx.tcx.types.usize), ty_size.into());
|
||||
|
@ -823,11 +751,7 @@ fn codegen_stmt<'tcx>(
|
|||
|
||||
StatementKind::LlvmInlineAsm(asm) => {
|
||||
use rustc_span::symbol::Symbol;
|
||||
let LlvmInlineAsm {
|
||||
asm,
|
||||
outputs,
|
||||
inputs,
|
||||
} = &**asm;
|
||||
let LlvmInlineAsm { asm, outputs, inputs } = &**asm;
|
||||
let rustc_hir::LlvmInlineAsmInner {
|
||||
asm: asm_code, // Name
|
||||
outputs: output_names, // Vec<LlvmInlineAsmOutput>
|
||||
|
@ -843,15 +767,9 @@ fn codegen_stmt<'tcx>(
|
|||
// Black box
|
||||
}
|
||||
"mov %rbx, %rsi\n cpuid\n xchg %rbx, %rsi" => {
|
||||
assert_eq!(
|
||||
input_names,
|
||||
&[Symbol::intern("{eax}"), Symbol::intern("{ecx}")]
|
||||
);
|
||||
assert_eq!(input_names, &[Symbol::intern("{eax}"), Symbol::intern("{ecx}")]);
|
||||
assert_eq!(output_names.len(), 4);
|
||||
for (i, c) in (&["={eax}", "={esi}", "={ecx}", "={edx}"])
|
||||
.iter()
|
||||
.enumerate()
|
||||
{
|
||||
for (i, c) in (&["={eax}", "={esi}", "={ecx}", "={edx}"]).iter().enumerate() {
|
||||
assert_eq!(&output_names[i].constraint.as_str(), c);
|
||||
assert!(!output_names[i].is_rw);
|
||||
assert!(!output_names[i].is_indirect);
|
||||
|
@ -897,12 +815,7 @@ fn codegen_stmt<'tcx>(
|
|||
crate::trap::trap_unimplemented(fx, "_xgetbv arch intrinsic is not supported");
|
||||
}
|
||||
// ___chkstk, ___chkstk_ms and __alloca are only used on Windows
|
||||
_ if fx
|
||||
.tcx
|
||||
.symbol_name(fx.instance)
|
||||
.name
|
||||
.starts_with("___chkstk") =>
|
||||
{
|
||||
_ if fx.tcx.symbol_name(fx.instance).name.starts_with("___chkstk") => {
|
||||
crate::trap::trap_unimplemented(fx, "Stack probes are not supported");
|
||||
}
|
||||
_ if fx.tcx.symbol_name(fx.instance).name == "__alloca" => {
|
||||
|
@ -919,30 +832,38 @@ fn codegen_stmt<'tcx>(
|
|||
}
|
||||
}
|
||||
StatementKind::Coverage { .. } => fx.tcx.sess.fatal("-Zcoverage is unimplemented"),
|
||||
StatementKind::CopyNonOverlapping(inner) => {
|
||||
let dst = codegen_operand(fx, &inner.dst);
|
||||
let pointee = dst
|
||||
.layout()
|
||||
.pointee_info_at(fx, rustc_target::abi::Size::ZERO)
|
||||
.expect("Expected pointer");
|
||||
let dst = dst.load_scalar(fx);
|
||||
let src = codegen_operand(fx, &inner.src).load_scalar(fx);
|
||||
let count = codegen_operand(fx, &inner.count).load_scalar(fx);
|
||||
let elem_size: u64 = pointee.size.bytes();
|
||||
let bytes =
|
||||
if elem_size != 1 { fx.bcx.ins().imul_imm(count, elem_size as i64) } else { count };
|
||||
fx.bcx.call_memcpy(fx.cx.module.target_config(), dst, src, bytes);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn codegen_array_len<'tcx>(
|
||||
fx: &mut FunctionCx<'_, 'tcx, impl Module>,
|
||||
place: CPlace<'tcx>,
|
||||
) -> Value {
|
||||
fn codegen_array_len<'tcx>(fx: &mut FunctionCx<'_, '_, 'tcx>, place: CPlace<'tcx>) -> Value {
|
||||
match *place.layout().ty.kind() {
|
||||
ty::Array(_elem_ty, len) => {
|
||||
let len = fx
|
||||
.monomorphize(len)
|
||||
.eval_usize(fx.tcx, ParamEnv::reveal_all()) as i64;
|
||||
let len = fx.monomorphize(len).eval_usize(fx.tcx, ParamEnv::reveal_all()) as i64;
|
||||
fx.bcx.ins().iconst(fx.pointer_type, len)
|
||||
}
|
||||
ty::Slice(_elem_ty) => place
|
||||
.to_ptr_maybe_unsized()
|
||||
.1
|
||||
.expect("Length metadata for slice place"),
|
||||
ty::Slice(_elem_ty) => {
|
||||
place.to_ptr_maybe_unsized().1.expect("Length metadata for slice place")
|
||||
}
|
||||
_ => bug!("Rvalue::Len({:?})", place),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn codegen_place<'tcx>(
|
||||
fx: &mut FunctionCx<'_, 'tcx, impl Module>,
|
||||
fx: &mut FunctionCx<'_, '_, 'tcx>,
|
||||
place: Place<'tcx>,
|
||||
) -> CPlace<'tcx> {
|
||||
let mut cplace = fx.get_local_place(place.local);
|
||||
|
@ -959,11 +880,7 @@ pub(crate) fn codegen_place<'tcx>(
|
|||
let index = fx.get_local_place(local).to_cvalue(fx).load_scalar(fx);
|
||||
cplace = cplace.place_index(fx, index);
|
||||
}
|
||||
PlaceElem::ConstantIndex {
|
||||
offset,
|
||||
min_length: _,
|
||||
from_end,
|
||||
} => {
|
||||
PlaceElem::ConstantIndex { offset, min_length: _, from_end } => {
|
||||
let offset: u64 = offset;
|
||||
let index = if !from_end {
|
||||
fx.bcx.ins().iconst(fx.pointer_type, offset as i64)
|
||||
|
@ -1014,7 +931,7 @@ pub(crate) fn codegen_place<'tcx>(
|
|||
}
|
||||
|
||||
pub(crate) fn codegen_operand<'tcx>(
|
||||
fx: &mut FunctionCx<'_, 'tcx, impl Module>,
|
||||
fx: &mut FunctionCx<'_, '_, 'tcx>,
|
||||
operand: &Operand<'tcx>,
|
||||
) -> CValue<'tcx> {
|
||||
match operand {
|
||||
|
@ -1026,34 +943,24 @@ pub(crate) fn codegen_operand<'tcx>(
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) fn codegen_panic<'tcx>(
|
||||
fx: &mut FunctionCx<'_, 'tcx, impl Module>,
|
||||
msg_str: &str,
|
||||
span: Span,
|
||||
) {
|
||||
pub(crate) fn codegen_panic<'tcx>(fx: &mut FunctionCx<'_, '_, 'tcx>, msg_str: &str, span: Span) {
|
||||
let location = fx.get_caller_location(span).load_scalar(fx);
|
||||
|
||||
let msg_ptr = fx.anonymous_str("assert", msg_str);
|
||||
let msg_len = fx
|
||||
.bcx
|
||||
.ins()
|
||||
.iconst(fx.pointer_type, i64::try_from(msg_str.len()).unwrap());
|
||||
let msg_len = fx.bcx.ins().iconst(fx.pointer_type, i64::try_from(msg_str.len()).unwrap());
|
||||
let args = [msg_ptr, msg_len, location];
|
||||
|
||||
codegen_panic_inner(fx, rustc_hir::LangItem::Panic, &args, span);
|
||||
}
|
||||
|
||||
pub(crate) fn codegen_panic_inner<'tcx>(
|
||||
fx: &mut FunctionCx<'_, 'tcx, impl Module>,
|
||||
fx: &mut FunctionCx<'_, '_, 'tcx>,
|
||||
lang_item: rustc_hir::LangItem,
|
||||
args: &[Value],
|
||||
span: Span,
|
||||
) {
|
||||
let def_id = fx
|
||||
.tcx
|
||||
.lang_items()
|
||||
.require(lang_item)
|
||||
.unwrap_or_else(|s| fx.tcx.sess.span_fatal(span, &s));
|
||||
let def_id =
|
||||
fx.tcx.lang_items().require(lang_item).unwrap_or_else(|s| fx.tcx.sess.span_fatal(span, &s));
|
||||
|
||||
let instance = Instance::mono(fx.tcx, def_id).polymorphize(fx.tcx);
|
||||
let symbol_name = fx.tcx.symbol_name(instance).name;
|
||||
|
|
|
@ -27,13 +27,7 @@ impl rustc_driver::Callbacks for CraneliftPassesCallbacks {
|
|||
config.opts.cg.panic = Some(PanicStrategy::Abort);
|
||||
config.opts.debugging_opts.panic_abort_tests = true;
|
||||
config.opts.maybe_sysroot = Some(config.opts.maybe_sysroot.clone().unwrap_or_else(|| {
|
||||
std::env::current_exe()
|
||||
.unwrap()
|
||||
.parent()
|
||||
.unwrap()
|
||||
.parent()
|
||||
.unwrap()
|
||||
.to_owned()
|
||||
std::env::current_exe().unwrap().parent().unwrap().parent().unwrap().to_owned()
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -46,15 +46,8 @@ impl rustc_driver::Callbacks for CraneliftPassesCallbacks {
|
|||
|
||||
config.opts.cg.panic = Some(PanicStrategy::Abort);
|
||||
config.opts.debugging_opts.panic_abort_tests = true;
|
||||
config.opts.maybe_sysroot = Some(
|
||||
std::env::current_exe()
|
||||
.unwrap()
|
||||
.parent()
|
||||
.unwrap()
|
||||
.parent()
|
||||
.unwrap()
|
||||
.to_owned(),
|
||||
);
|
||||
config.opts.maybe_sysroot =
|
||||
Some(std::env::current_exe().unwrap().parent().unwrap().parent().unwrap().to_owned());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
use crate::prelude::*;
|
||||
|
||||
pub(crate) fn clif_intcast(
|
||||
fx: &mut FunctionCx<'_, '_, impl Module>,
|
||||
fx: &mut FunctionCx<'_, '_, '_>,
|
||||
val: Value,
|
||||
to: Type,
|
||||
signed: bool,
|
||||
|
@ -40,18 +40,14 @@ pub(crate) fn clif_intcast(
|
|||
// reduce
|
||||
(types::I128, _) => {
|
||||
let (lsb, _msb) = fx.bcx.ins().isplit(val);
|
||||
if to == types::I64 {
|
||||
lsb
|
||||
} else {
|
||||
fx.bcx.ins().ireduce(to, lsb)
|
||||
}
|
||||
if to == types::I64 { lsb } else { fx.bcx.ins().ireduce(to, lsb) }
|
||||
}
|
||||
(_, _) => fx.bcx.ins().ireduce(to, val),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn clif_int_or_float_cast(
|
||||
fx: &mut FunctionCx<'_, '_, impl Module>,
|
||||
fx: &mut FunctionCx<'_, '_, '_>,
|
||||
from: Value,
|
||||
from_signed: bool,
|
||||
to_ty: Type,
|
||||
|
@ -87,11 +83,7 @@ pub(crate) fn clif_int_or_float_cast(
|
|||
},
|
||||
);
|
||||
|
||||
let from_rust_ty = if from_signed {
|
||||
fx.tcx.types.i128
|
||||
} else {
|
||||
fx.tcx.types.u128
|
||||
};
|
||||
let from_rust_ty = if from_signed { fx.tcx.types.i128 } else { fx.tcx.types.u128 };
|
||||
|
||||
let to_rust_ty = match to_ty {
|
||||
types::F32 => fx.tcx.types.f32,
|
||||
|
@ -100,11 +92,7 @@ pub(crate) fn clif_int_or_float_cast(
|
|||
};
|
||||
|
||||
return fx
|
||||
.easy_call(
|
||||
&name,
|
||||
&[CValue::by_val(from, fx.layout_of(from_rust_ty))],
|
||||
to_rust_ty,
|
||||
)
|
||||
.easy_call(&name, &[CValue::by_val(from, fx.layout_of(from_rust_ty))], to_rust_ty)
|
||||
.load_scalar(fx);
|
||||
}
|
||||
|
||||
|
@ -138,18 +126,10 @@ pub(crate) fn clif_int_or_float_cast(
|
|||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
let to_rust_ty = if to_signed {
|
||||
fx.tcx.types.i128
|
||||
} else {
|
||||
fx.tcx.types.u128
|
||||
};
|
||||
let to_rust_ty = if to_signed { fx.tcx.types.i128 } else { fx.tcx.types.u128 };
|
||||
|
||||
return fx
|
||||
.easy_call(
|
||||
&name,
|
||||
&[CValue::by_val(from, fx.layout_of(from_rust_ty))],
|
||||
to_rust_ty,
|
||||
)
|
||||
.easy_call(&name, &[CValue::by_val(from, fx.layout_of(from_rust_ty))], to_rust_ty)
|
||||
.load_scalar(fx);
|
||||
}
|
||||
|
||||
|
|
|
@ -5,13 +5,17 @@ use cranelift_codegen::ir::ArgumentPurpose;
|
|||
use crate::prelude::*;
|
||||
|
||||
pub(crate) fn maybe_codegen<'tcx>(
|
||||
fx: &mut FunctionCx<'_, 'tcx, impl Module>,
|
||||
fx: &mut FunctionCx<'_, '_, 'tcx>,
|
||||
bin_op: BinOp,
|
||||
checked: bool,
|
||||
lhs: CValue<'tcx>,
|
||||
rhs: CValue<'tcx>,
|
||||
) -> Option<CValue<'tcx>> {
|
||||
if lhs.layout().ty != fx.tcx.types.u128 && lhs.layout().ty != fx.tcx.types.i128 {
|
||||
if lhs.layout().ty != fx.tcx.types.u128
|
||||
&& lhs.layout().ty != fx.tcx.types.i128
|
||||
&& rhs.layout().ty != fx.tcx.types.u128
|
||||
&& rhs.layout().ty != fx.tcx.types.i128
|
||||
{
|
||||
return None;
|
||||
}
|
||||
|
||||
|
@ -27,27 +31,57 @@ pub(crate) fn maybe_codegen<'tcx>(
|
|||
}
|
||||
BinOp::Add | BinOp::Sub if !checked => None,
|
||||
BinOp::Mul if !checked => {
|
||||
let val_ty = if is_signed {
|
||||
fx.tcx.types.i128
|
||||
let val_ty = if is_signed { fx.tcx.types.i128 } else { fx.tcx.types.u128 };
|
||||
if fx.tcx.sess.target.is_like_windows {
|
||||
let ret_place = CPlace::new_stack_slot(fx, lhs.layout());
|
||||
let (lhs_ptr, lhs_extra) = lhs.force_stack(fx);
|
||||
let (rhs_ptr, rhs_extra) = rhs.force_stack(fx);
|
||||
assert!(lhs_extra.is_none());
|
||||
assert!(rhs_extra.is_none());
|
||||
let args =
|
||||
[ret_place.to_ptr().get_addr(fx), lhs_ptr.get_addr(fx), rhs_ptr.get_addr(fx)];
|
||||
fx.lib_call(
|
||||
"__multi3",
|
||||
vec![
|
||||
AbiParam::special(pointer_ty(fx.tcx), ArgumentPurpose::StructReturn),
|
||||
AbiParam::new(pointer_ty(fx.tcx)),
|
||||
AbiParam::new(pointer_ty(fx.tcx)),
|
||||
],
|
||||
vec![],
|
||||
&args,
|
||||
);
|
||||
Some(ret_place.to_cvalue(fx))
|
||||
} else {
|
||||
fx.tcx.types.u128
|
||||
};
|
||||
Some(fx.easy_call("__multi3", &[lhs, rhs], val_ty))
|
||||
}
|
||||
}
|
||||
BinOp::Add | BinOp::Sub | BinOp::Mul => {
|
||||
assert!(checked);
|
||||
let out_ty = fx.tcx.mk_tup([lhs.layout().ty, fx.tcx.types.bool].iter());
|
||||
let out_place = CPlace::new_stack_slot(fx, fx.layout_of(out_ty));
|
||||
let param_types = vec![
|
||||
let (param_types, args) = if fx.tcx.sess.target.is_like_windows {
|
||||
let (lhs_ptr, lhs_extra) = lhs.force_stack(fx);
|
||||
let (rhs_ptr, rhs_extra) = rhs.force_stack(fx);
|
||||
assert!(lhs_extra.is_none());
|
||||
assert!(rhs_extra.is_none());
|
||||
(
|
||||
vec![
|
||||
AbiParam::special(pointer_ty(fx.tcx), ArgumentPurpose::StructReturn),
|
||||
AbiParam::new(pointer_ty(fx.tcx)),
|
||||
AbiParam::new(pointer_ty(fx.tcx)),
|
||||
],
|
||||
[out_place.to_ptr().get_addr(fx), lhs_ptr.get_addr(fx), rhs_ptr.get_addr(fx)],
|
||||
)
|
||||
} else {
|
||||
(
|
||||
vec![
|
||||
AbiParam::special(pointer_ty(fx.tcx), ArgumentPurpose::StructReturn),
|
||||
AbiParam::new(types::I128),
|
||||
AbiParam::new(types::I128),
|
||||
];
|
||||
let args = [
|
||||
out_place.to_ptr().get_addr(fx),
|
||||
lhs.load_scalar(fx),
|
||||
rhs.load_scalar(fx),
|
||||
];
|
||||
],
|
||||
[out_place.to_ptr().get_addr(fx), lhs.load_scalar(fx), rhs.load_scalar(fx)],
|
||||
)
|
||||
};
|
||||
let name = match (bin_op, is_signed) {
|
||||
(BinOp::Add, false) => "__rust_u128_addo",
|
||||
(BinOp::Add, true) => "__rust_i128_addo",
|
||||
|
@ -61,20 +95,33 @@ pub(crate) fn maybe_codegen<'tcx>(
|
|||
Some(out_place.to_cvalue(fx))
|
||||
}
|
||||
BinOp::Offset => unreachable!("offset should only be used on pointers, not 128bit ints"),
|
||||
BinOp::Div => {
|
||||
BinOp::Div | BinOp::Rem => {
|
||||
assert!(!checked);
|
||||
if is_signed {
|
||||
Some(fx.easy_call("__divti3", &[lhs, rhs], fx.tcx.types.i128))
|
||||
let name = match (bin_op, is_signed) {
|
||||
(BinOp::Div, false) => "__udivti3",
|
||||
(BinOp::Div, true) => "__divti3",
|
||||
(BinOp::Rem, false) => "__umodti3",
|
||||
(BinOp::Rem, true) => "__modti3",
|
||||
_ => unreachable!(),
|
||||
};
|
||||
if fx.tcx.sess.target.is_like_windows {
|
||||
let (lhs_ptr, lhs_extra) = lhs.force_stack(fx);
|
||||
let (rhs_ptr, rhs_extra) = rhs.force_stack(fx);
|
||||
assert!(lhs_extra.is_none());
|
||||
assert!(rhs_extra.is_none());
|
||||
let args = [lhs_ptr.get_addr(fx), rhs_ptr.get_addr(fx)];
|
||||
let ret = fx.lib_call(
|
||||
name,
|
||||
vec![AbiParam::new(pointer_ty(fx.tcx)), AbiParam::new(pointer_ty(fx.tcx))],
|
||||
vec![AbiParam::new(types::I64X2)],
|
||||
&args,
|
||||
)[0];
|
||||
// FIXME use bitcast instead of store to get from i64x2 to i128
|
||||
let ret_place = CPlace::new_stack_slot(fx, lhs.layout());
|
||||
ret_place.to_ptr().store(fx, ret, MemFlags::trusted());
|
||||
Some(ret_place.to_cvalue(fx))
|
||||
} else {
|
||||
Some(fx.easy_call("__udivti3", &[lhs, rhs], fx.tcx.types.u128))
|
||||
}
|
||||
}
|
||||
BinOp::Rem => {
|
||||
assert!(!checked);
|
||||
if is_signed {
|
||||
Some(fx.easy_call("__modti3", &[lhs, rhs], fx.tcx.types.i128))
|
||||
} else {
|
||||
Some(fx.easy_call("__umodti3", &[lhs, rhs], fx.tcx.types.u128))
|
||||
Some(fx.easy_call(name, &[lhs, rhs], lhs.layout().ty))
|
||||
}
|
||||
}
|
||||
BinOp::Lt | BinOp::Le | BinOp::Eq | BinOp::Ge | BinOp::Gt | BinOp::Ne => {
|
||||
|
@ -97,70 +144,23 @@ pub(crate) fn maybe_codegen<'tcx>(
|
|||
None
|
||||
};
|
||||
|
||||
// Optimize `val >> 64`, because compiler_builtins uses it to deconstruct an 128bit
|
||||
// integer into its lsb and msb.
|
||||
// https://github.com/rust-lang-nursery/compiler-builtins/blob/79a6a1603d5672cbb9187ff41ff4d9b5048ac1cb/src/int/mod.rs#L217
|
||||
if resolve_value_imm(fx.bcx.func, rhs_val) == Some(64) {
|
||||
let (lhs_lsb, lhs_msb) = fx.bcx.ins().isplit(lhs_val);
|
||||
let all_zeros = fx.bcx.ins().iconst(types::I64, 0);
|
||||
let val = match (bin_op, is_signed) {
|
||||
(BinOp::Shr, false) => {
|
||||
let val = fx.bcx.ins().iconcat(lhs_msb, all_zeros);
|
||||
Some(CValue::by_val(val, fx.layout_of(fx.tcx.types.u128)))
|
||||
}
|
||||
(BinOp::Shr, true) => {
|
||||
let sign = fx.bcx.ins().icmp_imm(IntCC::SignedLessThan, lhs_msb, 0);
|
||||
let all_ones = fx.bcx.ins().iconst(types::I64, u64::MAX as i64);
|
||||
let all_sign_bits = fx.bcx.ins().select(sign, all_zeros, all_ones);
|
||||
|
||||
let val = fx.bcx.ins().iconcat(lhs_msb, all_sign_bits);
|
||||
Some(CValue::by_val(val, fx.layout_of(fx.tcx.types.i128)))
|
||||
}
|
||||
(BinOp::Shl, _) => {
|
||||
let val_ty = if is_signed {
|
||||
fx.tcx.types.i128
|
||||
} else {
|
||||
fx.tcx.types.u128
|
||||
};
|
||||
let val = fx.bcx.ins().iconcat(all_zeros, lhs_lsb);
|
||||
Some(CValue::by_val(val, fx.layout_of(val_ty)))
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
if let Some(val) = val {
|
||||
if let Some(is_overflow) = is_overflow {
|
||||
let out_ty = fx.tcx.mk_tup([lhs.layout().ty, fx.tcx.types.bool].iter());
|
||||
let val = val.load_scalar(fx);
|
||||
return Some(CValue::by_val_pair(val, is_overflow, fx.layout_of(out_ty)));
|
||||
} else {
|
||||
return Some(val);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let truncated_rhs = clif_intcast(fx, rhs_val, types::I32, false);
|
||||
let truncated_rhs = CValue::by_val(truncated_rhs, fx.layout_of(fx.tcx.types.u32));
|
||||
let val = match (bin_op, is_signed) {
|
||||
(BinOp::Shl, false) => {
|
||||
fx.easy_call("__ashlti3", &[lhs, truncated_rhs], fx.tcx.types.u128)
|
||||
let val = match bin_op {
|
||||
BinOp::Shl => fx.bcx.ins().ishl(lhs_val, truncated_rhs),
|
||||
BinOp::Shr => {
|
||||
if is_signed {
|
||||
fx.bcx.ins().sshr(lhs_val, truncated_rhs)
|
||||
} else {
|
||||
fx.bcx.ins().ushr(lhs_val, truncated_rhs)
|
||||
}
|
||||
(BinOp::Shl, true) => {
|
||||
fx.easy_call("__ashlti3", &[lhs, truncated_rhs], fx.tcx.types.i128)
|
||||
}
|
||||
(BinOp::Shr, false) => {
|
||||
fx.easy_call("__lshrti3", &[lhs, truncated_rhs], fx.tcx.types.u128)
|
||||
}
|
||||
(BinOp::Shr, true) => {
|
||||
fx.easy_call("__ashrti3", &[lhs, truncated_rhs], fx.tcx.types.i128)
|
||||
}
|
||||
(_, _) => unreachable!(),
|
||||
_ => unreachable!(),
|
||||
};
|
||||
if let Some(is_overflow) = is_overflow {
|
||||
let out_ty = fx.tcx.mk_tup([lhs.layout().ty, fx.tcx.types.bool].iter());
|
||||
let val = val.load_scalar(fx);
|
||||
Some(CValue::by_val_pair(val, is_overflow, fx.layout_of(out_ty)))
|
||||
} else {
|
||||
Some(val)
|
||||
Some(CValue::by_val(val, lhs.layout()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,8 +3,6 @@ use rustc_target::abi::call::FnAbi;
|
|||
use rustc_target::abi::{Integer, Primitive};
|
||||
use rustc_target::spec::{HasTargetSpec, Target};
|
||||
|
||||
use cranelift_codegen::ir::{InstructionData, Opcode, ValueDef};
|
||||
|
||||
use crate::prelude::*;
|
||||
|
||||
pub(crate) fn pointer_ty(tcx: TyCtxt<'_>) -> types::Type {
|
||||
|
@ -56,11 +54,7 @@ fn clif_type_from_ty<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> Option<types::Typ
|
|||
FloatTy::F64 => types::F64,
|
||||
},
|
||||
ty::FnPtr(_) => pointer_ty(tcx),
|
||||
ty::RawPtr(TypeAndMut {
|
||||
ty: pointee_ty,
|
||||
mutbl: _,
|
||||
})
|
||||
| ty::Ref(_, pointee_ty, _) => {
|
||||
ty::RawPtr(TypeAndMut { ty: pointee_ty, mutbl: _ }) | ty::Ref(_, pointee_ty, _) => {
|
||||
if has_ptr_meta(tcx, pointee_ty) {
|
||||
return None;
|
||||
} else {
|
||||
|
@ -99,11 +93,7 @@ fn clif_pair_type_from_ty<'tcx>(
|
|||
}
|
||||
(a, b)
|
||||
}
|
||||
ty::RawPtr(TypeAndMut {
|
||||
ty: pointee_ty,
|
||||
mutbl: _,
|
||||
})
|
||||
| ty::Ref(_, pointee_ty, _) => {
|
||||
ty::RawPtr(TypeAndMut { ty: pointee_ty, mutbl: _ }) | ty::Ref(_, pointee_ty, _) => {
|
||||
if has_ptr_meta(tcx, pointee_ty) {
|
||||
(pointer_ty(tcx), pointer_ty(tcx))
|
||||
} else {
|
||||
|
@ -116,15 +106,8 @@ fn clif_pair_type_from_ty<'tcx>(
|
|||
|
||||
/// Is a pointer to this type a fat ptr?
|
||||
pub(crate) fn has_ptr_meta<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> bool {
|
||||
let ptr_ty = tcx.mk_ptr(TypeAndMut {
|
||||
ty,
|
||||
mutbl: rustc_hir::Mutability::Not,
|
||||
});
|
||||
match &tcx
|
||||
.layout_of(ParamEnv::reveal_all().and(ptr_ty))
|
||||
.unwrap()
|
||||
.abi
|
||||
{
|
||||
let ptr_ty = tcx.mk_ptr(TypeAndMut { ty, mutbl: rustc_hir::Mutability::Not });
|
||||
match &tcx.layout_of(ParamEnv::reveal_all().and(ptr_ty)).unwrap().abi {
|
||||
Abi::Scalar(_) => false,
|
||||
Abi::ScalarPair(_, _) => true,
|
||||
abi => unreachable!("Abi of ptr to {:?} is {:?}???", ty, abi),
|
||||
|
@ -132,7 +115,7 @@ pub(crate) fn has_ptr_meta<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> bool {
|
|||
}
|
||||
|
||||
pub(crate) fn codegen_icmp_imm(
|
||||
fx: &mut FunctionCx<'_, '_, impl Module>,
|
||||
fx: &mut FunctionCx<'_, '_, '_>,
|
||||
intcc: IntCC,
|
||||
lhs: Value,
|
||||
rhs: i128,
|
||||
|
@ -175,51 +158,6 @@ pub(crate) fn codegen_icmp_imm(
|
|||
}
|
||||
}
|
||||
|
||||
fn resolve_normal_value_imm(func: &Function, val: Value) -> Option<i64> {
|
||||
if let ValueDef::Result(inst, 0 /*param*/) = func.dfg.value_def(val) {
|
||||
if let InstructionData::UnaryImm {
|
||||
opcode: Opcode::Iconst,
|
||||
imm,
|
||||
} = func.dfg[inst]
|
||||
{
|
||||
Some(imm.into())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_128bit_value_imm(func: &Function, val: Value) -> Option<u128> {
|
||||
let (lsb, msb) = if let ValueDef::Result(inst, 0 /*param*/) = func.dfg.value_def(val) {
|
||||
if let InstructionData::Binary {
|
||||
opcode: Opcode::Iconcat,
|
||||
args: [lsb, msb],
|
||||
} = func.dfg[inst]
|
||||
{
|
||||
(lsb, msb)
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
} else {
|
||||
return None;
|
||||
};
|
||||
|
||||
let lsb = u128::from(resolve_normal_value_imm(func, lsb)? as u64);
|
||||
let msb = u128::from(resolve_normal_value_imm(func, msb)? as u64);
|
||||
|
||||
Some(msb << 64 | lsb)
|
||||
}
|
||||
|
||||
pub(crate) fn resolve_value_imm(func: &Function, val: Value) -> Option<u128> {
|
||||
if func.dfg.value_type(val) == types::I128 {
|
||||
resolve_128bit_value_imm(func, val)
|
||||
} else {
|
||||
resolve_normal_value_imm(func, val).map(|imm| u128::from(imm as u64))
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn type_min_max_value(
|
||||
bcx: &mut FunctionBuilder<'_>,
|
||||
ty: Type,
|
||||
|
@ -288,8 +226,8 @@ pub(crate) fn type_sign(ty: Ty<'_>) -> bool {
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) struct FunctionCx<'clif, 'tcx, M: Module> {
|
||||
pub(crate) cx: &'clif mut crate::CodegenCx<'tcx, M>,
|
||||
pub(crate) struct FunctionCx<'m, 'clif, 'tcx> {
|
||||
pub(crate) cx: &'clif mut crate::CodegenCx<'m, 'tcx>,
|
||||
pub(crate) tcx: TyCtxt<'tcx>,
|
||||
pub(crate) pointer_type: Type, // Cached from module
|
||||
|
||||
|
@ -316,7 +254,7 @@ pub(crate) struct FunctionCx<'clif, 'tcx, M: Module> {
|
|||
pub(crate) inline_asm_index: u32,
|
||||
}
|
||||
|
||||
impl<'tcx, M: Module> LayoutOf for FunctionCx<'_, 'tcx, M> {
|
||||
impl<'tcx> LayoutOf for FunctionCx<'_, '_, 'tcx> {
|
||||
type Ty = Ty<'tcx>;
|
||||
type TyAndLayout = TyAndLayout<'tcx>;
|
||||
|
||||
|
@ -325,31 +263,31 @@ impl<'tcx, M: Module> LayoutOf for FunctionCx<'_, 'tcx, M> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'tcx, M: Module> layout::HasTyCtxt<'tcx> for FunctionCx<'_, 'tcx, M> {
|
||||
impl<'tcx> layout::HasTyCtxt<'tcx> for FunctionCx<'_, '_, 'tcx> {
|
||||
fn tcx<'b>(&'b self) -> TyCtxt<'tcx> {
|
||||
self.tcx
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx, M: Module> rustc_target::abi::HasDataLayout for FunctionCx<'_, 'tcx, M> {
|
||||
impl<'tcx> rustc_target::abi::HasDataLayout for FunctionCx<'_, '_, 'tcx> {
|
||||
fn data_layout(&self) -> &rustc_target::abi::TargetDataLayout {
|
||||
&self.tcx.data_layout
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx, M: Module> layout::HasParamEnv<'tcx> for FunctionCx<'_, 'tcx, M> {
|
||||
impl<'tcx> layout::HasParamEnv<'tcx> for FunctionCx<'_, '_, 'tcx> {
|
||||
fn param_env(&self) -> ParamEnv<'tcx> {
|
||||
ParamEnv::reveal_all()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx, M: Module> HasTargetSpec for FunctionCx<'_, 'tcx, M> {
|
||||
impl<'tcx> HasTargetSpec for FunctionCx<'_, '_, 'tcx> {
|
||||
fn target_spec(&self) -> &Target {
|
||||
&self.tcx.sess.target
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx, M: Module> FunctionCx<'_, 'tcx, M> {
|
||||
impl<'tcx> FunctionCx<'_, '_, 'tcx> {
|
||||
pub(crate) fn monomorphize<T>(&self, value: T) -> T
|
||||
where
|
||||
T: TypeFoldable<'tcx> + Copy,
|
||||
|
@ -416,20 +354,14 @@ impl<'tcx, M: Module> FunctionCx<'_, 'tcx, M> {
|
|||
let msg_id = self
|
||||
.cx
|
||||
.module
|
||||
.declare_data(
|
||||
&format!("__{}_{:08x}", prefix, msg_hash),
|
||||
Linkage::Local,
|
||||
false,
|
||||
false,
|
||||
)
|
||||
.declare_data(&format!("__{}_{:08x}", prefix, msg_hash), Linkage::Local, false, false)
|
||||
.unwrap();
|
||||
|
||||
// Ignore DuplicateDefinition error, as the data will be the same
|
||||
let _ = self.cx.module.define_data(msg_id, &data_ctx);
|
||||
|
||||
let local_msg_id = self.cx.module.declare_data_in_func(msg_id, self.bcx.func);
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
if self.clif_comments.enabled() {
|
||||
self.add_comment(local_msg_id, msg);
|
||||
}
|
||||
self.bcx.ins().global_value(self.pointer_type, local_msg_id)
|
||||
|
@ -444,9 +376,7 @@ impl<'tcx> LayoutOf for RevealAllLayoutCx<'tcx> {
|
|||
|
||||
fn layout_of(&self, ty: Ty<'tcx>) -> TyAndLayout<'tcx> {
|
||||
assert!(!ty.still_further_specializable());
|
||||
self.0
|
||||
.layout_of(ParamEnv::reveal_all().and(&ty))
|
||||
.unwrap_or_else(|e| {
|
||||
self.0.layout_of(ParamEnv::reveal_all().and(&ty)).unwrap_or_else(|e| {
|
||||
if let layout::LayoutError::SizeOverflow(_) = e {
|
||||
self.0.sess.fatal(&e.to_string())
|
||||
} else {
|
||||
|
|
41
compiler/rustc_codegen_cranelift/src/compiler_builtins.rs
Normal file
41
compiler/rustc_codegen_cranelift/src/compiler_builtins.rs
Normal file
|
@ -0,0 +1,41 @@
|
|||
macro builtin_functions($register:ident; $(fn $name:ident($($arg_name:ident: $arg_ty:ty),*) -> $ret_ty:ty;)*) {
|
||||
#[cfg(feature = "jit")]
|
||||
#[allow(improper_ctypes)]
|
||||
extern "C" {
|
||||
$(fn $name($($arg_name: $arg_ty),*) -> $ret_ty;)*
|
||||
}
|
||||
|
||||
#[cfg(feature = "jit")]
|
||||
pub(crate) fn $register(builder: &mut cranelift_jit::JITBuilder) {
|
||||
for &(name, val) in &[$((stringify!($name), $name as *const u8)),*] {
|
||||
builder.symbol(name, val);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
builtin_functions! {
|
||||
register_functions_for_jit;
|
||||
|
||||
// integers
|
||||
fn __multi3(a: i128, b: i128) -> i128;
|
||||
fn __udivti3(n: u128, d: u128) -> u128;
|
||||
fn __divti3(n: i128, d: i128) -> i128;
|
||||
fn __umodti3(n: u128, d: u128) -> u128;
|
||||
fn __modti3(n: i128, d: i128) -> i128;
|
||||
fn __rust_u128_addo(a: u128, b: u128) -> (u128, bool);
|
||||
fn __rust_i128_addo(a: i128, b: i128) -> (i128, bool);
|
||||
fn __rust_u128_subo(a: u128, b: u128) -> (u128, bool);
|
||||
fn __rust_i128_subo(a: i128, b: i128) -> (i128, bool);
|
||||
fn __rust_u128_mulo(a: u128, b: u128) -> (u128, bool);
|
||||
fn __rust_i128_mulo(a: i128, b: i128) -> (i128, bool);
|
||||
|
||||
// floats
|
||||
fn __floattisf(i: i128) -> f32;
|
||||
fn __floattidf(i: i128) -> f64;
|
||||
fn __floatuntisf(i: u128) -> f32;
|
||||
fn __floatuntidf(i: u128) -> f64;
|
||||
fn __fixsfti(f: f32) -> i128;
|
||||
fn __fixdfti(f: f64) -> i128;
|
||||
fn __fixunssfti(f: f32) -> u128;
|
||||
fn __fixunsdfti(f: f64) -> u128;
|
||||
}
|
|
@ -8,7 +8,7 @@ use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
|
|||
use rustc_middle::mir::interpret::{
|
||||
read_target_uint, AllocId, Allocation, ConstValue, ErrorHandled, GlobalAlloc, Pointer, Scalar,
|
||||
};
|
||||
use rustc_middle::ty::{Const, ConstKind};
|
||||
use rustc_middle::ty::ConstKind;
|
||||
|
||||
use cranelift_codegen::ir::GlobalValueData;
|
||||
use cranelift_module::*;
|
||||
|
@ -28,7 +28,7 @@ enum TodoItem {
|
|||
}
|
||||
|
||||
impl ConstantCx {
|
||||
pub(crate) fn finalize(mut self, tcx: TyCtxt<'_>, module: &mut impl Module) {
|
||||
pub(crate) fn finalize(mut self, tcx: TyCtxt<'_>, module: &mut dyn Module) {
|
||||
//println!("todo {:?}", self.todo);
|
||||
define_all_allocs(tcx, module, &mut self);
|
||||
//println!("done {:?}", self.done);
|
||||
|
@ -36,21 +36,23 @@ impl ConstantCx {
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) fn check_constants(fx: &mut FunctionCx<'_, '_, impl Module>) {
|
||||
pub(crate) fn check_constants(fx: &mut FunctionCx<'_, '_, '_>) -> bool {
|
||||
let mut all_constants_ok = true;
|
||||
for constant in &fx.mir.required_consts {
|
||||
let const_ = fx.monomorphize(constant.literal);
|
||||
let const_ = match fx.monomorphize(constant.literal) {
|
||||
ConstantKind::Ty(ct) => ct,
|
||||
ConstantKind::Val(..) => continue,
|
||||
};
|
||||
match const_.val {
|
||||
ConstKind::Value(_) => {}
|
||||
ConstKind::Unevaluated(def, ref substs, promoted) => {
|
||||
ConstKind::Unevaluated(unevaluated) => {
|
||||
if let Err(err) =
|
||||
fx.tcx
|
||||
.const_eval_resolve(ParamEnv::reveal_all(), def, substs, promoted, None)
|
||||
fx.tcx.const_eval_resolve(ParamEnv::reveal_all(), unevaluated, None)
|
||||
{
|
||||
all_constants_ok = false;
|
||||
match err {
|
||||
ErrorHandled::Reported(ErrorReported) | ErrorHandled::Linted => {
|
||||
fx.tcx
|
||||
.sess
|
||||
.span_err(constant.span, "erroneous constant encountered");
|
||||
fx.tcx.sess.span_err(constant.span, "erroneous constant encountered");
|
||||
}
|
||||
ErrorHandled::TooGeneric => {
|
||||
span_bug!(
|
||||
|
@ -69,6 +71,7 @@ pub(crate) fn check_constants(fx: &mut FunctionCx<'_, '_, impl Module>) {
|
|||
| ConstKind::Error(_) => unreachable!("{:?}", const_),
|
||||
}
|
||||
}
|
||||
all_constants_ok
|
||||
}
|
||||
|
||||
pub(crate) fn codegen_static(constants_cx: &mut ConstantCx, def_id: DefId) {
|
||||
|
@ -76,27 +79,29 @@ pub(crate) fn codegen_static(constants_cx: &mut ConstantCx, def_id: DefId) {
|
|||
}
|
||||
|
||||
pub(crate) fn codegen_tls_ref<'tcx>(
|
||||
fx: &mut FunctionCx<'_, 'tcx, impl Module>,
|
||||
fx: &mut FunctionCx<'_, '_, 'tcx>,
|
||||
def_id: DefId,
|
||||
layout: TyAndLayout<'tcx>,
|
||||
) -> CValue<'tcx> {
|
||||
let data_id = data_id_for_static(fx.tcx, &mut fx.cx.module, def_id, false);
|
||||
let data_id = data_id_for_static(fx.tcx, fx.cx.module, def_id, false);
|
||||
let local_data_id = fx.cx.module.declare_data_in_func(data_id, &mut fx.bcx.func);
|
||||
#[cfg(debug_assertions)]
|
||||
if fx.clif_comments.enabled() {
|
||||
fx.add_comment(local_data_id, format!("tls {:?}", def_id));
|
||||
}
|
||||
let tls_ptr = fx.bcx.ins().tls_value(fx.pointer_type, local_data_id);
|
||||
CValue::by_val(tls_ptr, layout)
|
||||
}
|
||||
|
||||
fn codegen_static_ref<'tcx>(
|
||||
fx: &mut FunctionCx<'_, 'tcx, impl Module>,
|
||||
fx: &mut FunctionCx<'_, '_, 'tcx>,
|
||||
def_id: DefId,
|
||||
layout: TyAndLayout<'tcx>,
|
||||
) -> CPlace<'tcx> {
|
||||
let data_id = data_id_for_static(fx.tcx, &mut fx.cx.module, def_id, false);
|
||||
let data_id = data_id_for_static(fx.tcx, fx.cx.module, def_id, false);
|
||||
let local_data_id = fx.cx.module.declare_data_in_func(data_id, &mut fx.bcx.func);
|
||||
#[cfg(debug_assertions)]
|
||||
if fx.clif_comments.enabled() {
|
||||
fx.add_comment(local_data_id, format!("{:?}", def_id));
|
||||
}
|
||||
let global_ptr = fx.bcx.ins().global_value(fx.pointer_type, local_data_id);
|
||||
assert!(!layout.is_unsized(), "unsized statics aren't supported");
|
||||
assert!(
|
||||
|
@ -110,38 +115,28 @@ fn codegen_static_ref<'tcx>(
|
|||
}
|
||||
|
||||
pub(crate) fn codegen_constant<'tcx>(
|
||||
fx: &mut FunctionCx<'_, 'tcx, impl Module>,
|
||||
fx: &mut FunctionCx<'_, '_, 'tcx>,
|
||||
constant: &Constant<'tcx>,
|
||||
) -> CValue<'tcx> {
|
||||
let const_ = fx.monomorphize(constant.literal);
|
||||
let const_ = match fx.monomorphize(constant.literal) {
|
||||
ConstantKind::Ty(ct) => ct,
|
||||
ConstantKind::Val(val, ty) => return codegen_const_value(fx, val, ty),
|
||||
};
|
||||
let const_val = match const_.val {
|
||||
ConstKind::Value(const_val) => const_val,
|
||||
ConstKind::Unevaluated(def, ref substs, promoted) if fx.tcx.is_static(def.did) => {
|
||||
ConstKind::Unevaluated(ty::Unevaluated { def, substs, promoted })
|
||||
if fx.tcx.is_static(def.did) =>
|
||||
{
|
||||
assert!(substs.is_empty());
|
||||
assert!(promoted.is_none());
|
||||
|
||||
return codegen_static_ref(
|
||||
fx,
|
||||
def.did,
|
||||
fx.layout_of(fx.monomorphize(&constant.literal.ty)),
|
||||
)
|
||||
.to_cvalue(fx);
|
||||
return codegen_static_ref(fx, def.did, fx.layout_of(const_.ty)).to_cvalue(fx);
|
||||
}
|
||||
ConstKind::Unevaluated(def, ref substs, promoted) => {
|
||||
match fx
|
||||
.tcx
|
||||
.const_eval_resolve(ParamEnv::reveal_all(), def, substs, promoted, None)
|
||||
{
|
||||
ConstKind::Unevaluated(unevaluated) => {
|
||||
match fx.tcx.const_eval_resolve(ParamEnv::reveal_all(), unevaluated, None) {
|
||||
Ok(const_val) => const_val,
|
||||
Err(_) => {
|
||||
fx.tcx
|
||||
.sess
|
||||
.span_err(constant.span, "erroneous constant encountered");
|
||||
return crate::trap::trap_unreachable_ret_value(
|
||||
fx,
|
||||
fx.layout_of(const_.ty),
|
||||
"erroneous constant encountered",
|
||||
);
|
||||
span_bug!(constant.span, "erroneous constant not captured by required_consts");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -156,7 +151,7 @@ pub(crate) fn codegen_constant<'tcx>(
|
|||
}
|
||||
|
||||
pub(crate) fn codegen_const_value<'tcx>(
|
||||
fx: &mut FunctionCx<'_, 'tcx, impl Module>,
|
||||
fx: &mut FunctionCx<'_, '_, 'tcx>,
|
||||
const_val: ConstValue<'tcx>,
|
||||
ty: Ty<'tcx>,
|
||||
) -> CValue<'tcx> {
|
||||
|
@ -172,9 +167,7 @@ pub(crate) fn codegen_const_value<'tcx>(
|
|||
if fx.clif_type(layout.ty).is_none() {
|
||||
let (size, align) = (layout.size, layout.align.pref);
|
||||
let mut alloc = Allocation::from_bytes(
|
||||
std::iter::repeat(0)
|
||||
.take(size.bytes_usize())
|
||||
.collect::<Vec<u8>>(),
|
||||
std::iter::repeat(0).take(size.bytes_usize()).collect::<Vec<u8>>(),
|
||||
align,
|
||||
);
|
||||
let ptr = Pointer::new(AllocId(!0), Size::ZERO); // The alloc id is never used
|
||||
|
@ -190,40 +183,36 @@ pub(crate) fn codegen_const_value<'tcx>(
|
|||
let base_addr = match alloc_kind {
|
||||
Some(GlobalAlloc::Memory(alloc)) => {
|
||||
fx.cx.constants_cx.todo.push(TodoItem::Alloc(ptr.alloc_id));
|
||||
let data_id = data_id_for_alloc_id(
|
||||
&mut fx.cx.module,
|
||||
ptr.alloc_id,
|
||||
alloc.mutability,
|
||||
);
|
||||
let data_id =
|
||||
data_id_for_alloc_id(fx.cx.module, ptr.alloc_id, alloc.mutability);
|
||||
let local_data_id =
|
||||
fx.cx.module.declare_data_in_func(data_id, &mut fx.bcx.func);
|
||||
#[cfg(debug_assertions)]
|
||||
if fx.clif_comments.enabled() {
|
||||
fx.add_comment(local_data_id, format!("{:?}", ptr.alloc_id));
|
||||
}
|
||||
fx.bcx.ins().global_value(fx.pointer_type, local_data_id)
|
||||
}
|
||||
Some(GlobalAlloc::Function(instance)) => {
|
||||
let func_id =
|
||||
crate::abi::import_function(fx.tcx, &mut fx.cx.module, instance);
|
||||
crate::abi::import_function(fx.tcx, fx.cx.module, instance);
|
||||
let local_func_id =
|
||||
fx.cx.module.declare_func_in_func(func_id, &mut fx.bcx.func);
|
||||
fx.bcx.ins().func_addr(fx.pointer_type, local_func_id)
|
||||
}
|
||||
Some(GlobalAlloc::Static(def_id)) => {
|
||||
assert!(fx.tcx.is_static(def_id));
|
||||
let data_id =
|
||||
data_id_for_static(fx.tcx, &mut fx.cx.module, def_id, false);
|
||||
let data_id = data_id_for_static(fx.tcx, fx.cx.module, def_id, false);
|
||||
let local_data_id =
|
||||
fx.cx.module.declare_data_in_func(data_id, &mut fx.bcx.func);
|
||||
#[cfg(debug_assertions)]
|
||||
if fx.clif_comments.enabled() {
|
||||
fx.add_comment(local_data_id, format!("{:?}", def_id));
|
||||
}
|
||||
fx.bcx.ins().global_value(fx.pointer_type, local_data_id)
|
||||
}
|
||||
None => bug!("missing allocation {:?}", ptr.alloc_id),
|
||||
};
|
||||
let val = if ptr.offset.bytes() != 0 {
|
||||
fx.bcx
|
||||
.ins()
|
||||
.iadd_imm(base_addr, i64::try_from(ptr.offset.bytes()).unwrap())
|
||||
fx.bcx.ins().iadd_imm(base_addr, i64::try_from(ptr.offset.bytes()).unwrap())
|
||||
} else {
|
||||
base_addr
|
||||
};
|
||||
|
@ -240,32 +229,33 @@ pub(crate) fn codegen_const_value<'tcx>(
|
|||
let ptr = pointer_for_allocation(fx, data)
|
||||
.offset_i64(fx, i64::try_from(start).unwrap())
|
||||
.get_addr(fx);
|
||||
let len = fx.bcx.ins().iconst(
|
||||
fx.pointer_type,
|
||||
i64::try_from(end.checked_sub(start).unwrap()).unwrap(),
|
||||
);
|
||||
let len = fx
|
||||
.bcx
|
||||
.ins()
|
||||
.iconst(fx.pointer_type, i64::try_from(end.checked_sub(start).unwrap()).unwrap());
|
||||
CValue::by_val_pair(ptr, len, layout)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn pointer_for_allocation<'tcx>(
|
||||
fx: &mut FunctionCx<'_, 'tcx, impl Module>,
|
||||
fx: &mut FunctionCx<'_, '_, 'tcx>,
|
||||
alloc: &'tcx Allocation,
|
||||
) -> crate::pointer::Pointer {
|
||||
let alloc_id = fx.tcx.create_memory_alloc(alloc);
|
||||
fx.cx.constants_cx.todo.push(TodoItem::Alloc(alloc_id));
|
||||
let data_id = data_id_for_alloc_id(&mut fx.cx.module, alloc_id, alloc.mutability);
|
||||
let data_id = data_id_for_alloc_id(fx.cx.module, alloc_id, alloc.mutability);
|
||||
|
||||
let local_data_id = fx.cx.module.declare_data_in_func(data_id, &mut fx.bcx.func);
|
||||
#[cfg(debug_assertions)]
|
||||
if fx.clif_comments.enabled() {
|
||||
fx.add_comment(local_data_id, format!("{:?}", alloc_id));
|
||||
}
|
||||
let global_ptr = fx.bcx.ins().global_value(fx.pointer_type, local_data_id);
|
||||
crate::pointer::Pointer::new(global_ptr)
|
||||
}
|
||||
|
||||
fn data_id_for_alloc_id(
|
||||
module: &mut impl Module,
|
||||
module: &mut dyn Module,
|
||||
alloc_id: AllocId,
|
||||
mutability: rustc_hir::Mutability,
|
||||
) -> DataId {
|
||||
|
@ -281,7 +271,7 @@ fn data_id_for_alloc_id(
|
|||
|
||||
fn data_id_for_static(
|
||||
tcx: TyCtxt<'_>,
|
||||
module: &mut impl Module,
|
||||
module: &mut dyn Module,
|
||||
def_id: DefId,
|
||||
definition: bool,
|
||||
) -> DataId {
|
||||
|
@ -304,12 +294,7 @@ fn data_id_for_static(
|
|||
} else {
|
||||
!ty.is_freeze(tcx.at(DUMMY_SP), ParamEnv::reveal_all())
|
||||
};
|
||||
let align = tcx
|
||||
.layout_of(ParamEnv::reveal_all().and(ty))
|
||||
.unwrap()
|
||||
.align
|
||||
.pref
|
||||
.bytes();
|
||||
let align = tcx.layout_of(ParamEnv::reveal_all().and(ty)).unwrap().align.pref.bytes();
|
||||
|
||||
let attrs = tcx.codegen_fn_attrs(def_id);
|
||||
|
||||
|
@ -332,17 +317,11 @@ fn data_id_for_static(
|
|||
// zero.
|
||||
|
||||
let ref_name = format!("_rust_extern_with_linkage_{}", symbol_name);
|
||||
let ref_data_id = module
|
||||
.declare_data(&ref_name, Linkage::Local, false, false)
|
||||
.unwrap();
|
||||
let ref_data_id = module.declare_data(&ref_name, Linkage::Local, false, false).unwrap();
|
||||
let mut data_ctx = DataContext::new();
|
||||
data_ctx.set_align(align);
|
||||
let data = module.declare_data_in_data(data_id, &mut data_ctx);
|
||||
data_ctx.define(
|
||||
std::iter::repeat(0)
|
||||
.take(pointer_ty(tcx).bytes() as usize)
|
||||
.collect(),
|
||||
);
|
||||
data_ctx.define(std::iter::repeat(0).take(pointer_ty(tcx).bytes() as usize).collect());
|
||||
data_ctx.write_data_addr(0, data, 0);
|
||||
match module.define_data(ref_data_id, &data_ctx) {
|
||||
// Every time the static is referenced there will be another definition of this global,
|
||||
|
@ -356,7 +335,7 @@ fn data_id_for_static(
|
|||
}
|
||||
}
|
||||
|
||||
fn define_all_allocs(tcx: TyCtxt<'_>, module: &mut impl Module, cx: &mut ConstantCx) {
|
||||
fn define_all_allocs(tcx: TyCtxt<'_>, module: &mut dyn Module, cx: &mut ConstantCx) {
|
||||
while let Some(todo_item) = cx.todo.pop() {
|
||||
let (data_id, alloc, section_name) = match todo_item {
|
||||
TodoItem::Alloc(alloc_id) => {
|
||||
|
@ -371,10 +350,7 @@ fn define_all_allocs(tcx: TyCtxt<'_>, module: &mut impl Module, cx: &mut Constan
|
|||
TodoItem::Static(def_id) => {
|
||||
//println!("static {:?}", def_id);
|
||||
|
||||
let section_name = tcx
|
||||
.codegen_fn_attrs(def_id)
|
||||
.link_section
|
||||
.map(|s| s.as_str());
|
||||
let section_name = tcx.codegen_fn_attrs(def_id).link_section.map(|s| s.as_str());
|
||||
|
||||
let alloc = tcx.eval_static_initializer(def_id).unwrap();
|
||||
|
||||
|
@ -396,9 +372,7 @@ fn define_all_allocs(tcx: TyCtxt<'_>, module: &mut impl Module, cx: &mut Constan
|
|||
data_ctx.set_segment_section("", &*section_name);
|
||||
}
|
||||
|
||||
let bytes = alloc
|
||||
.inspect_with_uninit_and_ptr_outside_interpreter(0..alloc.len())
|
||||
.to_vec();
|
||||
let bytes = alloc.inspect_with_uninit_and_ptr_outside_interpreter(0..alloc.len()).to_vec();
|
||||
data_ctx.define(bytes.into_boxed_slice());
|
||||
|
||||
for &(offset, (_tag, reloc)) in alloc.relocations().iter() {
|
||||
|
@ -426,10 +400,7 @@ fn define_all_allocs(tcx: TyCtxt<'_>, module: &mut impl Module, cx: &mut Constan
|
|||
data_id_for_alloc_id(module, reloc, target_alloc.mutability)
|
||||
}
|
||||
GlobalAlloc::Static(def_id) => {
|
||||
if tcx
|
||||
.codegen_fn_attrs(def_id)
|
||||
.flags
|
||||
.contains(CodegenFnAttrFlags::THREAD_LOCAL)
|
||||
if tcx.codegen_fn_attrs(def_id).flags.contains(CodegenFnAttrFlags::THREAD_LOCAL)
|
||||
{
|
||||
tcx.sess.fatal(&format!(
|
||||
"Allocation {:?} contains reference to TLS value {:?}",
|
||||
|
@ -457,14 +428,16 @@ fn define_all_allocs(tcx: TyCtxt<'_>, module: &mut impl Module, cx: &mut Constan
|
|||
}
|
||||
|
||||
pub(crate) fn mir_operand_get_const_val<'tcx>(
|
||||
fx: &FunctionCx<'_, 'tcx, impl Module>,
|
||||
fx: &FunctionCx<'_, '_, 'tcx>,
|
||||
operand: &Operand<'tcx>,
|
||||
) -> Option<&'tcx Const<'tcx>> {
|
||||
) -> Option<ConstValue<'tcx>> {
|
||||
match operand {
|
||||
Operand::Copy(_) | Operand::Move(_) => None,
|
||||
Operand::Constant(const_) => Some(
|
||||
fx.monomorphize(const_.literal)
|
||||
.eval(fx.tcx, ParamEnv::reveal_all()),
|
||||
),
|
||||
Operand::Constant(const_) => match const_.literal {
|
||||
ConstantKind::Ty(const_) => {
|
||||
fx.monomorphize(const_).eval(fx.tcx, ParamEnv::reveal_all()).val.try_to_value()
|
||||
}
|
||||
ConstantKind::Val(val, _) => Some(val),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue