1
Fork 0

Merge remote-tracking branch 'origin/master' into frewsxcv-san

This commit is contained in:
Corey Farwell 2020-12-31 23:27:33 -05:00
commit d482de30ea
1600 changed files with 29527 additions and 21534 deletions

View file

@ -0,0 +1,63 @@
---
name: Library Tracking Issue
about: A tracking issue for an unstable library feature.
title: Tracking Issue for XXX
labels: C-tracking-issue, T-libs
---
<!--
Thank you for creating a tracking issue!
Tracking issues are for tracking a feature from implementation to stabilization.
Make sure to include the relevant RFC for the feature if it has one.
If the new feature is small, it may be fine to skip the RFC process. In that
case, you can use use `issue = "none"` in your initial implementation PR. The
reviewer will ask you to open a tracking issue if they agree your feature can be
added without an RFC.
-->
Feature gate: `#![feature(...)]`
This is a tracking issue for ...
<!--
Include a short description of the feature.
-->
### Public API
<!--
For most library features, it'd be useful to include a summarized version of the public API.
(E.g. just the public function signatures without their doc comments or implementation.)
-->
```rust
...
```
### Steps / History
<!--
In the simplest case, this is a PR implementing the feature followed by a PR
that stabilises the feature. However it's not uncommon for the feature to be
changed before stabilization. For larger features, the implementation could be
split up in multiple steps.
-->
- [ ] Implementation: ...
- [ ] Stabilization PR
### Unresolved Questions
<!--
Include any open questions that need to be answered before the feature can be
stabilised. If multiple (unrelated) big questions come up, it can be a good idea
to open a separate issue for each, to make it easier to keep track of the
discussions.
It's useful to link any relevant discussions and conclusions (whether on GitHub,
Zulip, or the internals forum) here.
-->
- None yet.

View file

@ -355,6 +355,35 @@ dependencies = [
"winapi 0.3.9", "winapi 0.3.9",
] ]
[[package]]
name = "cargo-credential"
version = "0.1.0"
[[package]]
name = "cargo-credential-1password"
version = "0.1.0"
dependencies = [
"cargo-credential",
"serde",
"serde_json",
]
[[package]]
name = "cargo-credential-macos-keychain"
version = "0.1.0"
dependencies = [
"cargo-credential",
"security-framework",
]
[[package]]
name = "cargo-credential-wincred"
version = "0.1.0"
dependencies = [
"cargo-credential",
"winapi 0.3.9",
]
[[package]] [[package]]
name = "cargo-miri" name = "cargo-miri"
version = "0.1.0" version = "0.1.0"
@ -725,9 +754,6 @@ checksum = "9a21fa21941700a3cd8fcb4091f361a6a712fac632f85d9f487cc892045d55c6"
[[package]] [[package]]
name = "coverage_test_macros" name = "coverage_test_macros"
version = "0.0.0" version = "0.0.0"
dependencies = [
"proc-macro2",
]
[[package]] [[package]]
name = "cpuid-bool" name = "cpuid-bool"
@ -869,9 +895,9 @@ dependencies = [
[[package]] [[package]]
name = "curl" name = "curl"
version = "0.4.31" version = "0.4.34"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9447ad28eee2a5cfb031c329d46bef77487244fff6a724b378885b8691a35f78" checksum = "e268162af1a5fe89917ae25ba3b0a77c8da752bdc58e7dbb4f15b91fbd33756e"
dependencies = [ dependencies = [
"curl-sys", "curl-sys",
"libc", "libc",
@ -884,9 +910,9 @@ dependencies = [
[[package]] [[package]]
name = "curl-sys" name = "curl-sys"
version = "0.4.34+curl-7.71.1" version = "0.4.39+curl-7.74.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ad4eff0be6985b7e709f64b5a541f700e9ad1407190a29f4884319eb663ed1d6" checksum = "07a8ce861e7b68a0b394e814d7ee9f1b2750ff8bd10372c6ad3bacc10e86f874"
dependencies = [ dependencies = [
"cc", "cc",
"libc", "libc",
@ -1304,9 +1330,9 @@ dependencies = [
[[package]] [[package]]
name = "git2" name = "git2"
version = "0.13.12" version = "0.13.14"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca6f1a0238d7f8f8fd5ee642f4ebac4dbc03e03d1f78fbe7a3ede35dcf7e2224" checksum = "186dd99cc77576e58344ad614fa9bb27bad9d048f85de3ca850c1f4e8b048260"
dependencies = [ dependencies = [
"bitflags", "bitflags",
"libc", "libc",
@ -1319,9 +1345,9 @@ dependencies = [
[[package]] [[package]]
name = "git2-curl" name = "git2-curl"
version = "0.14.0" version = "0.14.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "502d532a2d06184beb3bc869d4d90236e60934e3382c921b203fa3c33e212bd7" checksum = "883539cb0ea94bab3f8371a98cd8e937bbe9ee7c044499184aa4c17deb643a50"
dependencies = [ dependencies = [
"curl", "curl",
"git2", "git2",
@ -1348,6 +1374,15 @@ dependencies = [
"regex", "regex",
] ]
[[package]]
name = "gsgdt"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a0d876ce7262df96262a2a19531da6ff9a86048224d49580a585fc5c04617825"
dependencies = [
"serde",
]
[[package]] [[package]]
name = "handlebars" name = "handlebars"
version = "3.4.0" version = "3.4.0"
@ -1724,9 +1759,9 @@ dependencies = [
[[package]] [[package]]
name = "libgit2-sys" name = "libgit2-sys"
version = "0.12.14+1.1.0" version = "0.12.16+1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f25af58e6495f7caf2919d08f212de550cfa3ed2f5e744988938ea292b9f549" checksum = "9f91b2f931ee975a98155195be8cd82d02e8e029d7d793d2bac1b8181ac97020"
dependencies = [ dependencies = [
"cc", "cc",
"libc", "libc",
@ -3400,6 +3435,7 @@ dependencies = [
"byteorder", "byteorder",
"crossbeam-utils 0.7.2", "crossbeam-utils 0.7.2",
"libc", "libc",
"libz-sys",
"proc-macro2", "proc-macro2",
"quote", "quote",
"serde", "serde",
@ -3932,6 +3968,7 @@ dependencies = [
"rustc_session", "rustc_session",
"rustc_span", "rustc_span",
"rustc_target", "rustc_target",
"rustc_type_ir",
"smallvec 1.4.2", "smallvec 1.4.2",
"tracing", "tracing",
] ]
@ -3942,6 +3979,7 @@ version = "0.0.0"
dependencies = [ dependencies = [
"coverage_test_macros", "coverage_test_macros",
"either", "either",
"gsgdt",
"itertools 0.9.0", "itertools 0.9.0",
"polonius-engine", "polonius-engine",
"regex", "regex",
@ -4266,6 +4304,16 @@ dependencies = [
"tracing", "tracing",
] ]
[[package]]
name = "rustc_type_ir"
version = "0.0.0"
dependencies = [
"bitflags",
"rustc_data_structures",
"rustc_index",
"rustc_serialize",
]
[[package]] [[package]]
name = "rustc_typeck" name = "rustc_typeck"
version = "0.0.0" version = "0.0.0"
@ -4349,7 +4397,7 @@ dependencies = [
[[package]] [[package]]
name = "rustfmt-nightly" name = "rustfmt-nightly"
version = "1.4.29" version = "1.4.30"
dependencies = [ dependencies = [
"annotate-snippets 0.6.1", "annotate-snippets 0.6.1",
"anyhow", "anyhow",
@ -4424,6 +4472,29 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
[[package]]
name = "security-framework"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c1759c2e3c8580017a484a7ac56d3abc5a6c1feadf88db2f3633f12ae4268c69"
dependencies = [
"bitflags",
"core-foundation",
"core-foundation-sys",
"libc",
"security-framework-sys",
]
[[package]]
name = "security-framework-sys"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f99b9d5e26d2a71633cc4f2ebae7cc9f874044e0c351a27e17892d76dce5678b"
dependencies = [
"core-foundation-sys",
"libc",
]
[[package]] [[package]]
name = "semver" name = "semver"
version = "0.9.0" version = "0.9.0"
@ -4471,18 +4542,18 @@ dependencies = [
[[package]] [[package]]
name = "serde" name = "serde"
version = "1.0.115" version = "1.0.118"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e54c9a88f2da7238af84b5101443f0c0d0a3bbdc455e34a5c9497b1903ed55d5" checksum = "06c64263859d87aa2eb554587e2d23183398d617427327cf2b3d0ed8c69e4800"
dependencies = [ dependencies = [
"serde_derive", "serde_derive",
] ]
[[package]] [[package]]
name = "serde_derive" name = "serde_derive"
version = "1.0.115" version = "1.0.118"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "609feed1d0a73cc36a0182a840a9b37b4a82f0b1150369f0536a9e3f2a31dc48" checksum = "c84d3526699cd55261af4b941e4e725444df67aa4f9e6a3564f18030d12672df"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -5272,7 +5343,7 @@ dependencies = [
"chrono", "chrono",
"lazy_static", "lazy_static",
"matchers", "matchers",
"parking_lot 0.11.0", "parking_lot 0.9.0",
"regex", "regex",
"serde", "serde",
"serde_json", "serde_json",

View file

@ -20,6 +20,9 @@ members = [
"src/tools/rust-installer", "src/tools/rust-installer",
"src/tools/rust-demangler", "src/tools/rust-demangler",
"src/tools/cargo", "src/tools/cargo",
"src/tools/cargo/crates/credential/cargo-credential-1password",
"src/tools/cargo/crates/credential/cargo-credential-macos-keychain",
"src/tools/cargo/crates/credential/cargo-credential-wincred",
"src/tools/rustdoc", "src/tools/rustdoc",
"src/tools/rls", "src/tools/rls",
"src/tools/rustfmt", "src/tools/rustfmt",

View file

@ -1,3 +1,131 @@
Version 1.49.0 (2020-12-31)
============================
Language
-----------------------
- [Unions can now implement `Drop`, and you can now have a field in a union
with `ManuallyDrop<T>`.][77547]
- [You can now cast uninhabited enums to integers.][76199]
- [You can now bind by reference and by move in patterns.][76119] This
allows you to selectively borrow individual components of a type. E.g.
```rust
#[derive(Debug)]
struct Person {
name: String,
age: u8,
}
let person = Person {
name: String::from("Alice"),
age: 20,
};
// `name` is moved out of person, but `age` is referenced.
let Person { name, ref age } = person;
println!("{} {}", name, age);
```
Compiler
-----------------------
- [Added tier 1\* support for `aarch64-unknown-linux-gnu`.][78228]
- [Added tier 2 support for `aarch64-apple-darwin`.][75991]
- [Added tier 2 support for `aarch64-pc-windows-msvc`.][75914]
- [Added tier 3 support for `mipsel-unknown-none`.][78676]
- [Raised the minimum supported LLVM version to LLVM 9.][78848]
- [Output from threads spawned in tests is now captured.][78227]
- [Change os and vendor values to "none" and "unknown" for some targets][78951]
\* Refer to Rust's [platform support page][forge-platform-support] for more
information on Rust's tiered platform support.
Libraries
-----------------------
- [`RangeInclusive` now checks for exhaustion when calling `contains` and indexing.][78109]
- [`ToString::to_string` now no longer shrinks the internal buffer in the default implementation.][77997]
- [`ops::{Index, IndexMut}` are now implemented for fixed sized arrays of any length.][74989]
Stabilized APIs
---------------
- [`slice::select_nth_unstable`]
- [`slice::select_nth_unstable_by`]
- [`slice::select_nth_unstable_by_key`]
The following previously stable methods are now `const`.
- [`Poll::is_ready`]
- [`Poll::is_pending`]
Cargo
-----------------------
- [Building a crate with `cargo-package` should now be independently reproducible.][cargo/8864]
- [`cargo-tree` now marks proc-macro crates.][cargo/8765]
- [Added `CARGO_PRIMARY_PACKAGE` build-time environment variable.][cargo/8758] This
variable will be set if the crate being built is one the user selected to build, either
with `-p` or through defaults.
- [You can now use glob patterns when specifying packages & targets.][cargo/8752]
Compatibility Notes
-------------------
- [Demoted `i686-unknown-freebsd` from host tier 2 to target tier 2 support.][78746]
- [Macros that end with a semi-colon are now treated as statements even if they expand to nothing.][78376]
- [Rustc will now check for the validity of some built-in attributes on enum variants.][77015]
Previously such invalid or unused attributes could be ignored.
- Leading whitespace is stripped more uniformly in documentation comments, which may change behavior. You
read [this post about the changes][rustdoc-ws-post] for more details.
- [Trait bounds are no longer inferred for associated types.][79904]
Internal Only
-------------
These changes provide no direct user facing benefits, but represent significant
improvements to the internals and overall performance of rustc and
related tools.
- [rustc's internal crates are now compiled using the `initial-exec` Thread
Local Storage model.][78201]
- [Calculate visibilities once in resolve.][78077]
- [Added `system` to the `llvm-libunwind` bootstrap config option.][77703]
- [Added `--color` for configuring terminal color support to bootstrap.][79004]
[75991]: https://github.com/rust-lang/rust/pull/75991
[78951]: https://github.com/rust-lang/rust/pull/78951
[78848]: https://github.com/rust-lang/rust/pull/78848
[78746]: https://github.com/rust-lang/rust/pull/78746
[78376]: https://github.com/rust-lang/rust/pull/78376
[78228]: https://github.com/rust-lang/rust/pull/78228
[78227]: https://github.com/rust-lang/rust/pull/78227
[78201]: https://github.com/rust-lang/rust/pull/78201
[78109]: https://github.com/rust-lang/rust/pull/78109
[78077]: https://github.com/rust-lang/rust/pull/78077
[77997]: https://github.com/rust-lang/rust/pull/77997
[77703]: https://github.com/rust-lang/rust/pull/77703
[77547]: https://github.com/rust-lang/rust/pull/77547
[77015]: https://github.com/rust-lang/rust/pull/77015
[76199]: https://github.com/rust-lang/rust/pull/76199
[76119]: https://github.com/rust-lang/rust/pull/76119
[75914]: https://github.com/rust-lang/rust/pull/75914
[74989]: https://github.com/rust-lang/rust/pull/74989
[79004]: https://github.com/rust-lang/rust/pull/79004
[78676]: https://github.com/rust-lang/rust/pull/78676
[79904]: https://github.com/rust-lang/rust/issues/79904
[cargo/8864]: https://github.com/rust-lang/cargo/pull/8864
[cargo/8765]: https://github.com/rust-lang/cargo/pull/8765
[cargo/8758]: https://github.com/rust-lang/cargo/pull/8758
[cargo/8752]: https://github.com/rust-lang/cargo/pull/8752
[`slice::select_nth_unstable`]: https://doc.rust-lang.org/nightly/std/primitive.slice.html#method.select_nth_unstable
[`slice::select_nth_unstable_by`]: https://doc.rust-lang.org/nightly/std/primitive.slice.html#method.select_nth_unstable_by
[`slice::select_nth_unstable_by_key`]: https://doc.rust-lang.org/nightly/std/primitive.slice.html#method.select_nth_unstable_by_key
[`hint::spin_loop`]: https://doc.rust-lang.org/stable/std/hint/fn.spin_loop.html
[`Poll::is_ready`]: https://doc.rust-lang.org/stable/std/task/enum.Poll.html#method.is_ready
[`Poll::is_pending`]: https://doc.rust-lang.org/stable/std/task/enum.Poll.html#method.is_pending
[rustdoc-ws-post]: https://blog.guillaume-gomez.fr/articles/2020-11-11+New+doc+comment+handling+in+rustdoc
Version 1.48.0 (2020-11-19) Version 1.48.0 (2020-11-19)
========================== ==========================

View file

@ -16,7 +16,7 @@
#![feature(new_uninit)] #![feature(new_uninit)]
#![feature(maybe_uninit_slice)] #![feature(maybe_uninit_slice)]
#![feature(array_value_iter)] #![feature(array_value_iter)]
#![feature(min_const_generics)] #![cfg_attr(bootstrap, feature(min_const_generics))]
#![feature(min_specialization)] #![feature(min_specialization)]
#![cfg_attr(test, feature(test))] #![cfg_attr(test, feature(test))]

View file

@ -167,10 +167,7 @@ pub enum GenericArgs {
impl GenericArgs { impl GenericArgs {
pub fn is_angle_bracketed(&self) -> bool { pub fn is_angle_bracketed(&self) -> bool {
match *self { matches!(self, AngleBracketed(..))
AngleBracketed(..) => true,
_ => false,
}
} }
pub fn span(&self) -> Span { pub fn span(&self) -> Span {
@ -629,23 +626,20 @@ impl Pat {
/// Is this a `..` pattern? /// Is this a `..` pattern?
pub fn is_rest(&self) -> bool { pub fn is_rest(&self) -> bool {
match self.kind { matches!(self.kind, PatKind::Rest)
PatKind::Rest => true,
_ => false,
}
} }
} }
/// A single field in a struct pattern /// A single field in a struct pattern.
/// ///
/// Patterns like the fields of Foo `{ x, ref y, ref mut z }` /// Patterns like the fields of `Foo { x, ref y, ref mut z }`
/// are treated the same as` x: x, y: ref y, z: ref mut z`, /// are treated the same as `x: x, y: ref y, z: ref mut z`,
/// except is_shorthand is true /// except when `is_shorthand` is true.
#[derive(Clone, Encodable, Decodable, Debug)] #[derive(Clone, Encodable, Decodable, Debug)]
pub struct FieldPat { pub struct FieldPat {
/// The identifier for the field /// The identifier for the field.
pub ident: Ident, pub ident: Ident,
/// The pattern the field is destructured to /// The pattern the field is destructured to.
pub pat: P<Pat>, pub pat: P<Pat>,
pub is_shorthand: bool, pub is_shorthand: bool,
pub attrs: AttrVec, pub attrs: AttrVec,
@ -852,10 +846,7 @@ impl BinOpKind {
} }
} }
pub fn lazy(&self) -> bool { pub fn lazy(&self) -> bool {
match *self { matches!(self, BinOpKind::And | BinOpKind::Or)
BinOpKind::And | BinOpKind::Or => true,
_ => false,
}
} }
pub fn is_comparison(&self) -> bool { pub fn is_comparison(&self) -> bool {
@ -963,17 +954,11 @@ impl Stmt {
} }
pub fn is_item(&self) -> bool { pub fn is_item(&self) -> bool {
match self.kind { matches!(self.kind, StmtKind::Item(_))
StmtKind::Item(_) => true,
_ => false,
}
} }
pub fn is_expr(&self) -> bool { pub fn is_expr(&self) -> bool {
match self.kind { matches!(self.kind, StmtKind::Expr(_))
StmtKind::Expr(_) => true,
_ => false,
}
} }
} }
@ -1107,15 +1092,9 @@ impl Expr {
if let ExprKind::Block(ref block, _) = self.kind { if let ExprKind::Block(ref block, _) = self.kind {
match block.stmts.last().map(|last_stmt| &last_stmt.kind) { match block.stmts.last().map(|last_stmt| &last_stmt.kind) {
// Implicit return // Implicit return
Some(&StmtKind::Expr(_)) => true, Some(StmtKind::Expr(_)) => true,
Some(&StmtKind::Semi(ref expr)) => { // Last statement is an explicit return?
if let ExprKind::Ret(_) = expr.kind { Some(StmtKind::Semi(expr)) => matches!(expr.kind, ExprKind::Ret(_)),
// Last statement is explicit return.
true
} else {
false
}
}
// This is a block that doesn't end in either an implicit or explicit return. // This is a block that doesn't end in either an implicit or explicit return.
_ => false, _ => false,
} }
@ -1128,7 +1107,7 @@ impl Expr {
/// Is this expr either `N`, or `{ N }`. /// Is this expr either `N`, or `{ N }`.
/// ///
/// If this is not the case, name resolution does not resolve `N` when using /// If this is not the case, name resolution does not resolve `N` when using
/// `feature(min_const_generics)` as more complex expressions are not supported. /// `min_const_generics` as more complex expressions are not supported.
pub fn is_potential_trivial_const_param(&self) -> bool { pub fn is_potential_trivial_const_param(&self) -> bool {
let this = if let ExprKind::Block(ref block, None) = self.kind { let this = if let ExprKind::Block(ref block, None) = self.kind {
if block.stmts.len() == 1 { if block.stmts.len() == 1 {
@ -1652,26 +1631,17 @@ pub enum LitKind {
impl LitKind { impl LitKind {
/// Returns `true` if this literal is a string. /// Returns `true` if this literal is a string.
pub fn is_str(&self) -> bool { pub fn is_str(&self) -> bool {
match *self { matches!(self, LitKind::Str(..))
LitKind::Str(..) => true,
_ => false,
}
} }
/// Returns `true` if this literal is byte literal string. /// Returns `true` if this literal is byte literal string.
pub fn is_bytestr(&self) -> bool { pub fn is_bytestr(&self) -> bool {
match self { matches!(self, LitKind::ByteStr(_))
LitKind::ByteStr(_) => true,
_ => false,
}
} }
/// Returns `true` if this is a numeric literal. /// Returns `true` if this is a numeric literal.
pub fn is_numeric(&self) -> bool { pub fn is_numeric(&self) -> bool {
match *self { matches!(self, LitKind::Int(..) | LitKind::Float(..))
LitKind::Int(..) | LitKind::Float(..) => true,
_ => false,
}
} }
/// Returns `true` if this literal has no suffix. /// Returns `true` if this literal has no suffix.
@ -1974,7 +1944,7 @@ impl TyKind {
} }
pub fn is_unit(&self) -> bool { pub fn is_unit(&self) -> bool {
if let TyKind::Tup(ref tys) = *self { tys.is_empty() } else { false } matches!(self, TyKind::Tup(tys) if tys.is_empty())
} }
} }
@ -2237,10 +2207,7 @@ impl FnDecl {
self.inputs.get(0).map_or(false, Param::is_self) self.inputs.get(0).map_or(false, Param::is_self)
} }
pub fn c_variadic(&self) -> bool { pub fn c_variadic(&self) -> bool {
self.inputs.last().map_or(false, |arg| match arg.ty.kind { self.inputs.last().map_or(false, |arg| matches!(arg.ty.kind, TyKind::CVarArgs))
TyKind::CVarArgs => true,
_ => false,
})
} }
} }

View file

@ -234,10 +234,7 @@ impl MetaItem {
} }
pub fn is_word(&self) -> bool { pub fn is_word(&self) -> bool {
match self.kind { matches!(self.kind, MetaItemKind::Word)
MetaItemKind::Word => true,
_ => false,
}
} }
pub fn has_name(&self, name: Symbol) -> bool { pub fn has_name(&self, name: Symbol) -> bool {

View file

@ -15,7 +15,7 @@ use rustc_span::hygiene::ExpnKind;
use rustc_span::source_map::SourceMap; use rustc_span::source_map::SourceMap;
use rustc_span::symbol::{kw, sym}; use rustc_span::symbol::{kw, sym};
use rustc_span::symbol::{Ident, Symbol}; use rustc_span::symbol::{Ident, Symbol};
use rustc_span::{self, FileName, RealFileName, Span, DUMMY_SP}; use rustc_span::{self, edition::Edition, FileName, RealFileName, Span, DUMMY_SP};
use std::borrow::Cow; use std::borrow::Cow;
use std::{fmt, mem}; use std::{fmt, mem};
@ -130,10 +130,7 @@ impl LitKind {
} }
crate fn may_have_suffix(self) -> bool { crate fn may_have_suffix(self) -> bool {
match self { matches!(self, Integer | Float | Err)
Integer | Float | Err => true,
_ => false,
}
} }
} }
@ -305,10 +302,7 @@ impl TokenKind {
} }
pub fn should_end_const_arg(&self) -> bool { pub fn should_end_const_arg(&self) -> bool {
match self { matches!(self, Gt | Ge | BinOp(Shr) | BinOpEq(Shr))
Gt | Ge | BinOp(Shr) | BinOpEq(Shr) => true,
_ => false,
}
} }
} }
@ -346,18 +340,21 @@ impl Token {
} }
pub fn is_op(&self) -> bool { pub fn is_op(&self) -> bool {
match self.kind { !matches!(
OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) | Ident(..) self.kind,
| Lifetime(..) | Interpolated(..) | Eof => false, OpenDelim(..)
_ => true, | CloseDelim(..)
} | Literal(..)
| DocComment(..)
| Ident(..)
| Lifetime(..)
| Interpolated(..)
| Eof
)
} }
pub fn is_like_plus(&self) -> bool { pub fn is_like_plus(&self) -> bool {
match self.kind { matches!(self.kind, BinOp(Plus) | BinOpEq(Plus))
BinOp(Plus) | BinOpEq(Plus) => true,
_ => false,
}
} }
/// Returns `true` if the token can appear at the start of an expression. /// Returns `true` if the token can appear at the start of an expression.
@ -379,13 +376,10 @@ impl Token {
ModSep | // global path ModSep | // global path
Lifetime(..) | // labeled loop Lifetime(..) | // labeled loop
Pound => true, // expression attributes Pound => true, // expression attributes
Interpolated(ref nt) => match **nt { Interpolated(ref nt) => matches!(**nt, NtLiteral(..) |
NtLiteral(..) |
NtExpr(..) | NtExpr(..) |
NtBlock(..) | NtBlock(..) |
NtPath(..) => true, NtPath(..)),
_ => false,
},
_ => false, _ => false,
} }
} }
@ -405,10 +399,7 @@ impl Token {
Lifetime(..) | // lifetime bound in trait object Lifetime(..) | // lifetime bound in trait object
Lt | BinOp(Shl) | // associated path Lt | BinOp(Shl) | // associated path
ModSep => true, // global path ModSep => true, // global path
Interpolated(ref nt) => match **nt { Interpolated(ref nt) => matches!(**nt, NtTy(..) | NtPath(..)),
NtTy(..) | NtPath(..) => true,
_ => false,
},
_ => false, _ => false,
} }
} }
@ -417,10 +408,7 @@ impl Token {
pub fn can_begin_const_arg(&self) -> bool { pub fn can_begin_const_arg(&self) -> bool {
match self.kind { match self.kind {
OpenDelim(Brace) => true, OpenDelim(Brace) => true,
Interpolated(ref nt) => match **nt { Interpolated(ref nt) => matches!(**nt, NtExpr(..) | NtBlock(..) | NtLiteral(..)),
NtExpr(..) | NtBlock(..) | NtLiteral(..) => true,
_ => false,
},
_ => self.can_begin_literal_maybe_minus(), _ => self.can_begin_literal_maybe_minus(),
} }
} }
@ -434,12 +422,9 @@ impl Token {
|| self == &OpenDelim(Paren) || self == &OpenDelim(Paren)
} }
/// Returns `true` if the token is any literal /// Returns `true` if the token is any literal.
pub fn is_lit(&self) -> bool { pub fn is_lit(&self) -> bool {
match self.kind { matches!(self.kind, Literal(..))
Literal(..) => true,
_ => false,
}
} }
/// Returns `true` if the token is any literal, a minus (which can prefix a literal, /// Returns `true` if the token is any literal, a minus (which can prefix a literal,
@ -705,7 +690,16 @@ pub enum NonterminalKind {
Item, Item,
Block, Block,
Stmt, Stmt,
Pat, Pat2018 {
/// Keep track of whether the user used `:pat2018` or `:pat` and we inferred it from the
/// edition of the span. This is used for diagnostics.
inferred: bool,
},
Pat2021 {
/// Keep track of whether the user used `:pat2018` or `:pat` and we inferred it from the
/// edition of the span. This is used for diagnostics.
inferred: bool,
},
Expr, Expr,
Ty, Ty,
Ident, Ident,
@ -718,12 +712,25 @@ pub enum NonterminalKind {
} }
impl NonterminalKind { impl NonterminalKind {
pub fn from_symbol(symbol: Symbol) -> Option<NonterminalKind> { /// The `edition` closure is used to get the edition for the given symbol. Doing
/// `span.edition()` is expensive, so we do it lazily.
pub fn from_symbol(
symbol: Symbol,
edition: impl FnOnce() -> Edition,
) -> Option<NonterminalKind> {
Some(match symbol { Some(match symbol {
sym::item => NonterminalKind::Item, sym::item => NonterminalKind::Item,
sym::block => NonterminalKind::Block, sym::block => NonterminalKind::Block,
sym::stmt => NonterminalKind::Stmt, sym::stmt => NonterminalKind::Stmt,
sym::pat => NonterminalKind::Pat, sym::pat => match edition() {
Edition::Edition2015 | Edition::Edition2018 => {
NonterminalKind::Pat2018 { inferred: true }
}
// FIXME(mark-i-m): uncomment when 2021 machinery is available.
//Edition::Edition2021 => NonterminalKind::Pat2021{inferred:true},
},
sym::pat2018 => NonterminalKind::Pat2018 { inferred: false },
sym::pat2021 => NonterminalKind::Pat2021 { inferred: false },
sym::expr => NonterminalKind::Expr, sym::expr => NonterminalKind::Expr,
sym::ty => NonterminalKind::Ty, sym::ty => NonterminalKind::Ty,
sym::ident => NonterminalKind::Ident, sym::ident => NonterminalKind::Ident,
@ -741,7 +748,10 @@ impl NonterminalKind {
NonterminalKind::Item => sym::item, NonterminalKind::Item => sym::item,
NonterminalKind::Block => sym::block, NonterminalKind::Block => sym::block,
NonterminalKind::Stmt => sym::stmt, NonterminalKind::Stmt => sym::stmt,
NonterminalKind::Pat => sym::pat, NonterminalKind::Pat2018 { inferred: false } => sym::pat2018,
NonterminalKind::Pat2021 { inferred: false } => sym::pat2021,
NonterminalKind::Pat2018 { inferred: true }
| NonterminalKind::Pat2021 { inferred: true } => sym::pat,
NonterminalKind::Expr => sym::expr, NonterminalKind::Expr => sym::expr,
NonterminalKind::Ty => sym::ty, NonterminalKind::Ty => sym::ty,
NonterminalKind::Ident => sym::ident, NonterminalKind::Ident => sym::ident,

View file

@ -44,6 +44,12 @@ pub enum TokenTree {
Delimited(DelimSpan, DelimToken, TokenStream), Delimited(DelimSpan, DelimToken, TokenStream),
} }
#[derive(Copy, Clone)]
pub enum CanSynthesizeMissingTokens {
Yes,
No,
}
// Ensure all fields of `TokenTree` is `Send` and `Sync`. // Ensure all fields of `TokenTree` is `Send` and `Sync`.
#[cfg(parallel_compiler)] #[cfg(parallel_compiler)]
fn _dummy() fn _dummy()

View file

@ -12,14 +12,14 @@ use crate::ast;
/// |x| 5 /// |x| 5
/// isn't parsed as (if true {...} else {...} | x) | 5 /// isn't parsed as (if true {...} else {...} | x) | 5
pub fn expr_requires_semi_to_be_stmt(e: &ast::Expr) -> bool { pub fn expr_requires_semi_to_be_stmt(e: &ast::Expr) -> bool {
match e.kind { !matches!(
e.kind,
ast::ExprKind::If(..) ast::ExprKind::If(..)
| ast::ExprKind::Match(..) | ast::ExprKind::Match(..)
| ast::ExprKind::Block(..) | ast::ExprKind::Block(..)
| ast::ExprKind::While(..) | ast::ExprKind::While(..)
| ast::ExprKind::Loop(..) | ast::ExprKind::Loop(..)
| ast::ExprKind::ForLoop(..) | ast::ExprKind::ForLoop(..)
| ast::ExprKind::TryBlock(..) => false, | ast::ExprKind::TryBlock(..)
_ => true, )
}
} }

View file

@ -25,9 +25,8 @@ pub struct Comment {
/// Makes a doc string more presentable to users. /// Makes a doc string more presentable to users.
/// Used by rustdoc and perhaps other tools, but not by rustc. /// Used by rustdoc and perhaps other tools, but not by rustc.
pub fn beautify_doc_string(data: Symbol) -> String { pub fn beautify_doc_string(data: Symbol) -> Symbol {
/// remove whitespace-only lines from the start/end of lines fn get_vertical_trim(lines: &[&str]) -> Option<(usize, usize)> {
fn vertical_trim(lines: Vec<String>) -> Vec<String> {
let mut i = 0; let mut i = 0;
let mut j = lines.len(); let mut j = lines.len();
// first line of all-stars should be omitted // first line of all-stars should be omitted
@ -47,55 +46,58 @@ pub fn beautify_doc_string(data: Symbol) -> String {
j -= 1; j -= 1;
} }
lines[i..j].to_vec() if i != 0 || j != lines.len() { Some((i, j)) } else { None }
} }
/// remove a "[ \t]*\*" block from each line, if possible fn get_horizontal_trim(lines: &[&str]) -> Option<usize> {
fn horizontal_trim(lines: Vec<String>) -> Vec<String> {
let mut i = usize::MAX; let mut i = usize::MAX;
let mut can_trim = true;
let mut first = true; let mut first = true;
for line in &lines { for line in lines {
for (j, c) in line.chars().enumerate() { for (j, c) in line.chars().enumerate() {
if j > i || !"* \t".contains(c) { if j > i || !"* \t".contains(c) {
can_trim = false; return None;
break;
} }
if c == '*' { if c == '*' {
if first { if first {
i = j; i = j;
first = false; first = false;
} else if i != j { } else if i != j {
can_trim = false; return None;
} }
break; break;
} }
} }
if i >= line.len() { if i >= line.len() {
can_trim = false; return None;
}
if !can_trim {
break;
} }
} }
Some(i)
}
if can_trim { let data_s = data.as_str();
lines.iter().map(|line| (&line[i + 1..line.len()]).to_string()).collect() if data_s.contains('\n') {
let mut lines = data_s.lines().collect::<Vec<&str>>();
let mut changes = false;
let lines = if let Some((i, j)) = get_vertical_trim(&lines) {
changes = true;
// remove whitespace-only lines from the start/end of lines
&mut lines[i..j]
} else { } else {
lines &mut lines
};
if let Some(horizontal) = get_horizontal_trim(&lines) {
changes = true;
// remove a "[ \t]*\*" block from each line, if possible
for line in lines.iter_mut() {
*line = &line[horizontal + 1..];
}
}
if changes {
return Symbol::intern(&lines.join("\n"));
} }
} }
data
let data = data.as_str();
if data.contains('\n') {
let lines = data.lines().map(|s| s.to_string()).collect::<Vec<String>>();
let lines = vertical_trim(lines);
let lines = horizontal_trim(lines);
lines.join("\n")
} else {
data.to_string()
}
} }
/// Returns `None` if the first `col` chars of `s` contain a non-whitespace char. /// Returns `None` if the first `col` chars of `s` contain a non-whitespace char.
@ -178,10 +180,8 @@ pub fn gather_comments(sm: &SourceMap, path: FileName, src: String) -> Vec<Comme
} }
rustc_lexer::TokenKind::BlockComment { doc_style, .. } => { rustc_lexer::TokenKind::BlockComment { doc_style, .. } => {
if doc_style.is_none() { if doc_style.is_none() {
let code_to_the_right = match text[pos + token.len..].chars().next() { let code_to_the_right =
Some('\r' | '\n') => false, !matches!(text[pos + token.len..].chars().next(), Some('\r' | '\n'));
_ => true,
};
let style = match (code_to_the_left, code_to_the_right) { let style = match (code_to_the_left, code_to_the_right) {
(_, true) => CommentStyle::Mixed, (_, true) => CommentStyle::Mixed,
(false, false) => CommentStyle::Isolated, (false, false) => CommentStyle::Isolated,

View file

@ -6,7 +6,7 @@ fn test_block_doc_comment_1() {
with_default_session_globals(|| { with_default_session_globals(|| {
let comment = "\n * Test \n ** Test\n * Test\n"; let comment = "\n * Test \n ** Test\n * Test\n";
let stripped = beautify_doc_string(Symbol::intern(comment)); let stripped = beautify_doc_string(Symbol::intern(comment));
assert_eq!(stripped, " Test \n* Test\n Test"); assert_eq!(stripped.as_str(), " Test \n* Test\n Test");
}) })
} }
@ -15,7 +15,7 @@ fn test_block_doc_comment_2() {
with_default_session_globals(|| { with_default_session_globals(|| {
let comment = "\n * Test\n * Test\n"; let comment = "\n * Test\n * Test\n";
let stripped = beautify_doc_string(Symbol::intern(comment)); let stripped = beautify_doc_string(Symbol::intern(comment));
assert_eq!(stripped, " Test\n Test"); assert_eq!(stripped.as_str(), " Test\n Test");
}) })
} }
@ -24,7 +24,7 @@ fn test_block_doc_comment_3() {
with_default_session_globals(|| { with_default_session_globals(|| {
let comment = "\n let a: *i32;\n *a = 5;\n"; let comment = "\n let a: *i32;\n *a = 5;\n";
let stripped = beautify_doc_string(Symbol::intern(comment)); let stripped = beautify_doc_string(Symbol::intern(comment));
assert_eq!(stripped, " let a: *i32;\n *a = 5;"); assert_eq!(stripped.as_str(), " let a: *i32;\n *a = 5;");
}) })
} }
@ -32,12 +32,12 @@ fn test_block_doc_comment_3() {
fn test_line_doc_comment() { fn test_line_doc_comment() {
with_default_session_globals(|| { with_default_session_globals(|| {
let stripped = beautify_doc_string(Symbol::intern(" test")); let stripped = beautify_doc_string(Symbol::intern(" test"));
assert_eq!(stripped, " test"); assert_eq!(stripped.as_str(), " test");
let stripped = beautify_doc_string(Symbol::intern("! test")); let stripped = beautify_doc_string(Symbol::intern("! test"));
assert_eq!(stripped, "! test"); assert_eq!(stripped.as_str(), "! test");
let stripped = beautify_doc_string(Symbol::intern("test")); let stripped = beautify_doc_string(Symbol::intern("test"));
assert_eq!(stripped, "test"); assert_eq!(stripped.as_str(), "test");
let stripped = beautify_doc_string(Symbol::intern("!test")); let stripped = beautify_doc_string(Symbol::intern("!test"));
assert_eq!(stripped, "!test"); assert_eq!(stripped.as_str(), "!test");
}) })
} }

View file

@ -505,14 +505,19 @@ impl<'hir> LoweringContext<'_, 'hir> {
} }
fn lower_arm(&mut self, arm: &Arm) -> hir::Arm<'hir> { fn lower_arm(&mut self, arm: &Arm) -> hir::Arm<'hir> {
let pat = self.lower_pat(&arm.pat);
let guard = arm.guard.as_ref().map(|cond| {
if let ExprKind::Let(ref pat, ref scrutinee) = cond.kind {
hir::Guard::IfLet(self.lower_pat(pat), self.lower_expr(scrutinee))
} else {
hir::Guard::If(self.lower_expr(cond))
}
});
hir::Arm { hir::Arm {
hir_id: self.next_id(), hir_id: self.next_id(),
attrs: self.lower_attrs(&arm.attrs), attrs: self.lower_attrs(&arm.attrs),
pat: self.lower_pat(&arm.pat), pat,
guard: match arm.guard { guard,
Some(ref x) => Some(hir::Guard::If(self.lower_expr(x))),
_ => None,
},
body: self.lower_expr(&arm.body), body: self.lower_expr(&arm.body),
span: arm.span, span: arm.span,
} }

View file

@ -37,7 +37,7 @@
use rustc_ast::node_id::NodeMap; use rustc_ast::node_id::NodeMap;
use rustc_ast::token::{self, DelimToken, Nonterminal, Token}; use rustc_ast::token::{self, DelimToken, Nonterminal, Token};
use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree}; use rustc_ast::tokenstream::{CanSynthesizeMissingTokens, DelimSpan, TokenStream, TokenTree};
use rustc_ast::visit::{self, AssocCtxt, Visitor}; use rustc_ast::visit::{self, AssocCtxt, Visitor};
use rustc_ast::walk_list; use rustc_ast::walk_list;
use rustc_ast::{self as ast, *}; use rustc_ast::{self as ast, *};
@ -206,7 +206,8 @@ pub trait ResolverAstLowering {
) -> LocalDefId; ) -> LocalDefId;
} }
type NtToTokenstream = fn(&Nonterminal, &ParseSess, Span) -> TokenStream; type NtToTokenstream =
fn(&Nonterminal, &ParseSess, Span, CanSynthesizeMissingTokens) -> TokenStream;
/// Context of `impl Trait` in code, which determines whether it is allowed in an HIR subtree, /// Context of `impl Trait` in code, which determines whether it is allowed in an HIR subtree,
/// and if so, what meaning it has. /// and if so, what meaning it has.
@ -393,6 +394,47 @@ enum AnonymousLifetimeMode {
PassThrough, PassThrough,
} }
struct TokenStreamLowering<'a> {
parse_sess: &'a ParseSess,
synthesize_tokens: CanSynthesizeMissingTokens,
nt_to_tokenstream: NtToTokenstream,
}
impl<'a> TokenStreamLowering<'a> {
fn lower_token_stream(&mut self, tokens: TokenStream) -> TokenStream {
tokens.into_trees().flat_map(|tree| self.lower_token_tree(tree).into_trees()).collect()
}
fn lower_token_tree(&mut self, tree: TokenTree) -> TokenStream {
match tree {
TokenTree::Token(token) => self.lower_token(token),
TokenTree::Delimited(span, delim, tts) => {
TokenTree::Delimited(span, delim, self.lower_token_stream(tts)).into()
}
}
}
fn lower_token(&mut self, token: Token) -> TokenStream {
match token.kind {
token::Interpolated(nt) => {
let tts = (self.nt_to_tokenstream)(
&nt,
self.parse_sess,
token.span,
self.synthesize_tokens,
);
TokenTree::Delimited(
DelimSpan::from_single(token.span),
DelimToken::NoDelim,
self.lower_token_stream(tts),
)
.into()
}
_ => TokenTree::Token(token).into(),
}
}
}
struct ImplTraitTypeIdVisitor<'a> { struct ImplTraitTypeIdVisitor<'a> {
ids: &'a mut SmallVec<[NodeId; 1]>, ids: &'a mut SmallVec<[NodeId; 1]>,
} }
@ -955,42 +997,51 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
match *args { match *args {
MacArgs::Empty => MacArgs::Empty, MacArgs::Empty => MacArgs::Empty,
MacArgs::Delimited(dspan, delim, ref tokens) => { MacArgs::Delimited(dspan, delim, ref tokens) => {
MacArgs::Delimited(dspan, delim, self.lower_token_stream(tokens.clone())) // This is either a non-key-value attribute, or a `macro_rules!` body.
} // We either not have any nonterminals present (in the case of an attribute),
MacArgs::Eq(eq_span, ref tokens) => { // or have tokens available for all nonterminals in the case of a nested
MacArgs::Eq(eq_span, self.lower_token_stream(tokens.clone())) // `macro_rules`: e.g:
} //
} // ```rust
} // macro_rules! outer {
// ($e:expr) => {
fn lower_token_stream(&mut self, tokens: TokenStream) -> TokenStream { // macro_rules! inner {
tokens.into_trees().flat_map(|tree| self.lower_token_tree(tree).into_trees()).collect() // () => { $e }
} // }
// }
fn lower_token_tree(&mut self, tree: TokenTree) -> TokenStream { // }
match tree { // ```
TokenTree::Token(token) => self.lower_token(token), //
TokenTree::Delimited(span, delim, tts) => { // In both cases, we don't want to synthesize any tokens
TokenTree::Delimited(span, delim, self.lower_token_stream(tts)).into() MacArgs::Delimited(
} dspan,
} delim,
} self.lower_token_stream(tokens.clone(), CanSynthesizeMissingTokens::No),
fn lower_token(&mut self, token: Token) -> TokenStream {
match token.kind {
token::Interpolated(nt) => {
let tts = (self.nt_to_tokenstream)(&nt, &self.sess.parse_sess, token.span);
TokenTree::Delimited(
DelimSpan::from_single(token.span),
DelimToken::NoDelim,
self.lower_token_stream(tts),
) )
.into()
} }
_ => TokenTree::Token(token).into(), // This is an inert key-value attribute - it will never be visible to macros
// after it gets lowered to HIR. Therefore, we can synthesize tokens with fake
// spans to handle nonterminals in `#[doc]` (e.g. `#[doc = $e]`).
MacArgs::Eq(eq_span, ref tokens) => MacArgs::Eq(
eq_span,
self.lower_token_stream(tokens.clone(), CanSynthesizeMissingTokens::Yes),
),
} }
} }
fn lower_token_stream(
&self,
tokens: TokenStream,
synthesize_tokens: CanSynthesizeMissingTokens,
) -> TokenStream {
TokenStreamLowering {
parse_sess: &self.sess.parse_sess,
synthesize_tokens,
nt_to_tokenstream: self.nt_to_tokenstream,
}
.lower_token_stream(tokens)
}
/// Given an associated type constraint like one of these: /// Given an associated type constraint like one of these:
/// ///
/// ``` /// ```
@ -1716,7 +1767,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
} }
self.arena.alloc_from_iter(inputs.iter().map(|param| match param.pat.kind { self.arena.alloc_from_iter(inputs.iter().map(|param| match param.pat.kind {
PatKind::Ident(_, ident, _) => ident, PatKind::Ident(_, ident, _) => ident,
_ => Ident::new(kw::Invalid, param.pat.span), _ => Ident::new(kw::Empty, param.pat.span),
})) }))
} }
@ -1806,12 +1857,11 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
output, output,
c_variadic, c_variadic,
implicit_self: decl.inputs.get(0).map_or(hir::ImplicitSelfKind::None, |arg| { implicit_self: decl.inputs.get(0).map_or(hir::ImplicitSelfKind::None, |arg| {
let is_mutable_pat = match arg.pat.kind { use BindingMode::{ByRef, ByValue};
PatKind::Ident(BindingMode::ByValue(mt) | BindingMode::ByRef(mt), _, _) => { let is_mutable_pat = matches!(
mt == Mutability::Mut arg.pat.kind,
} PatKind::Ident(ByValue(Mutability::Mut) | ByRef(Mutability::Mut), ..)
_ => false, );
};
match arg.ty.kind { match arg.ty.kind {
TyKind::ImplicitSelf if is_mutable_pat => hir::ImplicitSelfKind::Mut, TyKind::ImplicitSelf if is_mutable_pat => hir::ImplicitSelfKind::Mut,

View file

@ -184,7 +184,7 @@ impl<'a> AstValidator<'a> {
} }
fn check_lifetime(&self, ident: Ident) { fn check_lifetime(&self, ident: Ident) {
let valid_names = [kw::UnderscoreLifetime, kw::StaticLifetime, kw::Invalid]; let valid_names = [kw::UnderscoreLifetime, kw::StaticLifetime, kw::Empty];
if !valid_names.contains(&ident.name) && ident.without_first_quote().is_reserved() { if !valid_names.contains(&ident.name) && ident.without_first_quote().is_reserved() {
self.err_handler().span_err(ident.span, "lifetimes cannot use keyword names"); self.err_handler().span_err(ident.span, "lifetimes cannot use keyword names");
} }
@ -400,7 +400,7 @@ impl<'a> AstValidator<'a> {
if let Defaultness::Default(def_span) = defaultness { if let Defaultness::Default(def_span) = defaultness {
let span = self.session.source_map().guess_head_span(span); let span = self.session.source_map().guess_head_span(span);
self.err_handler() self.err_handler()
.struct_span_err(span, "`default` is only allowed on items in `impl` definitions") .struct_span_err(span, "`default` is only allowed on items in trait impls")
.span_label(def_span, "`default` because of this") .span_label(def_span, "`default` because of this")
.emit(); .emit();
} }
@ -717,35 +717,46 @@ impl<'a> AstValidator<'a> {
/// Checks that generic parameters are in the correct order, /// Checks that generic parameters are in the correct order,
/// which is lifetimes, then types and then consts. (`<'a, T, const N: usize>`) /// which is lifetimes, then types and then consts. (`<'a, T, const N: usize>`)
fn validate_generic_param_order<'a>( fn validate_generic_param_order(
sess: &Session, sess: &Session,
handler: &rustc_errors::Handler, handler: &rustc_errors::Handler,
generics: impl Iterator<Item = (ParamKindOrd, Option<&'a [GenericBound]>, Span, Option<String>)>, generics: &[GenericParam],
span: Span, span: Span,
) { ) {
let mut max_param: Option<ParamKindOrd> = None; let mut max_param: Option<ParamKindOrd> = None;
let mut out_of_order = FxHashMap::default(); let mut out_of_order = FxHashMap::default();
let mut param_idents = vec![]; let mut param_idents = vec![];
for (kind, bounds, span, ident) in generics { for param in generics {
let ident = Some(param.ident.to_string());
let (kind, bounds, span) = (&param.kind, Some(&*param.bounds), param.ident.span);
let (ord_kind, ident) = match &param.kind {
GenericParamKind::Lifetime => (ParamKindOrd::Lifetime, ident),
GenericParamKind::Type { default: _ } => (ParamKindOrd::Type, ident),
GenericParamKind::Const { ref ty, kw_span: _ } => {
let ty = pprust::ty_to_string(ty);
let unordered = sess.features_untracked().const_generics;
(ParamKindOrd::Const { unordered }, Some(format!("const {}: {}", param.ident, ty)))
}
};
if let Some(ident) = ident { if let Some(ident) = ident {
param_idents.push((kind, bounds, param_idents.len(), ident)); param_idents.push((kind, ord_kind, bounds, param_idents.len(), ident));
} }
let max_param = &mut max_param; let max_param = &mut max_param;
match max_param { match max_param {
Some(max_param) if *max_param > kind => { Some(max_param) if *max_param > ord_kind => {
let entry = out_of_order.entry(kind).or_insert((*max_param, vec![])); let entry = out_of_order.entry(ord_kind).or_insert((*max_param, vec![]));
entry.1.push(span); entry.1.push(span);
} }
Some(_) | None => *max_param = Some(kind), Some(_) | None => *max_param = Some(ord_kind),
}; };
} }
let mut ordered_params = "<".to_string(); let mut ordered_params = "<".to_string();
if !out_of_order.is_empty() { if !out_of_order.is_empty() {
param_idents.sort_by_key(|&(po, _, i, _)| (po, i)); param_idents.sort_by_key(|&(_, po, _, i, _)| (po, i));
let mut first = true; let mut first = true;
for (_, bounds, _, ident) in param_idents { for (kind, _, bounds, _, ident) in param_idents {
if !first { if !first {
ordered_params += ", "; ordered_params += ", ";
} }
@ -756,6 +767,16 @@ fn validate_generic_param_order<'a>(
ordered_params += &pprust::bounds_to_string(&bounds); ordered_params += &pprust::bounds_to_string(&bounds);
} }
} }
match kind {
GenericParamKind::Type { default: Some(default) } => {
ordered_params += " = ";
ordered_params += &pprust::ty_to_string(default);
}
GenericParamKind::Type { default: None } => (),
GenericParamKind::Lifetime => (),
// FIXME(const_generics:defaults)
GenericParamKind::Const { ty: _, kw_span: _ } => (),
}
first = false; first = false;
} }
} }
@ -773,14 +794,12 @@ fn validate_generic_param_order<'a>(
err.span_suggestion( err.span_suggestion(
span, span,
&format!( &format!(
"reorder the parameters: lifetimes{}", "reorder the parameters: lifetimes, {}",
if sess.features_untracked().const_generics { if sess.features_untracked().const_generics {
", then consts and types" "then consts and types"
} else if sess.features_untracked().min_const_generics {
", then types, then consts"
} else { } else {
", then types" "then types, then consts"
}, }
), ),
ordered_params.clone(), ordered_params.clone(),
Applicability::MachineApplicable, Applicability::MachineApplicable,
@ -1152,22 +1171,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
validate_generic_param_order( validate_generic_param_order(
self.session, self.session,
self.err_handler(), self.err_handler(),
generics.params.iter().map(|param| { &generics.params,
let ident = Some(param.ident.to_string());
let (kind, ident) = match &param.kind {
GenericParamKind::Lifetime => (ParamKindOrd::Lifetime, ident),
GenericParamKind::Type { default: _ } => (ParamKindOrd::Type, ident),
GenericParamKind::Const { ref ty, kw_span: _ } => {
let ty = pprust::ty_to_string(ty);
let unordered = self.session.features_untracked().const_generics;
(
ParamKindOrd::Const { unordered },
Some(format!("const {}: {}", param.ident, ty)),
)
}
};
(kind, Some(&*param.bounds), param.ident.span, ident)
}),
generics.span, generics.span,
); );

View file

@ -1,7 +1,7 @@
use rustc_ast as ast; use rustc_ast as ast;
use rustc_ast::visit::{self, AssocCtxt, FnCtxt, FnKind, Visitor}; use rustc_ast::visit::{self, AssocCtxt, FnCtxt, FnKind, Visitor};
use rustc_ast::{AssocTyConstraint, AssocTyConstraintKind, NodeId}; use rustc_ast::{AssocTyConstraint, AssocTyConstraintKind, NodeId};
use rustc_ast::{GenericParam, GenericParamKind, PatKind, RangeEnd, VariantData}; use rustc_ast::{PatKind, RangeEnd, VariantData};
use rustc_errors::struct_span_err; use rustc_errors::struct_span_err;
use rustc_feature::{AttributeGate, BUILTIN_ATTRIBUTE_MAP}; use rustc_feature::{AttributeGate, BUILTIN_ATTRIBUTE_MAP};
use rustc_feature::{Features, GateIssue}; use rustc_feature::{Features, GateIssue};
@ -397,10 +397,8 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
match i.kind { match i.kind {
ast::ForeignItemKind::Fn(..) | ast::ForeignItemKind::Static(..) => { ast::ForeignItemKind::Fn(..) | ast::ForeignItemKind::Static(..) => {
let link_name = self.sess.first_attr_value_str_by_name(&i.attrs, sym::link_name); let link_name = self.sess.first_attr_value_str_by_name(&i.attrs, sym::link_name);
let links_to_llvm = match link_name { let links_to_llvm =
Some(val) => val.as_str().starts_with("llvm."), link_name.map_or(false, |val| val.as_str().starts_with("llvm."));
_ => false,
};
if links_to_llvm { if links_to_llvm {
gate_feature_post!( gate_feature_post!(
&self, &self,
@ -529,19 +527,6 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
visit::walk_fn(self, fn_kind, span) visit::walk_fn(self, fn_kind, span)
} }
fn visit_generic_param(&mut self, param: &'a GenericParam) {
if let GenericParamKind::Const { .. } = param.kind {
gate_feature_fn!(
&self,
|x: &Features| x.const_generics || x.min_const_generics,
param.ident.span,
sym::min_const_generics,
"const generics are unstable"
);
}
visit::walk_generic_param(self, param)
}
fn visit_assoc_ty_constraint(&mut self, constraint: &'a AssocTyConstraint) { fn visit_assoc_ty_constraint(&mut self, constraint: &'a AssocTyConstraint) {
if let AssocTyConstraintKind::Bound { .. } = constraint.kind { if let AssocTyConstraintKind::Bound { .. } = constraint.kind {
gate_feature_post!( gate_feature_post!(
@ -620,7 +605,7 @@ pub fn check_crate(krate: &ast::Crate, sess: &Session) {
} }
}; };
} }
gate_all!(if_let_guard, "`if let` guard is not implemented"); gate_all!(if_let_guard, "`if let` guards are experimental");
gate_all!(let_chains, "`let` expressions in this position are experimental"); gate_all!(let_chains, "`let` expressions in this position are experimental");
gate_all!(async_closure, "async closures are unstable"); gate_all!(async_closure, "async closures are unstable");
gate_all!(generators, "yield syntax is experimental"); gate_all!(generators, "yield syntax is experimental");

View file

@ -75,7 +75,7 @@
//! breaking inconsistently to become //! breaking inconsistently to become
//! //!
//! ``` //! ```
//! foo(hello, there //! foo(hello, there,
//! good, friends); //! good, friends);
//! ``` //! ```
//! //!
@ -83,7 +83,7 @@
//! //!
//! ``` //! ```
//! foo(hello, //! foo(hello,
//! there //! there,
//! good, //! good,
//! friends); //! friends);
//! ``` //! ```

View file

@ -2420,7 +2420,15 @@ impl<'a> State<'a> {
if mutbl == ast::Mutability::Mut { if mutbl == ast::Mutability::Mut {
self.s.word("mut "); self.s.word("mut ");
} }
self.print_pat(inner); if let PatKind::Ident(ast::BindingMode::ByValue(ast::Mutability::Mut), ..) =
inner.kind
{
self.popen();
self.print_pat(inner);
self.pclose();
} else {
self.print_pat(inner);
}
} }
PatKind::Lit(ref e) => self.print_expr(&**e), PatKind::Lit(ref e) => self.print_expr(&**e),
PatKind::Range(ref begin, ref end, Spanned { node: ref end_kind, .. }) => { PatKind::Range(ref begin, ref end, Spanned { node: ref end_kind, .. }) => {
@ -2779,7 +2787,7 @@ impl<'a> State<'a> {
self.print_explicit_self(&eself); self.print_explicit_self(&eself);
} else { } else {
let invalid = if let PatKind::Ident(_, ident, _) = input.pat.kind { let invalid = if let PatKind::Ident(_, ident, _) = input.pat.kind {
ident.name == kw::Invalid ident.name == kw::Empty
} else { } else {
false false
}; };

View file

@ -621,7 +621,7 @@ pub fn eval_condition(
} }
} }
#[derive(Encodable, Decodable, Clone, HashStable_Generic)] #[derive(Debug, Encodable, Decodable, Clone, HashStable_Generic)]
pub struct Deprecation { pub struct Deprecation {
pub since: Option<Symbol>, pub since: Option<Symbol>,
/// The note to issue a reason. /// The note to issue a reason.

View file

@ -38,10 +38,9 @@ pub fn expand_deriving_clone(
| ItemKind::Enum(_, Generics { ref params, .. }) => { | ItemKind::Enum(_, Generics { ref params, .. }) => {
let container_id = cx.current_expansion.id.expn_data().parent; let container_id = cx.current_expansion.id.expn_data().parent;
if cx.resolver.has_derive_copy(container_id) if cx.resolver.has_derive_copy(container_id)
&& !params.iter().any(|param| match param.kind { && !params
ast::GenericParamKind::Type { .. } => true, .iter()
_ => false, .any(|param| matches!(param.kind, ast::GenericParamKind::Type { .. }))
})
{ {
bounds = vec![]; bounds = vec![];
is_shallow = true; is_shallow = true;

View file

@ -257,7 +257,10 @@ pub struct Substructure<'a> {
pub type_ident: Ident, pub type_ident: Ident,
/// ident of the method /// ident of the method
pub method_ident: Ident, pub method_ident: Ident,
/// dereferenced access to any `Self_` or `Ptr(Self_, _)` arguments /// dereferenced access to any [`Self_`] or [`Ptr(Self_, _)][ptr]` arguments
///
/// [`Self_`]: ty::Ty::Self_
/// [ptr]: ty::Ty::Ptr
pub self_args: &'a [P<Expr>], pub self_args: &'a [P<Expr>],
/// verbatim access to any other arguments /// verbatim access to any other arguments
pub nonself_args: &'a [P<Expr>], pub nonself_args: &'a [P<Expr>],
@ -401,12 +404,10 @@ impl<'a> TraitDef<'a> {
let has_no_type_params = match item.kind { let has_no_type_params = match item.kind {
ast::ItemKind::Struct(_, ref generics) ast::ItemKind::Struct(_, ref generics)
| ast::ItemKind::Enum(_, ref generics) | ast::ItemKind::Enum(_, ref generics)
| ast::ItemKind::Union(_, ref generics) => { | ast::ItemKind::Union(_, ref generics) => !generics
!generics.params.iter().any(|param| match param.kind { .params
ast::GenericParamKind::Type { .. } => true, .iter()
_ => false, .any(|param| matches!(param.kind, ast::GenericParamKind::Type { .. })),
})
}
_ => unreachable!(), _ => unreachable!(),
}; };
let container_id = cx.current_expansion.id.expn_data().parent; let container_id = cx.current_expansion.id.expn_data().parent;
@ -597,10 +598,7 @@ impl<'a> TraitDef<'a> {
let mut ty_params = params let mut ty_params = params
.iter() .iter()
.filter_map(|param| match param.kind { .filter(|param| matches!(param.kind, ast::GenericParamKind::Type{..}))
ast::GenericParamKind::Type { .. } => Some(param),
_ => None,
})
.peekable(); .peekable();
if ty_params.peek().is_some() { if ty_params.peek().is_some() {
@ -868,7 +866,7 @@ impl<'a> MethodDef<'a> {
Self_ if nonstatic => { Self_ if nonstatic => {
self_args.push(arg_expr); self_args.push(arg_expr);
} }
Ptr(ref ty, _) if (if let Self_ = **ty { true } else { false }) && nonstatic => { Ptr(ref ty, _) if matches!(**ty, Self_) && nonstatic => {
self_args.push(cx.expr_deref(trait_.span, arg_expr)) self_args.push(cx.expr_deref(trait_.span, arg_expr))
} }
_ => { _ => {

View file

@ -1044,10 +1044,7 @@ pub fn expand_preparsed_format_args(
let numbered_position_args = pieces.iter().any(|arg: &parse::Piece<'_>| match *arg { let numbered_position_args = pieces.iter().any(|arg: &parse::Piece<'_>| match *arg {
parse::String(_) => false, parse::String(_) => false,
parse::NextArgument(arg) => match arg.position { parse::NextArgument(arg) => matches!(arg.position, parse::Position::ArgumentIs(_)),
parse::Position::ArgumentIs(_) => true,
_ => false,
},
}); });
cx.build_index_map(); cx.build_index_map();

View file

@ -580,10 +580,7 @@ pub mod printf {
} }
fn is_flag(c: &char) -> bool { fn is_flag(c: &char) -> bool {
match c { matches!(c, '0' | '-' | '+' | ' ' | '#' | '\'')
'0' | '-' | '+' | ' ' | '#' | '\'' => true,
_ => false,
}
} }
#[cfg(test)] #[cfg(test)]

View file

@ -87,13 +87,15 @@ fn parse_inline_asm<'a>(
// parsed as `llvm_asm!(z)` with `z = "x": y` which is type ascription. // parsed as `llvm_asm!(z)` with `z = "x": y` which is type ascription.
let first_colon = tts let first_colon = tts
.trees() .trees()
.position(|tt| match tt { .position(|tt| {
tokenstream::TokenTree::Token(Token { kind: token::Colon | token::ModSep, .. }) => true, matches!(
_ => false, tt,
tokenstream::TokenTree::Token(Token { kind: token::Colon | token::ModSep, .. })
)
}) })
.unwrap_or(tts.len()); .unwrap_or(tts.len());
let mut p = cx.new_parser_from_tts(tts.trees().skip(first_colon).collect()); let mut p = cx.new_parser_from_tts(tts.trees().skip(first_colon).collect());
let mut asm = kw::Invalid; let mut asm = kw::Empty;
let mut asm_str_style = None; let mut asm_str_style = None;
let mut outputs = Vec::new(); let mut outputs = Vec::new();
let mut inputs = Vec::new(); let mut inputs = Vec::new();

View file

@ -256,10 +256,7 @@ impl<'a> Visitor<'a> for CollectProcMacros<'a> {
// we're just not interested in this item. // we're just not interested in this item.
// //
// If we find one, try to locate a `#[proc_macro_derive]` attribute on it. // If we find one, try to locate a `#[proc_macro_derive]` attribute on it.
let is_fn = match item.kind { let is_fn = matches!(item.kind, ast::ItemKind::Fn(..));
ast::ItemKind::Fn(..) => true,
_ => false,
};
let mut found_attr: Option<&'a ast::Attribute> = None; let mut found_attr: Option<&'a ast::Attribute> = None;

View file

@ -1,6 +1,7 @@
{ {
// source for rustc_* is not included in the rust-src component; disable the errors about this // source for rustc_* is not included in the rust-src component; disable the errors about this
"rust-analyzer.diagnostics.disabled": ["unresolved-extern-crate"], "rust-analyzer.diagnostics.disabled": ["unresolved-extern-crate"],
"rust-analyzer.assist.importMergeBehaviour": "last",
"rust-analyzer.cargo.loadOutDirsFromCheck": true, "rust-analyzer.cargo.loadOutDirsFromCheck": true,
"rust-analyzer.linkedProjects": [ "rust-analyzer.linkedProjects": [
"./Cargo.toml", "./Cargo.toml",

View file

@ -50,7 +50,7 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]] [[package]]
name = "cranelift-bforest" name = "cranelift-bforest"
version = "0.68.0" version = "0.68.0"
source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#19640367dbf0da7093e61add3306c8d092644fb3" source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#8f7f8ee0b4c5007ace6de29b45505c360450b1bb"
dependencies = [ dependencies = [
"cranelift-entity", "cranelift-entity",
] ]
@ -58,7 +58,7 @@ dependencies = [
[[package]] [[package]]
name = "cranelift-codegen" name = "cranelift-codegen"
version = "0.68.0" version = "0.68.0"
source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#19640367dbf0da7093e61add3306c8d092644fb3" source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#8f7f8ee0b4c5007ace6de29b45505c360450b1bb"
dependencies = [ dependencies = [
"byteorder", "byteorder",
"cranelift-bforest", "cranelift-bforest",
@ -76,7 +76,7 @@ dependencies = [
[[package]] [[package]]
name = "cranelift-codegen-meta" name = "cranelift-codegen-meta"
version = "0.68.0" version = "0.68.0"
source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#19640367dbf0da7093e61add3306c8d092644fb3" source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#8f7f8ee0b4c5007ace6de29b45505c360450b1bb"
dependencies = [ dependencies = [
"cranelift-codegen-shared", "cranelift-codegen-shared",
"cranelift-entity", "cranelift-entity",
@ -85,17 +85,17 @@ dependencies = [
[[package]] [[package]]
name = "cranelift-codegen-shared" name = "cranelift-codegen-shared"
version = "0.68.0" version = "0.68.0"
source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#19640367dbf0da7093e61add3306c8d092644fb3" source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#8f7f8ee0b4c5007ace6de29b45505c360450b1bb"
[[package]] [[package]]
name = "cranelift-entity" name = "cranelift-entity"
version = "0.68.0" version = "0.68.0"
source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#19640367dbf0da7093e61add3306c8d092644fb3" source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#8f7f8ee0b4c5007ace6de29b45505c360450b1bb"
[[package]] [[package]]
name = "cranelift-frontend" name = "cranelift-frontend"
version = "0.68.0" version = "0.68.0"
source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#19640367dbf0da7093e61add3306c8d092644fb3" source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#8f7f8ee0b4c5007ace6de29b45505c360450b1bb"
dependencies = [ dependencies = [
"cranelift-codegen", "cranelift-codegen",
"log", "log",
@ -103,10 +103,28 @@ dependencies = [
"target-lexicon", "target-lexicon",
] ]
[[package]]
name = "cranelift-jit"
version = "0.68.0"
source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#8f7f8ee0b4c5007ace6de29b45505c360450b1bb"
dependencies = [
"anyhow",
"cranelift-codegen",
"cranelift-entity",
"cranelift-module",
"cranelift-native",
"errno",
"libc",
"log",
"region",
"target-lexicon",
"winapi",
]
[[package]] [[package]]
name = "cranelift-module" name = "cranelift-module"
version = "0.68.0" version = "0.68.0"
source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#19640367dbf0da7093e61add3306c8d092644fb3" source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#8f7f8ee0b4c5007ace6de29b45505c360450b1bb"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"cranelift-codegen", "cranelift-codegen",
@ -118,7 +136,7 @@ dependencies = [
[[package]] [[package]]
name = "cranelift-native" name = "cranelift-native"
version = "0.68.0" version = "0.68.0"
source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#19640367dbf0da7093e61add3306c8d092644fb3" source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#8f7f8ee0b4c5007ace6de29b45505c360450b1bb"
dependencies = [ dependencies = [
"cranelift-codegen", "cranelift-codegen",
"raw-cpuid", "raw-cpuid",
@ -128,7 +146,7 @@ dependencies = [
[[package]] [[package]]
name = "cranelift-object" name = "cranelift-object"
version = "0.68.0" version = "0.68.0"
source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#19640367dbf0da7093e61add3306c8d092644fb3" source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#8f7f8ee0b4c5007ace6de29b45505c360450b1bb"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"cranelift-codegen", "cranelift-codegen",
@ -138,23 +156,6 @@ dependencies = [
"target-lexicon", "target-lexicon",
] ]
[[package]]
name = "cranelift-simplejit"
version = "0.68.0"
source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#19640367dbf0da7093e61add3306c8d092644fb3"
dependencies = [
"cranelift-codegen",
"cranelift-entity",
"cranelift-module",
"cranelift-native",
"errno",
"libc",
"log",
"region",
"target-lexicon",
"winapi",
]
[[package]] [[package]]
name = "crc32fast" name = "crc32fast"
version = "1.2.1" version = "1.2.1"
@ -325,9 +326,9 @@ dependencies = [
"ar", "ar",
"cranelift-codegen", "cranelift-codegen",
"cranelift-frontend", "cranelift-frontend",
"cranelift-jit",
"cranelift-module", "cranelift-module",
"cranelift-object", "cranelift-object",
"cranelift-simplejit",
"gimli", "gimli",
"indexmap", "indexmap",
"libloading", "libloading",

View file

@ -12,7 +12,7 @@ crate-type = ["dylib"]
cranelift-codegen = { git = "https://github.com/bytecodealliance/wasmtime/", branch = "main", features = ["unwind"] } cranelift-codegen = { git = "https://github.com/bytecodealliance/wasmtime/", branch = "main", features = ["unwind"] }
cranelift-frontend = { git = "https://github.com/bytecodealliance/wasmtime/", branch = "main" } cranelift-frontend = { git = "https://github.com/bytecodealliance/wasmtime/", branch = "main" }
cranelift-module = { git = "https://github.com/bytecodealliance/wasmtime/", branch = "main" } cranelift-module = { git = "https://github.com/bytecodealliance/wasmtime/", branch = "main" }
cranelift-simplejit = { git = "https://github.com/bytecodealliance/wasmtime/", branch = "main", optional = true } cranelift-jit = { git = "https://github.com/bytecodealliance/wasmtime/", branch = "main", optional = true }
cranelift-object = { git = "https://github.com/bytecodealliance/wasmtime/", branch = "main" } cranelift-object = { git = "https://github.com/bytecodealliance/wasmtime/", branch = "main" }
target-lexicon = "0.11.0" target-lexicon = "0.11.0"
gimli = { version = "0.23.0", default-features = false, features = ["write"]} gimli = { version = "0.23.0", default-features = false, features = ["write"]}
@ -27,7 +27,7 @@ libloading = { version = "0.6.0", optional = true }
#cranelift-codegen = { path = "../wasmtime/cranelift/codegen" } #cranelift-codegen = { path = "../wasmtime/cranelift/codegen" }
#cranelift-frontend = { path = "../wasmtime/cranelift/frontend" } #cranelift-frontend = { path = "../wasmtime/cranelift/frontend" }
#cranelift-module = { path = "../wasmtime/cranelift/module" } #cranelift-module = { path = "../wasmtime/cranelift/module" }
#cranelift-simplejit = { path = "../wasmtime/cranelift/simplejit" } #cranelift-jit = { path = "../wasmtime/cranelift/jit" }
#cranelift-object = { path = "../wasmtime/cranelift/object" } #cranelift-object = { path = "../wasmtime/cranelift/object" }
#[patch.crates-io] #[patch.crates-io]
@ -35,7 +35,7 @@ libloading = { version = "0.6.0", optional = true }
[features] [features]
default = ["jit", "inline_asm"] default = ["jit", "inline_asm"]
jit = ["cranelift-simplejit", "libloading"] jit = ["cranelift-jit", "libloading"]
inline_asm = [] inline_asm = []
[profile.dev] [profile.dev]

View file

@ -2,7 +2,7 @@
> ⚠⚠⚠ Certain kinds of FFI don't work yet. ⚠⚠⚠ > ⚠⚠⚠ Certain kinds of FFI don't work yet. ⚠⚠⚠
The goal of this project is to create an alternative codegen backend for the rust compiler based on [Cranelift](https://github.com/bytecodealliance/wasmtime/blob/master/cranelift). The goal of this project is to create an alternative codegen backend for the rust compiler based on [Cranelift](https://github.com/bytecodealliance/wasmtime/blob/main/cranelift).
This has the potential to improve compilation times in debug mode. This has the potential to improve compilation times in debug mode.
If your project doesn't use any of the things listed under "Not yet supported", it should work fine. If your project doesn't use any of the things listed under "Not yet supported", it should work fine.
If not please open an issue. If not please open an issue.
@ -68,7 +68,15 @@ $ $cg_clif_dir/build/cargo.sh jit
or or
```bash ```bash
$ $cg_clif_dir/build/bin/cg_clif --jit my_crate.rs $ $cg_clif_dir/build/bin/cg_clif -Cllvm-args=mode=jit -Cprefer-dynamic my_crate.rs
```
There is also an experimental lazy jit mode. In this mode functions are only compiled once they are
first called. It currently does not work with multi-threaded programs. When a not yet compiled
function is called from another thread than the main thread, you will get an ICE.
```bash
$ $cg_clif_dir/build/cargo.sh lazy-jit
``` ```
### Shell ### Shell
@ -77,7 +85,7 @@ These are a few functions that allow you to easily run rust code from the shell
```bash ```bash
function jit_naked() { function jit_naked() {
echo "$@" | $cg_clif_dir/build/bin/cg_clif - --jit echo "$@" | $cg_clif_dir/build/bin/cg_clif - -Cllvm-args=mode=jit -Cprefer-dynamic
} }
function jit() { function jit() {

View file

@ -47,9 +47,9 @@ checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a"
[[package]] [[package]]
name = "cc" name = "cc"
version = "1.0.65" version = "1.0.66"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "95752358c8f7552394baf48cd82695b345628ad3f170d607de3ca03b8dacca15" checksum = "4c0496836a84f8d0495758516b8621a622beb77c0fed418570e50764093ced48"
[[package]] [[package]]
name = "cfg-if" name = "cfg-if"
@ -141,9 +141,9 @@ dependencies = [
[[package]] [[package]]
name = "libc" name = "libc"
version = "0.2.80" version = "0.2.81"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4d58d1b70b004888f764dfbf6a26a3b0342a1632d33968e4a179d8011c760614" checksum = "1482821306169ec4d07f6aca392a4681f66c75c9918aa49641a2595db64053cb"
dependencies = [ dependencies = [
"rustc-std-workspace-core", "rustc-std-workspace-core",
] ]

View file

@ -5,13 +5,14 @@ version = "0.0.0"
[dependencies] [dependencies]
core = { path = "./sysroot_src/library/core" } core = { path = "./sysroot_src/library/core" }
compiler_builtins = "0.1"
alloc = { path = "./sysroot_src/library/alloc" } alloc = { path = "./sysroot_src/library/alloc" }
std = { path = "./sysroot_src/library/std", features = ["panic_unwind", "backtrace"] } std = { path = "./sysroot_src/library/std", features = ["panic_unwind", "backtrace"] }
test = { path = "./sysroot_src/library/test" } test = { path = "./sysroot_src/library/test" }
alloc_system = { path = "./alloc_system" } alloc_system = { path = "./alloc_system" }
compiler_builtins = { version = "=0.1.36", default-features = false }
[patch.crates-io] [patch.crates-io]
rustc-std-workspace-core = { path = "./sysroot_src/library/rustc-std-workspace-core" } rustc-std-workspace-core = { path = "./sysroot_src/library/rustc-std-workspace-core" }
rustc-std-workspace-alloc = { path = "./sysroot_src/library/rustc-std-workspace-alloc" } rustc-std-workspace-alloc = { path = "./sysroot_src/library/rustc-std-workspace-alloc" }

View file

@ -15,6 +15,8 @@ fn main() {
let stderr = ::std::io::stderr(); let stderr = ::std::io::stderr();
let mut stderr = stderr.lock(); let mut stderr = stderr.lock();
// FIXME support lazy jit when multi threading
#[cfg(not(lazy_jit))]
std::thread::spawn(move || { std::thread::spawn(move || {
println!("Hello from another thread!"); println!("Hello from another thread!");
}); });

View file

@ -1 +1 @@
nightly-2020-11-27 nightly-2020-12-23

View file

@ -10,7 +10,9 @@ cmd=$1
shift || true shift || true
if [[ "$cmd" = "jit" ]]; then if [[ "$cmd" = "jit" ]]; then
cargo "+${TOOLCHAIN}" rustc "$@" -- --jit cargo "+${TOOLCHAIN}" rustc "$@" -- -Cllvm-args=mode=jit -Cprefer-dynamic
elif [[ "$cmd" = "lazy-jit" ]]; then
cargo "+${TOOLCHAIN}" rustc "$@" -- -Cllvm-args=mode=jit-lazy -Cprefer-dynamic
else else
cargo "+${TOOLCHAIN}" "$cmd" "$@" cargo "+${TOOLCHAIN}" "$cmd" "$@"
fi fi

View file

@ -4,7 +4,7 @@
pushd $(dirname "$0")/../ pushd $(dirname "$0")/../
source build/config.sh source build/config.sh
popd popd
PROFILE=$1 OUTPUT=$2 exec $RUSTC $RUSTFLAGS --jit $0 PROFILE=$1 OUTPUT=$2 exec $RUSTC $RUSTFLAGS -Cllvm-args=mode=jit -Cprefer-dynamic $0
#*/ #*/
//! This program filters away uninteresting samples and trims uninteresting frames for stackcollapse //! This program filters away uninteresting samples and trims uninteresting frames for stackcollapse

View file

@ -15,7 +15,10 @@ function no_sysroot_tests() {
if [[ "$JIT_SUPPORTED" = "1" ]]; then if [[ "$JIT_SUPPORTED" = "1" ]]; then
echo "[JIT] mini_core_hello_world" echo "[JIT] mini_core_hello_world"
CG_CLIF_JIT_ARGS="abc bcd" $MY_RUSTC --jit example/mini_core_hello_world.rs --cfg jit --target "$HOST_TRIPLE" CG_CLIF_JIT_ARGS="abc bcd" $MY_RUSTC -Cllvm-args=mode=jit -Cprefer-dynamic example/mini_core_hello_world.rs --cfg jit --target "$HOST_TRIPLE"
echo "[JIT-lazy] mini_core_hello_world"
CG_CLIF_JIT_ARGS="abc bcd" $MY_RUSTC -Cllvm-args=mode=jit-lazy -Cprefer-dynamic example/mini_core_hello_world.rs --cfg jit --target "$HOST_TRIPLE"
else else
echo "[JIT] mini_core_hello_world (skipped)" echo "[JIT] mini_core_hello_world (skipped)"
fi fi
@ -37,7 +40,10 @@ function base_sysroot_tests() {
if [[ "$JIT_SUPPORTED" = "1" ]]; then if [[ "$JIT_SUPPORTED" = "1" ]]; then
echo "[JIT] std_example" echo "[JIT] std_example"
$MY_RUSTC --jit example/std_example.rs --target "$HOST_TRIPLE" $MY_RUSTC -Cllvm-args=mode=jit -Cprefer-dynamic example/std_example.rs --target "$HOST_TRIPLE"
echo "[JIT-lazy] std_example"
$MY_RUSTC -Cllvm-args=mode=jit-lazy -Cprefer-dynamic example/std_example.rs --cfg lazy_jit --target "$HOST_TRIPLE"
else else
echo "[JIT] std_example (skipped)" echo "[JIT] std_example (skipped)"
fi fi

View file

@ -64,7 +64,7 @@ pub(crate) fn fn_sig_for_fn_abi<'tcx>(tcx: TyCtxt<'tcx>, instance: Instance<'tcx
ty::Generator(_, substs, _) => { ty::Generator(_, substs, _) => {
let sig = substs.as_generator().poly_sig(); let sig = substs.as_generator().poly_sig();
let env_region = ty::ReLateBound(ty::INNERMOST, ty::BrEnv); let env_region = ty::ReLateBound(ty::INNERMOST, ty::BoundRegion { kind: ty::BrEnv });
let env_ty = tcx.mk_mut_ref(tcx.mk_region(env_region), ty); let env_ty = tcx.mk_mut_ref(tcx.mk_region(env_region), ty);
let pin_did = tcx.require_lang_item(rustc_hir::LangItem::Pin, None); let pin_did = tcx.require_lang_item(rustc_hir::LangItem::Pin, None);

View file

@ -162,7 +162,7 @@ impl AddConstructor for ObjectProduct {
} }
pub(crate) fn with_object(sess: &Session, name: &str, f: impl FnOnce(&mut Object)) -> Vec<u8> { pub(crate) fn with_object(sess: &Session, name: &str, f: impl FnOnce(&mut Object)) -> Vec<u8> {
let triple = crate::build_isa(sess, true).triple().clone(); let triple = crate::build_isa(sess).triple().clone();
let binary_format = match triple.binary_format { let binary_format = match triple.binary_format {
target_lexicon::BinaryFormat::Elf => object::BinaryFormat::Elf, target_lexicon::BinaryFormat::Elf => object::BinaryFormat::Elf,
@ -193,7 +193,7 @@ pub(crate) fn with_object(sess: &Session, name: &str, f: impl FnOnce(&mut Object
pub(crate) fn make_module(sess: &Session, name: String) -> ObjectModule { pub(crate) fn make_module(sess: &Session, name: String) -> ObjectModule {
let mut builder = ObjectBuilder::new( let mut builder = ObjectBuilder::new(
crate::build_isa(sess, true), crate::build_isa(sess),
name + ".o", name + ".o",
cranelift_module::default_libcall_names(), cranelift_module::default_libcall_names(),
) )

View file

@ -118,6 +118,8 @@ pub(crate) fn codegen_fn<'tcx>(
context.eliminate_unreachable_code(cx.module.isa()).unwrap(); context.eliminate_unreachable_code(cx.module.isa()).unwrap();
context.dce(cx.module.isa()).unwrap(); context.dce(cx.module.isa()).unwrap();
context.want_disasm = crate::pretty_clif::should_write_ir(tcx);
// Define function // Define function
let module = &mut cx.module; let module = &mut cx.module;
tcx.sess.time("define function", || { tcx.sess.time("define function", || {
@ -140,6 +142,16 @@ pub(crate) fn codegen_fn<'tcx>(
&clif_comments, &clif_comments,
); );
if let Some(mach_compile_result) = &context.mach_compile_result {
if let Some(disasm) = &mach_compile_result.disasm {
crate::pretty_clif::write_ir_file(
tcx,
&format!("{}.vcode", tcx.symbol_name(instance).name),
|file| file.write_all(disasm.as_bytes()),
)
}
}
// Define debuginfo for function // Define debuginfo for function
let isa = cx.module.isa(); let isa = cx.module.isa();
let debug_context = &mut cx.debug_context; let debug_context = &mut cx.debug_context;
@ -307,7 +319,9 @@ fn codegen_fn_content(fx: &mut FunctionCx<'_, '_, impl Module>) {
} => { } => {
let discr = codegen_operand(fx, discr).load_scalar(fx); let discr = codegen_operand(fx, discr).load_scalar(fx);
if switch_ty.kind() == fx.tcx.types.bool.kind() { let use_bool_opt = switch_ty.kind() == fx.tcx.types.bool.kind()
|| (targets.iter().count() == 1 && targets.iter().next().unwrap().0 == 0);
if use_bool_opt {
assert_eq!(targets.iter().count(), 1); assert_eq!(targets.iter().count(), 1);
let (then_value, then_block) = targets.iter().next().unwrap(); let (then_value, then_block) = targets.iter().next().unwrap();
let then_block = fx.get_block(then_block); let then_block = fx.get_block(then_block);
@ -325,12 +339,22 @@ fn codegen_fn_content(fx: &mut FunctionCx<'_, '_, impl Module>) {
let discr = crate::optimize::peephole::maybe_unwrap_bint(&mut fx.bcx, discr); let discr = crate::optimize::peephole::maybe_unwrap_bint(&mut fx.bcx, discr);
let discr = let discr =
crate::optimize::peephole::make_branchable_value(&mut fx.bcx, discr); crate::optimize::peephole::make_branchable_value(&mut fx.bcx, discr);
if test_zero { if let Some(taken) = crate::optimize::peephole::maybe_known_branch_taken(
fx.bcx.ins().brz(discr, then_block, &[]); &fx.bcx, discr, test_zero,
fx.bcx.ins().jump(else_block, &[]); ) {
if taken {
fx.bcx.ins().jump(then_block, &[]);
} else {
fx.bcx.ins().jump(else_block, &[]);
}
} else { } else {
fx.bcx.ins().brnz(discr, then_block, &[]); if test_zero {
fx.bcx.ins().jump(else_block, &[]); fx.bcx.ins().brz(discr, then_block, &[]);
fx.bcx.ins().jump(else_block, &[]);
} else {
fx.bcx.ins().brnz(discr, then_block, &[]);
fx.bcx.ins().jump(else_block, &[]);
}
} }
} else { } else {
let mut switch = ::cranelift_frontend::Switch::new(); let mut switch = ::cranelift_frontend::Switch::new();

View file

@ -44,9 +44,7 @@ fn main() {
let mut callbacks = CraneliftPassesCallbacks::default(); let mut callbacks = CraneliftPassesCallbacks::default();
rustc_driver::install_ice_hook(); rustc_driver::install_ice_hook();
let exit_code = rustc_driver::catch_with_exit_code(|| { let exit_code = rustc_driver::catch_with_exit_code(|| {
let mut use_jit = false; let args = std::env::args_os()
let mut args = std::env::args_os()
.enumerate() .enumerate()
.map(|(i, arg)| { .map(|(i, arg)| {
arg.into_string().unwrap_or_else(|arg| { arg.into_string().unwrap_or_else(|arg| {
@ -56,23 +54,10 @@ fn main() {
) )
}) })
}) })
.filter(|arg| {
if arg == "--jit" {
use_jit = true;
false
} else {
true
}
})
.collect::<Vec<_>>(); .collect::<Vec<_>>();
if use_jit {
args.push("-Cprefer-dynamic".to_string());
}
let mut run_compiler = rustc_driver::RunCompiler::new(&args, &mut callbacks); let mut run_compiler = rustc_driver::RunCompiler::new(&args, &mut callbacks);
run_compiler.set_make_codegen_backend(Some(Box::new(move |_| { run_compiler.set_make_codegen_backend(Some(Box::new(move |_| {
Box::new(rustc_codegen_cranelift::CraneliftCodegenBackend { Box::new(rustc_codegen_cranelift::CraneliftCodegenBackend { config: None })
config: rustc_codegen_cranelift::BackendConfig { use_jit },
})
}))); })));
run_compiler.run() run_compiler.run()
}); });

View file

@ -92,9 +92,7 @@ fn main() {
let mut run_compiler = rustc_driver::RunCompiler::new(&args, &mut callbacks); let mut run_compiler = rustc_driver::RunCompiler::new(&args, &mut callbacks);
if use_clif { if use_clif {
run_compiler.set_make_codegen_backend(Some(Box::new(move |_| { run_compiler.set_make_codegen_backend(Some(Box::new(move |_| {
Box::new(rustc_codegen_cranelift::CraneliftCodegenBackend { Box::new(rustc_codegen_cranelift::CraneliftCodegenBackend { config: None })
config: rustc_codegen_cranelift::BackendConfig { use_jit: false },
})
}))); })));
} }
run_compiler.run() run_compiler.run()

View file

@ -100,7 +100,10 @@ fn codegen_static_ref<'tcx>(
let global_ptr = fx.bcx.ins().global_value(fx.pointer_type, local_data_id); let global_ptr = fx.bcx.ins().global_value(fx.pointer_type, local_data_id);
assert!(!layout.is_unsized(), "unsized statics aren't supported"); assert!(!layout.is_unsized(), "unsized statics aren't supported");
assert!( assert!(
matches!(fx.bcx.func.global_values[local_data_id], GlobalValueData::Symbol { tls: false, ..}), matches!(
fx.bcx.func.global_values[local_data_id],
GlobalValueData::Symbol { tls: false, .. }
),
"tls static referenced without Rvalue::ThreadLocalRef" "tls static referenced without Rvalue::ThreadLocalRef"
); );
CPlace::for_ptr(crate::pointer::Pointer::new(global_ptr), layout) CPlace::for_ptr(crate::pointer::Pointer::new(global_ptr), layout)
@ -447,7 +450,8 @@ fn define_all_allocs(tcx: TyCtxt<'_>, module: &mut impl Module, cx: &mut Constan
data_ctx.write_data_addr(offset.bytes() as u32, global_value, addend as i64); data_ctx.write_data_addr(offset.bytes() as u32, global_value, addend as i64);
} }
module.define_data(data_id, &data_ctx).unwrap(); // FIXME don't duplicate definitions in lazy jit mode
let _ = module.define_data(data_id, &data_ctx);
cx.done.insert(data_id); cx.done.insert(data_id);
} }

View file

@ -74,10 +74,7 @@ impl WriterRelocate {
/// Perform the collected relocations to be usable for JIT usage. /// Perform the collected relocations to be usable for JIT usage.
#[cfg(feature = "jit")] #[cfg(feature = "jit")]
pub(super) fn relocate_for_jit( pub(super) fn relocate_for_jit(mut self, jit_module: &cranelift_jit::JITModule) -> Vec<u8> {
mut self,
jit_module: &cranelift_simplejit::SimpleJITModule,
) -> Vec<u8> {
use std::convert::TryInto; use std::convert::TryInto;
for reloc in self.relocs.drain(..) { for reloc in self.relocs.drain(..) {

View file

@ -15,11 +15,11 @@ pub(crate) struct UnwindContext<'tcx> {
} }
impl<'tcx> UnwindContext<'tcx> { impl<'tcx> UnwindContext<'tcx> {
pub(crate) fn new(tcx: TyCtxt<'tcx>, isa: &dyn TargetIsa) -> Self { pub(crate) fn new(tcx: TyCtxt<'tcx>, isa: &dyn TargetIsa, pic_eh_frame: bool) -> Self {
let mut frame_table = FrameTable::default(); let mut frame_table = FrameTable::default();
let cie_id = if let Some(mut cie) = isa.create_systemv_cie() { let cie_id = if let Some(mut cie) = isa.create_systemv_cie() {
if isa.flags().is_pic() { if pic_eh_frame {
cie.fde_address_encoding = cie.fde_address_encoding =
gimli::DwEhPe(gimli::DW_EH_PE_pcrel.0 | gimli::DW_EH_PE_sdata4.0); gimli::DwEhPe(gimli::DW_EH_PE_pcrel.0 | gimli::DW_EH_PE_sdata4.0);
} }
@ -80,7 +80,7 @@ impl<'tcx> UnwindContext<'tcx> {
#[cfg(feature = "jit")] #[cfg(feature = "jit")]
pub(crate) unsafe fn register_jit( pub(crate) unsafe fn register_jit(
self, self,
jit_module: &cranelift_simplejit::SimpleJITModule, jit_module: &cranelift_jit::JITModule,
) -> Option<UnwindRegistry> { ) -> Option<UnwindRegistry> {
let mut eh_frame = EhFrame::from(super::emit::WriterRelocate::new(super::target_endian( let mut eh_frame = EhFrame::from(super::emit::WriterRelocate::new(super::target_endian(
self.tcx, self.tcx,

View file

@ -8,7 +8,7 @@ use rustc_codegen_ssa::{CodegenResults, CompiledModule, CrateInfo, ModuleKind};
use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_middle::dep_graph::{WorkProduct, WorkProductId}; use rustc_middle::dep_graph::{WorkProduct, WorkProductId};
use rustc_middle::middle::cstore::EncodedMetadata; use rustc_middle::middle::cstore::EncodedMetadata;
use rustc_middle::mir::mono::CodegenUnit; use rustc_middle::mir::mono::{CodegenUnit, MonoItem};
use rustc_session::cgu_reuse_tracker::CguReuse; use rustc_session::cgu_reuse_tracker::CguReuse;
use rustc_session::config::{DebugInfo, OutputType}; use rustc_session::config::{DebugInfo, OutputType};
@ -75,6 +75,7 @@ fn emit_module(
name, name,
kind, kind,
object: Some(tmp_file), object: Some(tmp_file),
dwarf_object: None,
bytecode: None, bytecode: None,
}, },
work_product, work_product,
@ -111,6 +112,7 @@ fn reuse_workproduct_for_cgu(
name: cgu.name().to_string(), name: cgu.name().to_string(),
kind: ModuleKind::Regular, kind: ModuleKind::Regular,
object, object,
dwarf_object: None,
bytecode: None, bytecode: None,
} }
} }
@ -144,11 +146,34 @@ fn module_codegen(tcx: TyCtxt<'_>, cgu_name: rustc_span::Symbol) -> ModuleCodege
} }
} }
let mut cx = crate::CodegenCx::new(tcx, module, tcx.sess.opts.debuginfo != DebugInfo::None); let mut cx = crate::CodegenCx::new(
tcx,
module,
tcx.sess.opts.debuginfo != DebugInfo::None,
true,
);
super::predefine_mono_items(&mut cx, &mono_items); super::predefine_mono_items(&mut cx, &mono_items);
for (mono_item, (linkage, visibility)) in mono_items { for (mono_item, (linkage, visibility)) in mono_items {
let linkage = crate::linkage::get_clif_linkage(mono_item, linkage, visibility); let linkage = crate::linkage::get_clif_linkage(mono_item, linkage, visibility);
super::codegen_mono_item(&mut cx, mono_item, linkage); match mono_item {
MonoItem::Fn(inst) => {
cx.tcx.sess.time("codegen fn", || {
crate::base::codegen_fn(&mut cx, inst, linkage)
});
}
MonoItem::Static(def_id) => {
crate::constant::codegen_static(&mut cx.constants_cx, def_id)
}
MonoItem::GlobalAsm(hir_id) => {
let item = cx.tcx.hir().expect_item(hir_id);
if let rustc_hir::ItemKind::GlobalAsm(rustc_hir::GlobalAsm { asm }) = item.kind {
cx.global_asm.push_str(&*asm.as_str());
cx.global_asm.push_str("\n\n");
} else {
bug!("Expected GlobalAsm found {:?}", item);
}
}
}
} }
let (mut module, global_asm, debug, mut unwind_context) = let (mut module, global_asm, debug, mut unwind_context) =
tcx.sess.time("finalize CodegenCx", || cx.finalize()); tcx.sess.time("finalize CodegenCx", || cx.finalize());
@ -234,7 +259,7 @@ pub(super) fn run_aot(
tcx.sess.abort_if_errors(); tcx.sess.abort_if_errors();
let mut allocator_module = new_module(tcx, "allocator_shim".to_string()); let mut allocator_module = new_module(tcx, "allocator_shim".to_string());
let mut allocator_unwind_context = UnwindContext::new(tcx, allocator_module.isa()); let mut allocator_unwind_context = UnwindContext::new(tcx, allocator_module.isa(), true);
let created_alloc_shim = let created_alloc_shim =
crate::allocator::codegen(tcx, &mut allocator_module, &mut allocator_unwind_context); crate::allocator::codegen(tcx, &mut allocator_module, &mut allocator_unwind_context);
@ -290,6 +315,7 @@ pub(super) fn run_aot(
name: metadata_cgu_name, name: metadata_cgu_name,
kind: ModuleKind::Metadata, kind: ModuleKind::Metadata,
object: Some(tmp_file), object: Some(tmp_file),
dwarf_object: None,
bytecode: None, bytecode: None,
}) })
} else { } else {

View file

@ -1,16 +1,23 @@
//! The JIT driver uses [`cranelift_simplejit`] to JIT execute programs without writing any object //! The JIT driver uses [`cranelift_simplejit`] to JIT execute programs without writing any object
//! files. //! files.
use std::cell::RefCell;
use std::ffi::CString; use std::ffi::CString;
use std::os::raw::{c_char, c_int}; use std::os::raw::{c_char, c_int};
use rustc_codegen_ssa::CrateInfo; use rustc_codegen_ssa::CrateInfo;
use rustc_middle::mir::mono::MonoItem;
use cranelift_simplejit::{SimpleJITBuilder, SimpleJITModule}; use cranelift_jit::{JITBuilder, JITModule};
use crate::prelude::*; use crate::prelude::*;
use crate::{CodegenCx, CodegenMode};
pub(super) fn run_jit(tcx: TyCtxt<'_>) -> ! { thread_local! {
pub static CURRENT_MODULE: RefCell<Option<JITModule>> = RefCell::new(None);
}
pub(super) fn run_jit(tcx: TyCtxt<'_>, codegen_mode: CodegenMode) -> ! {
if !tcx.sess.opts.output_types.should_codegen() { if !tcx.sess.opts.output_types.should_codegen() {
tcx.sess.fatal("JIT mode doesn't work with `cargo check`."); tcx.sess.fatal("JIT mode doesn't work with `cargo check`.");
} }
@ -35,12 +42,13 @@ pub(super) fn run_jit(tcx: TyCtxt<'_>) -> ! {
let imported_symbols = load_imported_symbols_for_jit(tcx); let imported_symbols = load_imported_symbols_for_jit(tcx);
let mut jit_builder = SimpleJITBuilder::with_isa( let mut jit_builder = JITBuilder::with_isa(
crate::build_isa(tcx.sess, false), crate::build_isa(tcx.sess),
cranelift_module::default_libcall_names(), cranelift_module::default_libcall_names(),
); );
jit_builder.hotswap(matches!(codegen_mode, CodegenMode::JitLazy));
jit_builder.symbols(imported_symbols); jit_builder.symbols(imported_symbols);
let mut jit_module = SimpleJITModule::new(jit_builder); let mut jit_module = JITModule::new(jit_builder);
assert_eq!(pointer_ty(tcx), jit_module.target_config().pointer_type()); assert_eq!(pointer_ty(tcx), jit_module.target_config().pointer_type());
let sig = Signature { let sig = Signature {
@ -66,20 +74,42 @@ pub(super) fn run_jit(tcx: TyCtxt<'_>) -> ! {
.into_iter() .into_iter()
.collect::<Vec<(_, (_, _))>>(); .collect::<Vec<(_, (_, _))>>();
let mut cx = crate::CodegenCx::new(tcx, jit_module, false); let mut cx = crate::CodegenCx::new(tcx, jit_module, false, false);
super::time(tcx, "codegen mono items", || {
super::predefine_mono_items(&mut cx, &mono_items);
for (mono_item, (linkage, visibility)) in mono_items {
let linkage = crate::linkage::get_clif_linkage(mono_item, linkage, visibility);
match mono_item {
MonoItem::Fn(inst) => match codegen_mode {
CodegenMode::Aot => unreachable!(),
CodegenMode::Jit => {
cx.tcx.sess.time("codegen fn", || {
crate::base::codegen_fn(&mut cx, inst, linkage)
});
}
CodegenMode::JitLazy => codegen_shim(&mut cx, inst),
},
MonoItem::Static(def_id) => {
crate::constant::codegen_static(&mut cx.constants_cx, def_id);
}
MonoItem::GlobalAsm(hir_id) => {
let item = cx.tcx.hir().expect_item(hir_id);
tcx.sess
.span_fatal(item.span, "Global asm is not supported in JIT mode");
}
}
}
});
let (mut jit_module, global_asm, _debug, mut unwind_context) = let (mut jit_module, global_asm, _debug, mut unwind_context) =
super::time(tcx, "codegen mono items", || { tcx.sess.time("finalize CodegenCx", || cx.finalize());
super::predefine_mono_items(&mut cx, &mono_items); jit_module.finalize_definitions();
for (mono_item, (linkage, visibility)) in mono_items {
let linkage = crate::linkage::get_clif_linkage(mono_item, linkage, visibility);
super::codegen_mono_item(&mut cx, mono_item, linkage);
}
tcx.sess.time("finalize CodegenCx", || cx.finalize())
});
if !global_asm.is_empty() { if !global_asm.is_empty() {
tcx.sess.fatal("Global asm is not supported in JIT mode"); tcx.sess.fatal("Inline asm is not supported in JIT mode");
} }
crate::main_shim::maybe_create_entry_wrapper(tcx, &mut jit_module, &mut unwind_context, true); crate::main_shim::maybe_create_entry_wrapper(tcx, &mut jit_module, &mut unwind_context, true);
crate::allocator::codegen(tcx, &mut jit_module, &mut unwind_context); crate::allocator::codegen(tcx, &mut jit_module, &mut unwind_context);
@ -91,7 +121,7 @@ pub(super) fn run_jit(tcx: TyCtxt<'_>) -> ! {
let finalized_main: *const u8 = jit_module.get_finalized_function(main_func_id); let finalized_main: *const u8 = jit_module.get_finalized_function(main_func_id);
println!("Rustc codegen cranelift will JIT run the executable, because --jit was passed"); println!("Rustc codegen cranelift will JIT run the executable, because -Cllvm-args=mode=jit was passed");
let f: extern "C" fn(c_int, *const *const c_char) -> c_int = let f: extern "C" fn(c_int, *const *const c_char) -> c_int =
unsafe { ::std::mem::transmute(finalized_main) }; unsafe { ::std::mem::transmute(finalized_main) };
@ -107,11 +137,50 @@ pub(super) fn run_jit(tcx: TyCtxt<'_>) -> ! {
// useful as some dynamic linkers use it as a marker to jump over. // useful as some dynamic linkers use it as a marker to jump over.
argv.push(std::ptr::null()); argv.push(std::ptr::null());
CURRENT_MODULE
.with(|current_module| assert!(current_module.borrow_mut().replace(jit_module).is_none()));
let ret = f(args.len() as c_int, argv.as_ptr()); let ret = f(args.len() as c_int, argv.as_ptr());
std::process::exit(ret); std::process::exit(ret);
} }
#[no_mangle]
extern "C" fn __clif_jit_fn(instance_ptr: *const Instance<'static>) -> *const u8 {
rustc_middle::ty::tls::with(|tcx| {
// lift is used to ensure the correct lifetime for instance.
let instance = tcx.lift(unsafe { *instance_ptr }).unwrap();
CURRENT_MODULE.with(|jit_module| {
let mut jit_module = jit_module.borrow_mut();
let jit_module = jit_module.as_mut().unwrap();
let mut cx = crate::CodegenCx::new(tcx, jit_module, false, false);
let (name, sig) = crate::abi::get_function_name_and_sig(
tcx,
cx.module.isa().triple(),
instance,
true,
);
let func_id = cx
.module
.declare_function(&name, Linkage::Export, &sig)
.unwrap();
cx.module.prepare_for_function_redefine(func_id).unwrap();
tcx.sess.time("codegen fn", || {
crate::base::codegen_fn(&mut cx, instance, Linkage::Export)
});
let (jit_module, global_asm, _debug_context, unwind_context) = cx.finalize();
assert!(global_asm.is_empty());
jit_module.finalize_definitions();
std::mem::forget(unsafe { unwind_context.register_jit(&jit_module) });
jit_module.get_finalized_function(func_id)
})
})
}
fn load_imported_symbols_for_jit(tcx: TyCtxt<'_>) -> Vec<(String, *const u8)> { fn load_imported_symbols_for_jit(tcx: TyCtxt<'_>) -> Vec<(String, *const u8)> {
use rustc_middle::middle::dependency_format::Linkage; use rustc_middle::middle::dependency_format::Linkage;
@ -171,3 +240,68 @@ fn load_imported_symbols_for_jit(tcx: TyCtxt<'_>) -> Vec<(String, *const u8)> {
imported_symbols imported_symbols
} }
pub(super) fn codegen_shim<'tcx>(cx: &mut CodegenCx<'tcx, impl Module>, inst: Instance<'tcx>) {
let tcx = cx.tcx;
let pointer_type = cx.module.target_config().pointer_type();
let (name, sig) =
crate::abi::get_function_name_and_sig(tcx, cx.module.isa().triple(), inst, true);
let func_id = cx
.module
.declare_function(&name, Linkage::Export, &sig)
.unwrap();
let instance_ptr = Box::into_raw(Box::new(inst));
let jit_fn = cx
.module
.declare_function(
"__clif_jit_fn",
Linkage::Import,
&Signature {
call_conv: cx.module.target_config().default_call_conv,
params: vec![AbiParam::new(pointer_type)],
returns: vec![AbiParam::new(pointer_type)],
},
)
.unwrap();
let mut trampoline = Function::with_name_signature(ExternalName::default(), sig.clone());
let mut builder_ctx = FunctionBuilderContext::new();
let mut trampoline_builder = FunctionBuilder::new(&mut trampoline, &mut builder_ctx);
let jit_fn = cx
.module
.declare_func_in_func(jit_fn, trampoline_builder.func);
let sig_ref = trampoline_builder.func.import_signature(sig);
let entry_block = trampoline_builder.create_block();
trampoline_builder.append_block_params_for_function_params(entry_block);
let fn_args = trampoline_builder
.func
.dfg
.block_params(entry_block)
.to_vec();
trampoline_builder.switch_to_block(entry_block);
let instance_ptr = trampoline_builder
.ins()
.iconst(pointer_type, instance_ptr as u64 as i64);
let jitted_fn = trampoline_builder.ins().call(jit_fn, &[instance_ptr]);
let jitted_fn = trampoline_builder.func.dfg.inst_results(jitted_fn)[0];
let call_inst = trampoline_builder
.ins()
.call_indirect(sig_ref, jitted_fn, &fn_args);
let ret_vals = trampoline_builder.func.dfg.inst_results(call_inst).to_vec();
trampoline_builder.ins().return_(&ret_vals);
cx.module
.define_function(
func_id,
&mut Context::for_function(trampoline),
&mut cranelift_codegen::binemit::NullTrapSink {},
)
.unwrap();
}

View file

@ -7,6 +7,7 @@ use rustc_middle::middle::cstore::EncodedMetadata;
use rustc_middle::mir::mono::{Linkage as RLinkage, MonoItem, Visibility}; use rustc_middle::mir::mono::{Linkage as RLinkage, MonoItem, Visibility};
use crate::prelude::*; use crate::prelude::*;
use crate::CodegenMode;
mod aot; mod aot;
#[cfg(feature = "jit")] #[cfg(feature = "jit")]
@ -20,24 +21,25 @@ pub(crate) fn codegen_crate(
) -> Box<dyn Any> { ) -> Box<dyn Any> {
tcx.sess.abort_if_errors(); tcx.sess.abort_if_errors();
if config.use_jit { match config.codegen_mode {
let is_executable = tcx CodegenMode::Aot => aot::run_aot(tcx, metadata, need_metadata_module),
.sess CodegenMode::Jit | CodegenMode::JitLazy => {
.crate_types() let is_executable = tcx
.contains(&rustc_session::config::CrateType::Executable); .sess
if !is_executable { .crate_types()
tcx.sess.fatal("can't jit non-executable crate"); .contains(&rustc_session::config::CrateType::Executable);
if !is_executable {
tcx.sess.fatal("can't jit non-executable crate");
}
#[cfg(feature = "jit")]
let _: ! = jit::run_jit(tcx, config.codegen_mode);
#[cfg(not(feature = "jit"))]
tcx.sess
.fatal("jit support was disabled when compiling rustc_codegen_cranelift");
} }
#[cfg(feature = "jit")]
let _: ! = jit::run_jit(tcx);
#[cfg(not(feature = "jit"))]
tcx.sess
.fatal("jit support was disabled when compiling rustc_codegen_cranelift");
} }
aot::run_aot(tcx, metadata, need_metadata_module)
} }
fn predefine_mono_items<'tcx>( fn predefine_mono_items<'tcx>(
@ -63,30 +65,6 @@ fn predefine_mono_items<'tcx>(
}); });
} }
fn codegen_mono_item<'tcx, M: Module>(
cx: &mut crate::CodegenCx<'tcx, M>,
mono_item: MonoItem<'tcx>,
linkage: Linkage,
) {
match mono_item {
MonoItem::Fn(inst) => {
cx.tcx
.sess
.time("codegen fn", || crate::base::codegen_fn(cx, inst, linkage));
}
MonoItem::Static(def_id) => crate::constant::codegen_static(&mut cx.constants_cx, def_id),
MonoItem::GlobalAsm(hir_id) => {
let item = cx.tcx.hir().expect_item(hir_id);
if let rustc_hir::ItemKind::GlobalAsm(rustc_hir::GlobalAsm { asm }) = item.kind {
cx.global_asm.push_str(&*asm.as_str());
cx.global_asm.push_str("\n\n");
} else {
bug!("Expected GlobalAsm found {:?}", item);
}
}
}
}
fn time<R>(tcx: TyCtxt<'_>, name: &'static str, f: impl FnOnce() -> R) -> R { fn time<R>(tcx: TyCtxt<'_>, name: &'static str, f: impl FnOnce() -> R) -> R {
if std::env::var("CG_CLIF_DISPLAY_CG_TIME") if std::env::var("CG_CLIF_DISPLAY_CG_TIME")
.as_ref() .as_ref()

View file

@ -23,8 +23,8 @@ pub(crate) fn codegen_llvm_intrinsic_call<'tcx>(
// Used by `_mm_movemask_epi8` and `_mm256_movemask_epi8` // Used by `_mm_movemask_epi8` and `_mm256_movemask_epi8`
llvm.x86.sse2.pmovmskb.128 | llvm.x86.avx2.pmovmskb | llvm.x86.sse2.movmsk.pd, (c a) { llvm.x86.sse2.pmovmskb.128 | llvm.x86.avx2.pmovmskb | llvm.x86.sse2.movmsk.pd, (c a) {
let (lane_layout, lane_count) = lane_type_and_count(fx.tcx, a.layout()); let (lane_count, lane_ty) = a.layout().ty.simd_size_and_type(fx.tcx);
let lane_ty = fx.clif_type(lane_layout.ty).unwrap(); let lane_ty = fx.clif_type(lane_ty).unwrap();
assert!(lane_count <= 32); assert!(lane_count <= 32);
let mut res = fx.bcx.ins().iconst(types::I32, 0); let mut res = fx.bcx.ins().iconst(types::I32, 0);

View file

@ -171,27 +171,6 @@ macro validate_simd_type($fx:ident, $intrinsic:ident, $span:ident, $ty:expr) {
} }
} }
fn lane_type_and_count<'tcx>(
tcx: TyCtxt<'tcx>,
layout: TyAndLayout<'tcx>,
) -> (TyAndLayout<'tcx>, u16) {
assert!(layout.ty.is_simd());
let lane_count = match layout.fields {
rustc_target::abi::FieldsShape::Array { stride: _, count } => u16::try_from(count).unwrap(),
_ => unreachable!("lane_type_and_count({:?})", layout),
};
let lane_layout = layout
.field(
&ty::layout::LayoutCx {
tcx,
param_env: ParamEnv::reveal_all(),
},
0,
)
.unwrap();
(lane_layout, lane_count)
}
pub(crate) fn clif_vector_type<'tcx>(tcx: TyCtxt<'tcx>, layout: TyAndLayout<'tcx>) -> Option<Type> { pub(crate) fn clif_vector_type<'tcx>(tcx: TyCtxt<'tcx>, layout: TyAndLayout<'tcx>) -> Option<Type> {
let (element, count) = match &layout.abi { let (element, count) = match &layout.abi {
Abi::Vector { element, count } => (element.clone(), *count), Abi::Vector { element, count } => (element.clone(), *count),
@ -218,8 +197,10 @@ fn simd_for_each_lane<'tcx, M: Module>(
) { ) {
let layout = val.layout(); let layout = val.layout();
let (lane_layout, lane_count) = lane_type_and_count(fx.tcx, layout); let (lane_count, lane_ty) = layout.ty.simd_size_and_type(fx.tcx);
let (ret_lane_layout, ret_lane_count) = lane_type_and_count(fx.tcx, ret.layout()); let lane_layout = fx.layout_of(lane_ty);
let (ret_lane_count, ret_lane_ty) = ret.layout().ty.simd_size_and_type(fx.tcx);
let ret_lane_layout = fx.layout_of(ret_lane_ty);
assert_eq!(lane_count, ret_lane_count); assert_eq!(lane_count, ret_lane_count);
for lane_idx in 0..lane_count { for lane_idx in 0..lane_count {
@ -248,8 +229,10 @@ fn simd_pair_for_each_lane<'tcx, M: Module>(
assert_eq!(x.layout(), y.layout()); assert_eq!(x.layout(), y.layout());
let layout = x.layout(); let layout = x.layout();
let (lane_layout, lane_count) = lane_type_and_count(fx.tcx, layout); let (lane_count, lane_ty) = layout.ty.simd_size_and_type(fx.tcx);
let (ret_lane_layout, ret_lane_count) = lane_type_and_count(fx.tcx, ret.layout()); let lane_layout = fx.layout_of(lane_ty);
let (ret_lane_count, ret_lane_ty) = ret.layout().ty.simd_size_and_type(fx.tcx);
let ret_lane_layout = fx.layout_of(ret_lane_ty);
assert_eq!(lane_count, ret_lane_count); assert_eq!(lane_count, ret_lane_count);
for lane in 0..lane_count { for lane in 0..lane_count {
@ -269,13 +252,14 @@ fn simd_reduce<'tcx, M: Module>(
ret: CPlace<'tcx>, ret: CPlace<'tcx>,
f: impl Fn(&mut FunctionCx<'_, 'tcx, M>, TyAndLayout<'tcx>, Value, Value) -> Value, f: impl Fn(&mut FunctionCx<'_, 'tcx, M>, TyAndLayout<'tcx>, Value, Value) -> Value,
) { ) {
let (lane_layout, lane_count) = lane_type_and_count(fx.tcx, val.layout()); let (lane_count, lane_ty) = val.layout().ty.simd_size_and_type(fx.tcx);
let lane_layout = fx.layout_of(lane_ty);
assert_eq!(lane_layout, ret.layout()); assert_eq!(lane_layout, ret.layout());
let mut res_val = val.value_field(fx, mir::Field::new(0)).load_scalar(fx); let mut res_val = val.value_field(fx, mir::Field::new(0)).load_scalar(fx);
for lane_idx in 1..lane_count { for lane_idx in 1..lane_count {
let lane = val let lane = val
.value_field(fx, mir::Field::new(lane_idx.into())) .value_field(fx, mir::Field::new(lane_idx.try_into().unwrap()))
.load_scalar(fx); .load_scalar(fx);
res_val = f(fx, lane_layout, res_val, lane); res_val = f(fx, lane_layout, res_val, lane);
} }
@ -289,14 +273,14 @@ fn simd_reduce_bool<'tcx, M: Module>(
ret: CPlace<'tcx>, ret: CPlace<'tcx>,
f: impl Fn(&mut FunctionCx<'_, 'tcx, M>, Value, Value) -> Value, f: impl Fn(&mut FunctionCx<'_, 'tcx, M>, Value, Value) -> Value,
) { ) {
let (_lane_layout, lane_count) = lane_type_and_count(fx.tcx, val.layout()); let (lane_count, _lane_ty) = val.layout().ty.simd_size_and_type(fx.tcx);
assert!(ret.layout().ty.is_bool()); assert!(ret.layout().ty.is_bool());
let res_val = val.value_field(fx, mir::Field::new(0)).load_scalar(fx); let res_val = val.value_field(fx, mir::Field::new(0)).load_scalar(fx);
let mut res_val = fx.bcx.ins().band_imm(res_val, 1); // mask to boolean let mut res_val = fx.bcx.ins().band_imm(res_val, 1); // mask to boolean
for lane_idx in 1..lane_count { for lane_idx in 1..lane_count {
let lane = val let lane = val
.value_field(fx, mir::Field::new(lane_idx.into())) .value_field(fx, mir::Field::new(lane_idx.try_into().unwrap()))
.load_scalar(fx); .load_scalar(fx);
let lane = fx.bcx.ins().band_imm(lane, 1); // mask to boolean let lane = fx.bcx.ins().band_imm(lane, 1); // mask to boolean
res_val = f(fx, res_val, lane); res_val = f(fx, res_val, lane);
@ -460,9 +444,6 @@ pub(crate) fn codegen_intrinsic_call<'tcx>(
"abort" => { "abort" => {
trap_abort(fx, "Called intrinsic::abort."); trap_abort(fx, "Called intrinsic::abort.");
} }
"unreachable" => {
trap_unreachable(fx, "[corruption] Called intrinsic::unreachable.");
}
"transmute" => { "transmute" => {
crate::base::codegen_panic(fx, "Transmuting to uninhabited type.", span); crate::base::codegen_panic(fx, "Transmuting to uninhabited type.", span);
} }
@ -575,12 +556,6 @@ pub(crate) fn codegen_intrinsic_call<'tcx>(
fx.bcx.call_memmove(fx.cx.module.target_config(), dst, src, byte_amount); fx.bcx.call_memmove(fx.cx.module.target_config(), dst, src, byte_amount);
} }
}; };
discriminant_value, (c ptr) {
let pointee_layout = fx.layout_of(ptr.layout().ty.builtin_deref(true).unwrap().ty);
let val = CValue::by_ref(Pointer::new(ptr.load_scalar(fx)), pointee_layout);
let discr = crate::discriminant::codegen_get_discriminant(fx, val, ret.layout());
ret.write_cvalue(fx, discr);
};
size_of_val, <T> (c ptr) { size_of_val, <T> (c ptr) {
let layout = fx.layout_of(T); let layout = fx.layout_of(T);
let size = if layout.is_unsized() { let size = if layout.is_unsized() {
@ -641,22 +616,6 @@ pub(crate) fn codegen_intrinsic_call<'tcx>(
); );
ret.write_cvalue(fx, res); ret.write_cvalue(fx, res);
}; };
_ if intrinsic.starts_with("wrapping_"), (c x, c y) {
assert_eq!(x.layout().ty, y.layout().ty);
let bin_op = match intrinsic {
"wrapping_add" => BinOp::Add,
"wrapping_sub" => BinOp::Sub,
"wrapping_mul" => BinOp::Mul,
_ => unreachable!("intrinsic {}", intrinsic),
};
let res = crate::num::codegen_int_binop(
fx,
bin_op,
x,
y,
);
ret.write_cvalue(fx, res);
};
_ if intrinsic.starts_with("saturating_"), <T> (c lhs, c rhs) { _ if intrinsic.starts_with("saturating_"), <T> (c lhs, c rhs) {
assert_eq!(lhs.layout().ty, rhs.layout().ty); assert_eq!(lhs.layout().ty, rhs.layout().ty);
let bin_op = match intrinsic { let bin_op = match intrinsic {
@ -916,7 +875,7 @@ pub(crate) fn codegen_intrinsic_call<'tcx>(
dest.write_cvalue(fx, val); dest.write_cvalue(fx, val);
}; };
size_of | pref_align_of | min_align_of | needs_drop | type_id | type_name | variant_count, () { pref_align_of | min_align_of | needs_drop | type_id | type_name | variant_count, () {
let const_val = let const_val =
fx.tcx.const_eval_instance(ParamEnv::reveal_all(), instance, None).unwrap(); fx.tcx.const_eval_instance(ParamEnv::reveal_all(), instance, None).unwrap();
let val = crate::constant::codegen_const_value( let val = crate::constant::codegen_const_value(

View file

@ -73,11 +73,11 @@ pub(super) fn codegen_simd_intrinsic_call<'tcx>(
assert_eq!(x.layout(), y.layout()); assert_eq!(x.layout(), y.layout());
let layout = x.layout(); let layout = x.layout();
let (lane_type, lane_count) = lane_type_and_count(fx.tcx, layout); let (lane_count, lane_ty) = layout.ty.simd_size_and_type(fx.tcx);
let (ret_lane_type, ret_lane_count) = lane_type_and_count(fx.tcx, ret.layout()); let (ret_lane_count, ret_lane_ty) = ret.layout().ty.simd_size_and_type(fx.tcx);
assert_eq!(lane_type, ret_lane_type); assert_eq!(lane_ty, ret_lane_ty);
assert_eq!(n, ret_lane_count); assert_eq!(u64::from(n), ret_lane_count);
let total_len = lane_count * 2; let total_len = lane_count * 2;
@ -105,14 +105,14 @@ pub(super) fn codegen_simd_intrinsic_call<'tcx>(
}; };
for &idx in &indexes { for &idx in &indexes {
assert!(idx < total_len, "idx {} out of range 0..{}", idx, total_len); assert!(u64::from(idx) < total_len, "idx {} out of range 0..{}", idx, total_len);
} }
for (out_idx, in_idx) in indexes.into_iter().enumerate() { for (out_idx, in_idx) in indexes.into_iter().enumerate() {
let in_lane = if in_idx < lane_count { let in_lane = if u64::from(in_idx) < lane_count {
x.value_field(fx, mir::Field::new(in_idx.into())) x.value_field(fx, mir::Field::new(in_idx.into()))
} else { } else {
y.value_field(fx, mir::Field::new((in_idx - lane_count).into())) y.value_field(fx, mir::Field::new(usize::from(in_idx) - usize::try_from(lane_count).unwrap()))
}; };
let out_lane = ret.place_field(fx, mir::Field::new(out_idx)); let out_lane = ret.place_field(fx, mir::Field::new(out_idx));
out_lane.write_cvalue(fx, in_lane); out_lane.write_cvalue(fx, in_lane);
@ -131,7 +131,7 @@ pub(super) fn codegen_simd_intrinsic_call<'tcx>(
}; };
let idx = idx_const.val.try_to_bits(Size::from_bytes(4 /* u32*/)).unwrap_or_else(|| panic!("kind not scalar: {:?}", idx_const)); let idx = idx_const.val.try_to_bits(Size::from_bytes(4 /* u32*/)).unwrap_or_else(|| panic!("kind not scalar: {:?}", idx_const));
let (_lane_type, lane_count) = lane_type_and_count(fx.tcx, base.layout()); let (lane_count, _lane_ty) = base.layout().ty.simd_size_and_type(fx.tcx);
if idx >= lane_count.into() { if idx >= lane_count.into() {
fx.tcx.sess.span_fatal(fx.mir.span, &format!("[simd_insert] idx {} >= lane_count {}", idx, lane_count)); fx.tcx.sess.span_fatal(fx.mir.span, &format!("[simd_insert] idx {} >= lane_count {}", idx, lane_count));
} }
@ -160,7 +160,7 @@ pub(super) fn codegen_simd_intrinsic_call<'tcx>(
}; };
let idx = idx_const.val.try_to_bits(Size::from_bytes(4 /* u32*/)).unwrap_or_else(|| panic!("kind not scalar: {:?}", idx_const)); let idx = idx_const.val.try_to_bits(Size::from_bytes(4 /* u32*/)).unwrap_or_else(|| panic!("kind not scalar: {:?}", idx_const));
let (_lane_type, lane_count) = lane_type_and_count(fx.tcx, v.layout()); let (lane_count, _lane_ty) = v.layout().ty.simd_size_and_type(fx.tcx);
if idx >= lane_count.into() { if idx >= lane_count.into() {
fx.tcx.sess.span_fatal(fx.mir.span, &format!("[simd_extract] idx {} >= lane_count {}", idx, lane_count)); fx.tcx.sess.span_fatal(fx.mir.span, &format!("[simd_extract] idx {} >= lane_count {}", idx, lane_count));
} }
@ -212,12 +212,13 @@ pub(super) fn codegen_simd_intrinsic_call<'tcx>(
assert_eq!(a.layout(), c.layout()); assert_eq!(a.layout(), c.layout());
let layout = a.layout(); let layout = a.layout();
let (_lane_layout, lane_count) = lane_type_and_count(fx.tcx, layout); let (lane_count, _lane_ty) = layout.ty.simd_size_and_type(fx.tcx);
let (ret_lane_layout, ret_lane_count) = lane_type_and_count(fx.tcx, ret.layout()); let (ret_lane_count, ret_lane_ty) = ret.layout().ty.simd_size_and_type(fx.tcx);
assert_eq!(lane_count, ret_lane_count); assert_eq!(lane_count, ret_lane_count);
let ret_lane_layout = fx.layout_of(ret_lane_ty);
for lane in 0..lane_count { for lane in 0..lane_count {
let lane = mir::Field::new(lane.into()); let lane = mir::Field::new(lane.try_into().unwrap());
let a_lane = a.value_field(fx, lane).load_scalar(fx); let a_lane = a.value_field(fx, lane).load_scalar(fx);
let b_lane = b.value_field(fx, lane).load_scalar(fx); let b_lane = b.value_field(fx, lane).load_scalar(fx);
let c_lane = c.value_field(fx, lane).load_scalar(fx); let c_lane = c.value_field(fx, lane).load_scalar(fx);

View file

@ -5,7 +5,8 @@
associated_type_bounds, associated_type_bounds,
never_type, never_type,
try_blocks, try_blocks,
hash_drain_filter hash_drain_filter,
str_split_once
)] )]
#![warn(rust_2018_idioms)] #![warn(rust_2018_idioms)]
#![warn(unused_lifetimes)] #![warn(unused_lifetimes)]
@ -34,6 +35,7 @@ extern crate rustc_target;
extern crate rustc_driver; extern crate rustc_driver;
use std::any::Any; use std::any::Any;
use std::str::FromStr;
use rustc_codegen_ssa::traits::CodegenBackend; use rustc_codegen_ssa::traits::CodegenBackend;
use rustc_codegen_ssa::CodegenResults; use rustc_codegen_ssa::CodegenResults;
@ -141,8 +143,8 @@ struct CodegenCx<'tcx, M: Module> {
} }
impl<'tcx, M: Module> CodegenCx<'tcx, M> { impl<'tcx, M: Module> CodegenCx<'tcx, M> {
fn new(tcx: TyCtxt<'tcx>, module: M, debug_info: bool) -> Self { fn new(tcx: TyCtxt<'tcx>, module: M, debug_info: bool, pic_eh_frame: bool) -> Self {
let unwind_context = UnwindContext::new(tcx, module.isa()); let unwind_context = UnwindContext::new(tcx, module.isa(), pic_eh_frame);
let debug_context = if debug_info { let debug_context = if debug_info {
Some(DebugContext::new(tcx, module.isa())) Some(DebugContext::new(tcx, module.isa()))
} else { } else {
@ -172,12 +174,55 @@ impl<'tcx, M: Module> CodegenCx<'tcx, M> {
} }
#[derive(Copy, Clone, Debug)] #[derive(Copy, Clone, Debug)]
pub enum CodegenMode {
Aot,
Jit,
JitLazy,
}
impl Default for CodegenMode {
fn default() -> Self {
CodegenMode::Aot
}
}
impl FromStr for CodegenMode {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"aot" => Ok(CodegenMode::Aot),
"jit" => Ok(CodegenMode::Jit),
"jit-lazy" => Ok(CodegenMode::JitLazy),
_ => Err(format!("Unknown codegen mode `{}`", s)),
}
}
}
#[derive(Copy, Clone, Debug, Default)]
pub struct BackendConfig { pub struct BackendConfig {
pub use_jit: bool, pub codegen_mode: CodegenMode,
}
impl BackendConfig {
fn from_opts(opts: &[String]) -> Result<Self, String> {
let mut config = BackendConfig::default();
for opt in opts {
if let Some((name, value)) = opt.split_once('=') {
match name {
"mode" => config.codegen_mode = value.parse()?,
_ => return Err(format!("Unknown option `{}`", name)),
}
} else {
return Err(format!("Invalid option `{}`", opt));
}
}
Ok(config)
}
} }
pub struct CraneliftCodegenBackend { pub struct CraneliftCodegenBackend {
pub config: BackendConfig, pub config: Option<BackendConfig>,
} }
impl CodegenBackend for CraneliftCodegenBackend { impl CodegenBackend for CraneliftCodegenBackend {
@ -204,7 +249,13 @@ impl CodegenBackend for CraneliftCodegenBackend {
metadata: EncodedMetadata, metadata: EncodedMetadata,
need_metadata_module: bool, need_metadata_module: bool,
) -> Box<dyn Any> { ) -> Box<dyn Any> {
let res = driver::codegen_crate(tcx, metadata, need_metadata_module, self.config); let config = if let Some(config) = self.config {
config
} else {
BackendConfig::from_opts(&tcx.sess.opts.cg.llvm_args)
.unwrap_or_else(|err| tcx.sess.fatal(&err))
};
let res = driver::codegen_crate(tcx, metadata, need_metadata_module, config);
rustc_symbol_mangling::test::report_symbol_names(tcx); rustc_symbol_mangling::test::report_symbol_names(tcx);
@ -250,17 +301,13 @@ fn target_triple(sess: &Session) -> target_lexicon::Triple {
sess.target.llvm_target.parse().unwrap() sess.target.llvm_target.parse().unwrap()
} }
fn build_isa(sess: &Session, enable_pic: bool) -> Box<dyn isa::TargetIsa + 'static> { fn build_isa(sess: &Session) -> Box<dyn isa::TargetIsa + 'static> {
use target_lexicon::BinaryFormat; use target_lexicon::BinaryFormat;
let target_triple = crate::target_triple(sess); let target_triple = crate::target_triple(sess);
let mut flags_builder = settings::builder(); let mut flags_builder = settings::builder();
if enable_pic { flags_builder.enable("is_pic").unwrap();
flags_builder.enable("is_pic").unwrap();
} else {
flags_builder.set("is_pic", "false").unwrap();
}
flags_builder.set("enable_probestack", "false").unwrap(); // __cranelift_probestack is not provided flags_builder.set("enable_probestack", "false").unwrap(); // __cranelift_probestack is not provided
flags_builder flags_builder
.set( .set(
@ -283,8 +330,6 @@ fn build_isa(sess: &Session, enable_pic: bool) -> Box<dyn isa::TargetIsa + 'stat
flags_builder.set("enable_simd", "true").unwrap(); flags_builder.set("enable_simd", "true").unwrap();
// FIXME(CraneStation/cranelift#732) fix LICM in presence of jump tables
/*
use rustc_session::config::OptLevel; use rustc_session::config::OptLevel;
match sess.opts.optimize { match sess.opts.optimize {
OptLevel::No => { OptLevel::No => {
@ -297,7 +342,7 @@ fn build_isa(sess: &Session, enable_pic: bool) -> Box<dyn isa::TargetIsa + 'stat
OptLevel::Size | OptLevel::SizeMin => { OptLevel::Size | OptLevel::SizeMin => {
sess.warn("Optimizing for size is not supported. Just ignoring the request"); sess.warn("Optimizing for size is not supported. Just ignoring the request");
} }
}*/ }
let flags = settings::Flags::new(flags_builder); let flags = settings::Flags::new(flags_builder);
@ -311,7 +356,5 @@ fn build_isa(sess: &Session, enable_pic: bool) -> Box<dyn isa::TargetIsa + 'stat
/// This is the entrypoint for a hot plugged rustc_codegen_cranelift /// This is the entrypoint for a hot plugged rustc_codegen_cranelift
#[no_mangle] #[no_mangle]
pub fn __rustc_codegen_backend() -> Box<dyn CodegenBackend> { pub fn __rustc_codegen_backend() -> Box<dyn CodegenBackend> {
Box::new(CraneliftCodegenBackend { Box::new(CraneliftCodegenBackend { config: None })
config: BackendConfig { use_jit: false },
})
} }

View file

@ -73,7 +73,7 @@ pub(crate) fn make_branchable_value(bcx: &mut FunctionBuilder<'_>, arg: Value) -
})() })()
.unwrap_or_else(|| { .unwrap_or_else(|| {
match bcx.func.dfg.value_type(arg) { match bcx.func.dfg.value_type(arg) {
types::I8 | types::I32 => { types::I8 | types::I16 => {
// WORKAROUND for brz.i8 and brnz.i8 not yet being implemented // WORKAROUND for brz.i8 and brnz.i8 not yet being implemented
bcx.ins().uextend(types::I32, arg) bcx.ins().uextend(types::I32, arg)
} }
@ -81,3 +81,40 @@ pub(crate) fn make_branchable_value(bcx: &mut FunctionBuilder<'_>, arg: Value) -
} }
}) })
} }
/// Returns whether the branch is statically known to be taken or `None` if it isn't statically known.
pub(crate) fn maybe_known_branch_taken(
bcx: &FunctionBuilder<'_>,
arg: Value,
test_zero: bool,
) -> Option<bool> {
let arg_inst = if let ValueDef::Result(arg_inst, 0) = bcx.func.dfg.value_def(arg) {
arg_inst
} else {
return None;
};
match bcx.func.dfg[arg_inst] {
InstructionData::UnaryBool {
opcode: Opcode::Bconst,
imm,
} => {
if test_zero {
Some(!imm)
} else {
Some(imm)
}
}
InstructionData::UnaryImm {
opcode: Opcode::Iconst,
imm,
} => {
if test_zero {
Some(imm.bits() == 0)
} else {
Some(imm.bits() != 0)
}
}
_ => None,
}
}

View file

@ -53,6 +53,7 @@
//! ``` //! ```
use std::fmt; use std::fmt;
use std::io::Write;
use cranelift_codegen::{ use cranelift_codegen::{
entity::SecondaryMap, entity::SecondaryMap,
@ -200,32 +201,24 @@ impl<M: Module> FunctionCx<'_, '_, M> {
} }
} }
pub(crate) fn write_clif_file<'tcx>( pub(crate) fn should_write_ir(tcx: TyCtxt<'_>) -> bool {
tcx: TyCtxt<'tcx>, cfg!(debug_assertions)
postfix: &str, || tcx
isa: Option<&dyn cranelift_codegen::isa::TargetIsa>,
instance: Instance<'tcx>,
context: &cranelift_codegen::Context,
mut clif_comments: &CommentWriter,
) {
use std::io::Write;
if !cfg!(debug_assertions)
&& !tcx
.sess .sess
.opts .opts
.output_types .output_types
.contains_key(&OutputType::LlvmAssembly) .contains_key(&OutputType::LlvmAssembly)
{ }
pub(crate) fn write_ir_file<'tcx>(
tcx: TyCtxt<'tcx>,
name: &str,
write: impl FnOnce(&mut dyn Write) -> std::io::Result<()>,
) {
if !should_write_ir(tcx) {
return; return;
} }
let value_ranges = isa.map(|isa| {
context
.build_value_labels_ranges(isa)
.expect("value location ranges")
});
let clif_output_dir = tcx.output_filenames(LOCAL_CRATE).with_extension("clif"); let clif_output_dir = tcx.output_filenames(LOCAL_CRATE).with_extension("clif");
match std::fs::create_dir(&clif_output_dir) { match std::fs::create_dir(&clif_output_dir) {
@ -234,41 +227,58 @@ pub(crate) fn write_clif_file<'tcx>(
res @ Err(_) => res.unwrap(), res @ Err(_) => res.unwrap(),
} }
let clif_file_name = clif_output_dir.join(format!( let clif_file_name = clif_output_dir.join(name);
"{}.{}.clif",
tcx.symbol_name(instance).name,
postfix
));
let mut clif = String::new();
cranelift_codegen::write::decorate_function(
&mut clif_comments,
&mut clif,
&context.func,
&DisplayFunctionAnnotations {
isa: Some(&*crate::build_isa(
tcx.sess, true, /* PIC doesn't matter here */
)),
value_ranges: value_ranges.as_ref(),
},
)
.unwrap();
let res: std::io::Result<()> = try { let res: std::io::Result<()> = try {
let mut file = std::fs::File::create(clif_file_name)?; let mut file = std::fs::File::create(clif_file_name)?;
let target_triple = crate::target_triple(tcx.sess); write(&mut file)?;
writeln!(file, "test compile")?;
writeln!(file, "set is_pic")?;
writeln!(file, "set enable_simd")?;
writeln!(file, "target {} haswell", target_triple)?;
writeln!(file)?;
file.write_all(clif.as_bytes())?;
}; };
if let Err(err) = res { if let Err(err) = res {
tcx.sess.warn(&format!("err writing clif file: {}", err)); tcx.sess.warn(&format!("error writing ir file: {}", err));
} }
} }
pub(crate) fn write_clif_file<'tcx>(
tcx: TyCtxt<'tcx>,
postfix: &str,
isa: Option<&dyn cranelift_codegen::isa::TargetIsa>,
instance: Instance<'tcx>,
context: &cranelift_codegen::Context,
mut clif_comments: &CommentWriter,
) {
write_ir_file(
tcx,
&format!("{}.{}.clif", tcx.symbol_name(instance).name, postfix),
|file| {
let value_ranges = isa.map(|isa| {
context
.build_value_labels_ranges(isa)
.expect("value location ranges")
});
let mut clif = String::new();
cranelift_codegen::write::decorate_function(
&mut clif_comments,
&mut clif,
&context.func,
&DisplayFunctionAnnotations {
isa: Some(&*crate::build_isa(tcx.sess)),
value_ranges: value_ranges.as_ref(),
},
)
.unwrap();
writeln!(file, "test compile")?;
writeln!(file, "set is_pic")?;
writeln!(file, "set enable_simd")?;
writeln!(file, "target {} haswell", crate::target_triple(tcx.sess))?;
writeln!(file)?;
file.write_all(clif.as_bytes())?;
Ok(())
},
);
}
impl<M: Module> fmt::Debug for FunctionCx<'_, '_, M> { impl<M: Module> fmt::Debug for FunctionCx<'_, '_, M> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
writeln!(f, "{:?}", self.instance.substs)?; writeln!(f, "{:?}", self.instance.substs)?;

View file

@ -480,17 +480,19 @@ impl<'tcx> CPlace<'tcx> {
// fn(&T) -> for<'l> fn(&'l T) is allowed // fn(&T) -> for<'l> fn(&'l T) is allowed
} }
(&ty::Dynamic(from_traits, _), &ty::Dynamic(to_traits, _)) => { (&ty::Dynamic(from_traits, _), &ty::Dynamic(to_traits, _)) => {
let from_traits = fx for (from, to) in from_traits.iter().zip(to_traits) {
.tcx let from = fx
.normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), from_traits); .tcx
let to_traits = fx .normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), from);
.tcx let to = fx
.normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), to_traits); .tcx
assert_eq!( .normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), to);
from_traits, to_traits, assert_eq!(
"Can't write trait object of incompatible traits {:?} to place with traits {:?}\n\n{:#?}", from, to,
from_traits, to_traits, fx, "Can't write trait object of incompatible traits {:?} to place with traits {:?}\n\n{:#?}",
); from_traits, to_traits, fx,
);
}
// dyn for<'r> Trait<'r> -> dyn Trait<'_> is allowed // dyn for<'r> Trait<'r> -> dyn Trait<'_> is allowed
} }
_ => { _ => {

View file

@ -158,7 +158,8 @@ fn build_vtable<'tcx>(
) )
.unwrap(); .unwrap();
fx.cx.module.define_data(data_id, &data_ctx).unwrap(); // FIXME don't duplicate definitions in lazy jit mode
let _ = fx.cx.module.define_data(data_id, &data_ctx);
data_id data_id
} }

View file

@ -6,7 +6,9 @@ use crate::llvm::{self, build_string, False, True};
use crate::{LlvmCodegenBackend, ModuleLlvm}; use crate::{LlvmCodegenBackend, ModuleLlvm};
use rustc_codegen_ssa::back::lto::{LtoModuleCodegen, SerializedModule, ThinModule, ThinShared}; use rustc_codegen_ssa::back::lto::{LtoModuleCodegen, SerializedModule, ThinModule, ThinShared};
use rustc_codegen_ssa::back::symbol_export; use rustc_codegen_ssa::back::symbol_export;
use rustc_codegen_ssa::back::write::{CodegenContext, FatLTOInput, ModuleConfig}; use rustc_codegen_ssa::back::write::{
CodegenContext, FatLTOInput, ModuleConfig, TargetMachineFactoryConfig,
};
use rustc_codegen_ssa::traits::*; use rustc_codegen_ssa::traits::*;
use rustc_codegen_ssa::{looks_like_rust_object_file, ModuleCodegen, ModuleKind}; use rustc_codegen_ssa::{looks_like_rust_object_file, ModuleCodegen, ModuleKind};
use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::fx::FxHashMap;
@ -728,7 +730,14 @@ pub unsafe fn optimize_thin_module(
cgcx: &CodegenContext<LlvmCodegenBackend>, cgcx: &CodegenContext<LlvmCodegenBackend>,
) -> Result<ModuleCodegen<ModuleLlvm>, FatalError> { ) -> Result<ModuleCodegen<ModuleLlvm>, FatalError> {
let diag_handler = cgcx.create_diag_handler(); let diag_handler = cgcx.create_diag_handler();
let tm = (cgcx.tm_factory.0)().map_err(|e| write::llvm_err(&diag_handler, &e))?;
let module_name = &thin_module.shared.module_names[thin_module.idx];
let split_dwarf_file = cgcx
.output_filenames
.split_dwarf_filename(cgcx.split_dwarf_kind, Some(module_name.to_str().unwrap()));
let tm_factory_config = TargetMachineFactoryConfig { split_dwarf_file };
let tm =
(cgcx.tm_factory)(tm_factory_config).map_err(|e| write::llvm_err(&diag_handler, &e))?;
// Right now the implementation we've got only works over serialized // Right now the implementation we've got only works over serialized
// modules, so we create a fresh new LLVM context and parse the module // modules, so we create a fresh new LLVM context and parse the module
@ -736,12 +745,8 @@ pub unsafe fn optimize_thin_module(
// crates but for locally codegened modules we may be able to reuse // crates but for locally codegened modules we may be able to reuse
// that LLVM Context and Module. // that LLVM Context and Module.
let llcx = llvm::LLVMRustContextCreate(cgcx.fewer_names); let llcx = llvm::LLVMRustContextCreate(cgcx.fewer_names);
let llmod_raw = parse_module( let llmod_raw =
llcx, parse_module(llcx, &module_name, thin_module.data(), &diag_handler)? as *const _;
&thin_module.shared.module_names[thin_module.idx],
thin_module.data(),
&diag_handler,
)? as *const _;
let module = ModuleCodegen { let module = ModuleCodegen {
module_llvm: ModuleLlvm { llmod_raw, llcx, tm }, module_llvm: ModuleLlvm { llmod_raw, llcx, tm },
name: thin_module.name().to_string(), name: thin_module.name().to_string(),

View file

@ -11,7 +11,10 @@ use crate::llvm_util;
use crate::type_::Type; use crate::type_::Type;
use crate::LlvmCodegenBackend; use crate::LlvmCodegenBackend;
use crate::ModuleLlvm; use crate::ModuleLlvm;
use rustc_codegen_ssa::back::write::{BitcodeSection, CodegenContext, EmitObj, ModuleConfig}; use rustc_codegen_ssa::back::write::{
BitcodeSection, CodegenContext, EmitObj, ModuleConfig, TargetMachineFactoryConfig,
TargetMachineFactoryFn,
};
use rustc_codegen_ssa::traits::*; use rustc_codegen_ssa::traits::*;
use rustc_codegen_ssa::{CompiledModule, ModuleCodegen}; use rustc_codegen_ssa::{CompiledModule, ModuleCodegen};
use rustc_data_structures::small_c_str::SmallCStr; use rustc_data_structures::small_c_str::SmallCStr;
@ -20,7 +23,9 @@ use rustc_fs_util::{link_or_copy, path_to_c_string};
use rustc_hir::def_id::LOCAL_CRATE; use rustc_hir::def_id::LOCAL_CRATE;
use rustc_middle::bug; use rustc_middle::bug;
use rustc_middle::ty::TyCtxt; use rustc_middle::ty::TyCtxt;
use rustc_session::config::{self, Lto, OutputType, Passes, SanitizerSet, SwitchWithOptPath}; use rustc_session::config::{
self, Lto, OutputType, Passes, SanitizerSet, SplitDwarfKind, SwitchWithOptPath,
};
use rustc_session::Session; use rustc_session::Session;
use rustc_span::symbol::sym; use rustc_span::symbol::sym;
use rustc_span::InnerSpan; use rustc_span::InnerSpan;
@ -49,11 +54,31 @@ pub fn write_output_file(
pm: &llvm::PassManager<'ll>, pm: &llvm::PassManager<'ll>,
m: &'ll llvm::Module, m: &'ll llvm::Module,
output: &Path, output: &Path,
dwo_output: Option<&Path>,
file_type: llvm::FileType, file_type: llvm::FileType,
) -> Result<(), FatalError> { ) -> Result<(), FatalError> {
unsafe { unsafe {
let output_c = path_to_c_string(output); let output_c = path_to_c_string(output);
let result = llvm::LLVMRustWriteOutputFile(target, pm, m, output_c.as_ptr(), file_type); let result = if let Some(dwo_output) = dwo_output {
let dwo_output_c = path_to_c_string(dwo_output);
llvm::LLVMRustWriteOutputFile(
target,
pm,
m,
output_c.as_ptr(),
dwo_output_c.as_ptr(),
file_type,
)
} else {
llvm::LLVMRustWriteOutputFile(
target,
pm,
m,
output_c.as_ptr(),
std::ptr::null(),
file_type,
)
};
result.into_result().map_err(|()| { result.into_result().map_err(|()| {
let msg = format!("could not write output to {}", output.display()); let msg = format!("could not write output to {}", output.display());
llvm_err(handler, &msg) llvm_err(handler, &msg)
@ -62,12 +87,17 @@ pub fn write_output_file(
} }
pub fn create_informational_target_machine(sess: &Session) -> &'static mut llvm::TargetMachine { pub fn create_informational_target_machine(sess: &Session) -> &'static mut llvm::TargetMachine {
target_machine_factory(sess, config::OptLevel::No)() let config = TargetMachineFactoryConfig { split_dwarf_file: None };
target_machine_factory(sess, config::OptLevel::No)(config)
.unwrap_or_else(|err| llvm_err(sess.diagnostic(), &err).raise()) .unwrap_or_else(|err| llvm_err(sess.diagnostic(), &err).raise())
} }
pub fn create_target_machine(tcx: TyCtxt<'_>) -> &'static mut llvm::TargetMachine { pub fn create_target_machine(tcx: TyCtxt<'_>, mod_name: &str) -> &'static mut llvm::TargetMachine {
target_machine_factory(&tcx.sess, tcx.backend_optimization_level(LOCAL_CRATE))() let split_dwarf_file = tcx
.output_filenames(LOCAL_CRATE)
.split_dwarf_filename(tcx.sess.opts.debugging_opts.split_dwarf, Some(mod_name));
let config = TargetMachineFactoryConfig { split_dwarf_file };
target_machine_factory(&tcx.sess, tcx.backend_optimization_level(LOCAL_CRATE))(config)
.unwrap_or_else(|err| llvm_err(tcx.sess.diagnostic(), &err).raise()) .unwrap_or_else(|err| llvm_err(tcx.sess.diagnostic(), &err).raise())
} }
@ -122,7 +152,7 @@ fn to_llvm_code_model(code_model: Option<CodeModel>) -> llvm::CodeModel {
pub fn target_machine_factory( pub fn target_machine_factory(
sess: &Session, sess: &Session,
optlvl: config::OptLevel, optlvl: config::OptLevel,
) -> Arc<dyn Fn() -> Result<&'static mut llvm::TargetMachine, String> + Send + Sync> { ) -> TargetMachineFactoryFn<LlvmCodegenBackend> {
let reloc_model = to_llvm_relocation_model(sess.relocation_model()); let reloc_model = to_llvm_relocation_model(sess.relocation_model());
let (opt_level, _) = to_llvm_opt_settings(optlvl); let (opt_level, _) = to_llvm_opt_settings(optlvl);
@ -163,7 +193,10 @@ pub fn target_machine_factory(
let use_init_array = let use_init_array =
!sess.opts.debugging_opts.use_ctors_section.unwrap_or(sess.target.use_ctors_section); !sess.opts.debugging_opts.use_ctors_section.unwrap_or(sess.target.use_ctors_section);
Arc::new(move || { Arc::new(move |config: TargetMachineFactoryConfig| {
let split_dwarf_file = config.split_dwarf_file.unwrap_or_default();
let split_dwarf_file = CString::new(split_dwarf_file.to_str().unwrap()).unwrap();
let tm = unsafe { let tm = unsafe {
llvm::LLVMRustCreateTargetMachine( llvm::LLVMRustCreateTargetMachine(
triple.as_ptr(), triple.as_ptr(),
@ -182,6 +215,7 @@ pub fn target_machine_factory(
emit_stack_size_section, emit_stack_size_section,
relax_elf_relocations, relax_elf_relocations,
use_init_array, use_init_array,
split_dwarf_file.as_ptr(),
) )
}; };
@ -451,7 +485,7 @@ pub(crate) unsafe fn optimize(
diag_handler: &Handler, diag_handler: &Handler,
module: &ModuleCodegen<ModuleLlvm>, module: &ModuleCodegen<ModuleLlvm>,
config: &ModuleConfig, config: &ModuleConfig,
) -> Result<(), FatalError> { ) {
let _timer = cgcx.prof.generic_activity_with_arg("LLVM_module_optimize", &module.name[..]); let _timer = cgcx.prof.generic_activity_with_arg("LLVM_module_optimize", &module.name[..]);
let llmod = module.module_llvm.llmod(); let llmod = module.module_llvm.llmod();
@ -477,7 +511,7 @@ pub(crate) unsafe fn optimize(
_ => llvm::OptStage::PreLinkNoLTO, _ => llvm::OptStage::PreLinkNoLTO,
}; };
optimize_with_new_llvm_pass_manager(cgcx, module, config, opt_level, opt_stage); optimize_with_new_llvm_pass_manager(cgcx, module, config, opt_level, opt_stage);
return Ok(()); return;
} }
if cgcx.prof.llvm_recording_enabled() { if cgcx.prof.llvm_recording_enabled() {
@ -600,7 +634,6 @@ pub(crate) unsafe fn optimize(
llvm::LLVMDisposePassManager(fpm); llvm::LLVMDisposePassManager(fpm);
llvm::LLVMDisposePassManager(mpm); llvm::LLVMDisposePassManager(mpm);
} }
Ok(())
} }
unsafe fn add_sanitizer_passes(config: &ModuleConfig, passes: &mut Vec<&'static mut llvm::Pass>) { unsafe fn add_sanitizer_passes(config: &ModuleConfig, passes: &mut Vec<&'static mut llvm::Pass>) {
@ -785,7 +818,15 @@ pub(crate) unsafe fn codegen(
llmod llmod
}; };
with_codegen(tm, llmod, config.no_builtins, |cpm| { with_codegen(tm, llmod, config.no_builtins, |cpm| {
write_output_file(diag_handler, tm, cpm, llmod, &path, llvm::FileType::AssemblyFile) write_output_file(
diag_handler,
tm,
cpm,
llmod,
&path,
None,
llvm::FileType::AssemblyFile,
)
})?; })?;
} }
@ -794,6 +835,15 @@ pub(crate) unsafe fn codegen(
let _timer = cgcx let _timer = cgcx
.prof .prof
.generic_activity_with_arg("LLVM_module_codegen_emit_obj", &module.name[..]); .generic_activity_with_arg("LLVM_module_codegen_emit_obj", &module.name[..]);
let dwo_out = cgcx.output_filenames.temp_path_dwo(module_name);
let dwo_out = match cgcx.split_dwarf_kind {
// Don't change how DWARF is emitted in single mode (or when disabled).
SplitDwarfKind::None | SplitDwarfKind::Single => None,
// Emit (a subset of the) DWARF into a separate file in split mode.
SplitDwarfKind::Split => Some(dwo_out.as_path()),
};
with_codegen(tm, llmod, config.no_builtins, |cpm| { with_codegen(tm, llmod, config.no_builtins, |cpm| {
write_output_file( write_output_file(
diag_handler, diag_handler,
@ -801,6 +851,7 @@ pub(crate) unsafe fn codegen(
cpm, cpm,
llmod, llmod,
&obj_out, &obj_out,
dwo_out,
llvm::FileType::ObjectFile, llvm::FileType::ObjectFile,
) )
})?; })?;
@ -828,6 +879,7 @@ pub(crate) unsafe fn codegen(
Ok(module.into_compiled_module( Ok(module.into_compiled_module(
config.emit_obj != EmitObj::None, config.emit_obj != EmitObj::None,
cgcx.split_dwarf_kind == SplitDwarfKind::Split,
config.emit_bc, config.emit_bc,
&cgcx.output_filenames, &cgcx.output_filenames,
)) ))

View file

@ -1,17 +1,15 @@
//! Codegen the completed AST to the LLVM IR. //! Codegen the MIR to the LLVM IR.
//!
//! Some functions here, such as codegen_block and codegen_expr, return a value --
//! the result of the codegen to LLVM -- while others, such as codegen_fn
//! and mono_item, are called only for the side effect of adding a
//! particular definition to the LLVM IR output we're producing.
//! //!
//! Hopefully useful general knowledge about codegen: //! Hopefully useful general knowledge about codegen:
//! //!
//! * There's no way to find out the `Ty` type of a Value. Doing so //! * There's no way to find out the [`Ty`] type of a [`Value`]. Doing so
//! would be "trying to get the eggs out of an omelette" (credit: //! would be "trying to get the eggs out of an omelette" (credit:
//! pcwalton). You can, instead, find out its `llvm::Type` by calling `val_ty`, //! pcwalton). You can, instead, find out its [`llvm::Type`] by calling [`val_ty`],
//! but one `llvm::Type` corresponds to many `Ty`s; for instance, `tup(int, int, //! but one [`llvm::Type`] corresponds to many [`Ty`]s; for instance, `tup(int, int,
//! int)` and `rec(x=int, y=int, z=int)` will have the same `llvm::Type`. //! int)` and `rec(x=int, y=int, z=int)` will have the same [`llvm::Type`].
//!
//! [`Ty`]: rustc_middle::ty::Ty
//! [`val_ty`]: common::val_ty
use super::ModuleLlvm; use super::ModuleLlvm;

View file

@ -314,6 +314,7 @@ impl ConstMethods<'tcx> for CodegenCx<'ll, 'tcx> {
} }
} }
/// Get the [LLVM type][Type] of a [`Value`].
pub fn val_ty(v: &Value) -> &Type { pub fn val_ty(v: &Value) -> &Type {
unsafe { llvm::LLVMTypeOf(v) } unsafe { llvm::LLVMTypeOf(v) }
} }

View file

@ -993,9 +993,15 @@ pub fn compile_unit_metadata(
let producer = format!("clang LLVM ({})", rustc_producer); let producer = format!("clang LLVM ({})", rustc_producer);
let name_in_debuginfo = name_in_debuginfo.to_string_lossy(); let name_in_debuginfo = name_in_debuginfo.to_string_lossy();
let work_dir = tcx.sess.working_dir.0.to_string_lossy();
let flags = "\0"; let flags = "\0";
let split_name = "";
let out_dir = &tcx.output_filenames(LOCAL_CRATE).out_directory;
let split_name = tcx
.output_filenames(LOCAL_CRATE)
.split_dwarf_filename(tcx.sess.opts.debugging_opts.split_dwarf, Some(codegen_unit_name))
.unwrap_or_default();
let out_dir = out_dir.to_str().unwrap();
let split_name = split_name.to_str().unwrap();
// FIXME(#60020): // FIXME(#60020):
// //
@ -1020,8 +1026,8 @@ pub fn compile_unit_metadata(
debug_context.builder, debug_context.builder,
name_in_debuginfo.as_ptr().cast(), name_in_debuginfo.as_ptr().cast(),
name_in_debuginfo.len(), name_in_debuginfo.len(),
work_dir.as_ptr().cast(), out_dir.as_ptr().cast(),
work_dir.len(), out_dir.len(),
llvm::ChecksumKind::None, llvm::ChecksumKind::None,
ptr::null(), ptr::null(),
0, 0,
@ -1039,6 +1045,8 @@ pub fn compile_unit_metadata(
split_name.as_ptr().cast(), split_name.as_ptr().cast(),
split_name.len(), split_name.len(),
kind, kind,
0,
tcx.sess.opts.debugging_opts.split_dwarf_inlining,
); );
if tcx.sess.opts.debugging_opts.profile { if tcx.sess.opts.debugging_opts.profile {
@ -1409,10 +1417,11 @@ fn generator_layout_and_saved_local_names(
let state_arg = mir::Local::new(1); let state_arg = mir::Local::new(1);
for var in &body.var_debug_info { for var in &body.var_debug_info {
if var.place.local != state_arg { let place = if let mir::VarDebugInfoContents::Place(p) = var.value { p } else { continue };
if place.local != state_arg {
continue; continue;
} }
match var.place.projection[..] { match place.projection[..] {
[ [
// Deref of the `Pin<&mut Self>` state argument. // Deref of the `Pin<&mut Self>` state argument.
mir::ProjectionElem::Field(..), mir::ProjectionElem::Field(..),
@ -2313,13 +2322,13 @@ fn set_members_of_composite_type(
DIB(cx), DIB(cx),
composite_type_metadata, composite_type_metadata,
Some(type_array), Some(type_array),
type_params, Some(type_params),
); );
} }
} }
/// Computes the type parameters for a type, if any, for the given metadata. /// Computes the type parameters for a type, if any, for the given metadata.
fn compute_type_parameters(cx: &CodegenCx<'ll, 'tcx>, ty: Ty<'tcx>) -> Option<&'ll DIArray> { fn compute_type_parameters(cx: &CodegenCx<'ll, 'tcx>, ty: Ty<'tcx>) -> &'ll DIArray {
if let ty::Adt(def, substs) = *ty.kind() { if let ty::Adt(def, substs) = *ty.kind() {
if substs.types().next().is_some() { if substs.types().next().is_some() {
let generics = cx.tcx.generics_of(def.did); let generics = cx.tcx.generics_of(def.did);
@ -2349,10 +2358,10 @@ fn compute_type_parameters(cx: &CodegenCx<'ll, 'tcx>, ty: Ty<'tcx>) -> Option<&'
}) })
.collect(); .collect();
return Some(create_DIArray(DIB(cx), &template_params[..])); return create_DIArray(DIB(cx), &template_params[..]);
} }
} }
return Some(create_DIArray(DIB(cx), &[])); return create_DIArray(DIB(cx), &[]);
fn get_parameter_names(cx: &CodegenCx<'_, '_>, generics: &ty::Generics) -> Vec<Symbol> { fn get_parameter_names(cx: &CodegenCx<'_, '_>, generics: &ty::Generics) -> Vec<Symbol> {
let mut names = generics let mut names = generics

View file

@ -854,8 +854,8 @@ fn generic_simd_intrinsic(
)); ));
} }
if name_str.starts_with("simd_shuffle") { if let Some(stripped) = name_str.strip_prefix("simd_shuffle") {
let n: u64 = name_str["simd_shuffle".len()..].parse().unwrap_or_else(|_| { let n: u64 = stripped.parse().unwrap_or_else(|_| {
span_bug!(span, "bad `simd_shuffle` instruction only caught in codegen?") span_bug!(span, "bad `simd_shuffle` instruction only caught in codegen?")
}); });

View file

@ -19,7 +19,9 @@ use back::write::{create_informational_target_machine, create_target_machine};
pub use llvm_util::target_features; pub use llvm_util::target_features;
use rustc_ast::expand::allocator::AllocatorKind; use rustc_ast::expand::allocator::AllocatorKind;
use rustc_codegen_ssa::back::lto::{LtoModuleCodegen, SerializedModule, ThinModule}; use rustc_codegen_ssa::back::lto::{LtoModuleCodegen, SerializedModule, ThinModule};
use rustc_codegen_ssa::back::write::{CodegenContext, FatLTOInput, ModuleConfig}; use rustc_codegen_ssa::back::write::{
CodegenContext, FatLTOInput, ModuleConfig, TargetMachineFactoryConfig, TargetMachineFactoryFn,
};
use rustc_codegen_ssa::traits::*; use rustc_codegen_ssa::traits::*;
use rustc_codegen_ssa::ModuleCodegen; use rustc_codegen_ssa::ModuleCodegen;
use rustc_codegen_ssa::{CodegenResults, CompiledModule}; use rustc_codegen_ssa::{CodegenResults, CompiledModule};
@ -34,7 +36,6 @@ use rustc_span::symbol::Symbol;
use std::any::Any; use std::any::Any;
use std::ffi::CStr; use std::ffi::CStr;
use std::sync::Arc;
mod back { mod back {
pub mod archive; pub mod archive;
@ -109,7 +110,7 @@ impl ExtraBackendMethods for LlvmCodegenBackend {
&self, &self,
sess: &Session, sess: &Session,
optlvl: OptLevel, optlvl: OptLevel,
) -> Arc<dyn Fn() -> Result<&'static mut llvm::TargetMachine, String> + Send + Sync> { ) -> TargetMachineFactoryFn<Self> {
back::write::target_machine_factory(sess, optlvl) back::write::target_machine_factory(sess, optlvl)
} }
fn target_cpu<'b>(&self, sess: &'b Session) -> &'b str { fn target_cpu<'b>(&self, sess: &'b Session) -> &'b str {
@ -159,7 +160,7 @@ impl WriteBackendMethods for LlvmCodegenBackend {
module: &ModuleCodegen<Self::Module>, module: &ModuleCodegen<Self::Module>,
config: &ModuleConfig, config: &ModuleConfig,
) -> Result<(), FatalError> { ) -> Result<(), FatalError> {
back::write::optimize(cgcx, diag_handler, module, config) Ok(back::write::optimize(cgcx, diag_handler, module, config))
} }
unsafe fn optimize_thin( unsafe fn optimize_thin(
cgcx: &CodegenContext<Self>, cgcx: &CodegenContext<Self>,
@ -297,21 +298,19 @@ impl CodegenBackend for LlvmCodegenBackend {
codegen_results: CodegenResults, codegen_results: CodegenResults,
outputs: &OutputFilenames, outputs: &OutputFilenames,
) -> Result<(), ErrorReported> { ) -> Result<(), ErrorReported> {
use crate::back::archive::LlvmArchiveBuilder;
use rustc_codegen_ssa::back::link::link_binary;
// Run the linker on any artifacts that resulted from the LLVM run. // Run the linker on any artifacts that resulted from the LLVM run.
// This should produce either a finished executable or library. // This should produce either a finished executable or library.
sess.time("link_crate", || { let target_cpu = crate::llvm_util::target_cpu(sess);
use crate::back::archive::LlvmArchiveBuilder; link_binary::<LlvmArchiveBuilder<'_>>(
use rustc_codegen_ssa::back::link::link_binary; sess,
&codegen_results,
let target_cpu = crate::llvm_util::target_cpu(sess); outputs,
link_binary::<LlvmArchiveBuilder<'_>>( &codegen_results.crate_name.as_str(),
sess, target_cpu,
&codegen_results, );
outputs,
&codegen_results.crate_name.as_str(),
target_cpu,
);
});
Ok(()) Ok(())
} }
@ -331,7 +330,7 @@ impl ModuleLlvm {
unsafe { unsafe {
let llcx = llvm::LLVMRustContextCreate(tcx.sess.fewer_names()); let llcx = llvm::LLVMRustContextCreate(tcx.sess.fewer_names());
let llmod_raw = context::create_module(tcx, llcx, mod_name) as *const _; let llmod_raw = context::create_module(tcx, llcx, mod_name) as *const _;
ModuleLlvm { llmod_raw, llcx, tm: create_target_machine(tcx) } ModuleLlvm { llmod_raw, llcx, tm: create_target_machine(tcx, mod_name) }
} }
} }
@ -352,7 +351,13 @@ impl ModuleLlvm {
unsafe { unsafe {
let llcx = llvm::LLVMRustContextCreate(cgcx.fewer_names); let llcx = llvm::LLVMRustContextCreate(cgcx.fewer_names);
let llmod_raw = back::lto::parse_module(llcx, name, buffer, handler)?; let llmod_raw = back::lto::parse_module(llcx, name, buffer, handler)?;
let tm = match (cgcx.tm_factory.0)() {
let split_dwarf_file = cgcx
.output_filenames
.split_dwarf_filename(cgcx.split_dwarf_kind, Some(name.to_str().unwrap()));
let tm_factory_config = TargetMachineFactoryConfig { split_dwarf_file };
let tm = match (cgcx.tm_factory)(tm_factory_config) {
Ok(m) => m, Ok(m) => m,
Err(e) => { Err(e) => {
handler.struct_err(&e).emit(); handler.struct_err(&e).emit();

View file

@ -1830,6 +1830,8 @@ extern "C" {
SplitName: *const c_char, SplitName: *const c_char,
SplitNameLen: size_t, SplitNameLen: size_t,
kind: DebugEmissionKind, kind: DebugEmissionKind,
DWOId: u64,
SplitDebugInlining: bool,
) -> &'a DIDescriptor; ) -> &'a DIDescriptor;
pub fn LLVMRustDIBuilderCreateFile( pub fn LLVMRustDIBuilderCreateFile(
@ -2151,6 +2153,7 @@ extern "C" {
EmitStackSizeSection: bool, EmitStackSizeSection: bool,
RelaxELFRelocations: bool, RelaxELFRelocations: bool,
UseInitArray: bool, UseInitArray: bool,
SplitDwarfFile: *const c_char,
) -> Option<&'static mut TargetMachine>; ) -> Option<&'static mut TargetMachine>;
pub fn LLVMRustDisposeTargetMachine(T: &'static mut TargetMachine); pub fn LLVMRustDisposeTargetMachine(T: &'static mut TargetMachine);
pub fn LLVMRustAddBuilderLibraryInfo( pub fn LLVMRustAddBuilderLibraryInfo(
@ -2179,6 +2182,7 @@ extern "C" {
PM: &PassManager<'a>, PM: &PassManager<'a>,
M: &'a Module, M: &'a Module,
Output: *const c_char, Output: *const c_char,
DwoOutput: *const c_char,
FileType: FileType, FileType: FileType,
) -> LLVMRustResult; ) -> LLVMRustResult;
pub fn LLVMRustOptimizeWithNewPassManager( pub fn LLVMRustOptimizeWithNewPassManager(

View file

@ -40,9 +40,7 @@ fn uncached_llvm_type<'a, 'tcx>(
// FIXME(eddyb) producing readable type names for trait objects can result // FIXME(eddyb) producing readable type names for trait objects can result
// in problematically distinct types due to HRTB and subtyping (see #47638). // in problematically distinct types due to HRTB and subtyping (see #47638).
// ty::Dynamic(..) | // ty::Dynamic(..) |
ty::Adt(..) | ty::Closure(..) | ty::Foreign(..) | ty::Generator(..) | ty::Str ty::Adt(..) | ty::Closure(..) | ty::Foreign(..) | ty::Generator(..) | ty::Str => {
if !cx.sess().fewer_names() =>
{
let mut name = with_no_trimmed_paths(|| layout.ty.to_string()); let mut name = with_no_trimmed_paths(|| layout.ty.to_string());
if let (&ty::Adt(def, _), &Variants::Single { index }) = if let (&ty::Adt(def, _), &Variants::Single { index }) =
(layout.ty.kind(), &layout.variants) (layout.ty.kind(), &layout.variants)
@ -58,12 +56,6 @@ fn uncached_llvm_type<'a, 'tcx>(
} }
Some(name) Some(name)
} }
ty::Adt(..) => {
// If `Some` is returned then a named struct is created in LLVM. Name collisions are
// avoided by LLVM (with increasing suffixes). If rustc doesn't generate names then that
// can improve perf.
Some(String::new())
}
_ => None, _ => None,
}; };

View file

@ -2,7 +2,7 @@ use rustc_data_structures::fx::FxHashSet;
use rustc_data_structures::temp_dir::MaybeTempDir; use rustc_data_structures::temp_dir::MaybeTempDir;
use rustc_fs_util::fix_windows_verbatim_for_gcc; use rustc_fs_util::fix_windows_verbatim_for_gcc;
use rustc_hir::def_id::CrateNum; use rustc_hir::def_id::CrateNum;
use rustc_middle::middle::cstore::{EncodedMetadata, LibSource, NativeLib}; use rustc_middle::middle::cstore::{EncodedMetadata, LibSource};
use rustc_middle::middle::dependency_format::Linkage; use rustc_middle::middle::dependency_format::Linkage;
use rustc_session::config::{self, CFGuard, CrateType, DebugInfo}; use rustc_session::config::{self, CFGuard, CrateType, DebugInfo};
use rustc_session::config::{OutputFilenames, OutputType, PrintRequest, SanitizerSet}; use rustc_session::config::{OutputFilenames, OutputType, PrintRequest, SanitizerSet};
@ -21,7 +21,10 @@ use super::archive::ArchiveBuilder;
use super::command::Command; use super::command::Command;
use super::linker::{self, Linker}; use super::linker::{self, Linker};
use super::rpath::{self, RPathConfig}; use super::rpath::{self, RPathConfig};
use crate::{looks_like_rust_object_file, CodegenResults, CrateInfo, METADATA_FILENAME}; use crate::{
looks_like_rust_object_file, CodegenResults, CompiledModule, CrateInfo, NativeLib,
METADATA_FILENAME,
};
use cc::windows_registry; use cc::windows_registry;
use tempfile::Builder as TempFileBuilder; use tempfile::Builder as TempFileBuilder;
@ -96,6 +99,9 @@ pub fn link_binary<'a, B: ArchiveBuilder<'a>>(
path.as_ref(), path.as_ref(),
target_cpu, target_cpu,
); );
if sess.opts.debugging_opts.split_dwarf == config::SplitDwarfKind::Split {
link_dwarf_object(sess, &out_filename);
}
} }
} }
if sess.opts.json_artifact_notifications { if sess.opts.json_artifact_notifications {
@ -107,22 +113,30 @@ pub fn link_binary<'a, B: ArchiveBuilder<'a>>(
// Remove the temporary object file and metadata if we aren't saving temps // Remove the temporary object file and metadata if we aren't saving temps
sess.time("link_binary_remove_temps", || { sess.time("link_binary_remove_temps", || {
if !sess.opts.cg.save_temps { if !sess.opts.cg.save_temps {
let remove_temps_from_module = |module: &CompiledModule| {
if let Some(ref obj) = module.object {
remove(sess, obj);
}
if let Some(ref obj) = module.dwarf_object {
remove(sess, obj);
}
};
if sess.opts.output_types.should_codegen() if sess.opts.output_types.should_codegen()
&& !preserve_objects_for_their_debuginfo(sess) && !preserve_objects_for_their_debuginfo(sess)
{ {
for obj in codegen_results.modules.iter().filter_map(|m| m.object.as_ref()) { for module in &codegen_results.modules {
remove(sess, obj); remove_temps_from_module(module);
} }
} }
if let Some(ref metadata_module) = codegen_results.metadata_module { if let Some(ref metadata_module) = codegen_results.metadata_module {
if let Some(ref obj) = metadata_module.object { remove_temps_from_module(metadata_module);
remove(sess, obj);
}
} }
if let Some(ref allocator_module) = codegen_results.allocator_module { if let Some(ref allocator_module) = codegen_results.allocator_module {
if let Some(ref obj) = allocator_module.object { remove_temps_from_module(allocator_module);
remove(sess, obj);
}
} }
} }
}); });
@ -279,12 +293,12 @@ pub fn emit_metadata(sess: &Session, metadata: &EncodedMetadata, tmpdir: &MaybeT
out_filename out_filename
} }
// Create an 'rlib' /// Create an 'rlib'.
// ///
// An rlib in its current incarnation is essentially a renamed .a file. The /// An rlib in its current incarnation is essentially a renamed .a file. The rlib primarily contains
// rlib primarily contains the object file of the crate, but it also contains /// the object file of the crate, but it also contains all of the object files from native
// all of the object files from native libraries. This is done by unzipping /// libraries. This is done by unzipping native libraries and inserting all of the contents into
// native libraries and inserting all of the contents into this archive. /// this archive.
fn link_rlib<'a, B: ArchiveBuilder<'a>>( fn link_rlib<'a, B: ArchiveBuilder<'a>>(
sess: &'a Session, sess: &'a Session,
codegen_results: &CodegenResults, codegen_results: &CodegenResults,
@ -379,18 +393,17 @@ fn link_rlib<'a, B: ArchiveBuilder<'a>>(
ab ab
} }
// Create a static archive /// Create a static archive.
// ///
// This is essentially the same thing as an rlib, but it also involves adding /// This is essentially the same thing as an rlib, but it also involves adding all of the upstream
// all of the upstream crates' objects into the archive. This will slurp in /// crates' objects into the archive. This will slurp in all of the native libraries of upstream
// all of the native libraries of upstream dependencies as well. /// dependencies as well.
// ///
// Additionally, there's no way for us to link dynamic libraries, so we warn /// Additionally, there's no way for us to link dynamic libraries, so we warn about all dynamic
// about all dynamic library dependencies that they're not linked in. /// library dependencies that they're not linked in.
// ///
// There's no need to include metadata in a static archive, so ensure to not /// There's no need to include metadata in a static archive, so ensure to not link in the metadata
// link in the metadata object file (and also don't prepare the archive with a /// object file (and also don't prepare the archive with a metadata file).
// metadata file).
fn link_staticlib<'a, B: ArchiveBuilder<'a>>( fn link_staticlib<'a, B: ArchiveBuilder<'a>>(
sess: &'a Session, sess: &'a Session,
codegen_results: &CodegenResults, codegen_results: &CodegenResults,
@ -447,10 +460,73 @@ fn link_staticlib<'a, B: ArchiveBuilder<'a>>(
} }
} }
// Create a dynamic library or executable fn escape_stdout_stderr_string(s: &[u8]) -> String {
// str::from_utf8(s).map(|s| s.to_owned()).unwrap_or_else(|_| {
// This will invoke the system linker/cc to create the resulting file. This let mut x = "Non-UTF-8 output: ".to_string();
// links to all upstream files as well. x.extend(s.iter().flat_map(|&b| ascii::escape_default(b)).map(char::from));
x
})
}
const LLVM_DWP_EXECUTABLE: &'static str = "rust-llvm-dwp";
/// Invoke `llvm-dwp` (shipped alongside rustc) to link `dwo` files from Split DWARF into a `dwp`
/// file.
fn link_dwarf_object<'a>(sess: &'a Session, executable_out_filename: &Path) {
info!("preparing dwp to {}.dwp", executable_out_filename.to_str().unwrap());
let dwp_out_filename = executable_out_filename.with_extension("dwp");
let mut cmd = Command::new(LLVM_DWP_EXECUTABLE);
cmd.arg("-e");
cmd.arg(executable_out_filename);
cmd.arg("-o");
cmd.arg(&dwp_out_filename);
let mut new_path = sess.host_filesearch(PathKind::All).get_tools_search_paths(false);
if let Some(path) = env::var_os("PATH") {
new_path.extend(env::split_paths(&path));
}
let new_path = env::join_paths(new_path).unwrap();
cmd.env("PATH", new_path);
info!("{:?}", &cmd);
match sess.time("run_dwp", || cmd.output()) {
Ok(prog) if !prog.status.success() => {
sess.struct_err(&format!(
"linking dwarf objects with `{}` failed: {}",
LLVM_DWP_EXECUTABLE, prog.status
))
.note(&format!("{:?}", &cmd))
.note(&escape_stdout_stderr_string(&prog.stdout))
.note(&escape_stdout_stderr_string(&prog.stderr))
.emit();
info!("linker stderr:\n{}", escape_stdout_stderr_string(&prog.stderr));
info!("linker stdout:\n{}", escape_stdout_stderr_string(&prog.stdout));
}
Ok(_) => {}
Err(e) => {
let dwp_not_found = e.kind() == io::ErrorKind::NotFound;
let mut err = if dwp_not_found {
sess.struct_err(&format!("linker `{}` not found", LLVM_DWP_EXECUTABLE))
} else {
sess.struct_err(&format!("could not exec the linker `{}`", LLVM_DWP_EXECUTABLE))
};
err.note(&e.to_string());
if !dwp_not_found {
err.note(&format!("{:?}", &cmd));
}
err.emit();
}
}
}
/// Create a dynamic library or executable.
///
/// This will invoke the system linker/cc to create the resulting file. This links to all upstream
/// files as well.
fn link_natively<'a, B: ArchiveBuilder<'a>>( fn link_natively<'a, B: ArchiveBuilder<'a>>(
sess: &'a Session, sess: &'a Session,
crate_type: CrateType, crate_type: CrateType,
@ -662,7 +738,7 @@ fn link_natively<'a, B: ArchiveBuilder<'a>>(
prog.status prog.status
)) ))
.note(&format!("{:?}", &cmd)) .note(&format!("{:?}", &cmd))
.note(&escape_string(&output)) .note(&escape_stdout_stderr_string(&output))
.emit(); .emit();
// If MSVC's `link.exe` was expected but the return code // If MSVC's `link.exe` was expected but the return code
@ -715,8 +791,8 @@ fn link_natively<'a, B: ArchiveBuilder<'a>>(
sess.abort_if_errors(); sess.abort_if_errors();
} }
info!("linker stderr:\n{}", escape_string(&prog.stderr)); info!("linker stderr:\n{}", escape_stdout_stderr_string(&prog.stderr));
info!("linker stdout:\n{}", escape_string(&prog.stdout)); info!("linker stdout:\n{}", escape_stdout_stderr_string(&prog.stdout));
} }
Err(e) => { Err(e) => {
let linker_not_found = e.kind() == io::ErrorKind::NotFound; let linker_not_found = e.kind() == io::ErrorKind::NotFound;
@ -962,6 +1038,13 @@ fn preserve_objects_for_their_debuginfo(sess: &Session) -> bool {
return false; return false;
} }
// Single mode keeps debuginfo in the same object file, but in such a way that it it skipped
// by the linker - so it's expected that when codegen units are linked together that this
// debuginfo would be lost without keeping around the temps.
if sess.opts.debugging_opts.split_dwarf == config::SplitDwarfKind::Single {
return true;
}
// If we're on OSX then the equivalent of split dwarf is turned on by // If we're on OSX then the equivalent of split dwarf is turned on by
// default. The final executable won't actually have any debug information // default. The final executable won't actually have any debug information
// except it'll have pointers to elsewhere. Historically we've always run // except it'll have pointers to elsewhere. Historically we've always run
@ -1677,17 +1760,15 @@ fn linker_with_args<'a, B: ArchiveBuilder<'a>>(
cmd.take_cmd() cmd.take_cmd()
} }
// # Native library linking /// # Native library linking
// ///
// User-supplied library search paths (-L on the command line). These are /// User-supplied library search paths (-L on the command line). These are the same paths used to
// the same paths used to find Rust crates, so some of them may have been /// find Rust crates, so some of them may have been added already by the previous crate linking
// added already by the previous crate linking code. This only allows them /// code. This only allows them to be found at compile time so it is still entirely up to outside
// to be found at compile time so it is still entirely up to outside /// forces to make sure that library can be found at runtime.
// forces to make sure that library can be found at runtime. ///
// /// Also note that the native libraries linked here are only the ones located in the current crate.
// Also note that the native libraries linked here are only the ones located /// Upstream crates with native library dependencies may have their native library pulled in above.
// in the current crate. Upstream crates with native library dependencies
// may have their native library pulled in above.
fn add_local_native_libraries( fn add_local_native_libraries(
cmd: &mut dyn Linker, cmd: &mut dyn Linker,
sess: &Session, sess: &Session,
@ -1727,11 +1808,10 @@ fn add_local_native_libraries(
} }
} }
// # Rust Crate linking /// # Rust Crate linking
// ///
// Rust crates are not considered at all when creating an rlib output. All /// Rust crates are not considered at all when creating an rlib output. All dependencies will be
// dependencies will be linked when producing the final output (instead of /// linked when producing the final output (instead of the intermediate rlib version).
// the intermediate rlib version)
fn add_upstream_rust_crates<'a, B: ArchiveBuilder<'a>>( fn add_upstream_rust_crates<'a, B: ArchiveBuilder<'a>>(
cmd: &mut dyn Linker, cmd: &mut dyn Linker,
sess: &'a Session, sess: &'a Session,
@ -1996,24 +2076,21 @@ fn add_upstream_rust_crates<'a, B: ArchiveBuilder<'a>>(
} }
} }
// Link in all of our upstream crates' native dependencies. Remember that /// Link in all of our upstream crates' native dependencies. Remember that all of these upstream
// all of these upstream native dependencies are all non-static /// native dependencies are all non-static dependencies. We've got two cases then:
// dependencies. We've got two cases then: ///
// /// 1. The upstream crate is an rlib. In this case we *must* link in the native dependency because
// 1. The upstream crate is an rlib. In this case we *must* link in the /// the rlib is just an archive.
// native dependency because the rlib is just an archive. ///
// /// 2. The upstream crate is a dylib. In order to use the dylib, we have to have the dependency
// 2. The upstream crate is a dylib. In order to use the dylib, we have to /// present on the system somewhere. Thus, we don't gain a whole lot from not linking in the
// have the dependency present on the system somewhere. Thus, we don't /// dynamic dependency to this crate as well.
// gain a whole lot from not linking in the dynamic dependency to this ///
// crate as well. /// The use case for this is a little subtle. In theory the native dependencies of a crate are
// /// purely an implementation detail of the crate itself, but the problem arises with generic and
// The use case for this is a little subtle. In theory the native /// inlined functions. If a generic function calls a native function, then the generic function
// dependencies of a crate are purely an implementation detail of the crate /// must be instantiated in the target crate, meaning that the native symbol must also be resolved
// itself, but the problem arises with generic and inlined functions. If a /// in the target crate.
// generic function calls a native function, then the generic function must
// be instantiated in the target crate, meaning that the native symbol must
// also be resolved in the target crate.
fn add_upstream_native_libraries( fn add_upstream_native_libraries(
cmd: &mut dyn Linker, cmd: &mut dyn Linker,
sess: &Session, sess: &Session,

View file

@ -274,17 +274,20 @@ impl ModuleConfig {
} }
} }
// HACK(eddyb) work around `#[derive]` producing wrong bounds for `Clone`. /// Configuration passed to the function returned by the `target_machine_factory`.
pub struct TargetMachineFactory<B: WriteBackendMethods>( pub struct TargetMachineFactoryConfig {
pub Arc<dyn Fn() -> Result<B::TargetMachine, String> + Send + Sync>, /// Split DWARF is enabled in LLVM by checking that `TM.MCOptions.SplitDwarfFile` isn't empty,
); /// so the path to the dwarf object has to be provided when we create the target machine.
/// This can be ignored by backends which do not need it for their Split DWARF support.
impl<B: WriteBackendMethods> Clone for TargetMachineFactory<B> { pub split_dwarf_file: Option<PathBuf>,
fn clone(&self) -> Self {
TargetMachineFactory(self.0.clone())
}
} }
pub type TargetMachineFactoryFn<B> = Arc<
dyn Fn(TargetMachineFactoryConfig) -> Result<<B as WriteBackendMethods>::TargetMachine, String>
+ Send
+ Sync,
>;
pub type ExportedSymbols = FxHashMap<CrateNum, Arc<Vec<(String, SymbolExportLevel)>>>; pub type ExportedSymbols = FxHashMap<CrateNum, Arc<Vec<(String, SymbolExportLevel)>>>;
/// Additional resources used by optimize_and_codegen (not module specific) /// Additional resources used by optimize_and_codegen (not module specific)
@ -305,12 +308,13 @@ pub struct CodegenContext<B: WriteBackendMethods> {
pub regular_module_config: Arc<ModuleConfig>, pub regular_module_config: Arc<ModuleConfig>,
pub metadata_module_config: Arc<ModuleConfig>, pub metadata_module_config: Arc<ModuleConfig>,
pub allocator_module_config: Arc<ModuleConfig>, pub allocator_module_config: Arc<ModuleConfig>,
pub tm_factory: TargetMachineFactory<B>, pub tm_factory: TargetMachineFactoryFn<B>,
pub msvc_imps_needed: bool, pub msvc_imps_needed: bool,
pub is_pe_coff: bool, pub is_pe_coff: bool,
pub target_pointer_width: u32, pub target_pointer_width: u32,
pub target_arch: String, pub target_arch: String,
pub debuginfo: config::DebugInfo, pub debuginfo: config::DebugInfo,
pub split_dwarf_kind: config::SplitDwarfKind,
// Number of cgus excluding the allocator/metadata modules // Number of cgus excluding the allocator/metadata modules
pub total_cgus: usize, pub total_cgus: usize,
@ -627,6 +631,12 @@ fn produce_final_output_artifacts(
} }
} }
if let Some(ref path) = module.dwarf_object {
if !keep_numbered_objects {
remove(sess, path);
}
}
if let Some(ref path) = module.bytecode { if let Some(ref path) = module.bytecode {
if !keep_numbered_bitcode { if !keep_numbered_bitcode {
remove(sess, path); remove(sess, path);
@ -849,6 +859,7 @@ fn execute_copy_from_cache_work_item<B: ExtraBackendMethods>(
name: module.name, name: module.name,
kind: ModuleKind::Regular, kind: ModuleKind::Regular,
object, object,
dwarf_object: None,
bytecode: None, bytecode: None,
})) }))
} }
@ -1020,13 +1031,14 @@ fn start_executing_work<B: ExtraBackendMethods>(
regular_module_config: regular_config, regular_module_config: regular_config,
metadata_module_config: metadata_config, metadata_module_config: metadata_config,
allocator_module_config: allocator_config, allocator_module_config: allocator_config,
tm_factory: TargetMachineFactory(backend.target_machine_factory(tcx.sess, ol)), tm_factory: backend.target_machine_factory(tcx.sess, ol),
total_cgus, total_cgus,
msvc_imps_needed: msvc_imps_needed(tcx), msvc_imps_needed: msvc_imps_needed(tcx),
is_pe_coff: tcx.sess.target.is_like_windows, is_pe_coff: tcx.sess.target.is_like_windows,
target_pointer_width: tcx.sess.target.pointer_width, target_pointer_width: tcx.sess.target.pointer_width,
target_arch: tcx.sess.target.arch.clone(), target_arch: tcx.sess.target.arch.clone(),
debuginfo: tcx.sess.opts.debuginfo, debuginfo: tcx.sess.opts.debuginfo,
split_dwarf_kind: tcx.sess.opts.debugging_opts.split_dwarf,
}; };
// This is the "main loop" of parallel work happening for parallel codegen. // This is the "main loop" of parallel work happening for parallel codegen.

View file

@ -1,18 +1,3 @@
//! Codegen the completed AST to the LLVM IR.
//!
//! Some functions here, such as `codegen_block` and `codegen_expr`, return a value --
//! the result of the codegen to LLVM -- while others, such as `codegen_fn`
//! and `mono_item`, are called only for the side effect of adding a
//! particular definition to the LLVM IR output we're producing.
//!
//! Hopefully useful general knowledge about codegen:
//!
//! * There's no way to find out the `Ty` type of a `Value`. Doing so
//! would be "trying to get the eggs out of an omelette" (credit:
//! pcwalton). You can, instead, find out its `llvm::Type` by calling `val_ty`,
//! but one `llvm::Type` corresponds to many `Ty`s; for instance, `tup(int, int,
//! int)` and `rec(x=int, y=int, z=int)` will have the same `llvm::Type`.
use crate::back::write::{ use crate::back::write::{
compute_per_cgu_lto_type, start_async_codegen, submit_codegened_module_to_llvm, compute_per_cgu_lto_type, start_async_codegen, submit_codegened_module_to_llvm,
submit_post_lto_module_to_llvm, submit_pre_lto_module_to_llvm, ComputedLtoType, OngoingCodegen, submit_post_lto_module_to_llvm, submit_pre_lto_module_to_llvm, ComputedLtoType, OngoingCodegen,
@ -46,7 +31,6 @@ use rustc_session::cgu_reuse_tracker::CguReuse;
use rustc_session::config::{self, EntryFnType}; use rustc_session::config::{self, EntryFnType};
use rustc_session::utils::NativeLibKind; use rustc_session::utils::NativeLibKind;
use rustc_session::Session; use rustc_session::Session;
use rustc_symbol_mangling::test as symbol_names_test;
use rustc_target::abi::{Align, LayoutOf, VariantIdx}; use rustc_target::abi::{Align, LayoutOf, VariantIdx};
use std::cmp; use std::cmp;
@ -486,8 +470,6 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
ongoing_codegen.codegen_finished(tcx); ongoing_codegen.codegen_finished(tcx);
finalize_tcx(tcx);
ongoing_codegen.check_for_errors(tcx.sess); ongoing_codegen.check_for_errors(tcx.sess);
return ongoing_codegen; return ongoing_codegen;
@ -688,14 +670,8 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
total_codegen_time.into_inner(), total_codegen_time.into_inner(),
); );
rustc_incremental::assert_module_sources::assert_module_sources(tcx);
symbol_names_test::report_symbol_names(tcx);
ongoing_codegen.check_for_errors(tcx.sess); ongoing_codegen.check_for_errors(tcx.sess);
finalize_tcx(tcx);
ongoing_codegen.into_inner() ongoing_codegen.into_inner()
} }
@ -746,18 +722,6 @@ impl<B: ExtraBackendMethods> Drop for AbortCodegenOnDrop<B> {
} }
} }
fn finalize_tcx(tcx: TyCtxt<'_>) {
tcx.sess.time("assert_dep_graph", || rustc_incremental::assert_dep_graph(tcx));
tcx.sess.time("serialize_dep_graph", || rustc_incremental::save_dep_graph(tcx));
// We assume that no queries are run past here. If there are new queries
// after this point, they'll show up as "<unknown>" in self-profiling data.
{
let _prof_timer = tcx.prof.generic_activity("self_profile_alloc_query_strings");
tcx.alloc_self_profile_query_strings();
}
}
impl CrateInfo { impl CrateInfo {
pub fn new(tcx: TyCtxt<'_>) -> CrateInfo { pub fn new(tcx: TyCtxt<'_>) -> CrateInfo {
let mut info = CrateInfo { let mut info = CrateInfo {
@ -766,7 +730,7 @@ impl CrateInfo {
profiler_runtime: None, profiler_runtime: None,
is_no_builtins: Default::default(), is_no_builtins: Default::default(),
native_libraries: Default::default(), native_libraries: Default::default(),
used_libraries: tcx.native_libraries(LOCAL_CRATE), used_libraries: tcx.native_libraries(LOCAL_CRATE).iter().map(Into::into).collect(),
link_args: tcx.link_args(LOCAL_CRATE), link_args: tcx.link_args(LOCAL_CRATE),
crate_name: Default::default(), crate_name: Default::default(),
used_crates_dynamic: cstore::used_crates(tcx, LinkagePreference::RequireDynamic), used_crates_dynamic: cstore::used_crates(tcx, LinkagePreference::RequireDynamic),
@ -787,7 +751,8 @@ impl CrateInfo {
info.missing_lang_items.reserve(n_crates); info.missing_lang_items.reserve(n_crates);
for &cnum in crates.iter() { for &cnum in crates.iter() {
info.native_libraries.insert(cnum, tcx.native_libraries(cnum)); info.native_libraries
.insert(cnum, tcx.native_libraries(cnum).iter().map(Into::into).collect());
info.crate_name.insert(cnum, tcx.crate_name(cnum).to_string()); info.crate_name.insert(cnum, tcx.crate_name(cnum).to_string());
info.used_crate_source.insert(cnum, tcx.used_crate_source(cnum)); info.used_crate_source.insert(cnum, tcx.used_crate_source(cnum));
if tcx.is_panic_runtime(cnum) { if tcx.is_panic_runtime(cnum) {

View file

@ -21,15 +21,17 @@ extern crate tracing;
#[macro_use] #[macro_use]
extern crate rustc_middle; extern crate rustc_middle;
use rustc_ast as ast;
use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_data_structures::sync::Lrc; use rustc_data_structures::sync::Lrc;
use rustc_hir::def_id::CrateNum; use rustc_hir::def_id::CrateNum;
use rustc_hir::LangItem; use rustc_hir::LangItem;
use rustc_middle::dep_graph::WorkProduct; use rustc_middle::dep_graph::WorkProduct;
use rustc_middle::middle::cstore::{CrateSource, LibSource, NativeLib}; use rustc_middle::middle::cstore::{self, CrateSource, LibSource};
use rustc_middle::middle::dependency_format::Dependencies; use rustc_middle::middle::dependency_format::Dependencies;
use rustc_middle::ty::query::Providers; use rustc_middle::ty::query::Providers;
use rustc_session::config::{OutputFilenames, OutputType, RUST_CGU_EXT}; use rustc_session::config::{OutputFilenames, OutputType, RUST_CGU_EXT};
use rustc_session::utils::NativeLibKind;
use rustc_span::symbol::Symbol; use rustc_span::symbol::Symbol;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
@ -64,13 +66,15 @@ impl<M> ModuleCodegen<M> {
pub fn into_compiled_module( pub fn into_compiled_module(
self, self,
emit_obj: bool, emit_obj: bool,
emit_dwarf_obj: bool,
emit_bc: bool, emit_bc: bool,
outputs: &OutputFilenames, outputs: &OutputFilenames,
) -> CompiledModule { ) -> CompiledModule {
let object = emit_obj.then(|| outputs.temp_path(OutputType::Object, Some(&self.name))); let object = emit_obj.then(|| outputs.temp_path(OutputType::Object, Some(&self.name)));
let dwarf_object = emit_dwarf_obj.then(|| outputs.temp_path_dwo(Some(&self.name)));
let bytecode = emit_bc.then(|| outputs.temp_path(OutputType::Bitcode, Some(&self.name))); let bytecode = emit_bc.then(|| outputs.temp_path(OutputType::Bitcode, Some(&self.name)));
CompiledModule { name: self.name.clone(), kind: self.kind, object, bytecode } CompiledModule { name: self.name.clone(), kind: self.kind, object, dwarf_object, bytecode }
} }
} }
@ -79,6 +83,7 @@ pub struct CompiledModule {
pub name: String, pub name: String,
pub kind: ModuleKind, pub kind: ModuleKind,
pub object: Option<PathBuf>, pub object: Option<PathBuf>,
pub dwarf_object: Option<PathBuf>,
pub bytecode: Option<PathBuf>, pub bytecode: Option<PathBuf>,
} }
@ -102,6 +107,19 @@ bitflags::bitflags! {
} }
} }
#[derive(Clone, Debug, Encodable, Decodable, HashStable)]
pub struct NativeLib {
pub kind: NativeLibKind,
pub name: Option<Symbol>,
pub cfg: Option<ast::MetaItem>,
}
impl From<&cstore::NativeLib> for NativeLib {
fn from(lib: &cstore::NativeLib) -> Self {
NativeLib { kind: lib.kind, name: lib.name, cfg: lib.cfg.clone() }
}
}
/// Misc info we load from metadata to persist beyond the tcx. /// Misc info we load from metadata to persist beyond the tcx.
/// ///
/// Note: though `CrateNum` is only meaningful within the same tcx, information within `CrateInfo` /// Note: though `CrateNum` is only meaningful within the same tcx, information within `CrateInfo`
@ -116,9 +134,9 @@ pub struct CrateInfo {
pub compiler_builtins: Option<CrateNum>, pub compiler_builtins: Option<CrateNum>,
pub profiler_runtime: Option<CrateNum>, pub profiler_runtime: Option<CrateNum>,
pub is_no_builtins: FxHashSet<CrateNum>, pub is_no_builtins: FxHashSet<CrateNum>,
pub native_libraries: FxHashMap<CrateNum, Lrc<Vec<NativeLib>>>, pub native_libraries: FxHashMap<CrateNum, Vec<NativeLib>>,
pub crate_name: FxHashMap<CrateNum, String>, pub crate_name: FxHashMap<CrateNum, String>,
pub used_libraries: Lrc<Vec<NativeLib>>, pub used_libraries: Vec<NativeLib>,
pub link_args: Lrc<Vec<String>>, pub link_args: Lrc<Vec<String>>,
pub used_crate_source: FxHashMap<CrateNum, Lrc<CrateSource>>, pub used_crate_source: FxHashMap<CrateNum, Lrc<CrateSource>>,
pub used_crates_static: Vec<(CrateNum, LibSource)>, pub used_crates_static: Vec<(CrateNum, LibSource)>,

View file

@ -112,12 +112,12 @@ impl<Bx: BuilderMethods<'a, 'tcx>> LocalAnalyzer<'mir, 'a, 'tcx, Bx> {
}; };
// Allow uses of projections that are ZSTs or from scalar fields. // Allow uses of projections that are ZSTs or from scalar fields.
let is_consume = match context { let is_consume = matches!(
context,
PlaceContext::NonMutatingUse( PlaceContext::NonMutatingUse(
NonMutatingUseContext::Copy | NonMutatingUseContext::Move, NonMutatingUseContext::Copy | NonMutatingUseContext::Move,
) => true, )
_ => false, );
};
if is_consume { if is_consume {
let base_ty = let base_ty =
mir::Place::ty_from(place_ref.local, proj_base, self.fx.mir, cx.tcx()); mir::Place::ty_from(place_ref.local, proj_base, self.fx.mir, cx.tcx());

View file

@ -522,7 +522,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
mut bx: Bx, mut bx: Bx,
terminator: &mir::Terminator<'tcx>, terminator: &mir::Terminator<'tcx>,
func: &mir::Operand<'tcx>, func: &mir::Operand<'tcx>,
args: &Vec<mir::Operand<'tcx>>, args: &[mir::Operand<'tcx>],
destination: &Option<(mir::Place<'tcx>, mir::BasicBlock)>, destination: &Option<(mir::Place<'tcx>, mir::BasicBlock)>,
cleanup: Option<mir::BasicBlock>, cleanup: Option<mir::BasicBlock>,
fn_span: Span, fn_span: Span,
@ -1395,6 +1395,25 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
dst: PlaceRef<'tcx, Bx::Value>, dst: PlaceRef<'tcx, Bx::Value>,
) { ) {
let src = self.codegen_operand(bx, src); let src = self.codegen_operand(bx, src);
// Special-case transmutes between scalars as simple bitcasts.
match (&src.layout.abi, &dst.layout.abi) {
(abi::Abi::Scalar(src_scalar), abi::Abi::Scalar(dst_scalar)) => {
// HACK(eddyb) LLVM doesn't like `bitcast`s between pointers and non-pointers.
if (src_scalar.value == abi::Pointer) == (dst_scalar.value == abi::Pointer) {
assert_eq!(src.layout.size, dst.layout.size);
// NOTE(eddyb) the `from_immediate` and `to_immediate_scalar`
// conversions allow handling `bool`s the same as `u8`s.
let src = bx.from_immediate(src.immediate());
let src_as_dst = bx.bitcast(src, bx.backend_type(dst.layout));
Immediate(bx.to_immediate_scalar(src_as_dst, dst_scalar)).store(bx, dst);
return;
}
}
_ => {}
}
let llty = bx.backend_type(src.layout); let llty = bx.backend_type(src.layout);
let cast_ptr = bx.pointercast(dst.llval, bx.type_ptr_to(llty)); let cast_ptr = bx.pointercast(dst.llval, bx.type_ptr_to(llty));
let align = src.layout.align.abi.min(dst.align); let align = src.layout.align.abi.min(dst.align);

View file

@ -11,7 +11,7 @@ use super::FunctionCx;
impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
pub fn eval_mir_constant_to_operand( pub fn eval_mir_constant_to_operand(
&mut self, &self,
bx: &mut Bx, bx: &mut Bx,
constant: &mir::Constant<'tcx>, constant: &mir::Constant<'tcx>,
) -> Result<OperandRef<'tcx, Bx::Value>, ErrorHandled> { ) -> Result<OperandRef<'tcx, Bx::Value>, ErrorHandled> {
@ -21,7 +21,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
} }
pub fn eval_mir_constant( pub fn eval_mir_constant(
&mut self, &self,
constant: &mir::Constant<'tcx>, constant: &mir::Constant<'tcx>,
) -> Result<ConstValue<'tcx>, ErrorHandled> { ) -> Result<ConstValue<'tcx>, ErrorHandled> {
match self.monomorphize(constant.literal).val { match self.monomorphize(constant.literal).val {

View file

@ -8,7 +8,7 @@ use rustc_span::symbol::{kw, Symbol};
use rustc_span::{BytePos, Span}; use rustc_span::{BytePos, Span};
use rustc_target::abi::{LayoutOf, Size}; use rustc_target::abi::{LayoutOf, Size};
use super::operand::OperandValue; use super::operand::{OperandRef, OperandValue};
use super::place::PlaceRef; use super::place::PlaceRef;
use super::{FunctionCx, LocalRef}; use super::{FunctionCx, LocalRef};
@ -116,6 +116,24 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
span span
} }
fn spill_operand_to_stack(
operand: &OperandRef<'tcx, Bx::Value>,
name: Option<String>,
bx: &mut Bx,
) -> PlaceRef<'tcx, Bx::Value> {
// "Spill" the value onto the stack, for debuginfo,
// without forcing non-debuginfo uses of the local
// to also load from the stack every single time.
// FIXME(#68817) use `llvm.dbg.value` instead,
// at least for the cases which LLVM handles correctly.
let spill_slot = PlaceRef::alloca(bx, operand.layout);
if let Some(name) = name {
bx.set_var_name(spill_slot.llval, &(name + ".dbg.spill"));
}
operand.val.store(bx, spill_slot);
spill_slot
}
/// Apply debuginfo and/or name, after creating the `alloca` for a local, /// Apply debuginfo and/or name, after creating the `alloca` for a local,
/// or initializing the local with an operand (whichever applies). /// or initializing the local with an operand (whichever applies).
pub fn debug_introduce_local(&self, bx: &mut Bx, local: mir::Local) { pub fn debug_introduce_local(&self, bx: &mut Bx, local: mir::Local) {
@ -152,7 +170,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
// (after #67586 gets fixed). // (after #67586 gets fixed).
None None
} else { } else {
let name = kw::Invalid; let name = kw::Empty;
let decl = &self.mir.local_decls[local]; let decl = &self.mir.local_decls[local];
let dbg_var = if full_debug_info { let dbg_var = if full_debug_info {
self.adjusted_span_and_dbg_scope(decl.source_info).map( self.adjusted_span_and_dbg_scope(decl.source_info).map(
@ -186,7 +204,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
None None
} else { } else {
Some(match whole_local_var.or(fallback_var) { Some(match whole_local_var.or(fallback_var) {
Some(var) if var.name != kw::Invalid => var.name.to_string(), Some(var) if var.name != kw::Empty => var.name.to_string(),
_ => format!("{:?}", local), _ => format!("{:?}", local),
}) })
}; };
@ -226,17 +244,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
return; return;
} }
// "Spill" the value onto the stack, for debuginfo, Self::spill_operand_to_stack(operand, name, bx)
// without forcing non-debuginfo uses of the local
// to also load from the stack every single time.
// FIXME(#68817) use `llvm.dbg.value` instead,
// at least for the cases which LLVM handles correctly.
let spill_slot = PlaceRef::alloca(bx, operand.layout);
if let Some(name) = name {
bx.set_var_name(spill_slot.llval, &(name + ".dbg.spill"));
}
operand.val.store(bx, spill_slot);
spill_slot
} }
LocalRef::Place(place) => *place, LocalRef::Place(place) => *place,
@ -308,6 +316,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
/// Partition all `VarDebugInfo` in `self.mir`, by their base `Local`. /// Partition all `VarDebugInfo` in `self.mir`, by their base `Local`.
pub fn compute_per_local_var_debug_info( pub fn compute_per_local_var_debug_info(
&self, &self,
bx: &mut Bx,
) -> Option<IndexVec<mir::Local, Vec<PerLocalVarDebugInfo<'tcx, Bx::DIVariable>>>> { ) -> Option<IndexVec<mir::Local, Vec<PerLocalVarDebugInfo<'tcx, Bx::DIVariable>>>> {
let full_debug_info = self.cx.sess().opts.debuginfo == DebugInfo::Full; let full_debug_info = self.cx.sess().opts.debuginfo == DebugInfo::Full;
@ -322,31 +331,63 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
} else { } else {
None None
}; };
let dbg_var = dbg_scope_and_span.map(|(dbg_scope, _, span)| {
let place = var.place;
let var_ty = self.monomorphized_place_ty(place.as_ref());
let var_kind = if self.mir.local_kind(place.local) == mir::LocalKind::Arg
&& place.projection.is_empty()
&& var.source_info.scope == mir::OUTERMOST_SOURCE_SCOPE
{
let arg_index = place.local.index() - 1;
// FIXME(eddyb) shouldn't `ArgumentVariable` indices be let dbg_var = dbg_scope_and_span.map(|(dbg_scope, _, span)| {
// offset in closures to account for the hidden environment? let (var_ty, var_kind) = match var.value {
// Also, is this `+ 1` needed at all? mir::VarDebugInfoContents::Place(place) => {
VariableKind::ArgumentVariable(arg_index + 1) let var_ty = self.monomorphized_place_ty(place.as_ref());
} else { let var_kind = if self.mir.local_kind(place.local) == mir::LocalKind::Arg
VariableKind::LocalVariable && place.projection.is_empty()
&& var.source_info.scope == mir::OUTERMOST_SOURCE_SCOPE
{
let arg_index = place.local.index() - 1;
// FIXME(eddyb) shouldn't `ArgumentVariable` indices be
// offset in closures to account for the hidden environment?
// Also, is this `+ 1` needed at all?
VariableKind::ArgumentVariable(arg_index + 1)
} else {
VariableKind::LocalVariable
};
(var_ty, var_kind)
}
mir::VarDebugInfoContents::Const(c) => {
let ty = self.monomorphize(c.literal.ty);
(ty, VariableKind::LocalVariable)
}
}; };
self.cx.create_dbg_var(var.name, var_ty, dbg_scope, var_kind, span) self.cx.create_dbg_var(var.name, var_ty, dbg_scope, var_kind, span)
}); });
per_local[var.place.local].push(PerLocalVarDebugInfo { match var.value {
name: var.name, mir::VarDebugInfoContents::Place(place) => {
source_info: var.source_info, per_local[place.local].push(PerLocalVarDebugInfo {
dbg_var, name: var.name,
projection: var.place.projection, source_info: var.source_info,
}); dbg_var,
projection: place.projection,
});
}
mir::VarDebugInfoContents::Const(c) => {
if let Some(dbg_var) = dbg_var {
let dbg_loc = match self.dbg_loc(var.source_info) {
Some(dbg_loc) => dbg_loc,
None => continue,
};
if let Ok(operand) = self.eval_mir_constant_to_operand(bx, &c) {
let base = Self::spill_operand_to_stack(
&operand,
Some(var.name.to_string()),
bx,
);
bx.dbg_var_addr(dbg_var, dbg_loc, base.llval, Size::ZERO, &[]);
}
}
}
}
} }
Some(per_local) Some(per_local)
} }

View file

@ -83,9 +83,6 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
return; return;
} }
sym::unreachable => {
return;
}
sym::va_start => bx.va_start(args[0].immediate()), sym::va_start => bx.va_start(args[0].immediate()),
sym::va_end => bx.va_end(args[0].immediate()), sym::va_end => bx.va_end(args[0].immediate()),
sym::size_of_val => { sym::size_of_val => {
@ -106,8 +103,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
bx.const_usize(bx.layout_of(tp_ty).align.abi.bytes()) bx.const_usize(bx.layout_of(tp_ty).align.abi.bytes())
} }
} }
sym::size_of sym::pref_align_of
| sym::pref_align_of
| sym::min_align_of | sym::min_align_of
| sym::needs_drop | sym::needs_drop
| sym::type_id | sym::type_id
@ -119,10 +115,6 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
.unwrap(); .unwrap();
OperandRef::from_const(bx, value, ret_ty).immediate_or_packed_pair(bx) OperandRef::from_const(bx, value, ret_ty).immediate_or_packed_pair(bx)
} }
// Effectively no-op
sym::forget => {
return;
}
sym::offset => { sym::offset => {
let ptr = args[0].immediate(); let ptr = args[0].immediate();
let offset = args[1].immediate(); let offset = args[1].immediate();
@ -218,9 +210,6 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
sym::add_with_overflow sym::add_with_overflow
| sym::sub_with_overflow | sym::sub_with_overflow
| sym::mul_with_overflow | sym::mul_with_overflow
| sym::wrapping_add
| sym::wrapping_sub
| sym::wrapping_mul
| sym::unchecked_div | sym::unchecked_div
| sym::unchecked_rem | sym::unchecked_rem
| sym::unchecked_shl | sym::unchecked_shl
@ -254,9 +243,6 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
return; return;
} }
sym::wrapping_add => bx.add(args[0].immediate(), args[1].immediate()),
sym::wrapping_sub => bx.sub(args[0].immediate(), args[1].immediate()),
sym::wrapping_mul => bx.mul(args[0].immediate(), args[1].immediate()),
sym::exact_div => { sym::exact_div => {
if signed { if signed {
bx.exactsdiv(args[0].immediate(), args[1].immediate()) bx.exactsdiv(args[0].immediate(), args[1].immediate())
@ -538,8 +524,19 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
}; };
let ty = substs.type_at(0); let ty = substs.type_at(0);
if int_type_width_signed(ty, bx.tcx()).is_some() { if int_type_width_signed(ty, bx.tcx()).is_some()
bx.atomic_rmw(atom_op, args[0].immediate(), args[1].immediate(), order) || (ty.is_unsafe_ptr() && op == "xchg")
{
let mut ptr = args[0].immediate();
let mut val = args[1].immediate();
if ty.is_unsafe_ptr() {
// Some platforms do not support atomic operations on pointers,
// so we cast to integer first.
let ptr_llty = bx.type_ptr_to(bx.type_isize());
ptr = bx.pointercast(ptr, ptr_llty);
val = bx.ptrtoint(val, bx.type_isize());
}
bx.atomic_rmw(atom_op, ptr, val, order)
} else { } else {
return invalid_monomorphization(ty); return invalid_monomorphization(ty);
} }

View file

@ -186,7 +186,7 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
caller_location: None, caller_location: None,
}; };
fx.per_local_var_debug_info = fx.compute_per_local_var_debug_info(); fx.per_local_var_debug_info = fx.compute_per_local_var_debug_info(&mut bx);
for const_ in &mir.required_consts { for const_ in &mir.required_consts {
if let Err(err) = fx.eval_mir_constant(const_) { if let Err(err) = fx.eval_mir_constant(const_) {

View file

@ -489,6 +489,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
mir::Rvalue::Discriminant(ref place) => { mir::Rvalue::Discriminant(ref place) => {
let discr_ty = rvalue.ty(self.mir, bx.tcx()); let discr_ty = rvalue.ty(self.mir, bx.tcx());
let discr_ty = self.monomorphize(discr_ty);
let discr = self let discr = self
.codegen_place(&mut bx, place.as_ref()) .codegen_place(&mut bx, place.as_ref())
.codegen_get_discr(&mut bx, discr_ty); .codegen_get_discr(&mut bx, discr_ty);

View file

@ -1,5 +1,6 @@
use super::write::WriteBackendMethods; use super::write::WriteBackendMethods;
use super::CodegenObject; use super::CodegenObject;
use crate::back::write::TargetMachineFactoryFn;
use crate::{CodegenResults, ModuleCodegen}; use crate::{CodegenResults, ModuleCodegen};
use rustc_ast::expand::allocator::AllocatorKind; use rustc_ast::expand::allocator::AllocatorKind;
@ -21,7 +22,6 @@ use rustc_target::spec::Target;
pub use rustc_data_structures::sync::MetadataRef; pub use rustc_data_structures::sync::MetadataRef;
use std::any::Any; use std::any::Any;
use std::sync::Arc;
pub trait BackendTypes { pub trait BackendTypes {
type Value: CodegenObject; type Value: CodegenObject;
@ -123,7 +123,7 @@ pub trait ExtraBackendMethods: CodegenBackend + WriteBackendMethods + Sized + Se
&self, &self,
sess: &Session, sess: &Session,
opt_level: config::OptLevel, opt_level: config::OptLevel,
) -> Arc<dyn Fn() -> Result<Self::TargetMachine, String> + Send + Sync>; ) -> TargetMachineFactoryFn<Self>;
fn target_cpu<'b>(&self, sess: &'b Session) -> &'b str; fn target_cpu<'b>(&self, sess: &'b Session) -> &'b str;
fn tune_cpu<'b>(&self, sess: &'b Session) -> Option<&'b str>; fn tune_cpu<'b>(&self, sess: &'b Session) -> Option<&'b str>;
} }

View file

@ -149,8 +149,6 @@ struct Event<N> {
/// those successors), we will pop off that node's `Settled` event. /// those successors), we will pop off that node's `Settled` event.
/// ///
/// [CLR]: https://en.wikipedia.org/wiki/Introduction_to_Algorithms /// [CLR]: https://en.wikipedia.org/wiki/Introduction_to_Algorithms
/// [`NodeStatus`]: ./enum.NodeStatus.html
/// [`TriColorVisitor::node_examined`]: ./trait.TriColorVisitor.html#method.node_examined
pub struct TriColorDepthFirstSearch<'graph, G> pub struct TriColorDepthFirstSearch<'graph, G>
where where
G: ?Sized + DirectedGraph + WithNumNodes + WithSuccessors, G: ?Sized + DirectedGraph + WithNumNodes + WithSuccessors,

View file

@ -523,7 +523,7 @@ where
successors_len: 0, successors_len: 0,
min_depth: depth, min_depth: depth,
min_cycle_root: successor_node, min_cycle_root: successor_node,
successor_node: successor_node, successor_node,
}); });
continue 'recurse; continue 'recurse;
} }

View file

@ -15,8 +15,7 @@
#![feature(fn_traits)] #![feature(fn_traits)]
#![feature(int_bits_const)] #![feature(int_bits_const)]
#![feature(min_specialization)] #![feature(min_specialization)]
#![cfg_attr(bootstrap, feature(optin_builtin_traits))] #![feature(auto_traits)]
#![cfg_attr(not(bootstrap), feature(auto_traits))]
#![feature(nll)] #![feature(nll)]
#![feature(allow_internal_unstable)] #![feature(allow_internal_unstable)]
#![feature(hash_raw_entry)] #![feature(hash_raw_entry)]
@ -27,7 +26,7 @@
#![feature(thread_id_value)] #![feature(thread_id_value)]
#![feature(extend_one)] #![feature(extend_one)]
#![feature(const_panic)] #![feature(const_panic)]
#![feature(min_const_generics)] #![cfg_attr(bootstrap, feature(min_const_generics))]
#![feature(new_uninit)] #![feature(new_uninit)]
#![feature(once_cell)] #![feature(once_cell)]
#![feature(maybe_uninit_uninit_array)] #![feature(maybe_uninit_uninit_array)]

View file

@ -24,8 +24,7 @@ use rustc_index::vec::{Idx, IndexVec};
/// to insert into the middle of the sorted array. Users should avoid mutating this data structure /// to insert into the middle of the sorted array. Users should avoid mutating this data structure
/// in-place. /// in-place.
/// ///
/// [`IndexVec`]: ../../rustc_index/vec/struct.IndexVec.html /// [`SortedMap`]: super::SortedMap
/// [`SortedMap`]: ../sorted_map/struct.SortedMap.html
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct SortedIndexMultiMap<I: Idx, K, V> { pub struct SortedIndexMultiMap<I: Idx, K, V> {
/// The elements of the map in insertion order. /// The elements of the map in insertion order.

View file

@ -40,7 +40,7 @@ const SSO_ARRAY_SIZE: usize = 8;
// into_keys/into_values (unstable) // into_keys/into_values (unstable)
// all raw_entry-related // all raw_entry-related
// PartialEq/Eq (requires sorting the array) // PartialEq/Eq (requires sorting the array)
// Entry::or_insert_with_key (unstable) // Entry::or_insert_with_key
// Vacant/Occupied entries and related // Vacant/Occupied entries and related
// //
// FIXME: In HashMap most methods accepting key reference // FIXME: In HashMap most methods accepting key reference

View file

@ -603,7 +603,7 @@ fn handle_explain(registry: Registry, code: &str, output: ErrorOutputType) {
} }
} }
fn show_content_with_pager(content: &String) { fn show_content_with_pager(content: &str) {
let pager_name = env::var_os("PAGER").unwrap_or_else(|| { let pager_name = env::var_os("PAGER").unwrap_or_else(|| {
if cfg!(windows) { OsString::from("more.com") } else { OsString::from("less") } if cfg!(windows) { OsString::from("more.com") } else { OsString::from("less") }
}); });

View file

@ -111,6 +111,7 @@ E0206: include_str!("./error_codes/E0206.md"),
E0207: include_str!("./error_codes/E0207.md"), E0207: include_str!("./error_codes/E0207.md"),
E0210: include_str!("./error_codes/E0210.md"), E0210: include_str!("./error_codes/E0210.md"),
E0211: include_str!("./error_codes/E0211.md"), E0211: include_str!("./error_codes/E0211.md"),
E0212: include_str!("./error_codes/E0212.md"),
E0214: include_str!("./error_codes/E0214.md"), E0214: include_str!("./error_codes/E0214.md"),
E0220: include_str!("./error_codes/E0220.md"), E0220: include_str!("./error_codes/E0220.md"),
E0221: include_str!("./error_codes/E0221.md"), E0221: include_str!("./error_codes/E0221.md"),
@ -463,6 +464,7 @@ E0776: include_str!("./error_codes/E0776.md"),
E0777: include_str!("./error_codes/E0777.md"), E0777: include_str!("./error_codes/E0777.md"),
E0778: include_str!("./error_codes/E0778.md"), E0778: include_str!("./error_codes/E0778.md"),
E0779: include_str!("./error_codes/E0779.md"), E0779: include_str!("./error_codes/E0779.md"),
E0780: include_str!("./error_codes/E0780.md"),
; ;
// E0006, // merged with E0005 // E0006, // merged with E0005
// E0008, // cannot bind by-move into a pattern guard // E0008, // cannot bind by-move into a pattern guard
@ -503,7 +505,6 @@ E0779: include_str!("./error_codes/E0779.md"),
// E0196, // cannot determine a type for this closure // E0196, // cannot determine a type for this closure
E0208, E0208,
// E0209, // builtin traits can only be implemented on structs or enums // E0209, // builtin traits can only be implemented on structs or enums
E0212, // cannot extract an associated type from a higher-ranked trait bound
// E0213, // associated types are not accepted in this context // E0213, // associated types are not accepted in this context
// E0215, // angle-bracket notation is not stable with `Fn` // E0215, // angle-bracket notation is not stable with `Fn`
// E0216, // parenthetical notation is only stable with `Fn` // E0216, // parenthetical notation is only stable with `Fn`

View file

@ -0,0 +1,35 @@
Cannot use the associated type of
a trait with uninferred generic parameters.
Erroneous code example:
```compile_fail,E0212
pub trait Foo<T> {
type A;
fn get(&self, t: T) -> Self::A;
}
fn foo2<I : for<'x> Foo<&'x isize>>(
field: I::A) {} // error!
```
In this example, we have to instantiate `'x`, and
we don't know what lifetime to instantiate it with.
To fix this, spell out the precise lifetimes involved.
Example:
```
pub trait Foo<T> {
type A;
fn get(&self, t: T) -> Self::A;
}
fn foo3<I : for<'x> Foo<&'x isize>>(
x: <I as Foo<&isize>>::A) {} // ok!
fn foo4<'a, I : for<'x> Foo<&'x isize>>(
x: <I as Foo<&'a isize>>::A) {} // ok!
```

View file

@ -3,8 +3,6 @@ An array without a fixed length was pattern-matched.
Erroneous code example: Erroneous code example:
```compile_fail,E0730 ```compile_fail,E0730
#![feature(const_generics)]
fn is_123<const N: usize>(x: [u32; N]) -> bool { fn is_123<const N: usize>(x: [u32; N]) -> bool {
match x { match x {
[1, 2, ..] => true, // error: cannot pattern-match on an [1, 2, ..] => true, // error: cannot pattern-match on an

View file

@ -10,6 +10,5 @@ fn foo<T, const N: T>() {} // error!
To fix this error, use a concrete type for the const parameter: To fix this error, use a concrete type for the const parameter:
``` ```
#![feature(const_generics)]
fn foo<T, const N: usize>() {} fn foo<T, const N: usize>() {}
``` ```

View file

@ -0,0 +1,19 @@
Cannot use `doc(inline)` with anonymous imports
Erroneous code example:
```ignore (cannot-doctest-multicrate-project)
#[doc(inline)] // error: invalid doc argument
pub use foo::Foo as _;
```
Anonymous imports are always rendered with `#[doc(no_inline)]`. To fix this
error, remove the `#[doc(inline)]` attribute.
Example:
```ignore (cannot-doctest-multicrate-project)
pub use foo::Foo as _;
```

View file

@ -30,7 +30,8 @@ pub enum DiagnosticId {
Lint { name: String, has_future_breakage: bool }, Lint { name: String, has_future_breakage: bool },
} }
/// For example a note attached to an error. /// A "sub"-diagnostic attached to a parent diagnostic.
/// For example, a note attached to an error.
#[derive(Clone, Debug, PartialEq, Hash, Encodable, Decodable)] #[derive(Clone, Debug, PartialEq, Hash, Encodable, Decodable)]
pub struct SubDiagnostic { pub struct SubDiagnostic {
pub level: Level, pub level: Level,
@ -124,6 +125,7 @@ impl Diagnostic {
self.level = Level::Cancelled; self.level = Level::Cancelled;
} }
/// Check if this diagnostic [was cancelled][Self::cancel()].
pub fn cancelled(&self) -> bool { pub fn cancelled(&self) -> bool {
self.level == Level::Cancelled self.level == Level::Cancelled
} }
@ -136,8 +138,6 @@ impl Diagnostic {
/// ///
/// This span is *not* considered a ["primary span"][`MultiSpan`]; only /// This span is *not* considered a ["primary span"][`MultiSpan`]; only
/// the `Span` supplied when creating the diagnostic is primary. /// the `Span` supplied when creating the diagnostic is primary.
///
/// [`MultiSpan`]: ../rustc_span/struct.MultiSpan.html
pub fn span_label<T: Into<String>>(&mut self, span: Span, label: T) -> &mut Self { pub fn span_label<T: Into<String>>(&mut self, span: Span, label: T) -> &mut Self {
self.span.push_span_label(span, label.into()); self.span.push_span_label(span, label.into());
self self
@ -164,7 +164,7 @@ impl Diagnostic {
self.note_expected_found_extra(expected_label, expected, found_label, found, &"", &"") self.note_expected_found_extra(expected_label, expected, found_label, found, &"", &"")
} }
pub fn note_unsuccessfull_coercion( pub fn note_unsuccessful_coercion(
&mut self, &mut self,
expected: DiagnosticStyledString, expected: DiagnosticStyledString,
found: DiagnosticStyledString, found: DiagnosticStyledString,
@ -241,6 +241,7 @@ impl Diagnostic {
self self
} }
/// Add a note attached to this diagnostic.
pub fn note(&mut self, msg: &str) -> &mut Self { pub fn note(&mut self, msg: &str) -> &mut Self {
self.sub(Level::Note, msg, MultiSpan::new(), None); self.sub(Level::Note, msg, MultiSpan::new(), None);
self self
@ -252,33 +253,40 @@ impl Diagnostic {
} }
/// Prints the span with a note above it. /// Prints the span with a note above it.
/// This is like [`Diagnostic::note()`], but it gets its own span.
pub fn span_note<S: Into<MultiSpan>>(&mut self, sp: S, msg: &str) -> &mut Self { pub fn span_note<S: Into<MultiSpan>>(&mut self, sp: S, msg: &str) -> &mut Self {
self.sub(Level::Note, msg, sp.into(), None); self.sub(Level::Note, msg, sp.into(), None);
self self
} }
/// Add a warning attached to this diagnostic.
pub fn warn(&mut self, msg: &str) -> &mut Self { pub fn warn(&mut self, msg: &str) -> &mut Self {
self.sub(Level::Warning, msg, MultiSpan::new(), None); self.sub(Level::Warning, msg, MultiSpan::new(), None);
self self
} }
/// Prints the span with a warn above it. /// Prints the span with a warning above it.
/// This is like [`Diagnostic::warn()`], but it gets its own span.
pub fn span_warn<S: Into<MultiSpan>>(&mut self, sp: S, msg: &str) -> &mut Self { pub fn span_warn<S: Into<MultiSpan>>(&mut self, sp: S, msg: &str) -> &mut Self {
self.sub(Level::Warning, msg, sp.into(), None); self.sub(Level::Warning, msg, sp.into(), None);
self self
} }
/// Add a help message attached to this diagnostic.
pub fn help(&mut self, msg: &str) -> &mut Self { pub fn help(&mut self, msg: &str) -> &mut Self {
self.sub(Level::Help, msg, MultiSpan::new(), None); self.sub(Level::Help, msg, MultiSpan::new(), None);
self self
} }
/// Prints the span with some help above it. /// Prints the span with some help above it.
/// This is like [`Diagnostic::help()`], but it gets its own span.
pub fn span_help<S: Into<MultiSpan>>(&mut self, sp: S, msg: &str) -> &mut Self { pub fn span_help<S: Into<MultiSpan>>(&mut self, sp: S, msg: &str) -> &mut Self {
self.sub(Level::Help, msg, sp.into(), None); self.sub(Level::Help, msg, sp.into(), None);
self self
} }
/// Show a suggestion that has multiple parts to it.
/// In other words, multiple changes need to be applied as part of this suggestion.
pub fn multipart_suggestion( pub fn multipart_suggestion(
&mut self, &mut self,
msg: &str, msg: &str,
@ -299,6 +307,8 @@ impl Diagnostic {
self self
} }
/// Show multiple suggestions that have multiple parts.
/// See also [`Diagnostic::multipart_suggestion()`].
pub fn multipart_suggestions( pub fn multipart_suggestions(
&mut self, &mut self,
msg: &str, msg: &str,
@ -382,6 +392,7 @@ impl Diagnostic {
self self
} }
/// [`Diagnostic::span_suggestion()`] but you can set the [`SuggestionStyle`].
pub fn span_suggestion_with_style( pub fn span_suggestion_with_style(
&mut self, &mut self,
sp: Span, sp: Span,
@ -401,6 +412,7 @@ impl Diagnostic {
self self
} }
/// Always show the suggested change.
pub fn span_suggestion_verbose( pub fn span_suggestion_verbose(
&mut self, &mut self,
sp: Span, sp: Span,
@ -419,6 +431,7 @@ impl Diagnostic {
} }
/// Prints out a message with multiple suggested edits of the code. /// Prints out a message with multiple suggested edits of the code.
/// See also [`Diagnostic::span_suggestion()`].
pub fn span_suggestions( pub fn span_suggestions(
&mut self, &mut self,
sp: Span, sp: Span,
@ -458,7 +471,7 @@ impl Diagnostic {
self self
} }
/// Prints out a message with for a suggestion without showing the suggested code. /// Prints out a message for a suggestion without showing the suggested code.
/// ///
/// This is intended to be used for suggestions that are obvious in what the changes need to /// This is intended to be used for suggestions that are obvious in what the changes need to
/// be from the message, showing the span label inline would be visually unpleasant /// be from the message, showing the span label inline would be visually unpleasant
@ -481,7 +494,7 @@ impl Diagnostic {
self self
} }
/// Adds a suggestion to the json output, but otherwise remains silent/undisplayed in the cli. /// Adds a suggestion to the JSON output that will not be shown in the CLI.
/// ///
/// This is intended to be used for suggestions that are *very* obvious in what the changes /// This is intended to be used for suggestions that are *very* obvious in what the changes
/// need to be from the message, but we still want other tools to be able to apply them. /// need to be from the message, but we still want other tools to be able to apply them.

View file

@ -30,6 +30,15 @@ struct DiagnosticBuilderInner<'a> {
allow_suggestions: bool, allow_suggestions: bool,
} }
/// This is a helper macro for [`forward!`] that allows automatically adding documentation
/// that uses tokens from [`forward!`]'s input.
macro_rules! forward_inner_docs {
($e:expr => $i:item) => {
#[doc = $e]
$i
}
}
/// In general, the `DiagnosticBuilder` uses deref to allow access to /// In general, the `DiagnosticBuilder` uses deref to allow access to
/// the fields and methods of the embedded `diagnostic` in a /// the fields and methods of the embedded `diagnostic` in a
/// transparent way. *However,* many of the methods are intended to /// transparent way. *However,* many of the methods are intended to
@ -45,10 +54,11 @@ macro_rules! forward {
pub fn $n:ident(&self, $($name:ident: $ty:ty),* $(,)?) -> &Self pub fn $n:ident(&self, $($name:ident: $ty:ty),* $(,)?) -> &Self
) => { ) => {
$(#[$attrs])* $(#[$attrs])*
forward_inner_docs!(concat!("See [`Diagnostic::", stringify!($n), "()`].") =>
pub fn $n(&self, $($name: $ty),*) -> &Self { pub fn $n(&self, $($name: $ty),*) -> &Self {
self.diagnostic.$n($($name),*); self.diagnostic.$n($($name),*);
self self
} });
}; };
// Forward pattern for &mut self -> &mut Self // Forward pattern for &mut self -> &mut Self
@ -57,10 +67,11 @@ macro_rules! forward {
pub fn $n:ident(&mut self, $($name:ident: $ty:ty),* $(,)?) -> &mut Self pub fn $n:ident(&mut self, $($name:ident: $ty:ty),* $(,)?) -> &mut Self
) => { ) => {
$(#[$attrs])* $(#[$attrs])*
forward_inner_docs!(concat!("See [`Diagnostic::", stringify!($n), "()`].") =>
pub fn $n(&mut self, $($name: $ty),*) -> &mut Self { pub fn $n(&mut self, $($name: $ty),*) -> &mut Self {
self.0.diagnostic.$n($($name),*); self.0.diagnostic.$n($($name),*);
self self
} });
}; };
// Forward pattern for &mut self -> &mut Self, with S: Into<MultiSpan> // Forward pattern for &mut self -> &mut Self, with S: Into<MultiSpan>
@ -74,10 +85,11 @@ macro_rules! forward {
) -> &mut Self ) -> &mut Self
) => { ) => {
$(#[$attrs])* $(#[$attrs])*
forward_inner_docs!(concat!("See [`Diagnostic::", stringify!($n), "()`].") =>
pub fn $n<S: Into<MultiSpan>>(&mut self, $($name: $ty),*) -> &mut Self { pub fn $n<S: Into<MultiSpan>>(&mut self, $($name: $ty),*) -> &mut Self {
self.0.diagnostic.$n($($name),*); self.0.diagnostic.$n($($name),*);
self self
} });
}; };
} }
@ -116,7 +128,7 @@ impl<'a> DiagnosticBuilder<'a> {
/// Stashes diagnostic for possible later improvement in a different, /// Stashes diagnostic for possible later improvement in a different,
/// later stage of the compiler. The diagnostic can be accessed with /// later stage of the compiler. The diagnostic can be accessed with
/// the provided `span` and `key` through `.steal_diagnostic` on `Handler`. /// the provided `span` and `key` through [`Handler::steal_diagnostic()`].
/// ///
/// As with `buffer`, this is unless the handler has disabled such buffering. /// As with `buffer`, this is unless the handler has disabled such buffering.
pub fn stash(self, span: Span, key: StashKey) { pub fn stash(self, span: Span, key: StashKey) {
@ -202,7 +214,7 @@ impl<'a> DiagnosticBuilder<'a> {
} }
/// Labels all the given spans with the provided label. /// Labels all the given spans with the provided label.
/// See `span_label` for more information. /// See [`Diagnostic::span_label()`] for more information.
pub fn span_labels( pub fn span_labels(
&mut self, &mut self,
spans: impl IntoIterator<Item = Span>, spans: impl IntoIterator<Item = Span>,
@ -233,7 +245,7 @@ impl<'a> DiagnosticBuilder<'a> {
found_extra: &dyn fmt::Display, found_extra: &dyn fmt::Display,
) -> &mut Self); ) -> &mut Self);
forward!(pub fn note_unsuccessfull_coercion( forward!(pub fn note_unsuccessful_coercion(
&mut self, &mut self,
expected: DiagnosticStyledString, expected: DiagnosticStyledString,
found: DiagnosticStyledString, found: DiagnosticStyledString,
@ -254,6 +266,7 @@ impl<'a> DiagnosticBuilder<'a> {
msg: &str, msg: &str,
) -> &mut Self); ) -> &mut Self);
/// See [`Diagnostic::multipart_suggestion()`].
pub fn multipart_suggestion( pub fn multipart_suggestion(
&mut self, &mut self,
msg: &str, msg: &str,
@ -267,6 +280,7 @@ impl<'a> DiagnosticBuilder<'a> {
self self
} }
/// See [`Diagnostic::multipart_suggestions()`].
pub fn multipart_suggestions( pub fn multipart_suggestions(
&mut self, &mut self,
msg: &str, msg: &str,
@ -280,6 +294,7 @@ impl<'a> DiagnosticBuilder<'a> {
self self
} }
/// See [`Diagnostic::tool_only_multipart_suggestion()`].
pub fn tool_only_multipart_suggestion( pub fn tool_only_multipart_suggestion(
&mut self, &mut self,
msg: &str, msg: &str,
@ -293,6 +308,7 @@ impl<'a> DiagnosticBuilder<'a> {
self self
} }
/// See [`Diagnostic::span_suggestion()`].
pub fn span_suggestion( pub fn span_suggestion(
&mut self, &mut self,
sp: Span, sp: Span,
@ -307,6 +323,7 @@ impl<'a> DiagnosticBuilder<'a> {
self self
} }
/// See [`Diagnostic::span_suggestions()`].
pub fn span_suggestions( pub fn span_suggestions(
&mut self, &mut self,
sp: Span, sp: Span,
@ -321,6 +338,7 @@ impl<'a> DiagnosticBuilder<'a> {
self self
} }
/// See [`Diagnostic::span_suggestion_short()`].
pub fn span_suggestion_short( pub fn span_suggestion_short(
&mut self, &mut self,
sp: Span, sp: Span,
@ -335,6 +353,7 @@ impl<'a> DiagnosticBuilder<'a> {
self self
} }
/// See [`Diagnostic::span_suggestion_verbose()`].
pub fn span_suggestion_verbose( pub fn span_suggestion_verbose(
&mut self, &mut self,
sp: Span, sp: Span,
@ -349,6 +368,7 @@ impl<'a> DiagnosticBuilder<'a> {
self self
} }
/// See [`Diagnostic::span_suggestion_hidden()`].
pub fn span_suggestion_hidden( pub fn span_suggestion_hidden(
&mut self, &mut self,
sp: Span, sp: Span,
@ -363,6 +383,7 @@ impl<'a> DiagnosticBuilder<'a> {
self self
} }
/// See [`Diagnostic::tool_only_span_suggestion()`] for more information.
pub fn tool_only_span_suggestion( pub fn tool_only_span_suggestion(
&mut self, &mut self,
sp: Span, sp: Span,
@ -380,19 +401,22 @@ impl<'a> DiagnosticBuilder<'a> {
forward!(pub fn set_span<S: Into<MultiSpan>>(&mut self, sp: S) -> &mut Self); forward!(pub fn set_span<S: Into<MultiSpan>>(&mut self, sp: S) -> &mut Self);
forward!(pub fn code(&mut self, s: DiagnosticId) -> &mut Self); forward!(pub fn code(&mut self, s: DiagnosticId) -> &mut Self);
/// Allow attaching suggestions this diagnostic.
/// If this is set to `false`, then any suggestions attached with the `span_suggestion_*`
/// methods after this is set to `false` will be ignored.
pub fn allow_suggestions(&mut self, allow: bool) -> &mut Self { pub fn allow_suggestions(&mut self, allow: bool) -> &mut Self {
self.0.allow_suggestions = allow; self.0.allow_suggestions = allow;
self self
} }
/// Convenience function for internal use, clients should use one of the /// Convenience function for internal use, clients should use one of the
/// struct_* methods on Handler. /// `struct_*` methods on [`Handler`].
crate fn new(handler: &'a Handler, level: Level, message: &str) -> DiagnosticBuilder<'a> { crate fn new(handler: &'a Handler, level: Level, message: &str) -> DiagnosticBuilder<'a> {
DiagnosticBuilder::new_with_code(handler, level, None, message) DiagnosticBuilder::new_with_code(handler, level, None, message)
} }
/// Convenience function for internal use, clients should use one of the /// Convenience function for internal use, clients should use one of the
/// struct_* methods on Handler. /// `struct_*` methods on [`Handler`].
crate fn new_with_code( crate fn new_with_code(
handler: &'a Handler, handler: &'a Handler,
level: Level, level: Level,

View file

@ -2,8 +2,8 @@ use crate::expand::{self, AstFragment, Invocation};
use crate::module::DirectoryOwnership; use crate::module::DirectoryOwnership;
use rustc_ast::ptr::P; use rustc_ast::ptr::P;
use rustc_ast::token; use rustc_ast::token::{self, Nonterminal};
use rustc_ast::tokenstream::TokenStream; use rustc_ast::tokenstream::{CanSynthesizeMissingTokens, TokenStream};
use rustc_ast::visit::{AssocCtxt, Visitor}; use rustc_ast::visit::{AssocCtxt, Visitor};
use rustc_ast::{self as ast, Attribute, NodeId, PatKind}; use rustc_ast::{self as ast, Attribute, NodeId, PatKind};
use rustc_attr::{self as attr, Deprecation, HasAttrs, Stability}; use rustc_attr::{self as attr, Deprecation, HasAttrs, Stability};
@ -119,8 +119,8 @@ impl Annotatable {
} }
} }
crate fn into_tokens(self, sess: &ParseSess) -> TokenStream { crate fn into_nonterminal(self) -> Nonterminal {
let nt = match self { match self {
Annotatable::Item(item) => token::NtItem(item), Annotatable::Item(item) => token::NtItem(item),
Annotatable::TraitItem(item) | Annotatable::ImplItem(item) => { Annotatable::TraitItem(item) | Annotatable::ImplItem(item) => {
token::NtItem(P(item.and_then(ast::AssocItem::into_item))) token::NtItem(P(item.and_then(ast::AssocItem::into_item)))
@ -137,8 +137,11 @@ impl Annotatable {
| Annotatable::Param(..) | Annotatable::Param(..)
| Annotatable::StructField(..) | Annotatable::StructField(..)
| Annotatable::Variant(..) => panic!("unexpected annotatable"), | Annotatable::Variant(..) => panic!("unexpected annotatable"),
}; }
nt_to_tokenstream(&nt, sess, DUMMY_SP) }
crate fn into_tokens(self, sess: &ParseSess) -> TokenStream {
nt_to_tokenstream(&self.into_nonterminal(), sess, DUMMY_SP, CanSynthesizeMissingTokens::No)
} }
pub fn expect_item(self) -> P<ast::Item> { pub fn expect_item(self) -> P<ast::Item> {

View file

@ -29,6 +29,7 @@ use smallvec::SmallVec;
pub struct StripUnconfigured<'a> { pub struct StripUnconfigured<'a> {
pub sess: &'a Session, pub sess: &'a Session,
pub features: Option<&'a Features>, pub features: Option<&'a Features>,
pub modified: bool,
} }
fn get_features( fn get_features(
@ -199,7 +200,7 @@ fn get_features(
// `cfg_attr`-process the crate's attributes and compute the crate's features. // `cfg_attr`-process the crate's attributes and compute the crate's features.
pub fn features(sess: &Session, mut krate: ast::Crate) -> (ast::Crate, Features) { pub fn features(sess: &Session, mut krate: ast::Crate) -> (ast::Crate, Features) {
let mut strip_unconfigured = StripUnconfigured { sess, features: None }; let mut strip_unconfigured = StripUnconfigured { sess, features: None, modified: false };
let unconfigured_attrs = krate.attrs.clone(); let unconfigured_attrs = krate.attrs.clone();
let diag = &sess.parse_sess.span_diagnostic; let diag = &sess.parse_sess.span_diagnostic;
@ -243,7 +244,12 @@ const CFG_ATTR_NOTE_REF: &str = "for more information, visit \
impl<'a> StripUnconfigured<'a> { impl<'a> StripUnconfigured<'a> {
pub fn configure<T: HasAttrs>(&mut self, mut node: T) -> Option<T> { pub fn configure<T: HasAttrs>(&mut self, mut node: T) -> Option<T> {
self.process_cfg_attrs(&mut node); self.process_cfg_attrs(&mut node);
self.in_cfg(node.attrs()).then_some(node) if self.in_cfg(node.attrs()) {
Some(node)
} else {
self.modified = true;
None
}
} }
/// Parse and expand all `cfg_attr` attributes into a list of attributes /// Parse and expand all `cfg_attr` attributes into a list of attributes
@ -270,6 +276,9 @@ impl<'a> StripUnconfigured<'a> {
return vec![attr]; return vec![attr];
} }
// A `#[cfg_attr]` either gets removed, or replaced with a new attribute
self.modified = true;
let (cfg_predicate, expanded_attrs) = match self.parse_cfg_attr(&attr) { let (cfg_predicate, expanded_attrs) = match self.parse_cfg_attr(&attr) {
None => return vec![], None => return vec![],
Some(r) => r, Some(r) => r,

View file

@ -12,7 +12,7 @@ use rustc_ast::ptr::P;
use rustc_ast::token; use rustc_ast::token;
use rustc_ast::tokenstream::TokenStream; use rustc_ast::tokenstream::TokenStream;
use rustc_ast::visit::{self, AssocCtxt, Visitor}; use rustc_ast::visit::{self, AssocCtxt, Visitor};
use rustc_ast::{self as ast, AttrItem, Block, LitKind, NodeId, PatKind, Path}; use rustc_ast::{self as ast, AttrItem, AttrStyle, Block, LitKind, NodeId, PatKind, Path};
use rustc_ast::{ItemKind, MacArgs, MacCallStmt, MacStmtStyle, StmtKind, Unsafe}; use rustc_ast::{ItemKind, MacArgs, MacCallStmt, MacStmtStyle, StmtKind, Unsafe};
use rustc_ast_pretty::pprust; use rustc_ast_pretty::pprust;
use rustc_attr::{self as attr, is_builtin_attr, HasAttrs}; use rustc_attr::{self as attr, is_builtin_attr, HasAttrs};
@ -522,12 +522,29 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
item.visit_attrs(|attrs| attrs.retain(|a| !a.has_name(sym::derive))); item.visit_attrs(|attrs| attrs.retain(|a| !a.has_name(sym::derive)));
(item, Vec::new()) (item, Vec::new())
} else { } else {
let mut item = StripUnconfigured { let mut visitor = StripUnconfigured {
sess: self.cx.sess, sess: self.cx.sess,
features: self.cx.ecfg.features, features: self.cx.ecfg.features,
} modified: false,
.fully_configure(item); };
let mut item = visitor.fully_configure(item);
item.visit_attrs(|attrs| attrs.retain(|a| !a.has_name(sym::derive))); item.visit_attrs(|attrs| attrs.retain(|a| !a.has_name(sym::derive)));
if visitor.modified && !derives.is_empty() {
// Erase the tokens if cfg-stripping modified the item
// This will cause us to synthesize fake tokens
// when `nt_to_tokenstream` is called on this item.
match &mut item {
Annotatable::Item(item) => item.tokens = None,
Annotatable::Stmt(stmt) => {
if let StmtKind::Item(item) = &mut stmt.kind {
item.tokens = None
} else {
panic!("Unexpected stmt {:?}", stmt);
}
}
_ => panic!("Unexpected annotatable {:?}", item),
}
}
invocations.reserve(derives.len()); invocations.reserve(derives.len());
let derive_placeholders = derives let derive_placeholders = derives
@ -622,7 +639,11 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
let invocations = { let invocations = {
let mut collector = InvocationCollector { let mut collector = InvocationCollector {
cfg: StripUnconfigured { sess: &self.cx.sess, features: self.cx.ecfg.features }, cfg: StripUnconfigured {
sess: &self.cx.sess,
features: self.cx.ecfg.features,
modified: false,
},
cx: self.cx, cx: self.cx,
invocations: Vec::new(), invocations: Vec::new(),
monotonic: self.monotonic, monotonic: self.monotonic,
@ -716,7 +737,15 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
SyntaxExtensionKind::Attr(expander) => { SyntaxExtensionKind::Attr(expander) => {
self.gate_proc_macro_input(&item); self.gate_proc_macro_input(&item);
self.gate_proc_macro_attr_item(span, &item); self.gate_proc_macro_attr_item(span, &item);
let tokens = item.into_tokens(&self.cx.sess.parse_sess); let tokens = match attr.style {
AttrStyle::Outer => item.into_tokens(&self.cx.sess.parse_sess),
// FIXME: Properly collect tokens for inner attributes
AttrStyle::Inner => rustc_parse::fake_token_stream(
&self.cx.sess.parse_sess,
&item.into_nonterminal(),
span,
),
};
let attr_item = attr.unwrap_normal_item(); let attr_item = attr.unwrap_normal_item();
if let MacArgs::Eq(..) = attr_item.args { if let MacArgs::Eq(..) = attr_item.args {
self.cx.span_err(span, "key-value macro attributes are not supported"); self.cx.span_err(span, "key-value macro attributes are not supported");

View file

@ -36,16 +36,13 @@ crate mod mbe;
mod tests; mod tests;
#[cfg(test)] #[cfg(test)]
mod parse { mod parse {
#[cfg(test)]
mod tests; mod tests;
} }
#[cfg(test)] #[cfg(test)]
mod tokenstream { mod tokenstream {
#[cfg(test)]
mod tests; mod tests;
} }
#[cfg(test)] #[cfg(test)]
mod mut_visit { mod mut_visit {
#[cfg(test)]
mod tests; mod tests;
} }

View file

@ -84,7 +84,7 @@ enum TokenTree {
/// e.g., `$var` /// e.g., `$var`
MetaVar(Span, Ident), MetaVar(Span, Ident),
/// e.g., `$var:expr`. This is only used in the left hand side of MBE macros. /// e.g., `$var:expr`. This is only used in the left hand side of MBE macros.
MetaVarDecl(Span, Ident /* name to bind */, NonterminalKind), MetaVarDecl(Span, Ident /* name to bind */, Option<NonterminalKind>),
} }
impl TokenTree { impl TokenTree {

View file

@ -1,4 +1,4 @@
//! This is an NFA-based parser, which calls out to the main rust parser for named non-terminals //! This is an NFA-based parser, which calls out to the main Rust parser for named non-terminals
//! (which it commits to fully when it hits one in a grammar). There's a set of current NFA threads //! (which it commits to fully when it hits one in a grammar). There's a set of current NFA threads
//! and a set of next ones. Instead of NTs, we have a special case for Kleene star. The big-O, in //! and a set of next ones. Instead of NTs, we have a special case for Kleene star. The big-O, in
//! pathological cases, is worse than traditional use of NFA or Earley parsing, but it's an easier //! pathological cases, is worse than traditional use of NFA or Earley parsing, but it's an easier
@ -378,6 +378,11 @@ fn nameize<I: Iterator<Item = NamedMatch>>(
n_rec(sess, next_m, res.by_ref(), ret_val)?; n_rec(sess, next_m, res.by_ref(), ret_val)?;
} }
} }
TokenTree::MetaVarDecl(span, _, None) => {
if sess.missing_fragment_specifiers.borrow_mut().remove(&span).is_some() {
return Err((span, "missing fragment specifier".to_string()));
}
}
TokenTree::MetaVarDecl(sp, bind_name, _) => match ret_val TokenTree::MetaVarDecl(sp, bind_name, _) => match ret_val
.entry(MacroRulesNormalizedIdent::new(bind_name)) .entry(MacroRulesNormalizedIdent::new(bind_name))
{ {
@ -422,7 +427,6 @@ fn token_name_eq(t1: &Token, t2: &Token) -> bool {
/// ///
/// # Parameters /// # Parameters
/// ///
/// - `sess`: the parsing session into which errors are emitted.
/// - `cur_items`: the set of current items to be processed. This should be empty by the end of a /// - `cur_items`: the set of current items to be processed. This should be empty by the end of a
/// successful execution of this function. /// successful execution of this function.
/// - `next_items`: the set of newly generated items. These are used to replenish `cur_items` in /// - `next_items`: the set of newly generated items. These are used to replenish `cur_items` in
@ -430,13 +434,12 @@ fn token_name_eq(t1: &Token, t2: &Token) -> bool {
/// - `eof_items`: the set of items that would be valid if this was the EOF. /// - `eof_items`: the set of items that would be valid if this was the EOF.
/// - `bb_items`: the set of items that are waiting for the black-box parser. /// - `bb_items`: the set of items that are waiting for the black-box parser.
/// - `token`: the current token of the parser. /// - `token`: the current token of the parser.
/// - `span`: the `Span` in the source code corresponding to the token trees we are trying to match
/// against the matcher positions in `cur_items`.
/// ///
/// # Returns /// # Returns
/// ///
/// A `ParseResult`. Note that matches are kept track of through the items generated. /// A `ParseResult`. Note that matches are kept track of through the items generated.
fn inner_parse_loop<'root, 'tt>( fn inner_parse_loop<'root, 'tt>(
sess: &ParseSess,
cur_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>, cur_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>,
next_items: &mut Vec<MatcherPosHandle<'root, 'tt>>, next_items: &mut Vec<MatcherPosHandle<'root, 'tt>>,
eof_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>, eof_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>,
@ -554,11 +557,21 @@ fn inner_parse_loop<'root, 'tt>(
}))); })));
} }
// We need to match a metavar (but the identifier is invalid)... this is an error
TokenTree::MetaVarDecl(span, _, None) => {
if sess.missing_fragment_specifiers.borrow_mut().remove(&span).is_some() {
return Error(span, "missing fragment specifier".to_string());
}
}
// We need to match a metavar with a valid ident... call out to the black-box // We need to match a metavar with a valid ident... call out to the black-box
// parser by adding an item to `bb_items`. // parser by adding an item to `bb_items`.
TokenTree::MetaVarDecl(_, _, kind) => { TokenTree::MetaVarDecl(_, _, Some(kind)) => {
// Built-in nonterminals never start with these tokens, // Built-in nonterminals never start with these tokens, so we can eliminate
// so we can eliminate them from consideration. // them from consideration.
//
// We use the span of the metavariable declaration to determine any
// edition-specific matching behavior for non-terminals.
if Parser::nonterminal_may_begin_with(kind, token) { if Parser::nonterminal_may_begin_with(kind, token) {
bb_items.push(item); bb_items.push(item);
} }
@ -627,6 +640,7 @@ pub(super) fn parse_tt(parser: &mut Cow<'_, Parser<'_>>, ms: &[TokenTree]) -> Na
// parsing from the black-box parser done. The result is that `next_items` will contain a // parsing from the black-box parser done. The result is that `next_items` will contain a
// bunch of possible next matcher positions in `next_items`. // bunch of possible next matcher positions in `next_items`.
match inner_parse_loop( match inner_parse_loop(
parser.sess,
&mut cur_items, &mut cur_items,
&mut next_items, &mut next_items,
&mut eof_items, &mut eof_items,
@ -688,7 +702,7 @@ pub(super) fn parse_tt(parser: &mut Cow<'_, Parser<'_>>, ms: &[TokenTree]) -> Na
let nts = bb_items let nts = bb_items
.iter() .iter()
.map(|item| match item.top_elts.get_tt(item.idx) { .map(|item| match item.top_elts.get_tt(item.idx) {
TokenTree::MetaVarDecl(_, bind, kind) => format!("{} ('{}')", kind, bind), TokenTree::MetaVarDecl(_, bind, Some(kind)) => format!("{} ('{}')", kind, bind),
_ => panic!(), _ => panic!(),
}) })
.collect::<Vec<String>>() .collect::<Vec<String>>()
@ -718,8 +732,10 @@ pub(super) fn parse_tt(parser: &mut Cow<'_, Parser<'_>>, ms: &[TokenTree]) -> Na
assert_eq!(bb_items.len(), 1); assert_eq!(bb_items.len(), 1);
let mut item = bb_items.pop().unwrap(); let mut item = bb_items.pop().unwrap();
if let TokenTree::MetaVarDecl(span, _, kind) = item.top_elts.get_tt(item.idx) { if let TokenTree::MetaVarDecl(span, _, Some(kind)) = item.top_elts.get_tt(item.idx) {
let match_cur = item.match_cur; let match_cur = item.match_cur;
// We use the span of the metavariable declaration to determine any
// edition-specific matching behavior for non-terminals.
let nt = match parser.to_mut().parse_nonterminal(kind) { let nt = match parser.to_mut().parse_nonterminal(kind) {
Err(mut err) => { Err(mut err) => {
err.span_label( err.span_label(

View file

@ -401,7 +401,7 @@ pub fn compile_declarative_macro(
let diag = &sess.parse_sess.span_diagnostic; let diag = &sess.parse_sess.span_diagnostic;
let lhs_nm = Ident::new(sym::lhs, def.span); let lhs_nm = Ident::new(sym::lhs, def.span);
let rhs_nm = Ident::new(sym::rhs, def.span); let rhs_nm = Ident::new(sym::rhs, def.span);
let tt_spec = NonterminalKind::TT; let tt_spec = Some(NonterminalKind::TT);
// Parse the macro_rules! invocation // Parse the macro_rules! invocation
let (macro_rules, body) = match &def.kind { let (macro_rules, body) = match &def.kind {
@ -476,10 +476,15 @@ pub fn compile_declarative_macro(
.map(|m| { .map(|m| {
if let MatchedNonterminal(ref nt) = *m { if let MatchedNonterminal(ref nt) = *m {
if let NtTT(ref tt) = **nt { if let NtTT(ref tt) = **nt {
let tt = let tt = mbe::quoted::parse(
mbe::quoted::parse(tt.clone().into(), true, &sess.parse_sess, def.id) tt.clone().into(),
.pop() true,
.unwrap(); &sess.parse_sess,
def.id,
features,
)
.pop()
.unwrap();
valid &= check_lhs_nt_follows(&sess.parse_sess, features, &def.attrs, &tt); valid &= check_lhs_nt_follows(&sess.parse_sess, features, &def.attrs, &tt);
return tt; return tt;
} }
@ -501,6 +506,7 @@ pub fn compile_declarative_macro(
false, false,
&sess.parse_sess, &sess.parse_sess,
def.id, def.id,
features,
) )
.pop() .pop()
.unwrap(); .unwrap();
@ -578,7 +584,7 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[mbe::TokenTree]) -> bool {
TokenTree::Sequence(span, ref seq) => { TokenTree::Sequence(span, ref seq) => {
if seq.separator.is_none() if seq.separator.is_none()
&& seq.tts.iter().all(|seq_tt| match *seq_tt { && seq.tts.iter().all(|seq_tt| match *seq_tt {
TokenTree::MetaVarDecl(_, _, NonterminalKind::Vis) => true, TokenTree::MetaVarDecl(_, _, Some(NonterminalKind::Vis)) => true,
TokenTree::Sequence(_, ref sub_seq) => { TokenTree::Sequence(_, ref sub_seq) => {
sub_seq.kleene.op == mbe::KleeneOp::ZeroOrMore sub_seq.kleene.op == mbe::KleeneOp::ZeroOrMore
|| sub_seq.kleene.op == mbe::KleeneOp::ZeroOrOne || sub_seq.kleene.op == mbe::KleeneOp::ZeroOrOne
@ -961,7 +967,7 @@ fn check_matcher_core(
// Now `last` holds the complete set of NT tokens that could // Now `last` holds the complete set of NT tokens that could
// end the sequence before SUFFIX. Check that every one works with `suffix`. // end the sequence before SUFFIX. Check that every one works with `suffix`.
for token in &last.tokens { for token in &last.tokens {
if let TokenTree::MetaVarDecl(_, name, kind) = *token { if let TokenTree::MetaVarDecl(_, name, Some(kind)) = *token {
for next_token in &suffix_first.tokens { for next_token in &suffix_first.tokens {
match is_in_follow(next_token, kind) { match is_in_follow(next_token, kind) {
IsInFollow::Yes => {} IsInFollow::Yes => {}
@ -1019,7 +1025,7 @@ fn check_matcher_core(
} }
fn token_can_be_followed_by_any(tok: &mbe::TokenTree) -> bool { fn token_can_be_followed_by_any(tok: &mbe::TokenTree) -> bool {
if let mbe::TokenTree::MetaVarDecl(_, _, kind) = *tok { if let mbe::TokenTree::MetaVarDecl(_, _, Some(kind)) = *tok {
frag_can_be_followed_by_any(kind) frag_can_be_followed_by_any(kind)
} else { } else {
// (Non NT's can always be followed by anything in matchers.) // (Non NT's can always be followed by anything in matchers.)
@ -1090,7 +1096,7 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
_ => IsInFollow::No(TOKENS), _ => IsInFollow::No(TOKENS),
} }
} }
NonterminalKind::Pat => { NonterminalKind::Pat2018 { .. } | NonterminalKind::Pat2021 { .. } => {
const TOKENS: &[&str] = &["`=>`", "`,`", "`=`", "`|`", "`if`", "`in`"]; const TOKENS: &[&str] = &["`=>`", "`,`", "`=`", "`|`", "`if`", "`in`"];
match tok { match tok {
TokenTree::Token(token) => match token.kind { TokenTree::Token(token) => match token.kind {
@ -1123,7 +1129,7 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
} }
_ => IsInFollow::No(TOKENS), _ => IsInFollow::No(TOKENS),
}, },
TokenTree::MetaVarDecl(_, _, NonterminalKind::Block) => IsInFollow::Yes, TokenTree::MetaVarDecl(_, _, Some(NonterminalKind::Block)) => IsInFollow::Yes,
_ => IsInFollow::No(TOKENS), _ => IsInFollow::No(TOKENS),
} }
} }
@ -1158,7 +1164,7 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
TokenTree::MetaVarDecl( TokenTree::MetaVarDecl(
_, _,
_, _,
NonterminalKind::Ident | NonterminalKind::Ty | NonterminalKind::Path, Some(NonterminalKind::Ident | NonterminalKind::Ty | NonterminalKind::Path),
) => IsInFollow::Yes, ) => IsInFollow::Yes,
_ => IsInFollow::No(TOKENS), _ => IsInFollow::No(TOKENS),
} }
@ -1171,7 +1177,8 @@ fn quoted_tt_to_string(tt: &mbe::TokenTree) -> String {
match *tt { match *tt {
mbe::TokenTree::Token(ref token) => pprust::token_to_string(&token), mbe::TokenTree::Token(ref token) => pprust::token_to_string(&token),
mbe::TokenTree::MetaVar(_, name) => format!("${}", name), mbe::TokenTree::MetaVar(_, name) => format!("${}", name),
mbe::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind), mbe::TokenTree::MetaVarDecl(_, name, Some(kind)) => format!("${}:{}", name, kind),
mbe::TokenTree::MetaVarDecl(_, name, None) => format!("${}:", name),
_ => panic!( _ => panic!(
"{}", "{}",
"unexpected mbe::TokenTree::{Sequence or Delimited} \ "unexpected mbe::TokenTree::{Sequence or Delimited} \

View file

@ -3,10 +3,11 @@ use crate::mbe::{Delimited, KleeneOp, KleeneToken, SequenceRepetition, TokenTree
use rustc_ast::token::{self, Token}; use rustc_ast::token::{self, Token};
use rustc_ast::tokenstream; use rustc_ast::tokenstream;
use rustc_ast::NodeId; use rustc_ast::{NodeId, DUMMY_NODE_ID};
use rustc_ast_pretty::pprust; use rustc_ast_pretty::pprust;
use rustc_session::parse::ParseSess; use rustc_feature::Features;
use rustc_span::symbol::{kw, Ident}; use rustc_session::parse::{feature_err, ParseSess};
use rustc_span::symbol::{kw, sym, Ident};
use rustc_span::Span; use rustc_span::Span;
@ -29,10 +30,8 @@ const VALID_FRAGMENT_NAMES_MSG: &str = "valid fragment specifiers are \
/// `ident` are "matchers". They are not present in the body of a macro rule -- just in the /// `ident` are "matchers". They are not present in the body of a macro rule -- just in the
/// pattern, so we pass a parameter to indicate whether to expect them or not. /// pattern, so we pass a parameter to indicate whether to expect them or not.
/// - `sess`: the parsing session. Any errors will be emitted to this session. /// - `sess`: the parsing session. Any errors will be emitted to this session.
/// - `features`, `attrs`: language feature flags and attributes so that we know whether to use /// - `node_id`: the NodeId of the macro we are parsing.
/// unstable features or not. /// - `features`: language features so we can do feature gating.
/// - `edition`: which edition are we in.
/// - `macro_node_id`: the NodeId of the macro we are parsing.
/// ///
/// # Returns /// # Returns
/// ///
@ -42,6 +41,7 @@ pub(super) fn parse(
expect_matchers: bool, expect_matchers: bool,
sess: &ParseSess, sess: &ParseSess,
node_id: NodeId, node_id: NodeId,
features: &Features,
) -> Vec<TokenTree> { ) -> Vec<TokenTree> {
// Will contain the final collection of `self::TokenTree` // Will contain the final collection of `self::TokenTree`
let mut result = Vec::new(); let mut result = Vec::new();
@ -52,7 +52,7 @@ pub(super) fn parse(
while let Some(tree) = trees.next() { while let Some(tree) = trees.next() {
// Given the parsed tree, if there is a metavar and we are expecting matchers, actually // Given the parsed tree, if there is a metavar and we are expecting matchers, actually
// parse out the matcher (i.e., in `$id:ident` this would parse the `:` and `ident`). // parse out the matcher (i.e., in `$id:ident` this would parse the `:` and `ident`).
let tree = parse_tree(tree, &mut trees, expect_matchers, sess, node_id); let tree = parse_tree(tree, &mut trees, expect_matchers, sess, node_id, features);
match tree { match tree {
TokenTree::MetaVar(start_sp, ident) if expect_matchers => { TokenTree::MetaVar(start_sp, ident) if expect_matchers => {
let span = match trees.next() { let span = match trees.next() {
@ -61,19 +61,40 @@ pub(super) fn parse(
Some(tokenstream::TokenTree::Token(token)) => match token.ident() { Some(tokenstream::TokenTree::Token(token)) => match token.ident() {
Some((frag, _)) => { Some((frag, _)) => {
let span = token.span.with_lo(start_sp.lo()); let span = token.span.with_lo(start_sp.lo());
let kind = token::NonterminalKind::from_symbol(frag.name)
.unwrap_or_else(|| { match frag.name {
let msg = format!( sym::pat2018 | sym::pat2021 => {
"invalid fragment specifier `{}`", if !features.edition_macro_pats {
frag.name feature_err(
); sess,
sess.span_diagnostic sym::edition_macro_pats,
.struct_span_err(span, &msg) frag.span,
.help(VALID_FRAGMENT_NAMES_MSG) "`pat2018` and `pat2021` are unstable.",
)
.emit(); .emit();
token::NonterminalKind::Ident }
}); }
result.push(TokenTree::MetaVarDecl(span, ident, kind)); _ => {}
}
let kind =
token::NonterminalKind::from_symbol(frag.name, || {
span.edition()
})
.unwrap_or_else(
|| {
let msg = format!(
"invalid fragment specifier `{}`",
frag.name
);
sess.span_diagnostic
.struct_span_err(span, &msg)
.help(VALID_FRAGMENT_NAMES_MSG)
.emit();
token::NonterminalKind::Ident
},
);
result.push(TokenTree::MetaVarDecl(span, ident, Some(kind)));
continue; continue;
} }
_ => token.span, _ => token.span,
@ -83,8 +104,11 @@ pub(super) fn parse(
} }
tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(start_sp), tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(start_sp),
}; };
sess.span_diagnostic.struct_span_err(span, "missing fragment specifier").emit(); if node_id != DUMMY_NODE_ID {
continue; // Macros loaded from other crates have dummy node ids.
sess.missing_fragment_specifiers.borrow_mut().insert(span, node_id);
}
result.push(TokenTree::MetaVarDecl(span, ident, None));
} }
// Not a metavar or no matchers allowed, so just return the tree // Not a metavar or no matchers allowed, so just return the tree
@ -107,14 +131,14 @@ pub(super) fn parse(
/// converting `tree` /// converting `tree`
/// - `expect_matchers`: same as for `parse` (see above). /// - `expect_matchers`: same as for `parse` (see above).
/// - `sess`: the parsing session. Any errors will be emitted to this session. /// - `sess`: the parsing session. Any errors will be emitted to this session.
/// - `features`, `attrs`: language feature flags and attributes so that we know whether to use /// - `features`: language features so we can do feature gating.
/// unstable features or not.
fn parse_tree( fn parse_tree(
tree: tokenstream::TokenTree, tree: tokenstream::TokenTree,
outer_trees: &mut impl Iterator<Item = tokenstream::TokenTree>, outer_trees: &mut impl Iterator<Item = tokenstream::TokenTree>,
expect_matchers: bool, expect_matchers: bool,
sess: &ParseSess, sess: &ParseSess,
node_id: NodeId, node_id: NodeId,
features: &Features,
) -> TokenTree { ) -> TokenTree {
// Depending on what `tree` is, we could be parsing different parts of a macro // Depending on what `tree` is, we could be parsing different parts of a macro
match tree { match tree {
@ -142,7 +166,7 @@ fn parse_tree(
sess.span_diagnostic.span_err(span.entire(), &msg); sess.span_diagnostic.span_err(span.entire(), &msg);
} }
// Parse the contents of the sequence itself // Parse the contents of the sequence itself
let sequence = parse(tts, expect_matchers, sess, node_id); let sequence = parse(tts, expect_matchers, sess, node_id, features);
// Get the Kleene operator and optional separator // Get the Kleene operator and optional separator
let (separator, kleene) = let (separator, kleene) =
parse_sep_and_kleene_op(&mut trees, span.entire(), sess); parse_sep_and_kleene_op(&mut trees, span.entire(), sess);
@ -193,7 +217,10 @@ fn parse_tree(
// descend into the delimited set and further parse it. // descend into the delimited set and further parse it.
tokenstream::TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited( tokenstream::TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
span, span,
Lrc::new(Delimited { delim, tts: parse(tts, expect_matchers, sess, node_id) }), Lrc::new(Delimited {
delim,
tts: parse(tts, expect_matchers, sess, node_id, features),
}),
), ),
} }
} }

View file

@ -3,7 +3,7 @@ use crate::proc_macro_server;
use rustc_ast::ptr::P; use rustc_ast::ptr::P;
use rustc_ast::token; use rustc_ast::token;
use rustc_ast::tokenstream::{TokenStream, TokenTree}; use rustc_ast::tokenstream::{CanSynthesizeMissingTokens, TokenStream, TokenTree};
use rustc_ast::{self as ast, *}; use rustc_ast::{self as ast, *};
use rustc_data_structures::sync::Lrc; use rustc_data_structures::sync::Lrc;
use rustc_errors::{struct_span_err, Applicability, ErrorReported}; use rustc_errors::{struct_span_err, Applicability, ErrorReported};
@ -94,7 +94,12 @@ impl MultiItemModifier for ProcMacroDerive {
let input = if item.pretty_printing_compatibility_hack() { let input = if item.pretty_printing_compatibility_hack() {
TokenTree::token(token::Interpolated(Lrc::new(item)), DUMMY_SP).into() TokenTree::token(token::Interpolated(Lrc::new(item)), DUMMY_SP).into()
} else { } else {
nt_to_tokenstream(&item, &ecx.sess.parse_sess, DUMMY_SP) nt_to_tokenstream(
&item,
&ecx.sess.parse_sess,
DUMMY_SP,
CanSynthesizeMissingTokens::Yes,
)
}; };
let server = proc_macro_server::Rustc::new(ecx); let server = proc_macro_server::Rustc::new(ecx);

View file

@ -2,7 +2,8 @@ use crate::base::ExtCtxt;
use rustc_ast as ast; use rustc_ast as ast;
use rustc_ast::token; use rustc_ast::token;
use rustc_ast::tokenstream::{self, DelimSpan, Spacing::*, TokenStream, TreeAndSpacing}; use rustc_ast::tokenstream::{self, CanSynthesizeMissingTokens};
use rustc_ast::tokenstream::{DelimSpan, Spacing::*, TokenStream, TreeAndSpacing};
use rustc_ast_pretty::pprust; use rustc_ast_pretty::pprust;
use rustc_data_structures::sync::Lrc; use rustc_data_structures::sync::Lrc;
use rustc_errors::Diagnostic; use rustc_errors::Diagnostic;
@ -178,7 +179,7 @@ impl FromInternal<(TreeAndSpacing, &'_ ParseSess, &'_ mut Vec<Self>)>
{ {
TokenTree::Ident(Ident::new(sess, name.name, is_raw, name.span)) TokenTree::Ident(Ident::new(sess, name.name, is_raw, name.span))
} else { } else {
let stream = nt_to_tokenstream(&nt, sess, span); let stream = nt_to_tokenstream(&nt, sess, span, CanSynthesizeMissingTokens::No);
TokenTree::Group(Group { TokenTree::Group(Group {
delimiter: Delimiter::None, delimiter: Delimiter::None,
stream, stream,

View file

@ -273,6 +273,8 @@ declare_features! (
/// Allows patterns with concurrent by-move and by-ref bindings. /// Allows patterns with concurrent by-move and by-ref bindings.
/// For example, you can write `Foo(a, ref b)` where `a` is by-move and `b` is by-ref. /// For example, you can write `Foo(a, ref b)` where `a` is by-move and `b` is by-ref.
(accepted, move_ref_pattern, "1.48.0", Some(68354), None), (accepted, move_ref_pattern, "1.48.0", Some(68354), None),
/// The smallest useful subset of `const_generics`.
(accepted, min_const_generics, "1.51.0", Some(74878), None),
// ------------------------------------------------------------------------- // -------------------------------------------------------------------------
// feature-group-end: accepted features // feature-group-end: accepted features

Some files were not shown because too many files have changed in this diff Show more