1
Fork 0

Merge from rustc

This commit is contained in:
The Miri Conjob Bot 2024-02-26 05:40:12 +00:00
commit 779d079677
299 changed files with 4781 additions and 1952 deletions

View file

@ -65,9 +65,20 @@ jobs:
- name: x86_64-gnu-tools
os: ubuntu-20.04-16core-64gb
env: {}
defaults:
run:
shell: "${{ contains(matrix.os, 'windows') && 'msys2 {0}' || 'bash' }}"
timeout-minutes: 600
runs-on: "${{ matrix.os }}"
steps:
- if: "contains(matrix.os, 'windows')"
uses: msys2/setup-msys2@v2.22.0
with:
msystem: "${{ contains(matrix.name, 'i686') && 'mingw32' || 'mingw64' }}"
update: false
release: true
path-type: inherit
install: "make dos2unix diffutils\n"
- name: disable git crlf conversion
run: git config --global core.autocrlf false
- name: checkout the source code
@ -461,9 +472,20 @@ jobs:
RUST_CONFIGURE_ARGS: "--build=x86_64-pc-windows-msvc --enable-extended --enable-profiler"
SCRIPT: python x.py dist bootstrap --include-default-paths
os: windows-2019-8core-32gb
defaults:
run:
shell: "${{ contains(matrix.os, 'windows') && 'msys2 {0}' || 'bash' }}"
timeout-minutes: 600
runs-on: "${{ matrix.os }}"
steps:
- if: "contains(matrix.os, 'windows')"
uses: msys2/setup-msys2@v2.22.0
with:
msystem: "${{ contains(matrix.name, 'i686') && 'mingw32' || 'mingw64' }}"
update: false
release: true
path-type: inherit
install: "make dos2unix diffutils\n"
- name: disable git crlf conversion
run: git config --global core.autocrlf false
- name: checkout the source code
@ -589,9 +611,20 @@ jobs:
env:
CODEGEN_BACKENDS: "llvm,cranelift"
os: ubuntu-20.04-16core-64gb
defaults:
run:
shell: "${{ contains(matrix.os, 'windows') && 'msys2 {0}' || 'bash' }}"
timeout-minutes: 600
runs-on: "${{ matrix.os }}"
steps:
- if: "contains(matrix.os, 'windows')"
uses: msys2/setup-msys2@v2.22.0
with:
msystem: "${{ contains(matrix.name, 'i686') && 'mingw32' || 'mingw64' }}"
update: false
release: true
path-type: inherit
install: "make dos2unix diffutils\n"
- name: disable git crlf conversion
run: git config --global core.autocrlf false
- name: checkout the source code

View file

@ -4101,6 +4101,7 @@ dependencies = [
"rustc_target",
"rustc_trait_selection",
"rustc_type_ir",
"smallvec",
"tracing",
"unicode-security",
]

View file

@ -145,10 +145,9 @@ toolchain.
1. Download the latest [MSYS2 installer][msys2] and go through the installer.
2. Run `mingw32_shell.bat` or `mingw64_shell.bat` from the MSYS2 installation
directory (e.g. `C:\msys64`), depending on whether you want 32-bit or 64-bit
Rust. (As of the latest version of MSYS2 you have to run `msys2_shell.cmd
-mingw32` or `msys2_shell.cmd -mingw64` from the command line instead.)
2. Start a MINGW64 or MINGW32 shell (depending on whether you want 32-bit
or 64-bit Rust) either from your start menu, or by running `mingw64.exe`
or `mingw32.exe` from your MSYS2 installation directory (e.g. `C:\msys64`).
3. From this terminal, install the required tools:
@ -157,8 +156,7 @@ toolchain.
pacman -Sy pacman-mirrors
# Install build tools needed for Rust. If you're building a 32-bit compiler,
# then replace "x86_64" below with "i686". If you've already got Git, Python,
# or CMake installed and in PATH you can remove them from this list.
# then replace "x86_64" below with "i686".
# Note that it is important that you do **not** use the 'python2', 'cmake',
# and 'ninja' packages from the 'msys2' subsystem.
# The build has historically been known to fail with these packages.
@ -175,9 +173,21 @@ toolchain.
4. Navigate to Rust's source code (or clone it), then build it:
```sh
python x.py setup user && python x.py build && python x.py install
python x.py setup dist && python x.py build && python x.py install
```
If you want to use the native versions of Git, Python, or CMake you can remove
them from the above pacman command and install them from another source. Make
sure that they're in your Windows PATH, and edit the relevant `mingw[32|64].ini`
file in your MSYS2 installation directory by uncommenting the line
`MSYS2_PATH_TYPE=inherit` to include them in your MSYS2 PATH.
Using Windows native Python can be helpful if you get errors when building LLVM.
You may also want to use Git for Windows, as it is often *much* faster. Turning
off real-time protection in the Windows Virus & Threat protections settings can
also help with long run times (although note that it will automatically turn
itself back on after some time).
### MSVC
MSVC builds of Rust additionally require an installation of Visual Studio 2017

View file

@ -107,7 +107,7 @@ impl Lit {
/// Keep this in sync with `Token::can_begin_literal_or_bool` excluding unary negation.
pub fn from_token(token: &Token) -> Option<Lit> {
match token.uninterpolate().kind {
Ident(name, false) if name.is_bool_lit() => Some(Lit::new(Bool, name, None)),
Ident(name, IdentIsRaw::No) if name.is_bool_lit() => Some(Lit::new(Bool, name, None)),
Literal(token_lit) => Some(token_lit),
Interpolated(ref nt)
if let NtExpr(expr) | NtLiteral(expr) = &nt.0
@ -183,7 +183,7 @@ impl LitKind {
}
}
pub fn ident_can_begin_expr(name: Symbol, span: Span, is_raw: bool) -> bool {
pub fn ident_can_begin_expr(name: Symbol, span: Span, is_raw: IdentIsRaw) -> bool {
let ident_token = Token::new(Ident(name, is_raw), span);
!ident_token.is_reserved_ident()
@ -214,7 +214,7 @@ pub fn ident_can_begin_expr(name: Symbol, span: Span, is_raw: bool) -> bool {
.contains(&name)
}
fn ident_can_begin_type(name: Symbol, span: Span, is_raw: bool) -> bool {
fn ident_can_begin_type(name: Symbol, span: Span, is_raw: IdentIsRaw) -> bool {
let ident_token = Token::new(Ident(name, is_raw), span);
!ident_token.is_reserved_ident()
@ -223,6 +223,24 @@ fn ident_can_begin_type(name: Symbol, span: Span, is_raw: bool) -> bool {
.contains(&name)
}
#[derive(PartialEq, Encodable, Decodable, Debug, Copy, Clone, HashStable_Generic)]
pub enum IdentIsRaw {
No,
Yes,
}
impl From<bool> for IdentIsRaw {
fn from(b: bool) -> Self {
if b { Self::Yes } else { Self::No }
}
}
impl From<IdentIsRaw> for bool {
fn from(is_raw: IdentIsRaw) -> bool {
matches!(is_raw, IdentIsRaw::Yes)
}
}
// SAFETY: due to the `Clone` impl below, all fields of all variants other than
// `Interpolated` must impl `Copy`.
#[derive(PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
@ -298,7 +316,7 @@ pub enum TokenKind {
/// Do not forget about `NtIdent` when you want to match on identifiers.
/// It's recommended to use `Token::(ident,uninterpolate,uninterpolated_span)` to
/// treat regular and interpolated identifiers in the same way.
Ident(Symbol, /* is_raw */ bool),
Ident(Symbol, IdentIsRaw),
/// Lifetime identifier token.
/// Do not forget about `NtLifetime` when you want to match on lifetime identifiers.
/// It's recommended to use `Token::(lifetime,uninterpolate,uninterpolated_span)` to
@ -411,7 +429,7 @@ impl Token {
/// Recovers a `Token` from an `Ident`. This creates a raw identifier if necessary.
pub fn from_ast_ident(ident: Ident) -> Self {
Token::new(Ident(ident.name, ident.is_raw_guess()), ident.span)
Token::new(Ident(ident.name, ident.is_raw_guess().into()), ident.span)
}
/// For interpolated tokens, returns a span of the fragment to which the interpolated
@ -567,7 +585,7 @@ impl Token {
pub fn can_begin_literal_maybe_minus(&self) -> bool {
match self.uninterpolate().kind {
Literal(..) | BinOp(Minus) => true,
Ident(name, false) if name.is_bool_lit() => true,
Ident(name, IdentIsRaw::No) if name.is_bool_lit() => true,
Interpolated(ref nt) => match &nt.0 {
NtLiteral(_) => true,
NtExpr(e) => match &e.kind {
@ -602,7 +620,7 @@ impl Token {
/// Returns an identifier if this token is an identifier.
#[inline]
pub fn ident(&self) -> Option<(Ident, /* is_raw */ bool)> {
pub fn ident(&self) -> Option<(Ident, IdentIsRaw)> {
// We avoid using `Token::uninterpolate` here because it's slow.
match &self.kind {
&Ident(name, is_raw) => Some((Ident::new(name, self.span), is_raw)),
@ -755,7 +773,7 @@ impl Token {
/// Returns `true` if the token is a non-raw identifier for which `pred` holds.
pub fn is_non_raw_ident_where(&self, pred: impl FnOnce(Ident) -> bool) -> bool {
match self.ident() {
Some((id, false)) => pred(id),
Some((id, IdentIsRaw::No)) => pred(id),
_ => false,
}
}
@ -806,7 +824,7 @@ impl Token {
_ => return None,
},
SingleQuote => match joint.kind {
Ident(name, false) => Lifetime(Symbol::intern(&format!("'{name}"))),
Ident(name, IdentIsRaw::No) => Lifetime(Symbol::intern(&format!("'{name}"))),
_ => return None,
},
@ -836,7 +854,7 @@ pub enum Nonterminal {
NtPat(P<ast::Pat>),
NtExpr(P<ast::Expr>),
NtTy(P<ast::Ty>),
NtIdent(Ident, /* is_raw */ bool),
NtIdent(Ident, IdentIsRaw),
NtLifetime(Ident),
NtLiteral(P<ast::Expr>),
/// Stuff inside brackets for attributes

View file

@ -656,7 +656,7 @@ impl TokenStream {
DelimSpacing::new(Spacing::JointHidden, Spacing::Alone),
Delimiter::Bracket,
[
TokenTree::token_alone(token::Ident(sym::doc, false), span),
TokenTree::token_alone(token::Ident(sym::doc, token::IdentIsRaw::No), span),
TokenTree::token_alone(token::Eq, span),
TokenTree::token_alone(
TokenKind::lit(token::StrRaw(num_of_hashes), data, None),

View file

@ -185,7 +185,7 @@ fn space_between(tt1: &TokenTree, tt2: &TokenTree) -> bool {
// IDENT + `!`: `println!()`, but `if !x { ... }` needs a space after the `if`
(Tok(Token { kind: Ident(sym, is_raw), span }, _), Tok(Token { kind: Not, .. }, _))
if !Ident::new(*sym, *span).is_reserved() || *is_raw =>
if !Ident::new(*sym, *span).is_reserved() || matches!(is_raw, IdentIsRaw::Yes) =>
{
false
}
@ -197,7 +197,7 @@ fn space_between(tt1: &TokenTree, tt2: &TokenTree) -> bool {
|| *sym == kw::Fn
|| *sym == kw::SelfUpper
|| *sym == kw::Pub
|| *is_raw =>
|| matches!(is_raw, IdentIsRaw::Yes) =>
{
false
}
@ -731,7 +731,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
token::NtBlock(e) => self.block_to_string(e),
token::NtStmt(e) => self.stmt_to_string(e),
token::NtPat(e) => self.pat_to_string(e),
token::NtIdent(e, is_raw) => IdentPrinter::for_ast_ident(*e, *is_raw).to_string(),
&token::NtIdent(e, is_raw) => IdentPrinter::for_ast_ident(e, is_raw.into()).to_string(),
token::NtLifetime(e) => e.to_string(),
token::NtLiteral(e) => self.expr_to_string(e),
token::NtVis(e) => self.vis_to_string(e),
@ -795,7 +795,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
/* Name components */
token::Ident(s, is_raw) => {
IdentPrinter::new(s, is_raw, convert_dollar_crate).to_string().into()
IdentPrinter::new(s, is_raw.into(), convert_dollar_crate).to_string().into()
}
token::Lifetime(s) => s.to_string().into(),

View file

@ -1,3 +1,4 @@
use ast::token::IdentIsRaw;
use rustc_ast as ast;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter};
@ -416,7 +417,7 @@ fn parse_reg<'a>(
) -> PResult<'a, ast::InlineAsmRegOrRegClass> {
p.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
let result = match p.token.uninterpolate().kind {
token::Ident(name, false) => ast::InlineAsmRegOrRegClass::RegClass(name),
token::Ident(name, IdentIsRaw::No) => ast::InlineAsmRegOrRegClass::RegClass(name),
token::Literal(token::Lit { kind: token::LitKind::Str, symbol, suffix: _ }) => {
*explicit_reg = true;
ast::InlineAsmRegOrRegClass::Reg(symbol)

View file

@ -1,7 +1,6 @@
use rustc_ast::{
ptr::P,
token,
token::Delimiter,
token::{self, Delimiter, IdentIsRaw},
tokenstream::{DelimSpan, TokenStream, TokenTree},
BinOpKind, BorrowKind, DelimArgs, Expr, ExprKind, ItemKind, MacCall, MethodCall, Mutability,
Path, PathSegment, Stmt, StructRest, UnOp, UseTree, UseTreeKind, DUMMY_NODE_ID,
@ -170,7 +169,10 @@ impl<'cx, 'a> Context<'cx, 'a> {
];
let captures = self.capture_decls.iter().flat_map(|cap| {
[
TokenTree::token_joint_hidden(token::Ident(cap.ident.name, false), cap.ident.span),
TokenTree::token_joint_hidden(
token::Ident(cap.ident.name, IdentIsRaw::No),
cap.ident.span,
),
TokenTree::token_alone(token::Comma, self.span),
]
});

View file

@ -198,7 +198,7 @@ where
match fields {
Unnamed(fields, is_tuple) => {
let path_expr = cx.expr_path(outer_pat_path);
if !*is_tuple {
if matches!(is_tuple, IsTuple::No) {
path_expr
} else {
let fields = fields

View file

@ -62,8 +62,8 @@ fn default_struct_substructure(
let default_call = |span| cx.expr_call_global(span, default_ident.clone(), ThinVec::new());
let expr = match summary {
Unnamed(_, false) => cx.expr_ident(trait_span, substr.type_ident),
Unnamed(fields, true) => {
Unnamed(_, IsTuple::No) => cx.expr_ident(trait_span, substr.type_ident),
Unnamed(fields, IsTuple::Yes) => {
let exprs = fields.iter().map(|sp| default_call(*sp)).collect();
cx.expr_call_ident(trait_span, substr.type_ident, exprs)
}

View file

@ -286,10 +286,16 @@ pub struct FieldInfo {
pub other_selflike_exprs: Vec<P<Expr>>,
}
#[derive(Copy, Clone)]
pub enum IsTuple {
No,
Yes,
}
/// Fields for a static method
pub enum StaticFields {
/// Tuple and unit structs/enum variants like this.
Unnamed(Vec<Span>, bool /*is tuple*/),
Unnamed(Vec<Span>, IsTuple),
/// Normal structs/struct variants.
Named(Vec<(Ident, Span)>),
}
@ -1439,7 +1445,10 @@ impl<'a> TraitDef<'a> {
}
}
let is_tuple = matches!(struct_def, ast::VariantData::Tuple(..));
let is_tuple = match struct_def {
ast::VariantData::Tuple(..) => IsTuple::Yes,
_ => IsTuple::No,
};
match (just_spans.is_empty(), named_idents.is_empty()) {
(false, false) => cx
.dcx()

View file

@ -10,6 +10,7 @@ use rustc_ast::{
use rustc_data_structures::fx::FxHashSet;
use rustc_errors::{Applicability, DiagnosticBuilder, MultiSpan, PResult, SingleLabelManySpans};
use rustc_expand::base::{self, *};
use rustc_parse::parser::Recovered;
use rustc_parse_format as parse;
use rustc_span::symbol::{Ident, Symbol};
use rustc_span::{BytePos, InnerSpan, Span};
@ -111,9 +112,8 @@ fn parse_args<'a>(ecx: &mut ExtCtxt<'a>, sp: Span, tts: TokenStream) -> PResult<
_ => return Err(err),
}
}
Ok(recovered) => {
assert!(recovered);
}
Ok(Recovered::Yes) => (),
Ok(Recovered::No) => unreachable!(),
}
}
first = false;

View file

@ -0,0 +1,47 @@
From 9f65e742ba3e41474e6126c6c4469c48eaa6ca7e Mon Sep 17 00:00:00 2001
From: Chris Denton <chris@chrisdenton.dev>
Date: Tue, 20 Feb 2024 16:01:40 -0300
Subject: [PATCH] Don't use raw-dylib in std
---
library/std/src/sys/pal/windows/c.rs | 2 +-
library/std/src/sys/pal/windows/rand.rs | 3 +--
2 files changed, 2 insertions(+), 3 deletions(-)
diff --git a/library/std/src/sys/pal/windows/c.rs b/library/std/src/sys/pal/windows/c.rs
index ad8e01bfa9b..9ca8e4c16ce 100644
--- a/library/std/src/sys/pal/windows/c.rs
+++ b/library/std/src/sys/pal/windows/c.rs
@@ -323,7 +323,7 @@ pub unsafe fn NtWriteFile(
// Use raw-dylib to import ProcessPrng as we can't rely on there being an import library.
cfg_if::cfg_if! {
-if #[cfg(not(target_vendor = "win7"))] {
+if #[cfg(any())] {
#[cfg(target_arch = "x86")]
#[link(name = "bcryptprimitives", kind = "raw-dylib", import_name_type = "undecorated")]
extern "system" {
diff --git a/library/std/src/sys/pal/windows/rand.rs b/library/std/src/sys/pal/windows/rand.rs
index e427546222a..f2fe42a4d51 100644
--- a/library/std/src/sys/pal/windows/rand.rs
+++ b/library/std/src/sys/pal/windows/rand.rs
@@ -2,7 +2,7 @@
use core::mem;
use core::ptr;
-#[cfg(not(target_vendor = "win7"))]
+#[cfg(any())]
#[inline]
pub fn hashmap_random_keys() -> (u64, u64) {
let mut v = (0, 0);
@@ -13,7 +13,6 @@ pub fn hashmap_random_keys() -> (u64, u64) {
v
}
-#[cfg(target_vendor = "win7")]
pub fn hashmap_random_keys() -> (u64, u64) {
use crate::ffi::c_void;
use crate::io;
--
2.42.0.windows.2

View file

@ -2106,9 +2106,16 @@ fn generic_simd_intrinsic<'ll, 'tcx>(
return Ok(args[0].immediate());
}
#[derive(Copy, Clone)]
enum Sign {
Unsigned,
Signed,
}
use Sign::*;
enum Style {
Float,
Int(/* is signed? */ bool),
Int(Sign),
Unsupported,
}
@ -2116,11 +2123,11 @@ fn generic_simd_intrinsic<'ll, 'tcx>(
// vectors of pointer-sized integers should've been
// disallowed before here, so this unwrap is safe.
ty::Int(i) => (
Style::Int(true),
Style::Int(Signed),
i.normalize(bx.tcx().sess.target.pointer_width).bit_width().unwrap(),
),
ty::Uint(u) => (
Style::Int(false),
Style::Int(Unsigned),
u.normalize(bx.tcx().sess.target.pointer_width).bit_width().unwrap(),
),
ty::Float(f) => (Style::Float, f.bit_width()),
@ -2128,11 +2135,11 @@ fn generic_simd_intrinsic<'ll, 'tcx>(
};
let (out_style, out_width) = match out_elem.kind() {
ty::Int(i) => (
Style::Int(true),
Style::Int(Signed),
i.normalize(bx.tcx().sess.target.pointer_width).bit_width().unwrap(),
),
ty::Uint(u) => (
Style::Int(false),
Style::Int(Unsigned),
u.normalize(bx.tcx().sess.target.pointer_width).bit_width().unwrap(),
),
ty::Float(f) => (Style::Float, f.bit_width()),
@ -2140,31 +2147,31 @@ fn generic_simd_intrinsic<'ll, 'tcx>(
};
match (in_style, out_style) {
(Style::Int(in_is_signed), Style::Int(_)) => {
(Style::Int(sign), Style::Int(_)) => {
return Ok(match in_width.cmp(&out_width) {
Ordering::Greater => bx.trunc(args[0].immediate(), llret_ty),
Ordering::Equal => args[0].immediate(),
Ordering::Less => {
if in_is_signed {
bx.sext(args[0].immediate(), llret_ty)
} else {
bx.zext(args[0].immediate(), llret_ty)
}
}
Ordering::Less => match sign {
Sign::Signed => bx.sext(args[0].immediate(), llret_ty),
Sign::Unsigned => bx.zext(args[0].immediate(), llret_ty),
},
});
}
(Style::Int(in_is_signed), Style::Float) => {
return Ok(if in_is_signed {
bx.sitofp(args[0].immediate(), llret_ty)
} else {
bx.uitofp(args[0].immediate(), llret_ty)
});
(Style::Int(Sign::Signed), Style::Float) => {
return Ok(bx.sitofp(args[0].immediate(), llret_ty));
}
(Style::Float, Style::Int(out_is_signed)) => {
return Ok(match (out_is_signed, name == sym::simd_as) {
(false, false) => bx.fptoui(args[0].immediate(), llret_ty),
(true, false) => bx.fptosi(args[0].immediate(), llret_ty),
(_, true) => bx.cast_float_to_int(out_is_signed, args[0].immediate(), llret_ty),
(Style::Int(Sign::Unsigned), Style::Float) => {
return Ok(bx.uitofp(args[0].immediate(), llret_ty));
}
(Style::Float, Style::Int(sign)) => {
return Ok(match (sign, name == sym::simd_as) {
(Sign::Unsigned, false) => bx.fptoui(args[0].immediate(), llret_ty),
(Sign::Signed, false) => bx.fptosi(args[0].immediate(), llret_ty),
(_, true) => bx.cast_float_to_int(
matches!(sign, Sign::Signed),
args[0].immediate(),
llret_ty,
),
});
}
(Style::Float, Style::Float) => {

View file

@ -107,7 +107,7 @@
use crate::errors;
use crate::mbe::{KleeneToken, TokenTree};
use rustc_ast::token::{Delimiter, Token, TokenKind};
use rustc_ast::token::{Delimiter, IdentIsRaw, Token, TokenKind};
use rustc_ast::{NodeId, DUMMY_NODE_ID};
use rustc_data_structures::fx::FxHashMap;
use rustc_errors::{DiagnosticMessage, MultiSpan};
@ -409,7 +409,7 @@ fn check_nested_occurrences(
match (state, tt) {
(
NestedMacroState::Empty,
&TokenTree::Token(Token { kind: TokenKind::Ident(name, false), .. }),
&TokenTree::Token(Token { kind: TokenKind::Ident(name, IdentIsRaw::No), .. }),
) => {
if name == kw::MacroRules {
state = NestedMacroState::MacroRules;

View file

@ -8,6 +8,7 @@ use crate::mbe::macro_parser::{Error, ErrorReported, Failure, Success, TtParser}
use crate::mbe::macro_parser::{MatchedSeq, MatchedTokenTree, MatcherLoc};
use crate::mbe::transcribe::transcribe;
use ast::token::IdentIsRaw;
use rustc_ast as ast;
use rustc_ast::token::{self, Delimiter, NonterminalKind, Token, TokenKind, TokenKind::*};
use rustc_ast::tokenstream::{DelimSpan, TokenStream};
@ -1302,7 +1303,9 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
match tok {
TokenTree::Token(token) => match token.kind {
FatArrow | Comma | Eq | BinOp(token::Or) => IsInFollow::Yes,
Ident(name, false) if name == kw::If || name == kw::In => IsInFollow::Yes,
Ident(name, IdentIsRaw::No) if name == kw::If || name == kw::In => {
IsInFollow::Yes
}
_ => IsInFollow::No(TOKENS),
},
_ => IsInFollow::No(TOKENS),
@ -1313,7 +1316,9 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
match tok {
TokenTree::Token(token) => match token.kind {
FatArrow | Comma | Eq => IsInFollow::Yes,
Ident(name, false) if name == kw::If || name == kw::In => IsInFollow::Yes,
Ident(name, IdentIsRaw::No) if name == kw::If || name == kw::In => {
IsInFollow::Yes
}
_ => IsInFollow::No(TOKENS),
},
_ => IsInFollow::No(TOKENS),
@ -1336,7 +1341,7 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
| BinOp(token::Shr)
| Semi
| BinOp(token::Or) => IsInFollow::Yes,
Ident(name, false) if name == kw::As || name == kw::Where => {
Ident(name, IdentIsRaw::No) if name == kw::As || name == kw::Where => {
IsInFollow::Yes
}
_ => IsInFollow::No(TOKENS),
@ -1364,7 +1369,8 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
match tok {
TokenTree::Token(token) => match token.kind {
Comma => IsInFollow::Yes,
Ident(name, is_raw) if is_raw || name != kw::Priv => IsInFollow::Yes,
Ident(_, IdentIsRaw::Yes) => IsInFollow::Yes,
Ident(name, _) if name != kw::Priv => IsInFollow::Yes,
_ => {
if token.can_begin_type() {
IsInFollow::Yes

View file

@ -1,4 +1,4 @@
use rustc_ast::token::{self, Delimiter};
use rustc_ast::token::{self, Delimiter, IdentIsRaw};
use rustc_ast::tokenstream::{RefTokenTreeCursor, TokenStream, TokenTree};
use rustc_ast::{LitIntType, LitKind};
use rustc_ast_pretty::pprust;
@ -142,7 +142,7 @@ fn parse_ident<'sess>(
if let Some(tt) = iter.next()
&& let TokenTree::Token(token, _) = tt
{
if let Some((elem, false)) = token.ident() {
if let Some((elem, IdentIsRaw::No)) = token.ident() {
return Ok(elem);
}
let token_str = pprust::token_to_string(token);

View file

@ -2,7 +2,7 @@ use crate::errors;
use crate::mbe::macro_parser::count_metavar_decls;
use crate::mbe::{Delimited, KleeneOp, KleeneToken, MetaVarExpr, SequenceRepetition, TokenTree};
use rustc_ast::token::{self, Delimiter, Token};
use rustc_ast::token::{self, Delimiter, IdentIsRaw, Token};
use rustc_ast::{tokenstream, NodeId};
use rustc_ast_pretty::pprust;
use rustc_feature::Features;
@ -222,7 +222,7 @@ fn parse_tree<'a>(
Some(tokenstream::TokenTree::Token(token, _)) if token.is_ident() => {
let (ident, is_raw) = token.ident().unwrap();
let span = ident.span.with_lo(span.lo());
if ident.name == kw::Crate && !is_raw {
if ident.name == kw::Crate && matches!(is_raw, IdentIsRaw::No) {
TokenTree::token(token::Ident(kw::DollarCrate, is_raw), span)
} else {
TokenTree::MetaVar(span, ident)

View file

@ -2,6 +2,7 @@ use crate::tests::{
matches_codepattern, string_to_stream, with_error_checking_parse, with_expected_parse_error,
};
use ast::token::IdentIsRaw;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, Token};
use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree};
@ -74,9 +75,12 @@ fn string_to_tts_macro() {
match tts {
[
TokenTree::Token(Token { kind: token::Ident(name_macro_rules, false), .. }, _),
TokenTree::Token(
Token { kind: token::Ident(name_macro_rules, IdentIsRaw::No), .. },
_,
),
TokenTree::Token(Token { kind: token::Not, .. }, _),
TokenTree::Token(Token { kind: token::Ident(name_zip, false), .. }, _),
TokenTree::Token(Token { kind: token::Ident(name_zip, IdentIsRaw::No), .. }, _),
TokenTree::Delimited(.., macro_delim, macro_tts),
] if name_macro_rules == &kw::MacroRules && name_zip.as_str() == "zip" => {
let tts = &macro_tts.trees().collect::<Vec<_>>();
@ -90,7 +94,10 @@ fn string_to_tts_macro() {
match &tts[..] {
[
TokenTree::Token(Token { kind: token::Dollar, .. }, _),
TokenTree::Token(Token { kind: token::Ident(name, false), .. }, _),
TokenTree::Token(
Token { kind: token::Ident(name, IdentIsRaw::No), .. },
_,
),
] if first_delim == &Delimiter::Parenthesis && name.as_str() == "a" => {
}
_ => panic!("value 3: {:?} {:?}", first_delim, first_tts),
@ -99,7 +106,10 @@ fn string_to_tts_macro() {
match &tts[..] {
[
TokenTree::Token(Token { kind: token::Dollar, .. }, _),
TokenTree::Token(Token { kind: token::Ident(name, false), .. }, _),
TokenTree::Token(
Token { kind: token::Ident(name, IdentIsRaw::No), .. },
_,
),
] if second_delim == &Delimiter::Parenthesis
&& name.as_str() == "a" => {}
_ => panic!("value 4: {:?} {:?}", second_delim, second_tts),
@ -119,8 +129,11 @@ fn string_to_tts_1() {
let tts = string_to_stream("fn a(b: i32) { b; }".to_string());
let expected = TokenStream::new(vec![
TokenTree::token_alone(token::Ident(kw::Fn, false), sp(0, 2)),
TokenTree::token_joint_hidden(token::Ident(Symbol::intern("a"), false), sp(3, 4)),
TokenTree::token_alone(token::Ident(kw::Fn, IdentIsRaw::No), sp(0, 2)),
TokenTree::token_joint_hidden(
token::Ident(Symbol::intern("a"), IdentIsRaw::No),
sp(3, 4),
),
TokenTree::Delimited(
DelimSpan::from_pair(sp(4, 5), sp(11, 12)),
// `JointHidden` because the `(` is followed immediately by
@ -128,10 +141,16 @@ fn string_to_tts_1() {
DelimSpacing::new(Spacing::JointHidden, Spacing::Alone),
Delimiter::Parenthesis,
TokenStream::new(vec![
TokenTree::token_joint(token::Ident(Symbol::intern("b"), false), sp(5, 6)),
TokenTree::token_joint(
token::Ident(Symbol::intern("b"), IdentIsRaw::No),
sp(5, 6),
),
TokenTree::token_alone(token::Colon, sp(6, 7)),
// `JointHidden` because the `i32` is immediately followed by the `)`.
TokenTree::token_joint_hidden(token::Ident(sym::i32, false), sp(8, 11)),
TokenTree::token_joint_hidden(
token::Ident(sym::i32, IdentIsRaw::No),
sp(8, 11),
),
])
.into(),
),
@ -143,7 +162,10 @@ fn string_to_tts_1() {
DelimSpacing::new(Spacing::Alone, Spacing::Alone),
Delimiter::Brace,
TokenStream::new(vec![
TokenTree::token_joint(token::Ident(Symbol::intern("b"), false), sp(15, 16)),
TokenTree::token_joint(
token::Ident(Symbol::intern("b"), IdentIsRaw::No),
sp(15, 16),
),
// `Alone` because the `;` is followed by whitespace.
TokenTree::token_alone(token::Semi, sp(16, 17)),
])

View file

@ -1,4 +1,5 @@
use crate::base::ExtCtxt;
use ast::token::IdentIsRaw;
use pm::bridge::{
server, DelimSpan, Diagnostic, ExpnGlobals, Group, Ident, LitKind, Literal, Punct, TokenTree,
};
@ -216,7 +217,9 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre
Question => op("?"),
SingleQuote => op("'"),
Ident(sym, is_raw) => trees.push(TokenTree::Ident(Ident { sym, is_raw, span })),
Ident(sym, is_raw) => {
trees.push(TokenTree::Ident(Ident { sym, is_raw: is_raw.into(), span }))
}
Lifetime(name) => {
let ident = symbol::Ident::new(name, span).without_first_quote();
trees.extend([
@ -238,7 +241,7 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre
escaped.extend(ch.escape_debug());
}
let stream = [
Ident(sym::doc, false),
Ident(sym::doc, IdentIsRaw::No),
Eq,
TokenKind::lit(token::Str, Symbol::intern(&escaped), None),
]
@ -259,7 +262,7 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre
Interpolated(ref nt) if let NtIdent(ident, is_raw) = &nt.0 => {
trees.push(TokenTree::Ident(Ident {
sym: ident.name,
is_raw: *is_raw,
is_raw: matches!(is_raw, IdentIsRaw::Yes),
span: ident.span,
}))
}
@ -352,7 +355,7 @@ impl ToInternal<SmallVec<[tokenstream::TokenTree; 2]>>
}
TokenTree::Ident(self::Ident { sym, is_raw, span }) => {
rustc.sess().symbol_gallery.insert(sym, span);
smallvec![tokenstream::TokenTree::token_alone(Ident(sym, is_raw), span)]
smallvec![tokenstream::TokenTree::token_alone(Ident(sym, is_raw.into()), span)]
}
TokenTree::Literal(self::Literal {
kind: self::LitKind::Integer,
@ -570,7 +573,7 @@ impl server::TokenStream for Rustc<'_, '_> {
match &expr.kind {
ast::ExprKind::Lit(token_lit) if token_lit.kind == token::Bool => {
Ok(tokenstream::TokenStream::token_alone(
token::Ident(token_lit.symbol, false),
token::Ident(token_lit.symbol, IdentIsRaw::No),
expr.span,
))
}

View file

@ -1,6 +1,6 @@
use crate::tests::string_to_stream;
use rustc_ast::token;
use rustc_ast::token::{self, IdentIsRaw};
use rustc_ast::tokenstream::{TokenStream, TokenTree};
use rustc_span::create_default_session_globals_then;
use rustc_span::{BytePos, Span, Symbol};
@ -86,7 +86,8 @@ fn test_diseq_1() {
fn test_is_empty() {
create_default_session_globals_then(|| {
let test0 = TokenStream::default();
let test1 = TokenStream::token_alone(token::Ident(Symbol::intern("a"), false), sp(0, 1));
let test1 =
TokenStream::token_alone(token::Ident(Symbol::intern("a"), IdentIsRaw::No), sp(0, 1));
let test2 = string_to_ts("foo(bar::baz)");
assert_eq!(test0.is_empty(), true);

View file

@ -84,6 +84,8 @@ declare_features! (
(accepted, cfg_doctest, "1.40.0", Some(62210)),
/// Enables `#[cfg(panic = "...")]` config key.
(accepted, cfg_panic, "1.60.0", Some(77443)),
/// Allows `cfg(target_abi = "...")`.
(accepted, cfg_target_abi, "CURRENT_RUSTC_VERSION", Some(80970)),
/// Allows `cfg(target_feature = "...")`.
(accepted, cfg_target_feature, "1.27.0", Some(29717)),
/// Allows `cfg(target_vendor = "...")`.

View file

@ -25,7 +25,6 @@ pub type GatedCfg = (Symbol, Symbol, GateFn);
const GATED_CFGS: &[GatedCfg] = &[
// (name in cfg, feature, function to check if the feature is enabled)
(sym::overflow_checks, sym::cfg_overflow_checks, cfg_fn!(cfg_overflow_checks)),
(sym::target_abi, sym::cfg_target_abi, cfg_fn!(cfg_target_abi)),
(sym::target_thread_local, sym::cfg_target_thread_local, cfg_fn!(cfg_target_thread_local)),
(
sym::target_has_atomic_equal_alignment,

View file

@ -373,8 +373,6 @@ declare_features! (
(unstable, cfg_sanitize, "1.41.0", Some(39699)),
/// Allows `cfg(sanitizer_cfi_generalize_pointers)` and `cfg(sanitizer_cfi_normalize_integers)`.
(unstable, cfg_sanitizer_cfi, "1.77.0", Some(89653)),
/// Allows `cfg(target_abi = "...")`.
(unstable, cfg_target_abi, "1.55.0", Some(80970)),
/// Allows `cfg(target(abi = "..."))`.
(unstable, cfg_target_compact, "1.63.0", Some(96901)),
/// Allows `cfg(target_has_atomic_load_store = "...")`.

View file

@ -24,6 +24,11 @@ struct InherentOverlapChecker<'tcx> {
tcx: TyCtxt<'tcx>,
}
rustc_index::newtype_index! {
#[orderable]
pub struct RegionId {}
}
impl<'tcx> InherentOverlapChecker<'tcx> {
/// Checks whether any associated items in impls 1 and 2 share the same identifier and
/// namespace.
@ -205,11 +210,6 @@ impl<'tcx> InherentOverlapChecker<'tcx> {
// This is advantageous to running the algorithm over the
// entire graph when there are many connected regions.
rustc_index::newtype_index! {
#[orderable]
pub struct RegionId {}
}
struct ConnectedRegion {
idents: SmallVec<[Symbol; 8]>,
impl_blocks: FxHashSet<usize>,

View file

@ -1,14 +1,13 @@
//! Orphan checker: every impl either implements a trait defined in this
//! crate or pertains to a type defined in this crate.
use crate::errors;
use rustc_errors::ErrorGuaranteed;
use rustc_hir as hir;
use rustc_middle::ty::{self, AliasKind, Ty, TyCtxt, TypeVisitableExt};
use rustc_span::def_id::LocalDefId;
use rustc_span::Span;
use rustc_trait_selection::traits;
use crate::errors;
use rustc_trait_selection::traits::{self, IsFirstInputType};
#[instrument(skip(tcx), level = "debug")]
pub(crate) fn orphan_check_impl(
@ -288,7 +287,7 @@ fn emit_orphan_check_error<'tcx>(
(Vec::new(), Vec::new(), Vec::new(), Vec::new(), Vec::new());
let mut sugg = None;
for &(mut ty, is_target_ty) in &tys {
let span = if is_target_ty {
let span = if matches!(is_target_ty, IsFirstInputType::Yes) {
// Point at `D<A>` in `impl<A, B> for C<B> in D<A>`
self_ty_span
} else {
@ -321,7 +320,8 @@ fn emit_orphan_check_error<'tcx>(
}
}
let is_foreign = !trait_ref.def_id.is_local() && !is_target_ty;
let is_foreign =
!trait_ref.def_id.is_local() && matches!(is_target_ty, IsFirstInputType::No);
match &ty.kind() {
ty::Slice(_) => {

View file

@ -640,16 +640,30 @@ pub(super) fn implied_predicates_with_filter(
// Now require that immediate supertraits are converted, which will, in
// turn, reach indirect supertraits, so we detect cycles now instead of
// overflowing during elaboration.
if matches!(filter, PredicateFilter::SelfOnly) {
for &(pred, span) in implied_bounds {
debug!("superbound: {:?}", pred);
if let ty::ClauseKind::Trait(bound) = pred.kind().skip_binder()
&& bound.polarity == ty::ImplPolarity::Positive
{
tcx.at(span).super_predicates_of(bound.def_id());
// overflowing during elaboration. Same for implied predicates, which
// make sure we walk into associated type bounds.
match filter {
PredicateFilter::SelfOnly => {
for &(pred, span) in implied_bounds {
debug!("superbound: {:?}", pred);
if let ty::ClauseKind::Trait(bound) = pred.kind().skip_binder()
&& bound.polarity == ty::ImplPolarity::Positive
{
tcx.at(span).super_predicates_of(bound.def_id());
}
}
}
PredicateFilter::SelfAndAssociatedTypeBounds => {
for &(pred, span) in implied_bounds {
debug!("superbound: {:?}", pred);
if let ty::ClauseKind::Trait(bound) = pred.kind().skip_binder()
&& bound.polarity == ty::ImplPolarity::Positive
{
tcx.at(span).implied_predicates_of(bound.def_id());
}
}
}
_ => {}
}
ty::GenericPredicates { parent: None, predicates: implied_bounds }

View file

@ -23,6 +23,7 @@ rustc_span = { path = "../rustc_span" }
rustc_target = { path = "../rustc_target" }
rustc_trait_selection = { path = "../rustc_trait_selection" }
rustc_type_ir = { path = "../rustc_type_ir" }
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
tracing = "0.1"
unicode-security = "0.1.0"
# tidy-alphabetical-end

View file

@ -414,6 +414,29 @@ lint_non_fmt_panic_unused =
}
.add_fmt_suggestion = or add a "{"{"}{"}"}" format string to use the message literally
lint_non_local_definitions_cargo_update = the {$macro_kind} `{$macro_name}` may come from an old version of the `{$crate_name}` crate, try updating your dependency with `cargo update -p {$crate_name}`
lint_non_local_definitions_deprecation = this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue <https://github.com/rust-lang/rust/issues/120363>
lint_non_local_definitions_impl = non-local `impl` definition, they should be avoided as they go against expectation
.help =
move this `impl` block outside the of the current {$body_kind_descr} {$depth ->
[one] `{$body_name}`
*[other] `{$body_name}` and up {$depth} bodies
}
.non_local = an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block
.exception = one exception to the rule are anon-const (`const _: () = {"{"} ... {"}"}`) at top-level module and anon-const at the same nesting as the trait or type
.const_anon = use a const-anon item to suppress this lint
lint_non_local_definitions_macro_rules = non-local `macro_rules!` definition, they should be avoided as they go against expectation
.help =
remove the `#[macro_export]` or move this `macro_rules!` outside the of the current {$body_kind_descr} {$depth ->
[one] `{$body_name}`
*[other] `{$body_name}` and up {$depth} bodies
}
.non_local = a `macro_rules!` definition is non-local if it is nested inside an item and has a `#[macro_export]` attribute
.exception = one exception to the rule are anon-const (`const _: () = {"{"} ... {"}"}`) at top-level module
lint_non_snake_case = {$sort} `{$name}` should have a snake case name
.rename_or_convert_suggestion = rename the identifier or convert it to a snake case raw identifier
.cannot_convert_note = `{$sc}` cannot be used as a raw identifier

View file

@ -70,11 +70,15 @@ impl<'tcx> LateLintPass<'tcx> for ArrayIntoIter {
// Check if the method call actually calls the libcore
// `IntoIterator::into_iter`.
let def_id = cx.typeck_results().type_dependent_def_id(expr.hir_id).unwrap();
match cx.tcx.trait_of_item(def_id) {
Some(trait_id) if cx.tcx.is_diagnostic_item(sym::IntoIterator, trait_id) => {}
_ => return,
};
let trait_id = cx
.typeck_results()
.type_dependent_def_id(expr.hir_id)
.and_then(|did| cx.tcx.trait_of_item(did));
if trait_id.is_none()
|| !cx.tcx.is_diagnostic_item(sym::IntoIterator, trait_id.unwrap())
{
return;
}
// As this is a method call expression, we have at least one argument.
let receiver_ty = cx.typeck_results().expr_ty(receiver_arg);

View file

@ -1825,7 +1825,7 @@ impl KeywordIdents {
match tt {
// Only report non-raw idents.
TokenTree::Token(token, _) => {
if let Some((ident, false)) = token.ident() {
if let Some((ident, token::IdentIsRaw::No)) = token.ident() {
self.check_ident_token(cx, UnderMacro(true), ident);
}
}

View file

@ -70,6 +70,7 @@ mod methods;
mod multiple_supertrait_upcastable;
mod non_ascii_idents;
mod non_fmt_panic;
mod non_local_def;
mod nonstandard_style;
mod noop_method_call;
mod opaque_hidden_inferred_bound;
@ -105,6 +106,7 @@ use methods::*;
use multiple_supertrait_upcastable::*;
use non_ascii_idents::*;
use non_fmt_panic::NonPanicFmt;
use non_local_def::*;
use nonstandard_style::*;
use noop_method_call::*;
use opaque_hidden_inferred_bound::*;
@ -229,6 +231,7 @@ late_lint_methods!(
MissingDebugImplementations: MissingDebugImplementations,
MissingDoc: MissingDoc,
AsyncFnInTrait: AsyncFnInTrait,
NonLocalDefinitions: NonLocalDefinitions::default(),
]
]
);
@ -527,6 +530,11 @@ fn register_builtins(store: &mut LintStore) {
"no longer needed, see #93367 \
<https://github.com/rust-lang/rust/issues/93367> for more information",
);
store.register_removed(
"const_patterns_without_partial_eq",
"converted into hard error, see RFC #3535 \
<https://rust-lang.github.io/rfcs/3535-constants-in-patterns.html> for more information",
);
}
fn register_internals(store: &mut LintStore) {

View file

@ -1334,6 +1334,45 @@ pub struct SuspiciousDoubleRefCloneDiag<'a> {
pub ty: Ty<'a>,
}
// non_local_defs.rs
#[derive(LintDiagnostic)]
pub enum NonLocalDefinitionsDiag {
#[diag(lint_non_local_definitions_impl)]
#[help]
#[note(lint_non_local)]
#[note(lint_exception)]
#[note(lint_non_local_definitions_deprecation)]
Impl {
depth: u32,
body_kind_descr: &'static str,
body_name: String,
#[subdiagnostic]
cargo_update: Option<NonLocalDefinitionsCargoUpdateNote>,
#[suggestion(lint_const_anon, code = "_", applicability = "machine-applicable")]
const_anon: Option<Span>,
},
#[diag(lint_non_local_definitions_macro_rules)]
#[help]
#[note(lint_non_local)]
#[note(lint_exception)]
#[note(lint_non_local_definitions_deprecation)]
MacroRules {
depth: u32,
body_kind_descr: &'static str,
body_name: String,
#[subdiagnostic]
cargo_update: Option<NonLocalDefinitionsCargoUpdateNote>,
},
}
#[derive(Subdiagnostic)]
#[note(lint_non_local_definitions_cargo_update)]
pub struct NonLocalDefinitionsCargoUpdateNote {
pub macro_kind: &'static str,
pub macro_name: Symbol,
pub crate_name: Symbol,
}
// pass_by_value.rs
#[derive(LintDiagnostic)]
#[diag(lint_pass_by_value)]

View file

@ -0,0 +1,222 @@
use rustc_hir::{def::DefKind, Body, Item, ItemKind, Node, Path, QPath, TyKind};
use rustc_span::def_id::{DefId, LOCAL_CRATE};
use rustc_span::{sym, symbol::kw, ExpnKind, MacroKind};
use smallvec::{smallvec, SmallVec};
use crate::lints::{NonLocalDefinitionsCargoUpdateNote, NonLocalDefinitionsDiag};
use crate::{LateContext, LateLintPass, LintContext};
declare_lint! {
/// The `non_local_definitions` lint checks for `impl` blocks and `#[macro_export]`
/// macro inside bodies (functions, enum discriminant, ...).
///
/// ### Example
///
/// ```rust
/// trait MyTrait {}
/// struct MyStruct;
///
/// fn foo() {
/// impl MyTrait for MyStruct {}
/// }
/// ```
///
/// {{produces}}
///
/// ### Explanation
///
/// Creating non-local definitions go against expectation and can create discrepancies
/// in tooling. It should be avoided. It may become deny-by-default in edition 2024
/// and higher, see see the tracking issue <https://github.com/rust-lang/rust/issues/120363>.
///
/// An `impl` definition is non-local if it is nested inside an item and neither
/// the type nor the trait are at the same nesting level as the `impl` block.
///
/// All nested bodies (functions, enum discriminant, array length, consts) (expect for
/// `const _: Ty = { ... }` in top-level module, which is still undecided) are checked.
pub NON_LOCAL_DEFINITIONS,
Warn,
"checks for non-local definitions",
report_in_external_macro
}
#[derive(Default)]
pub struct NonLocalDefinitions {
body_depth: u32,
}
impl_lint_pass!(NonLocalDefinitions => [NON_LOCAL_DEFINITIONS]);
// FIXME(Urgau): Figure out how to handle modules nested in bodies.
// It's currently not handled by the current logic because modules are not bodies.
// They don't even follow the correct order (check_body -> check_mod -> check_body_post)
// instead check_mod is called after every body has been handled.
impl<'tcx> LateLintPass<'tcx> for NonLocalDefinitions {
fn check_body(&mut self, _cx: &LateContext<'tcx>, _body: &'tcx Body<'tcx>) {
self.body_depth += 1;
}
fn check_body_post(&mut self, _cx: &LateContext<'tcx>, _body: &'tcx Body<'tcx>) {
self.body_depth -= 1;
}
fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'tcx>) {
if self.body_depth == 0 {
return;
}
let parent = cx.tcx.parent(item.owner_id.def_id.into());
let parent_def_kind = cx.tcx.def_kind(parent);
let parent_opt_item_name = cx.tcx.opt_item_name(parent);
// Per RFC we (currently) ignore anon-const (`const _: Ty = ...`) in top-level module.
if self.body_depth == 1
&& parent_def_kind == DefKind::Const
&& parent_opt_item_name == Some(kw::Underscore)
{
return;
}
let cargo_update = || {
let oexpn = item.span.ctxt().outer_expn_data();
if let Some(def_id) = oexpn.macro_def_id
&& let ExpnKind::Macro(macro_kind, macro_name) = oexpn.kind
&& def_id.krate != LOCAL_CRATE
&& std::env::var_os("CARGO").is_some()
{
Some(NonLocalDefinitionsCargoUpdateNote {
macro_kind: macro_kind.descr(),
macro_name,
crate_name: cx.tcx.crate_name(def_id.krate),
})
} else {
None
}
};
match item.kind {
ItemKind::Impl(impl_) => {
// The RFC states:
//
// > An item nested inside an expression-containing item (through any
// > level of nesting) may not define an impl Trait for Type unless
// > either the **Trait** or the **Type** is also nested inside the
// > same expression-containing item.
//
// To achieve this we get try to get the paths of the _Trait_ and
// _Type_, and we look inside thoses paths to try a find in one
// of them a type whose parent is the same as the impl definition.
//
// If that's the case this means that this impl block declaration
// is using local items and so we don't lint on it.
// We also ignore anon-const in item by including the anon-const
// parent as well; and since it's quite uncommon, we use smallvec
// to avoid unnecessary heap allocations.
let local_parents: SmallVec<[DefId; 1]> = if parent_def_kind == DefKind::Const
&& parent_opt_item_name == Some(kw::Underscore)
{
smallvec![parent, cx.tcx.parent(parent)]
} else {
smallvec![parent]
};
let self_ty_has_local_parent = match impl_.self_ty.kind {
TyKind::Path(QPath::Resolved(_, ty_path)) => {
path_has_local_parent(ty_path, cx, &*local_parents)
}
TyKind::TraitObject([principle_poly_trait_ref, ..], _, _) => {
path_has_local_parent(
principle_poly_trait_ref.trait_ref.path,
cx,
&*local_parents,
)
}
TyKind::TraitObject([], _, _)
| TyKind::InferDelegation(_, _)
| TyKind::Slice(_)
| TyKind::Array(_, _)
| TyKind::Ptr(_)
| TyKind::Ref(_, _)
| TyKind::BareFn(_)
| TyKind::Never
| TyKind::Tup(_)
| TyKind::Path(_)
| TyKind::AnonAdt(_)
| TyKind::OpaqueDef(_, _, _)
| TyKind::Typeof(_)
| TyKind::Infer
| TyKind::Err(_) => false,
};
let of_trait_has_local_parent = impl_
.of_trait
.map(|of_trait| path_has_local_parent(of_trait.path, cx, &*local_parents))
.unwrap_or(false);
// If none of them have a local parent (LOGICAL NOR) this means that
// this impl definition is a non-local definition and so we lint on it.
if !(self_ty_has_local_parent || of_trait_has_local_parent) {
let const_anon = if self.body_depth == 1
&& parent_def_kind == DefKind::Const
&& parent_opt_item_name != Some(kw::Underscore)
&& let Some(parent) = parent.as_local()
&& let Node::Item(item) = cx.tcx.hir_node_by_def_id(parent)
&& let ItemKind::Const(ty, _, _) = item.kind
&& let TyKind::Tup(&[]) = ty.kind
{
Some(item.ident.span)
} else {
None
};
cx.emit_span_lint(
NON_LOCAL_DEFINITIONS,
item.span,
NonLocalDefinitionsDiag::Impl {
depth: self.body_depth,
body_kind_descr: cx.tcx.def_kind_descr(parent_def_kind, parent),
body_name: parent_opt_item_name
.map(|s| s.to_ident_string())
.unwrap_or_else(|| "<unnameable>".to_string()),
cargo_update: cargo_update(),
const_anon,
},
)
}
}
ItemKind::Macro(_macro, MacroKind::Bang)
if cx.tcx.has_attr(item.owner_id.def_id, sym::macro_export) =>
{
cx.emit_span_lint(
NON_LOCAL_DEFINITIONS,
item.span,
NonLocalDefinitionsDiag::MacroRules {
depth: self.body_depth,
body_kind_descr: cx.tcx.def_kind_descr(parent_def_kind, parent),
body_name: parent_opt_item_name
.map(|s| s.to_ident_string())
.unwrap_or_else(|| "<unnameable>".to_string()),
cargo_update: cargo_update(),
},
)
}
_ => {}
}
}
}
/// Given a path and a parent impl def id, this checks if the if parent resolution
/// def id correspond to the def id of the parent impl definition.
///
/// Given this path, we will look at the path (and ignore any generic args):
///
/// ```text
/// std::convert::PartialEq<Foo<Bar>>
/// ^^^^^^^^^^^^^^^^^^^^^^^
/// ```
fn path_has_local_parent(path: &Path<'_>, cx: &LateContext<'_>, local_parents: &[DefId]) -> bool {
path.res.opt_def_id().is_some_and(|did| local_parents.contains(&cx.tcx.parent(did)))
}

View file

@ -32,7 +32,6 @@ declare_lint_pass! {
CONFLICTING_REPR_HINTS,
CONST_EVALUATABLE_UNCHECKED,
CONST_ITEM_MUTATION,
CONST_PATTERNS_WITHOUT_PARTIAL_EQ,
DEAD_CODE,
DEPRECATED,
DEPRECATED_CFG_ATTR_CRATE_TYPE_NAME,
@ -2342,57 +2341,6 @@ declare_lint! {
};
}
declare_lint! {
/// The `const_patterns_without_partial_eq` lint detects constants that are used in patterns,
/// whose type does not implement `PartialEq`.
///
/// ### Example
///
/// ```rust,compile_fail
/// #![deny(const_patterns_without_partial_eq)]
///
/// trait EnumSetType {
/// type Repr;
/// }
///
/// enum Enum8 { }
/// impl EnumSetType for Enum8 {
/// type Repr = u8;
/// }
///
/// #[derive(PartialEq, Eq)]
/// struct EnumSet<T: EnumSetType> {
/// __enumset_underlying: T::Repr,
/// }
///
/// const CONST_SET: EnumSet<Enum8> = EnumSet { __enumset_underlying: 3 };
///
/// fn main() {
/// match CONST_SET {
/// CONST_SET => { /* ok */ }
/// _ => panic!("match fell through?"),
/// }
/// }
/// ```
///
/// {{produces}}
///
/// ### Explanation
///
/// Previous versions of Rust accepted constants in patterns, even if those constants' types
/// did not have `PartialEq` implemented. The compiler falls back to comparing the value
/// field-by-field. In the future we'd like to ensure that pattern matching always
/// follows `PartialEq` semantics, so that trait bound will become a requirement for
/// matching on constants.
pub CONST_PATTERNS_WITHOUT_PARTIAL_EQ,
Warn,
"constant in pattern does not implement `PartialEq`",
@future_incompatible = FutureIncompatibleInfo {
reason: FutureIncompatibilityReason::FutureReleaseErrorReportInDeps,
reference: "issue #116122 <https://github.com/rust-lang/rust/issues/116122>",
};
}
declare_lint! {
/// The `ambiguous_associated_items` lint detects ambiguity between
/// [associated items] and [enum variants].

View file

@ -55,7 +55,8 @@ use synstructure::Structure;
///
/// See rustc dev guide for more examples on using the `#[derive(Diagnostic)]`:
/// <https://rustc-dev-guide.rust-lang.org/diagnostics/diagnostic-structs.html>
pub fn session_diagnostic_derive(s: Structure<'_>) -> TokenStream {
pub fn session_diagnostic_derive(mut s: Structure<'_>) -> TokenStream {
s.underscore_const(true);
DiagnosticDerive::new(s).into_tokens()
}
@ -101,7 +102,8 @@ pub fn session_diagnostic_derive(s: Structure<'_>) -> TokenStream {
///
/// See rustc dev guide for more examples on using the `#[derive(LintDiagnostic)]`:
/// <https://rustc-dev-guide.rust-lang.org/diagnostics/diagnostic-structs.html#reference>
pub fn lint_diagnostic_derive(s: Structure<'_>) -> TokenStream {
pub fn lint_diagnostic_derive(mut s: Structure<'_>) -> TokenStream {
s.underscore_const(true);
LintDiagnosticDerive::new(s).into_tokens()
}
@ -151,6 +153,7 @@ pub fn lint_diagnostic_derive(s: Structure<'_>) -> TokenStream {
///
/// diag.subdiagnostic(RawIdentifierSuggestion { span, applicability, ident });
/// ```
pub fn session_subdiagnostic_derive(s: Structure<'_>) -> TokenStream {
pub fn session_subdiagnostic_derive(mut s: Structure<'_>) -> TokenStream {
s.underscore_const(true);
SubdiagnosticDeriveBuilder::new().into_tokens(s)
}

View file

@ -74,6 +74,8 @@ fn hash_stable_derive_with_mode(
HashStableMode::Generic | HashStableMode::NoContext => parse_quote!(__CTX),
};
s.underscore_const(true);
// no_context impl is able to derive by-field, which is closer to a perfect derive.
s.add_bounds(match mode {
HashStableMode::Normal | HashStableMode::Generic => synstructure::AddBounds::Generics,

View file

@ -4,6 +4,7 @@ use syn::parse_quote;
pub fn lift_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream {
s.add_bounds(synstructure::AddBounds::Generics);
s.bind_with(|_| synstructure::BindStyle::Move);
s.underscore_const(true);
let tcx: syn::Lifetime = parse_quote!('tcx);
let newtcx: syn::GenericParam = parse_quote!('__lifted);

View file

@ -15,6 +15,7 @@ pub fn type_decodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2:
s.add_impl_generic(parse_quote! {#decoder_ty: ::rustc_type_ir::codec::TyDecoder #bound });
s.add_bounds(synstructure::AddBounds::Fields);
s.underscore_const(true);
decodable_body(s, decoder_ty)
}
@ -26,6 +27,7 @@ pub fn meta_decodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2:
s.add_impl_generic(parse_quote! { '__a });
let decoder_ty = quote! { DecodeContext<'__a, 'tcx> };
s.add_bounds(synstructure::AddBounds::Generics);
s.underscore_const(true);
decodable_body(s, decoder_ty)
}
@ -34,6 +36,7 @@ pub fn decodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::Toke
let decoder_ty = quote! { __D };
s.add_impl_generic(parse_quote! {#decoder_ty: ::rustc_span::SpanDecoder});
s.add_bounds(synstructure::AddBounds::Generics);
s.underscore_const(true);
decodable_body(s, decoder_ty)
}
@ -42,12 +45,13 @@ pub fn decodable_generic_derive(mut s: synstructure::Structure<'_>) -> proc_macr
let decoder_ty = quote! { __D };
s.add_impl_generic(parse_quote! {#decoder_ty: ::rustc_serialize::Decoder});
s.add_bounds(synstructure::AddBounds::Generics);
s.underscore_const(true);
decodable_body(s, decoder_ty)
}
fn decodable_body(
s: synstructure::Structure<'_>,
mut s: synstructure::Structure<'_>,
decoder_ty: TokenStream,
) -> proc_macro2::TokenStream {
if let syn::Data::Union(_) = s.ast().data {
@ -93,6 +97,7 @@ fn decodable_body(
}
}
};
s.underscore_const(true);
s.bound_impl(
quote!(::rustc_serialize::Decodable<#decoder_ty>),
@ -130,6 +135,7 @@ pub fn type_encodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2:
let encoder_ty = quote! { __E };
s.add_impl_generic(parse_quote! {#encoder_ty: ::rustc_type_ir::codec::TyEncoder #bound });
s.add_bounds(synstructure::AddBounds::Fields);
s.underscore_const(true);
encodable_body(s, encoder_ty, false)
}
@ -141,6 +147,7 @@ pub fn meta_encodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2:
s.add_impl_generic(parse_quote! { '__a });
let encoder_ty = quote! { EncodeContext<'__a, 'tcx> };
s.add_bounds(synstructure::AddBounds::Generics);
s.underscore_const(true);
encodable_body(s, encoder_ty, true)
}
@ -149,6 +156,7 @@ pub fn encodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::Toke
let encoder_ty = quote! { __E };
s.add_impl_generic(parse_quote! { #encoder_ty: ::rustc_span::SpanEncoder});
s.add_bounds(synstructure::AddBounds::Generics);
s.underscore_const(true);
encodable_body(s, encoder_ty, false)
}
@ -157,6 +165,7 @@ pub fn encodable_generic_derive(mut s: synstructure::Structure<'_>) -> proc_macr
let encoder_ty = quote! { __E };
s.add_impl_generic(parse_quote! { #encoder_ty: ::rustc_serialize::Encoder});
s.add_bounds(synstructure::AddBounds::Generics);
s.underscore_const(true);
encodable_body(s, encoder_ty, false)
}
@ -170,6 +179,7 @@ fn encodable_body(
panic!("cannot derive on union")
}
s.underscore_const(true);
s.bind_with(|binding| {
// Handle the lack of a blanket reference impl.
if let syn::Type::Reference(_) = binding.ast().ty {

View file

@ -6,6 +6,8 @@ pub fn type_foldable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::
panic!("cannot derive on union")
}
s.underscore_const(true);
if !s.ast().generics.lifetimes().any(|lt| lt.lifetime.ident == "tcx") {
s.add_impl_generic(parse_quote! { 'tcx });
}

View file

@ -6,6 +6,8 @@ pub fn type_visitable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2:
panic!("cannot derive on union")
}
s.underscore_const(true);
// ignore fields with #[type_visitable(ignore)]
s.filter(|bi| {
let mut ignored = false;

View file

@ -767,6 +767,14 @@ pub struct TypeNotStructural<'tcx> {
pub non_sm_ty: Ty<'tcx>,
}
#[derive(Diagnostic)]
#[diag(mir_build_non_partial_eq_match)]
pub struct TypeNotPartialEq<'tcx> {
#[primary_span]
pub span: Span,
pub non_peq_ty: Ty<'tcx>,
}
#[derive(Diagnostic)]
#[diag(mir_build_invalid_pattern)]
pub struct InvalidPattern<'tcx> {
@ -822,12 +830,6 @@ pub struct NontrivialStructuralMatch<'tcx> {
pub non_sm_ty: Ty<'tcx>,
}
#[derive(LintDiagnostic)]
#[diag(mir_build_non_partial_eq_match)]
pub struct NonPartialEqMatch<'tcx> {
pub non_peq_ty: Ty<'tcx>,
}
#[derive(Diagnostic)]
#[diag(mir_build_pattern_not_covered, code = E0005)]
pub(crate) struct PatternNotCovered<'s, 'tcx> {

View file

@ -16,7 +16,7 @@ use std::cell::Cell;
use super::PatCtxt;
use crate::errors::{
IndirectStructuralMatch, InvalidPattern, NaNPattern, NonPartialEqMatch, PointerPattern,
IndirectStructuralMatch, InvalidPattern, NaNPattern, PointerPattern, TypeNotPartialEq,
TypeNotStructural, UnionPattern, UnsizedPattern,
};
@ -208,15 +208,12 @@ impl<'tcx> ConstToPat<'tcx> {
);
}
// Always check for `PartialEq`, even if we emitted other lints. (But not if there were
// any errors.) This ensures it shows up in cargo's future-compat reports as well.
// Always check for `PartialEq` if we had no other errors yet.
if !self.type_has_partial_eq_impl(cv.ty()) {
self.tcx().emit_node_span_lint(
lint::builtin::CONST_PATTERNS_WITHOUT_PARTIAL_EQ,
self.id,
self.span,
NonPartialEqMatch { non_peq_ty: cv.ty() },
);
let err = TypeNotPartialEq { span: self.span, non_peq_ty: cv.ty() };
let e = self.tcx().dcx().emit_err(err);
let kind = PatKind::Error(e);
return Box::new(Pat { span: self.span, ty: cv.ty(), kind });
}
}

View file

@ -585,20 +585,32 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
val.into()
}
Aggregate(ref kind, ref fields) => Value::Aggregate {
fields: fields
.iter()
.map(|field| self.eval_operand(field).map_or(Value::Uninit, Value::Immediate))
.collect(),
variant: match **kind {
AggregateKind::Adt(_, variant, _, _, _) => variant,
AggregateKind::Array(_)
| AggregateKind::Tuple
| AggregateKind::Closure(_, _)
| AggregateKind::Coroutine(_, _)
| AggregateKind::CoroutineClosure(_, _) => VariantIdx::new(0),
},
},
Aggregate(ref kind, ref fields) => {
// Do not const pop union fields as they can be
// made to produce values that don't match their
// underlying layout's type (see ICE #121534).
// If the last element of the `Adt` tuple
// is `Some` it indicates the ADT is a union
if let AggregateKind::Adt(_, _, _, _, Some(_)) = **kind {
return None;
};
Value::Aggregate {
fields: fields
.iter()
.map(|field| {
self.eval_operand(field).map_or(Value::Uninit, Value::Immediate)
})
.collect(),
variant: match **kind {
AggregateKind::Adt(_, variant, _, _, _) => variant,
AggregateKind::Array(_)
| AggregateKind::Tuple
| AggregateKind::Closure(_, _)
| AggregateKind::Coroutine(_, _)
| AggregateKind::CoroutineClosure(_, _) => VariantIdx::new(0),
},
}
}
Repeat(ref op, n) => {
trace!(?op, ?n);

View file

@ -4,7 +4,7 @@ use crate::errors;
use crate::lexer::unicode_chars::UNICODE_ARRAY;
use crate::make_unclosed_delims_error;
use rustc_ast::ast::{self, AttrStyle};
use rustc_ast::token::{self, CommentKind, Delimiter, Token, TokenKind};
use rustc_ast::token::{self, CommentKind, Delimiter, IdentIsRaw, Token, TokenKind};
use rustc_ast::tokenstream::TokenStream;
use rustc_ast::util::unicode::contains_text_flow_control_chars;
use rustc_errors::{codes::*, Applicability, DiagCtxt, DiagnosticBuilder, StashKey};
@ -181,7 +181,7 @@ impl<'sess, 'src> StringReader<'sess, 'src> {
self.dcx().emit_err(errors::CannotBeRawIdent { span, ident: sym });
}
self.sess.raw_identifier_spans.push(span);
token::Ident(sym, true)
token::Ident(sym, IdentIsRaw::Yes)
}
rustc_lexer::TokenKind::UnknownPrefix => {
self.report_unknown_prefix(start);
@ -201,7 +201,7 @@ impl<'sess, 'src> StringReader<'sess, 'src> {
let span = self.mk_sp(start, self.pos);
self.sess.bad_unicode_identifiers.borrow_mut().entry(sym).or_default()
.push(span);
token::Ident(sym, false)
token::Ident(sym, IdentIsRaw::No)
}
// split up (raw) c string literals to an ident and a string literal when edition < 2021.
rustc_lexer::TokenKind::Literal {
@ -339,7 +339,7 @@ impl<'sess, 'src> StringReader<'sess, 'src> {
let sym = nfc_normalize(self.str_from(start));
let span = self.mk_sp(start, self.pos);
self.sess.symbol_gallery.insert(sym, span);
token::Ident(sym, false)
token::Ident(sym, IdentIsRaw::No)
}
/// Detect usages of Unicode codepoints changing the direction of the text on screen and loudly

View file

@ -307,7 +307,7 @@ pub(crate) const UNICODE_ARRAY: &[(char, &str, &str)] = &[
// fancier error recovery to it, as there will be less overall work to do this way.
const ASCII_ARRAY: &[(&str, &str, Option<token::TokenKind>)] = &[
(" ", "Space", None),
("_", "Underscore", Some(token::Ident(kw::Underscore, false))),
("_", "Underscore", Some(token::Ident(kw::Underscore, token::IdentIsRaw::No))),
("-", "Minus/Hyphen", Some(token::BinOp(token::Minus))),
(",", "Comma", Some(token::Comma)),
(";", "Semicolon", Some(token::Semi)),

View file

@ -21,6 +21,8 @@ use crate::errors::{
use crate::fluent_generated as fluent;
use crate::parser;
use crate::parser::attr::InnerAttrPolicy;
use ast::token::IdentIsRaw;
use parser::Recovered;
use rustc_ast as ast;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, Lit, LitKind, Token, TokenKind};
@ -264,7 +266,7 @@ impl<'a> Parser<'a> {
pub(super) fn expected_ident_found(
&mut self,
recover: bool,
) -> PResult<'a, (Ident, /* is_raw */ bool)> {
) -> PResult<'a, (Ident, IdentIsRaw)> {
if let TokenKind::DocComment(..) = self.prev_token.kind {
return Err(self.dcx().create_err(DocCommentDoesNotDocumentAnything {
span: self.prev_token.span,
@ -290,11 +292,11 @@ impl<'a> Parser<'a> {
let bad_token = self.token.clone();
// suggest prepending a keyword in identifier position with `r#`
let suggest_raw = if let Some((ident, false)) = self.token.ident()
let suggest_raw = if let Some((ident, IdentIsRaw::No)) = self.token.ident()
&& ident.is_raw_guess()
&& self.look_ahead(1, |t| valid_follow.contains(&t.kind))
{
recovered_ident = Some((ident, true));
recovered_ident = Some((ident, IdentIsRaw::Yes));
// `Symbol::to_string()` is different from `Symbol::into_diagnostic_arg()`,
// which uses `Symbol::to_ident_string()` and "helpfully" adds an implicit `r#`
@ -320,7 +322,7 @@ impl<'a> Parser<'a> {
let help_cannot_start_number = self.is_lit_bad_ident().map(|(len, valid_portion)| {
let (invalid, valid) = self.token.span.split_at(len as u32);
recovered_ident = Some((Ident::new(valid_portion, valid), false));
recovered_ident = Some((Ident::new(valid_portion, valid), IdentIsRaw::No));
HelpIdentifierStartsWithNumber { num_span: invalid }
});
@ -429,7 +431,7 @@ impl<'a> Parser<'a> {
&mut self,
edible: &[TokenKind],
inedible: &[TokenKind],
) -> PResult<'a, bool /* recovered */> {
) -> PResult<'a, Recovered> {
debug!("expected_one_of_not_found(edible: {:?}, inedible: {:?})", edible, inedible);
fn tokens_to_string(tokens: &[TokenType]) -> String {
let mut i = tokens.iter();
@ -532,7 +534,7 @@ impl<'a> Parser<'a> {
sugg: ExpectedSemiSugg::ChangeToSemi(self.token.span),
});
self.bump();
return Ok(true);
return Ok(Recovered::Yes);
} else if self.look_ahead(0, |t| {
t == &token::CloseDelim(Delimiter::Brace)
|| ((t.can_begin_expr() || t.can_begin_item())
@ -556,7 +558,7 @@ impl<'a> Parser<'a> {
unexpected_token_label: Some(self.token.span),
sugg: ExpectedSemiSugg::AddSemi(span),
});
return Ok(true);
return Ok(Recovered::Yes);
}
}
@ -653,9 +655,9 @@ impl<'a> Parser<'a> {
// positive for a `cr#` that wasn't intended to start a c-string literal, but identifying
// that in the parser requires unbounded lookahead, so we only add a hint to the existing
// error rather than replacing it entirely.
if ((self.prev_token.kind == TokenKind::Ident(sym::c, false)
if ((self.prev_token.kind == TokenKind::Ident(sym::c, IdentIsRaw::No)
&& matches!(&self.token.kind, TokenKind::Literal(token::Lit { kind: token::Str, .. })))
|| (self.prev_token.kind == TokenKind::Ident(sym::cr, false)
|| (self.prev_token.kind == TokenKind::Ident(sym::cr, IdentIsRaw::No)
&& matches!(
&self.token.kind,
TokenKind::Literal(token::Lit { kind: token::Str, .. }) | token::Pound
@ -711,7 +713,7 @@ impl<'a> Parser<'a> {
if self.check_too_many_raw_str_terminators(&mut err) {
if expected.contains(&TokenType::Token(token::Semi)) && self.eat(&token::Semi) {
err.emit();
return Ok(true);
return Ok(Recovered::Yes);
} else {
return Err(err);
}
@ -1223,7 +1225,7 @@ impl<'a> Parser<'a> {
|p| p.parse_generic_arg(None),
);
match x {
Ok((_, _, false)) => {
Ok((_, _, Recovered::No)) => {
if self.eat(&token::Gt) {
// We made sense of it. Improve the error message.
e.span_suggestion_verbose(
@ -1247,7 +1249,7 @@ impl<'a> Parser<'a> {
}
}
}
Ok((_, _, true)) => {}
Ok((_, _, Recovered::Yes)) => {}
Err(err) => {
err.cancel();
}
@ -1286,7 +1288,7 @@ impl<'a> Parser<'a> {
err: &mut ComparisonOperatorsCannotBeChained,
inner_op: &Expr,
outer_op: &Spanned<AssocOp>,
) -> bool /* advanced the cursor */ {
) -> Recovered {
if let ExprKind::Binary(op, l1, r1) = &inner_op.kind {
if let ExprKind::Field(_, ident) = l1.kind
&& ident.as_str().parse::<i32>().is_err()
@ -1294,7 +1296,7 @@ impl<'a> Parser<'a> {
{
// The parser has encountered `foo.bar<baz`, the likelihood of the turbofish
// suggestion being the only one to apply is high.
return false;
return Recovered::No;
}
return match (op.node, &outer_op.node) {
// `x == y == z`
@ -1313,7 +1315,7 @@ impl<'a> Parser<'a> {
span: inner_op.span.shrink_to_hi(),
middle_term: expr_to_str(r1),
});
false // Keep the current parse behavior, where the AST is `(x < y) < z`.
Recovered::No // Keep the current parse behavior, where the AST is `(x < y) < z`.
}
// `x == y < z`
(BinOpKind::Eq, AssocOp::Less | AssocOp::LessEqual | AssocOp::Greater | AssocOp::GreaterEqual) => {
@ -1327,12 +1329,12 @@ impl<'a> Parser<'a> {
left: r1.span.shrink_to_lo(),
right: r2.span.shrink_to_hi(),
});
true
Recovered::Yes
}
Err(expr_err) => {
expr_err.cancel();
self.restore_snapshot(snapshot);
false
Recovered::Yes
}
}
}
@ -1347,19 +1349,19 @@ impl<'a> Parser<'a> {
left: l1.span.shrink_to_lo(),
right: r1.span.shrink_to_hi(),
});
true
Recovered::Yes
}
Err(expr_err) => {
expr_err.cancel();
self.restore_snapshot(snapshot);
false
Recovered::No
}
}
}
_ => false,
_ => Recovered::No,
};
}
false
Recovered::No
}
/// Produces an error if comparison operators are chained (RFC #558).
@ -1487,8 +1489,9 @@ impl<'a> Parser<'a> {
// If it looks like a genuine attempt to chain operators (as opposed to a
// misformatted turbofish, for instance), suggest a correct form.
if self.attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op)
{
let recovered = self
.attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op);
if matches!(recovered, Recovered::Yes) {
self.dcx().emit_err(err);
mk_err_expr(self, inner_op.span.to(self.prev_token.span))
} else {
@ -1500,7 +1503,7 @@ impl<'a> Parser<'a> {
let recover =
self.attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op);
self.dcx().emit_err(err);
if recover {
if matches!(recover, Recovered::Yes) {
return mk_err_expr(self, inner_op.span.to(self.prev_token.span));
}
}
@ -1840,10 +1843,7 @@ impl<'a> Parser<'a> {
/// Creates a `DiagnosticBuilder` for an unexpected token `t` and tries to recover if it is a
/// closing delimiter.
pub(super) fn unexpected_try_recover(
&mut self,
t: &TokenKind,
) -> PResult<'a, bool /* recovered */> {
pub(super) fn unexpected_try_recover(&mut self, t: &TokenKind) -> PResult<'a, Recovered> {
let token_str = pprust::token_kind_to_string(t);
let this_token_str = super::token_descr(&self.token);
let (prev_sp, sp) = match (&self.token.kind, self.subparser_name) {

View file

@ -3,13 +3,14 @@ use super::diagnostics::SnapshotParser;
use super::pat::{CommaRecoveryMode, Expected, RecoverColon, RecoverComma};
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
use super::{
AttrWrapper, BlockMode, ClosureSpans, ForceCollect, Parser, PathStyle, Restrictions,
SemiColonMode, SeqSep, TokenExpectType, TokenType, TrailingToken,
AttrWrapper, BlockMode, ClosureSpans, ForceCollect, Parser, PathStyle, Recovered, Restrictions,
SemiColonMode, SeqSep, TokenExpectType, TokenType, Trailing, TrailingToken,
};
use crate::errors;
use crate::maybe_recover_from_interpolated_ty_qpath;
use ast::mut_visit::{noop_visit_expr, MutVisitor};
use ast::token::IdentIsRaw;
use ast::{CoroutineKind, ForLoopKind, GenBlockKind, Pat, Path, PathSegment};
use core::mem;
use rustc_ast::ptr::P;
@ -126,7 +127,7 @@ impl<'a> Parser<'a> {
match self.parse_expr_res(restrictions, None) {
Ok(expr) => Ok(expr),
Err(err) => match self.token.ident() {
Some((Ident { name: kw::Underscore, .. }, false))
Some((Ident { name: kw::Underscore, .. }, IdentIsRaw::No))
if self.may_recover() && self.look_ahead(1, |t| t == &token::Comma) =>
{
// Special-case handling of `foo(_, _, _)`
@ -457,7 +458,9 @@ impl<'a> Parser<'a> {
return None;
}
(Some(op), _) => (op, self.token.span),
(None, Some((Ident { name: sym::and, span }, false))) if self.may_recover() => {
(None, Some((Ident { name: sym::and, span }, IdentIsRaw::No)))
if self.may_recover() =>
{
self.dcx().emit_err(errors::InvalidLogicalOperator {
span: self.token.span,
incorrect: "and".into(),
@ -465,7 +468,7 @@ impl<'a> Parser<'a> {
});
(AssocOp::LAnd, span)
}
(None, Some((Ident { name: sym::or, span }, false))) if self.may_recover() => {
(None, Some((Ident { name: sym::or, span }, IdentIsRaw::No))) if self.may_recover() => {
self.dcx().emit_err(errors::InvalidLogicalOperator {
span: self.token.span,
incorrect: "or".into(),
@ -742,7 +745,7 @@ impl<'a> Parser<'a> {
(
// `foo: `
ExprKind::Path(None, ast::Path { segments, .. }),
token::Ident(kw::For | kw::Loop | kw::While, false),
token::Ident(kw::For | kw::Loop | kw::While, IdentIsRaw::No),
) if segments.len() == 1 => {
let snapshot = self.create_snapshot_for_diagnostic();
let label = Label {
@ -955,19 +958,20 @@ impl<'a> Parser<'a> {
fn parse_expr_dot_or_call_with_(&mut self, mut e: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> {
loop {
let has_question = if self.prev_token.kind == TokenKind::Ident(kw::Return, false) {
// we are using noexpect here because we don't expect a `?` directly after a `return`
// which could be suggested otherwise
self.eat_noexpect(&token::Question)
} else {
self.eat(&token::Question)
};
let has_question =
if self.prev_token.kind == TokenKind::Ident(kw::Return, IdentIsRaw::No) {
// we are using noexpect here because we don't expect a `?` directly after a `return`
// which could be suggested otherwise
self.eat_noexpect(&token::Question)
} else {
self.eat(&token::Question)
};
if has_question {
// `expr?`
e = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Try(e));
continue;
}
let has_dot = if self.prev_token.kind == TokenKind::Ident(kw::Return, false) {
let has_dot = if self.prev_token.kind == TokenKind::Ident(kw::Return, IdentIsRaw::No) {
// we are using noexpect here because we don't expect a `.` directly after a `return`
// which could be suggested otherwise
self.eat_noexpect(&token::Dot)
@ -1126,19 +1130,19 @@ impl<'a> Parser<'a> {
// 1.
DestructuredFloat::TrailingDot(sym, ident_span, dot_span) => {
assert!(suffix.is_none());
self.token = Token::new(token::Ident(sym, false), ident_span);
self.token = Token::new(token::Ident(sym, IdentIsRaw::No), ident_span);
let next_token = (Token::new(token::Dot, dot_span), self.token_spacing);
self.parse_expr_tuple_field_access(lo, base, sym, None, Some(next_token))
}
// 1.2 | 1.2e3
DestructuredFloat::MiddleDot(symbol1, ident1_span, dot_span, symbol2, ident2_span) => {
self.token = Token::new(token::Ident(symbol1, false), ident1_span);
self.token = Token::new(token::Ident(symbol1, IdentIsRaw::No), ident1_span);
// This needs to be `Spacing::Alone` to prevent regressions.
// See issue #76399 and PR #76285 for more details
let next_token1 = (Token::new(token::Dot, dot_span), Spacing::Alone);
let base1 =
self.parse_expr_tuple_field_access(lo, base, symbol1, None, Some(next_token1));
let next_token2 = Token::new(token::Ident(symbol2, false), ident2_span);
let next_token2 = Token::new(token::Ident(symbol2, IdentIsRaw::No), ident2_span);
self.bump_with((next_token2, self.token_spacing)); // `.`
self.parse_expr_tuple_field_access(lo, base1, symbol2, suffix, None)
}
@ -1555,7 +1559,7 @@ impl<'a> Parser<'a> {
return Ok(self.recover_seq_parse_error(Delimiter::Parenthesis, lo, err));
}
};
let kind = if es.len() == 1 && !trailing_comma {
let kind = if es.len() == 1 && matches!(trailing_comma, Trailing::No) {
// `(e)` is parenthesized `e`.
ExprKind::Paren(es.into_iter().next().unwrap())
} else {
@ -1946,7 +1950,7 @@ impl<'a> Parser<'a> {
self.bump(); // `builtin`
self.bump(); // `#`
let Some((ident, false)) = self.token.ident() else {
let Some((ident, IdentIsRaw::No)) = self.token.ident() else {
let err = self.dcx().create_err(errors::ExpectedBuiltinIdent { span: self.token.span });
return Err(err);
};
@ -3087,10 +3091,10 @@ impl<'a> Parser<'a> {
if !require_comma {
arm_body = Some(expr);
this.eat(&token::Comma);
Ok(false)
Ok(Recovered::No)
} else if let Some(body) = this.parse_arm_body_missing_braces(&expr, arrow_span) {
arm_body = Some(body);
Ok(true)
Ok(Recovered::Yes)
} else {
let expr_span = expr.span;
arm_body = Some(expr);
@ -3171,7 +3175,7 @@ impl<'a> Parser<'a> {
this.dcx().emit_err(errors::MissingCommaAfterMatchArm {
span: arm_span.shrink_to_hi(),
});
return Ok(true);
return Ok(Recovered::Yes);
}
Err(err)
});
@ -3574,7 +3578,7 @@ impl<'a> Parser<'a> {
fn find_struct_error_after_field_looking_code(&self) -> Option<ExprField> {
match self.token.ident() {
Some((ident, is_raw))
if (is_raw || !ident.is_reserved())
if (matches!(is_raw, IdentIsRaw::Yes) || !ident.is_reserved())
&& self.look_ahead(1, |t| *t == token::Colon) =>
{
Some(ast::ExprField {

View file

@ -1,8 +1,12 @@
use super::diagnostics::{dummy_arg, ConsumeClosingDelim};
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
use super::{AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, TrailingToken};
use super::{
AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, Recovered, Trailing,
TrailingToken,
};
use crate::errors::{self, MacroExpandsToAdtField};
use crate::fluent_generated as fluent;
use ast::token::IdentIsRaw;
use rustc_ast::ast::*;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, TokenKind};
@ -1076,7 +1080,7 @@ impl<'a> Parser<'a> {
fn parse_ident_or_underscore(&mut self) -> PResult<'a, Ident> {
match self.token.ident() {
Some((ident @ Ident { name: kw::Underscore, .. }, false)) => {
Some((ident @ Ident { name: kw::Underscore, .. }, IdentIsRaw::No)) => {
self.bump();
Ok(ident)
}
@ -1453,7 +1457,7 @@ impl<'a> Parser<'a> {
let (variants, _) = if self.token == TokenKind::Semi {
self.dcx().emit_err(errors::UseEmptyBlockNotSemi { span: self.token.span });
self.bump();
(thin_vec![], false)
(thin_vec![], Trailing::No)
} else {
self.parse_delim_comma_seq(Delimiter::Brace, |p| p.parse_enum_variant(id.span))
.map_err(|mut err| {
@ -1530,10 +1534,10 @@ impl<'a> Parser<'a> {
err.span_label(span, "while parsing this enum");
err.help(help);
err.emit();
(thin_vec![], true)
(thin_vec![], Recovered::Yes)
}
};
VariantData::Struct { fields, recovered }
VariantData::Struct { fields, recovered: recovered.into() }
} else if this.check(&token::OpenDelim(Delimiter::Parenthesis)) {
let body = match this.parse_tuple_struct_body() {
Ok(body) => body,
@ -1618,7 +1622,7 @@ impl<'a> Parser<'a> {
class_name.span,
generics.where_clause.has_where_token,
)?;
VariantData::Struct { fields, recovered }
VariantData::Struct { fields, recovered: recovered.into() }
}
// No `where` so: `struct Foo<T>;`
} else if self.eat(&token::Semi) {
@ -1630,7 +1634,7 @@ impl<'a> Parser<'a> {
class_name.span,
generics.where_clause.has_where_token,
)?;
VariantData::Struct { fields, recovered }
VariantData::Struct { fields, recovered: recovered.into() }
// Tuple-style struct definition with optional where-clause.
} else if self.token == token::OpenDelim(Delimiter::Parenthesis) {
let body = VariantData::Tuple(self.parse_tuple_struct_body()?, DUMMY_NODE_ID);
@ -1659,14 +1663,14 @@ impl<'a> Parser<'a> {
class_name.span,
generics.where_clause.has_where_token,
)?;
VariantData::Struct { fields, recovered }
VariantData::Struct { fields, recovered: recovered.into() }
} else if self.token == token::OpenDelim(Delimiter::Brace) {
let (fields, recovered) = self.parse_record_struct_body(
"union",
class_name.span,
generics.where_clause.has_where_token,
)?;
VariantData::Struct { fields, recovered }
VariantData::Struct { fields, recovered: recovered.into() }
} else {
let token_str = super::token_descr(&self.token);
let msg = format!("expected `where` or `{{` after union name, found {token_str}");
@ -1683,14 +1687,14 @@ impl<'a> Parser<'a> {
adt_ty: &str,
ident_span: Span,
parsed_where: bool,
) -> PResult<'a, (ThinVec<FieldDef>, /* recovered */ bool)> {
) -> PResult<'a, (ThinVec<FieldDef>, Recovered)> {
let mut fields = ThinVec::new();
let mut recovered = false;
let mut recovered = Recovered::No;
if self.eat(&token::OpenDelim(Delimiter::Brace)) {
while self.token != token::CloseDelim(Delimiter::Brace) {
let field = self.parse_field_def(adt_ty).map_err(|e| {
self.consume_block(Delimiter::Brace, ConsumeClosingDelim::No);
recovered = true;
recovered = Recovered::Yes;
e
});
match field {
@ -1962,7 +1966,7 @@ impl<'a> Parser<'a> {
let (ident, is_raw) = self.ident_or_err(true)?;
if ident.name == kw::Underscore {
self.sess.gated_spans.gate(sym::unnamed_fields, lo);
} else if !is_raw && ident.is_reserved() {
} else if matches!(is_raw, IdentIsRaw::No) && ident.is_reserved() {
let snapshot = self.create_snapshot_for_diagnostic();
let err = if self.check_fn_front_matter(false, Case::Sensitive) {
let inherited_vis = Visibility {
@ -2461,8 +2465,8 @@ impl<'a> Parser<'a> {
// `self.expected_tokens`, therefore, do not use `self.unexpected()` which doesn't
// account for this.
match self.expect_one_of(&[], &[]) {
Ok(true) => {}
Ok(false) => unreachable!(),
Ok(Recovered::Yes) => {}
Ok(Recovered::No) => unreachable!(),
Err(mut err) => {
// Qualifier keywords ordering check
enum WrongKw {
@ -2740,7 +2744,7 @@ impl<'a> Parser<'a> {
fn parse_self_param(&mut self) -> PResult<'a, Option<Param>> {
// Extract an identifier *after* having confirmed that the token is one.
let expect_self_ident = |this: &mut Self| match this.token.ident() {
Some((ident, false)) => {
Some((ident, IdentIsRaw::No)) => {
this.bump();
ident
}

View file

@ -11,6 +11,7 @@ mod stmt;
mod ty;
use crate::lexer::UnmatchedDelim;
use ast::token::IdentIsRaw;
pub use attr_wrapper::AttrWrapper;
pub use diagnostics::AttemptLocalParseRecovery;
pub(crate) use expr::ForbiddenLetReason;
@ -357,6 +358,25 @@ pub enum FollowedByType {
No,
}
/// Whether a function performed recovery
#[derive(Copy, Clone, Debug)]
pub enum Recovered {
No,
Yes,
}
impl From<Recovered> for bool {
fn from(r: Recovered) -> bool {
matches!(r, Recovered::Yes)
}
}
#[derive(Copy, Clone, Debug)]
pub enum Trailing {
No,
Yes,
}
#[derive(Clone, Copy, PartialEq, Eq)]
pub enum TokenDescription {
ReservedIdentifier,
@ -455,11 +475,11 @@ impl<'a> Parser<'a> {
}
/// Expects and consumes the token `t`. Signals an error if the next token is not `t`.
pub fn expect(&mut self, t: &TokenKind) -> PResult<'a, bool /* recovered */> {
pub fn expect(&mut self, t: &TokenKind) -> PResult<'a, Recovered> {
if self.expected_tokens.is_empty() {
if self.token == *t {
self.bump();
Ok(false)
Ok(Recovered::No)
} else {
self.unexpected_try_recover(t)
}
@ -475,13 +495,13 @@ impl<'a> Parser<'a> {
&mut self,
edible: &[TokenKind],
inedible: &[TokenKind],
) -> PResult<'a, bool /* recovered */> {
) -> PResult<'a, Recovered> {
if edible.contains(&self.token.kind) {
self.bump();
Ok(false)
Ok(Recovered::No)
} else if inedible.contains(&self.token.kind) {
// leave it in the input
Ok(false)
Ok(Recovered::No)
} else if self.token.kind != token::Eof
&& self.last_unexpected_token_span == Some(self.token.span)
{
@ -499,7 +519,7 @@ impl<'a> Parser<'a> {
fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, Ident> {
let (ident, is_raw) = self.ident_or_err(recover)?;
if !is_raw && ident.is_reserved() {
if matches!(is_raw, IdentIsRaw::No) && ident.is_reserved() {
let err = self.expected_ident_found_err();
if recover {
err.emit();
@ -511,7 +531,7 @@ impl<'a> Parser<'a> {
Ok(ident)
}
fn ident_or_err(&mut self, recover: bool) -> PResult<'a, (Ident, /* is_raw */ bool)> {
fn ident_or_err(&mut self, recover: bool) -> PResult<'a, (Ident, IdentIsRaw)> {
match self.token.ident() {
Some(ident) => Ok(ident),
None => self.expected_ident_found(recover),
@ -568,7 +588,7 @@ impl<'a> Parser<'a> {
}
if case == Case::Insensitive
&& let Some((ident, /* is_raw */ false)) = self.token.ident()
&& let Some((ident, IdentIsRaw::No)) = self.token.ident()
&& ident.as_str().to_lowercase() == kw.as_str().to_lowercase()
{
true
@ -598,7 +618,7 @@ impl<'a> Parser<'a> {
}
if case == Case::Insensitive
&& let Some((ident, /* is_raw */ false)) = self.token.ident()
&& let Some((ident, IdentIsRaw::No)) = self.token.ident()
&& ident.as_str().to_lowercase() == kw.as_str().to_lowercase()
{
self.dcx().emit_err(errors::KwBadCase { span: ident.span, kw: kw.as_str() });
@ -783,10 +803,10 @@ impl<'a> Parser<'a> {
sep: SeqSep,
expect: TokenExpectType,
mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
) -> PResult<'a, (ThinVec<T>, bool /* trailing */, bool /* recovered */)> {
) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> {
let mut first = true;
let mut recovered = false;
let mut trailing = false;
let mut recovered = Recovered::No;
let mut trailing = Trailing::No;
let mut v = ThinVec::new();
while !self.expect_any_with_type(kets, expect) {
@ -800,12 +820,12 @@ impl<'a> Parser<'a> {
} else {
// check for separator
match self.expect(t) {
Ok(false) /* not recovered */ => {
Ok(Recovered::No) => {
self.current_closure.take();
}
Ok(true) /* recovered */ => {
Ok(Recovered::Yes) => {
self.current_closure.take();
recovered = true;
recovered = Recovered::Yes;
break;
}
Err(mut expect_err) => {
@ -900,7 +920,7 @@ impl<'a> Parser<'a> {
}
}
if sep.trailing_sep_allowed && self.expect_any_with_type(kets, expect) {
trailing = true;
trailing = Trailing::Yes;
break;
}
@ -978,7 +998,7 @@ impl<'a> Parser<'a> {
ket: &TokenKind,
sep: SeqSep,
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
) -> PResult<'a, (ThinVec<T>, bool /* trailing */, bool /* recovered */)> {
) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> {
self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f)
}
@ -990,9 +1010,9 @@ impl<'a> Parser<'a> {
ket: &TokenKind,
sep: SeqSep,
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
) -> PResult<'a, (ThinVec<T>, bool /* trailing */)> {
) -> PResult<'a, (ThinVec<T>, Trailing)> {
let (val, trailing, recovered) = self.parse_seq_to_before_end(ket, sep, f)?;
if !recovered {
if matches!(recovered, Recovered::No) {
self.eat(ket);
}
Ok((val, trailing))
@ -1007,7 +1027,7 @@ impl<'a> Parser<'a> {
ket: &TokenKind,
sep: SeqSep,
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
) -> PResult<'a, (ThinVec<T>, bool /* trailing */)> {
) -> PResult<'a, (ThinVec<T>, Trailing)> {
self.expect(bra)?;
self.parse_seq_to_end(ket, sep, f)
}
@ -1019,7 +1039,7 @@ impl<'a> Parser<'a> {
&mut self,
delim: Delimiter,
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
) -> PResult<'a, (ThinVec<T>, bool /* trailing */)> {
) -> PResult<'a, (ThinVec<T>, Trailing)> {
self.parse_unspanned_seq(
&token::OpenDelim(delim),
&token::CloseDelim(delim),
@ -1034,7 +1054,7 @@ impl<'a> Parser<'a> {
fn parse_paren_comma_seq<T>(
&mut self,
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
) -> PResult<'a, (ThinVec<T>, bool /* trailing */)> {
) -> PResult<'a, (ThinVec<T>, Trailing)> {
self.parse_delim_comma_seq(Delimiter::Parenthesis, f)
}

View file

@ -201,6 +201,6 @@ impl<'a> Parser<'a> {
/// The token is an identifier, but not `_`.
/// We prohibit passing `_` to macros expecting `ident` for now.
fn get_macro_ident(token: &Token) -> Option<(Ident, bool)> {
fn get_macro_ident(token: &Token) -> Option<(Ident, token::IdentIsRaw)> {
token.ident().filter(|(ident, _)| ident.name != kw::Underscore)
}

View file

@ -1,4 +1,4 @@
use super::{ForceCollect, Parser, PathStyle, Restrictions, TrailingToken};
use super::{ForceCollect, Parser, PathStyle, Restrictions, Trailing, TrailingToken};
use crate::errors::{
self, AmbiguousRangePattern, DotDotDotForRemainingFields, DotDotDotRangeToPatternNotAllowed,
DotDotDotRestPattern, EnumPatternInsteadOfIdentifier, ExpectedBindingLeftOfAt,
@ -311,7 +311,7 @@ impl<'a> Parser<'a> {
matches!(
&token.uninterpolate().kind,
token::FatArrow // e.g. `a | => 0,`.
| token::Ident(kw::If, false) // e.g. `a | if expr`.
| token::Ident(kw::If, token::IdentIsRaw::No) // e.g. `a | if expr`.
| token::Eq // e.g. `let a | = 0`.
| token::Semi // e.g. `let a |;`.
| token::Colon // e.g. `let a | :`.
@ -696,7 +696,9 @@ impl<'a> Parser<'a> {
// Here, `(pat,)` is a tuple pattern.
// For backward compatibility, `(..)` is a tuple pattern as well.
Ok(if fields.len() == 1 && !(trailing_comma || fields[0].is_rest()) {
let paren_pattern =
fields.len() == 1 && !(matches!(trailing_comma, Trailing::Yes) || fields[0].is_rest());
if paren_pattern {
let pat = fields.into_iter().next().unwrap();
let close_paren = self.prev_token.span;
@ -714,7 +716,7 @@ impl<'a> Parser<'a> {
},
});
self.parse_pat_range_begin_with(begin.clone(), form)?
self.parse_pat_range_begin_with(begin.clone(), form)
}
// recover ranges with parentheses around the `(start)..`
PatKind::Err(_)
@ -729,15 +731,15 @@ impl<'a> Parser<'a> {
},
});
self.parse_pat_range_begin_with(self.mk_expr(pat.span, ExprKind::Err), form)?
self.parse_pat_range_begin_with(self.mk_expr(pat.span, ExprKind::Err), form)
}
// (pat) with optional parentheses
_ => PatKind::Paren(pat),
_ => Ok(PatKind::Paren(pat)),
}
} else {
PatKind::Tuple(fields)
})
Ok(PatKind::Tuple(fields))
}
}
/// Parse a mutable binding with the `mut` token already eaten.

View file

@ -2,6 +2,7 @@ use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
use super::{Parser, Restrictions, TokenType};
use crate::errors::PathSingleColon;
use crate::{errors, maybe_whole};
use ast::token::IdentIsRaw;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
use rustc_ast::{
@ -390,7 +391,7 @@ impl<'a> Parser<'a> {
pub(super) fn parse_path_segment_ident(&mut self) -> PResult<'a, Ident> {
match self.token.ident() {
Some((ident, false)) if ident.is_path_segment_keyword() => {
Some((ident, IdentIsRaw::No)) if ident.is_path_segment_keyword() => {
self.bump();
Ok(ident)
}

View file

@ -11,6 +11,7 @@ use crate::errors;
use crate::maybe_whole;
use crate::errors::MalformedLoopLabel;
use crate::parser::Recovered;
use ast::Label;
use rustc_ast as ast;
use rustc_ast::ptr::P;
@ -661,7 +662,6 @@ impl<'a> Parser<'a> {
if self.token != token::Eof && classify::expr_requires_semi_to_be_stmt(expr) =>
{
// Just check for errors and recover; do not eat semicolon yet.
// `expect_one_of` returns PResult<'a, bool /* recovered */>
let expect_result =
self.expect_one_of(&[], &[token::Semi, token::CloseDelim(Delimiter::Brace)]);
@ -669,7 +669,7 @@ impl<'a> Parser<'a> {
let replace_with_err = 'break_recover: {
match expect_result {
// Recover from parser, skip type error to avoid extra errors.
Ok(true) => true,
Ok(Recovered::Yes) => true,
Err(e) => {
if self.recover_colon_as_semi() {
// recover_colon_as_semi has already emitted a nicer error.
@ -691,7 +691,7 @@ impl<'a> Parser<'a> {
token.kind,
token::Ident(
kw::For | kw::Loop | kw::While,
false
token::IdentIsRaw::No
) | token::OpenDelim(Delimiter::Brace)
)
})
@ -735,7 +735,7 @@ impl<'a> Parser<'a> {
true
}
Ok(false) => false,
Ok(Recovered::No) => false,
}
};

View file

@ -1,4 +1,4 @@
use super::{Parser, PathStyle, TokenType};
use super::{Parser, PathStyle, TokenType, Trailing};
use crate::errors::{
self, DynAfterMut, ExpectedFnPathFoundFnKeyword, ExpectedMutOrConstInRawPointerType,
@ -415,7 +415,7 @@ impl<'a> Parser<'a> {
Ok(ty)
})?;
if ts.len() == 1 && !trailing {
if ts.len() == 1 && matches!(trailing, Trailing::No) {
let ty = ts.into_iter().next().unwrap().into_inner();
let maybe_bounds = allow_plus == AllowPlus::Yes && self.token.is_like_plus();
match ty.kind {

View file

@ -1189,6 +1189,25 @@ impl<'p, Cx: TypeCx> Matrix<'p, Cx> {
}
Ok(matrix)
}
/// Recover row usefulness and intersection information from a processed specialized matrix.
/// `specialized` must come from `self.specialize_constructor`.
fn unspecialize(&mut self, specialized: Self) {
for child_row in specialized.rows() {
let parent_row_id = child_row.parent_row;
let parent_row = &mut self.rows[parent_row_id];
// A parent row is useful if any of its children is.
parent_row.useful |= child_row.useful;
for child_intersection in child_row.intersects.iter() {
// Convert the intersecting ids into ids for the parent matrix.
let parent_intersection = specialized.rows[child_intersection].parent_row;
// Note: self-intersection can happen with or-patterns.
if parent_intersection != parent_row_id {
parent_row.intersects.insert(parent_intersection);
}
}
}
}
}
/// Pretty-printer for matrices of patterns, example:
@ -1558,21 +1577,6 @@ fn compute_exhaustiveness_and_usefulness<'a, 'p, Cx: TypeCx>(
// Accumulate the found witnesses.
ret.extend(witnesses);
for child_row in spec_matrix.rows() {
let parent_row_id = child_row.parent_row;
let parent_row = &mut matrix.rows[parent_row_id];
// A parent row is useful if any of its children is.
parent_row.useful |= child_row.useful;
for child_intersection in child_row.intersects.iter() {
// Convert the intersecting ids into ids for the parent matrix.
let parent_intersection = spec_matrix.rows[child_intersection].parent_row;
// Note: self-intersection can happen with or-patterns.
if parent_intersection != parent_row_id {
parent_row.intersects.insert(parent_intersection);
}
}
}
// Detect ranges that overlap on their endpoints.
if let Constructor::IntRange(overlap_range) = ctor {
if overlap_range.is_singleton()
@ -1582,6 +1586,8 @@ fn compute_exhaustiveness_and_usefulness<'a, 'p, Cx: TypeCx>(
collect_overlapping_range_endpoints(mcx, overlap_range, matrix, &spec_matrix);
}
}
matrix.unspecialize(spec_matrix);
}
// Record usefulness in the patterns.

View file

@ -1111,7 +1111,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
suggestions.extend(
tmp_suggestions
.into_iter()
.filter(|s| use_prelude || this.is_builtin_macro(s.res)),
.filter(|s| use_prelude.into() || this.is_builtin_macro(s.res)),
);
}
}

View file

@ -23,6 +23,18 @@ use Namespace::*;
type Visibility = ty::Visibility<LocalDefId>;
#[derive(Copy, Clone)]
pub enum UsePrelude {
No,
Yes,
}
impl From<UsePrelude> for bool {
fn from(up: UsePrelude) -> bool {
matches!(up, UsePrelude::Yes)
}
}
impl<'a, 'tcx> Resolver<'a, 'tcx> {
/// A generic scope visitor.
/// Visits scopes in order to resolve some identifier in them or perform other actions.
@ -32,12 +44,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
scope_set: ScopeSet<'a>,
parent_scope: &ParentScope<'a>,
ctxt: SyntaxContext,
mut visitor: impl FnMut(
&mut Self,
Scope<'a>,
/*use_prelude*/ bool,
SyntaxContext,
) -> Option<T>,
mut visitor: impl FnMut(&mut Self, Scope<'a>, UsePrelude, SyntaxContext) -> Option<T>,
) -> Option<T> {
// General principles:
// 1. Not controlled (user-defined) names should have higher priority than controlled names
@ -133,6 +140,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
};
if visit {
let use_prelude = if use_prelude { UsePrelude::Yes } else { UsePrelude::No };
if let break_result @ Some(..) = visitor(self, scope, use_prelude, ctxt) {
return break_result;
}
@ -579,7 +587,9 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
None,
ignore_binding,
) {
if use_prelude || this.is_builtin_macro(binding.res()) {
if matches!(use_prelude, UsePrelude::Yes)
|| this.is_builtin_macro(binding.res())
{
result = Ok((binding, Flags::MISC_FROM_PRELUDE));
}
}

View file

@ -8,8 +8,7 @@ pub fn target() -> Target {
arch: "arm".into(),
options: TargetOptions {
abi: "eabihf".into(),
// FIXME: change env to "gnu" when cfg_target_abi becomes stable
env: "gnueabihf".into(),
env: "gnu".into(),
features: "+v6,+vfp2,-d32".into(),
max_atomic_width: Some(64),
mcount: "\u{1}__gnu_mcount_nc".into(),

View file

@ -8,8 +8,6 @@ pub fn target() -> Target {
arch: "arm".into(),
options: TargetOptions {
abi: "eabihf".into(),
// FIXME: remove env when cfg_target_abi becomes stable
env: "eabihf".into(),
features: "+v6,+vfp2,-d32".into(),
max_atomic_width: Some(64),
mcount: "__mcount".into(),

View file

@ -8,8 +8,7 @@ pub fn target() -> Target {
arch: "arm".into(),
options: TargetOptions {
abi: "eabihf".into(),
// FIXME: change env to "gnu" when cfg_target_abi becomes stable
env: "gnueabihf".into(),
env: "gnu".into(),
features: "+v7,+vfp3,-d32,+thumb2,-neon".into(),
max_atomic_width: Some(64),
mcount: "\u{1}__gnu_mcount_nc".into(),

View file

@ -8,8 +8,6 @@ pub fn target() -> Target {
arch: "arm".into(),
options: TargetOptions {
abi: "eabihf".into(),
// FIXME: remove env when cfg_target_abi becomes stable
env: "eabihf".into(),
features: "+v7,+vfp3,-d32,+thumb2,-neon".into(),
max_atomic_width: Some(64),
mcount: "__mcount".into(),

View file

@ -601,9 +601,24 @@ pub fn trait_ref_is_local_or_fundamental<'tcx>(
trait_ref.def_id.krate == LOCAL_CRATE || tcx.has_attr(trait_ref.def_id, sym::fundamental)
}
#[derive(Debug, Copy, Clone)]
pub enum IsFirstInputType {
No,
Yes,
}
impl From<bool> for IsFirstInputType {
fn from(b: bool) -> IsFirstInputType {
match b {
false => IsFirstInputType::No,
true => IsFirstInputType::Yes,
}
}
}
#[derive(Debug)]
pub enum OrphanCheckErr<'tcx> {
NonLocalInputType(Vec<(Ty<'tcx>, bool /* Is this the first input type? */)>),
NonLocalInputType(Vec<(Ty<'tcx>, IsFirstInputType)>),
UncoveredTy(Ty<'tcx>, Option<Ty<'tcx>>),
}
@ -754,7 +769,7 @@ struct OrphanChecker<'tcx, F> {
/// Ignore orphan check failures and exclusively search for the first
/// local type.
search_first_local_ty: bool,
non_local_tys: Vec<(Ty<'tcx>, bool)>,
non_local_tys: Vec<(Ty<'tcx>, IsFirstInputType)>,
}
impl<'tcx, F, E> OrphanChecker<'tcx, F>
@ -772,7 +787,7 @@ where
}
fn found_non_local_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<OrphanCheckEarlyExit<'tcx, E>> {
self.non_local_tys.push((t, self.in_self_ty));
self.non_local_tys.push((t, self.in_self_ty.into()));
ControlFlow::Continue(())
}

View file

@ -42,7 +42,7 @@ use std::fmt::Debug;
use std::ops::ControlFlow;
pub use self::coherence::{add_placeholder_note, orphan_check, overlapping_impls};
pub use self::coherence::{OrphanCheckErr, OverlapResult};
pub use self::coherence::{IsFirstInputType, OrphanCheckErr, OverlapResult};
pub use self::engine::{ObligationCtxt, TraitEngineExt};
pub use self::fulfill::{FulfillmentContext, PendingPredicateObligation};
pub use self::normalize::NormalizeExt;

View file

@ -396,6 +396,7 @@ pub trait AsMut<T: ?Sized> {
/// For example, take this code:
///
/// ```
/// # #![cfg_attr(not(bootstrap), allow(non_local_definitions))]
/// struct Wrapper<T>(Vec<T>);
/// impl<T> From<Wrapper<T>> for Vec<T> {
/// fn from(w: Wrapper<T>) -> Vec<T> {

View file

@ -454,6 +454,7 @@ pub trait Hasher {
/// ```
/// #![feature(hasher_prefixfree_extras)]
/// # // Stubs to make the `impl` below pass the compiler
/// # #![cfg_attr(not(bootstrap), allow(non_local_definitions))]
/// # struct MyCollection<T>(Option<T>);
/// # impl<T> MyCollection<T> {
/// # fn len(&self) -> usize { todo!() }

View file

@ -49,6 +49,7 @@ fn test_iterator_step_by_nth() {
}
#[test]
#[cfg_attr(not(bootstrap), allow(non_local_definitions))]
fn test_iterator_step_by_nth_overflow() {
#[cfg(target_pointer_width = "16")]
type Bigger = u32;

View file

@ -195,6 +195,7 @@ pub fn test_unwrap_or_default() {
}
#[test]
#[cfg_attr(not(bootstrap), allow(non_local_definitions))]
pub fn test_into_ok() {
fn infallible_op() -> Result<isize, !> {
Ok(666)
@ -217,6 +218,7 @@ pub fn test_into_ok() {
}
#[test]
#[cfg_attr(not(bootstrap), allow(non_local_definitions))]
pub fn test_into_err() {
fn until_error_op() -> Result<!, isize> {
Err(666)

View file

@ -321,6 +321,21 @@ pub unsafe fn NtWriteFile(
}
}
// Use raw-dylib to import ProcessPrng as we can't rely on there being an import library.
cfg_if::cfg_if! {
if #[cfg(not(target_vendor = "win7"))] {
#[cfg(target_arch = "x86")]
#[link(name = "bcryptprimitives", kind = "raw-dylib", import_name_type = "undecorated")]
extern "system" {
pub fn ProcessPrng(pbdata: *mut u8, cbdata: usize) -> BOOL;
}
#[cfg(not(target_arch = "x86"))]
#[link(name = "bcryptprimitives", kind = "raw-dylib")]
extern "system" {
pub fn ProcessPrng(pbdata: *mut u8, cbdata: usize) -> BOOL;
}
}}
// Functions that aren't available on every version of Windows that we support,
// but we still use them and just provide some form of a fallback implementation.
compat_fn_with_fallback! {

View file

@ -2180,10 +2180,6 @@ Windows.Win32.Networking.WinSock.WSATRY_AGAIN
Windows.Win32.Networking.WinSock.WSATYPE_NOT_FOUND
Windows.Win32.Networking.WinSock.WSAVERNOTSUPPORTED
Windows.Win32.Security.Authentication.Identity.RtlGenRandom
Windows.Win32.Security.Cryptography.BCRYPT_ALG_HANDLE
Windows.Win32.Security.Cryptography.BCRYPT_USE_SYSTEM_PREFERRED_RNG
Windows.Win32.Security.Cryptography.BCryptGenRandom
Windows.Win32.Security.Cryptography.BCRYPTGENRANDOM_FLAGS
Windows.Win32.Security.SECURITY_ATTRIBUTES
Windows.Win32.Security.TOKEN_ACCESS_MASK
Windows.Win32.Security.TOKEN_ACCESS_PSEUDO_HANDLE

View file

@ -15,15 +15,6 @@ extern "system" {
pub fn RtlGenRandom(randombuffer: *mut ::core::ffi::c_void, randombufferlength: u32)
-> BOOLEAN;
}
#[link(name = "bcrypt")]
extern "system" {
pub fn BCryptGenRandom(
halgorithm: BCRYPT_ALG_HANDLE,
pbbuffer: *mut u8,
cbbuffer: u32,
dwflags: BCRYPTGENRANDOM_FLAGS,
) -> NTSTATUS;
}
#[link(name = "kernel32")]
extern "system" {
pub fn AcquireSRWLockExclusive(srwlock: *mut SRWLOCK) -> ();
@ -889,9 +880,6 @@ impl ::core::clone::Clone for ARM64_NT_NEON128_0 {
*self
}
}
pub type BCRYPTGENRANDOM_FLAGS = u32;
pub type BCRYPT_ALG_HANDLE = *mut ::core::ffi::c_void;
pub const BCRYPT_USE_SYSTEM_PREFERRED_RNG: BCRYPTGENRANDOM_FLAGS = 2u32;
pub const BELOW_NORMAL_PRIORITY_CLASS: PROCESS_CREATION_FLAGS = 16384u32;
pub type BOOL = i32;
pub type BOOLEAN = u8;

View file

@ -1,42 +1,27 @@
use crate::mem;
use crate::ptr;
use crate::sys::c;
use core::mem;
use core::ptr;
#[cfg(not(target_vendor = "win7"))]
#[inline]
pub fn hashmap_random_keys() -> (u64, u64) {
let mut v = (0, 0);
let ret = unsafe {
c::BCryptGenRandom(
ptr::null_mut(),
core::ptr::addr_of_mut!(v) as *mut u8,
mem::size_of_val(&v) as c::ULONG,
c::BCRYPT_USE_SYSTEM_PREFERRED_RNG,
)
};
if c::nt_success(ret) { v } else { fallback_rng() }
let ret = unsafe { c::ProcessPrng(ptr::addr_of_mut!(v).cast::<u8>(), mem::size_of_val(&v)) };
// ProcessPrng is documented as always returning `TRUE`.
// https://learn.microsoft.com/en-us/windows/win32/seccng/processprng#return-value
debug_assert_eq!(ret, c::TRUE);
v
}
/// Generate random numbers using the fallback RNG function (RtlGenRandom)
///
/// This is necessary because of a failure to load the SysWOW64 variant of the
/// bcryptprimitives.dll library from code that lives in bcrypt.dll
/// See <https://bugzilla.mozilla.org/show_bug.cgi?id=1788004#c9>
#[cfg(not(target_vendor = "uwp"))]
#[inline(never)]
fn fallback_rng() -> (u64, u64) {
#[cfg(target_vendor = "win7")]
pub fn hashmap_random_keys() -> (u64, u64) {
use crate::ffi::c_void;
use crate::io;
let mut v = (0, 0);
let ret = unsafe {
c::RtlGenRandom(core::ptr::addr_of_mut!(v) as *mut c_void, mem::size_of_val(&v) as c::ULONG)
c::RtlGenRandom(ptr::addr_of_mut!(v).cast::<c_void>(), mem::size_of_val(&v) as c::ULONG)
};
if ret != 0 { v } else { panic!("fallback RNG broken: {}", io::Error::last_os_error()) }
}
/// We can't use RtlGenRandom with UWP, so there is no fallback
#[cfg(target_vendor = "uwp")]
#[inline(never)]
fn fallback_rng() -> (u64, u64) {
panic!("fallback RNG broken: RtlGenRandom() not supported on UWP");
if ret != 0 { v } else { panic!("RNG broken: {}", io::Error::last_os_error()) }
}

View file

@ -3,7 +3,7 @@
#![feature(link_cfg)]
#![feature(staged_api)]
#![feature(c_unwind)]
#![feature(cfg_target_abi)]
#![cfg_attr(bootstrap, feature(cfg_target_abi))]
#![feature(strict_provenance)]
#![cfg_attr(not(target_env = "msvc"), feature(libc))]
#![allow(internal_features)]

View file

@ -367,7 +367,7 @@ impl Step for CodegenBackend {
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct RustAnalyzer {
pub target: TargetSelection,
}
@ -441,7 +441,7 @@ impl Step for RustAnalyzer {
macro_rules! tool_check_step {
($name:ident, $path:literal, $($alias:literal, )* $source_type:path $(, $default:literal )?) => {
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct $name {
pub target: TargetSelection,
}

View file

@ -14,7 +14,7 @@ use crate::utils::cache::Interned;
use crate::utils::helpers::t;
use crate::{Build, Compiler, Mode, Subcommand};
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct CleanAll {}
impl Step for CleanAll {

View file

@ -727,7 +727,7 @@ fn apple_darwin_sign_file(file_path: &Path) {
assert!(status.success());
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct StartupObjects {
pub compiler: Compiler,
pub target: TargetSelection,
@ -1491,7 +1491,7 @@ pub fn compiler_file(
PathBuf::from(out.trim())
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Sysroot {
pub compiler: Compiler,
/// See [`Std::force_recompile`].
@ -1653,7 +1653,7 @@ impl Step for Sysroot {
}
}
#[derive(Debug, Copy, PartialOrd, Ord, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, PartialOrd, Ord, Clone, PartialEq, Eq, Hash)]
pub struct Assemble {
/// The compiler which we will produce in this step. Assemble itself will
/// take care of ensuring that the necessary prerequisites to do so exist,

View file

@ -50,7 +50,7 @@ fn should_build_extended_tool(builder: &Builder<'_>, tool: &str) -> bool {
builder.config.tools.as_ref().map_or(true, |tools| tools.contains(tool))
}
#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
pub struct Docs {
pub host: TargetSelection,
}
@ -83,7 +83,7 @@ impl Step for Docs {
}
}
#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
pub struct JsonDocs {
pub host: TargetSelection,
}
@ -121,7 +121,7 @@ impl Step for JsonDocs {
}
}
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct RustcDocs {
pub host: TargetSelection,
}
@ -308,7 +308,7 @@ fn make_win_dist(
}
}
#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
pub struct Mingw {
pub host: TargetSelection,
}
@ -348,7 +348,7 @@ impl Step for Mingw {
}
}
#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
pub struct Rustc {
pub compiler: Compiler,
}
@ -476,7 +476,7 @@ impl Step for Rustc {
let man_src = builder.src.join("src/doc/man");
let man_dst = image.join("share/man/man1");
// don't use our `bootstrap::{copy, cp_r}`, because those try
// don't use our `bootstrap::{copy_internal, cp_r}`, because those try
// to hardlink, and we don't want to edit the source templates
for file_entry in builder.read_dir(&man_src) {
let page_src = file_entry.path();
@ -617,7 +617,7 @@ fn copy_target_libs(builder: &Builder<'_>, target: TargetSelection, image: &Path
}
}
#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
pub struct Std {
pub compiler: Compiler,
pub target: TargetSelection,
@ -664,7 +664,7 @@ impl Step for Std {
}
}
#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
pub struct RustcDev {
pub compiler: Compiler,
pub target: TargetSelection,
@ -723,7 +723,7 @@ impl Step for RustcDev {
}
}
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct Analysis {
pub compiler: Compiler,
pub target: TargetSelection,
@ -870,7 +870,7 @@ fn copy_src_dirs(
}
}
#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
pub struct Src;
impl Step for Src {
@ -931,7 +931,7 @@ impl Step for Src {
}
}
#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
pub struct PlainSourceTarball;
impl Step for PlainSourceTarball {
@ -1031,7 +1031,7 @@ impl Step for PlainSourceTarball {
}
}
#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
pub struct Cargo {
pub compiler: Compiler,
pub target: TargetSelection,
@ -1080,7 +1080,7 @@ impl Step for Cargo {
}
}
#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
pub struct Rls {
pub compiler: Compiler,
pub target: TargetSelection,
@ -1122,7 +1122,7 @@ impl Step for Rls {
}
}
#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
pub struct RustAnalyzer {
pub compiler: Compiler,
pub target: TargetSelection,
@ -1164,7 +1164,7 @@ impl Step for RustAnalyzer {
}
}
#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
pub struct Clippy {
pub compiler: Compiler,
pub target: TargetSelection,
@ -1212,7 +1212,7 @@ impl Step for Clippy {
}
}
#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
pub struct Miri {
pub compiler: Compiler,
pub target: TargetSelection,
@ -1359,7 +1359,7 @@ impl Step for CodegenBackend {
}
}
#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
pub struct Rustfmt {
pub compiler: Compiler,
pub target: TargetSelection,
@ -1404,7 +1404,7 @@ impl Step for Rustfmt {
}
}
#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
pub struct RustDemangler {
pub compiler: Compiler,
pub target: TargetSelection,
@ -1460,7 +1460,7 @@ impl Step for RustDemangler {
}
}
#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
pub struct Extended {
stage: u32,
host: TargetSelection,

View file

@ -32,7 +32,7 @@ macro_rules! submodule_helper {
macro_rules! book {
($($name:ident, $path:expr, $book_name:expr $(, submodule $(= $submodule:literal)? )? ;)+) => {
$(
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct $name {
target: TargetSelection,
}
@ -86,7 +86,7 @@ book!(
StyleGuide, "src/doc/style-guide", "style-guide";
);
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct UnstableBook {
target: TargetSelection,
}
@ -160,7 +160,7 @@ impl<P: Step> Step for RustbookSrc<P> {
}
}
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct TheBook {
compiler: Compiler,
target: TargetSelection,
@ -286,7 +286,7 @@ fn invoke_rustdoc(
builder.run(&mut cmd);
}
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct Standalone {
compiler: Compiler,
target: TargetSelection,
@ -389,7 +389,7 @@ impl Step for Standalone {
}
}
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct Releases {
compiler: Compiler,
target: TargetSelection,
@ -492,7 +492,7 @@ pub struct SharedAssetsPaths {
pub version_info: PathBuf,
}
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct SharedAssets {
target: TargetSelection,
}
@ -872,7 +872,7 @@ macro_rules! tool_doc {
$(is_library = $is_library:expr,)?
$(crates = $crates:expr)?
) => {
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct $tool {
target: TargetSelection,
}
@ -1021,7 +1021,7 @@ tool_doc!(
crates = ["bootstrap"]
);
#[derive(Ord, PartialOrd, Debug, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Ord, PartialOrd, Debug, Clone, Hash, PartialEq, Eq)]
pub struct ErrorIndex {
pub target: TargetSelection,
}
@ -1056,7 +1056,7 @@ impl Step for ErrorIndex {
}
}
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct UnstableBookGen {
target: TargetSelection,
}
@ -1112,7 +1112,7 @@ fn symlink_dir_force(config: &Config, original: &Path, link: &Path) {
);
}
#[derive(Ord, PartialOrd, Debug, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Ord, PartialOrd, Debug, Clone, Hash, PartialEq, Eq)]
pub struct RustcBook {
pub compiler: Compiler,
pub target: TargetSelection,

View file

@ -159,7 +159,7 @@ macro_rules! install {
only_hosts: $only_hosts:expr,
$run_item:block $(, $c:ident)*;)+) => {
$(
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct $name {
pub compiler: Compiler,
pub target: TargetSelection,
@ -303,7 +303,7 @@ install!((self, builder, _config),
};
);
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct Src {
pub stage: u32,
}

View file

@ -242,7 +242,7 @@ pub(crate) fn is_ci_llvm_modified(config: &Config) -> bool {
}
}
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct Llvm {
pub target: TargetSelection,
}
@ -815,7 +815,7 @@ fn get_var(var_base: &str, host: &str, target: &str) -> Option<OsString> {
.or_else(|| env::var_os(var_base))
}
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct Lld {
pub target: TargetSelection,
}
@ -937,7 +937,7 @@ impl Step for Lld {
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Sanitizers {
pub target: TargetSelection,
}
@ -1147,7 +1147,7 @@ impl HashStamp {
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct CrtBeginEnd {
pub target: TargetSelection,
}
@ -1215,7 +1215,7 @@ impl Step for CrtBeginEnd {
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Libunwind {
pub target: TargetSelection,
}

View file

@ -10,7 +10,7 @@ use crate::core::config::TargetSelection;
use crate::utils::helpers::output;
use crate::Mode;
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct ExpandYamlAnchors;
impl Step for ExpandYamlAnchors {
@ -36,7 +36,7 @@ impl Step for ExpandYamlAnchors {
}
}
#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
pub struct BuildManifest;
impl Step for BuildManifest {
@ -75,7 +75,7 @@ impl Step for BuildManifest {
}
}
#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
pub struct BumpStage0;
impl Step for BumpStage0 {
@ -97,7 +97,7 @@ impl Step for BumpStage0 {
}
}
#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
pub struct ReplaceVersionPlaceholder;
impl Step for ReplaceVersionPlaceholder {
@ -119,7 +119,7 @@ impl Step for ReplaceVersionPlaceholder {
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Miri {
stage: u32,
host: TargetSelection,
@ -178,7 +178,7 @@ impl Step for Miri {
}
}
#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
pub struct CollectLicenseMetadata;
impl Step for CollectLicenseMetadata {
@ -210,7 +210,7 @@ impl Step for CollectLicenseMetadata {
}
}
#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
pub struct GenerateCopyright;
impl Step for GenerateCopyright {
@ -240,7 +240,7 @@ impl Step for GenerateCopyright {
}
}
#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
pub struct GenerateWindowsSys;
impl Step for GenerateWindowsSys {
@ -262,7 +262,7 @@ impl Step for GenerateWindowsSys {
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct GenerateCompletions;
macro_rules! generate_completions {

View file

@ -233,7 +233,7 @@ fn setup_config_toml(path: &PathBuf, profile: Profile, config: &Config) {
}
/// Creates a toolchain link for stage1 using `rustup`
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
#[derive(Clone, Debug, Eq, PartialEq, Hash)]
pub struct Link;
impl Step for Link {
type Output = ();
@ -444,7 +444,7 @@ fn prompt_user(prompt: &str) -> io::Result<Option<PromptResult>> {
}
/// Installs `src/etc/pre-push.sh` as a Git hook
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
#[derive(Clone, Debug, Eq, PartialEq, Hash)]
pub struct Hook;
impl Step for Hook {
@ -516,7 +516,7 @@ undesirable, simply delete the `pre-push` file from .git/hooks."
}
/// Sets up or displays `src/etc/rust_analyzer_settings.json`
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
#[derive(Clone, Debug, Eq, PartialEq, Hash)]
pub struct Vscode;
impl Step for Vscode {

View file

@ -12,7 +12,7 @@ use crate::core::config::TargetSelection;
use crate::Compiler;
use std::process::{Command, Stdio};
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub(crate) struct MirOptPanicAbortSyntheticTarget {
pub(crate) compiler: Compiler,
pub(crate) base: TargetSelection,

View file

@ -86,7 +86,7 @@ impl Step for CrateBootstrap {
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Linkcheck {
host: TargetSelection,
}
@ -179,7 +179,7 @@ fn check_if_tidy_is_installed() -> bool {
.map_or(false, |status| status.success())
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct HtmlCheck {
target: TargetSelection,
}
@ -220,7 +220,7 @@ impl Step for HtmlCheck {
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Cargotest {
stage: u32,
host: TargetSelection,
@ -266,7 +266,7 @@ impl Step for Cargotest {
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Cargo {
stage: u32,
host: TargetSelection,
@ -327,7 +327,7 @@ impl Step for Cargo {
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct RustAnalyzer {
stage: u32,
host: TargetSelection,
@ -386,7 +386,7 @@ impl Step for RustAnalyzer {
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Rustfmt {
stage: u32,
host: TargetSelection,
@ -433,7 +433,7 @@ impl Step for Rustfmt {
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct RustDemangler {
stage: u32,
host: TargetSelection,
@ -492,7 +492,7 @@ impl Step for RustDemangler {
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Miri {
stage: u32,
host: TargetSelection,
@ -699,7 +699,7 @@ impl Step for Miri {
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct CompiletestTest {
host: TargetSelection,
}
@ -747,7 +747,7 @@ impl Step for CompiletestTest {
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Clippy {
stage: u32,
host: TargetSelection,
@ -815,7 +815,7 @@ fn path_for_cargo(builder: &Builder<'_>, compiler: Compiler) -> OsString {
env::join_paths(iter::once(path).chain(env::split_paths(&old_path))).expect("")
}
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct RustdocTheme {
pub compiler: Compiler,
}
@ -852,7 +852,7 @@ impl Step for RustdocTheme {
}
}
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct RustdocJSStd {
pub target: TargetSelection,
}
@ -912,7 +912,7 @@ impl Step for RustdocJSStd {
}
}
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct RustdocJSNotStd {
pub target: TargetSelection,
pub compiler: Compiler,
@ -966,7 +966,7 @@ fn get_browser_ui_test_version(npm: &Path) -> Option<String> {
.or_else(|| get_browser_ui_test_version_inner(npm, true))
}
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct RustdocGUI {
pub target: TargetSelection,
pub compiler: Compiler,
@ -1060,7 +1060,7 @@ impl Step for RustdocGUI {
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Tidy;
impl Step for Tidy {
@ -1151,7 +1151,7 @@ HELP: to skip test's attempt to check tidiness, pass `--skip src/tools/tidy` to
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct ExpandYamlAnchors;
impl Step for ExpandYamlAnchors {
@ -1251,7 +1251,7 @@ macro_rules! test_definitions {
host: $host:expr,
compare_mode: $compare_mode:expr
}) => {
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct $name {
pub compiler: Compiler,
pub target: TargetSelection,
@ -1294,7 +1294,7 @@ macro_rules! coverage_test_alias {
default: $default:expr,
only_hosts: $only_hosts:expr $(,)?
}) => {
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct $name {
pub compiler: Compiler,
pub target: TargetSelection,
@ -1376,7 +1376,7 @@ default_test!(Assembly { path: "tests/assembly", mode: "assembly", suite: "assem
///
/// Each individual mode also has its own alias that will run the tests in
/// just that mode.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Coverage {
pub compiler: Compiler,
pub target: TargetSelection,
@ -1438,7 +1438,7 @@ host_test!(CoverageRunRustdoc {
});
// For the mir-opt suite we do not use macros, as we need custom behavior when blessing.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct MirOpt {
pub compiler: Compiler,
pub target: TargetSelection,
@ -1494,7 +1494,7 @@ impl Step for MirOpt {
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
struct Compiletest {
compiler: Compiler,
target: TargetSelection,
@ -2142,7 +2142,7 @@ impl BookTest {
macro_rules! test_book {
($($name:ident, $path:expr, $book_name:expr, default=$default:expr;)+) => {
$(
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct $name {
compiler: Compiler,
}
@ -2187,7 +2187,7 @@ test_book!(
EditionGuide, "src/doc/edition-guide", "edition-guide", default=false;
);
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct ErrorIndex {
compiler: Compiler,
}
@ -2264,7 +2264,7 @@ fn markdown_test(builder: &Builder<'_>, compiler: Compiler, markdown: &Path) ->
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct RustcGuide;
impl Step for RustcGuide {
@ -2537,7 +2537,7 @@ impl Step for Crate {
}
/// Rustdoc is special in various ways, which is why this step is different from `Crate`.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct CrateRustdoc {
host: TargetSelection,
}
@ -2638,7 +2638,7 @@ impl Step for CrateRustdoc {
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct CrateRustdocJsonTypes {
host: TargetSelection,
}
@ -2708,7 +2708,7 @@ impl Step for CrateRustdocJsonTypes {
/// QEMU we have to build our own tools so we've got conditional dependencies
/// on those programs as well. Note that the remote test client is built for
/// the build target (us) and the server is built for the target.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct RemoteCopyLibs {
compiler: Compiler,
target: TargetSelection,
@ -2754,7 +2754,7 @@ impl Step for RemoteCopyLibs {
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Distcheck;
impl Step for Distcheck {
@ -2824,7 +2824,7 @@ impl Step for Distcheck {
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Bootstrap;
impl Step for Bootstrap {
@ -2876,7 +2876,7 @@ impl Step for Bootstrap {
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct TierCheck {
pub compiler: Compiler,
}
@ -2926,7 +2926,7 @@ impl Step for TierCheck {
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct LintDocs {
pub compiler: Compiler,
pub target: TargetSelection,
@ -2959,7 +2959,7 @@ impl Step for LintDocs {
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct RustInstaller;
impl Step for RustInstaller {
@ -3020,7 +3020,7 @@ impl Step for RustInstaller {
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct TestHelpers {
pub target: TargetSelection,
}

View file

@ -15,7 +15,7 @@ use crate::Compiler;
use crate::Mode;
use crate::{gha, Kind};
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub enum SourceType {
InTree,
Submodule,
@ -220,7 +220,7 @@ macro_rules! bootstrap_tool {
$(,allow_features = $allow_features:expr)?
;
)+) => {
#[derive(Copy, PartialEq, Eq, Clone)]
#[derive(PartialEq, Eq, Clone)]
pub enum Tool {
$(
$name,
@ -241,7 +241,7 @@ macro_rules! bootstrap_tool {
}
$(
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct $name {
pub compiler: Compiler,
pub target: TargetSelection,
@ -315,7 +315,7 @@ bootstrap_tool!(
CoverageDump, "src/tools/coverage-dump", "coverage-dump";
);
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, Ord, PartialOrd)]
#[derive(Debug, Clone, Hash, PartialEq, Eq, Ord, PartialOrd)]
pub struct ErrorIndex {
pub compiler: Compiler,
}
@ -369,7 +369,7 @@ impl Step for ErrorIndex {
}
}
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct RemoteTestServer {
pub compiler: Compiler,
pub target: TargetSelection,
@ -403,7 +403,7 @@ impl Step for RemoteTestServer {
}
}
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, Ord, PartialOrd)]
#[derive(Debug, Clone, Hash, PartialEq, Eq, Ord, PartialOrd)]
pub struct Rustdoc {
/// This should only ever be 0 or 2.
/// We sometimes want to reference the "bootstrap" rustdoc, which is why this option is here.
@ -515,7 +515,7 @@ impl Step for Rustdoc {
}
}
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct Cargo {
pub compiler: Compiler,
pub target: TargetSelection,
@ -560,7 +560,7 @@ impl Step for Cargo {
}
}
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct LldWrapper {
pub compiler: Compiler,
pub target: TargetSelection,
@ -589,7 +589,7 @@ impl Step for LldWrapper {
}
}
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct RustAnalyzer {
pub compiler: Compiler,
pub target: TargetSelection,
@ -637,7 +637,7 @@ impl Step for RustAnalyzer {
}
}
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct RustAnalyzerProcMacroSrv {
pub compiler: Compiler,
pub target: TargetSelection,

View file

@ -1069,7 +1069,7 @@ impl<'a> Builder<'a> {
/// Returns the libdir where the standard library and other artifacts are
/// found for a compiler's sysroot.
pub fn sysroot_libdir(&self, compiler: Compiler, target: TargetSelection) -> Interned<PathBuf> {
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
struct Libdir {
compiler: Compiler,
target: TargetSelection,

View file

@ -1235,12 +1235,16 @@ impl Config {
// Infer the rest of the configuration.
// Infer the source directory. This is non-trivial because we want to support a downloaded bootstrap binary,
// running on a completely machine from where it was compiled.
// running on a completely different machine from where it was compiled.
let mut cmd = Command::new("git");
// NOTE: we cannot support running from outside the repository because the only path we have available
// is set at compile time, which can be wrong if bootstrap was downloaded from source.
// NOTE: we cannot support running from outside the repository because the only other path we have available
// is set at compile time, which can be wrong if bootstrap was downloaded rather than compiled locally.
// We still support running outside the repository if we find we aren't in a git directory.
cmd.arg("rev-parse").arg("--show-toplevel");
// NOTE: We get a relative path from git to work around an issue on MSYS/mingw. If we used an absolute path,
// and end up using MSYS's git rather than git-for-windows, we would get a unix-y MSYS path. But as bootstrap
// has already been (kinda-cross-)compiled to Windows land, we require a normal Windows path.
cmd.arg("rev-parse").arg("--show-cdup");
// Discard stderr because we expect this to fail when building from a tarball.
let output = cmd
.stderr(std::process::Stdio::null())
@ -1248,13 +1252,18 @@ impl Config {
.ok()
.and_then(|output| if output.status.success() { Some(output) } else { None });
if let Some(output) = output {
let git_root = String::from_utf8(output.stdout).unwrap();
// We need to canonicalize this path to make sure it uses backslashes instead of forward slashes.
let git_root = PathBuf::from(git_root.trim()).canonicalize().unwrap();
let git_root_relative = String::from_utf8(output.stdout).unwrap();
// We need to canonicalize this path to make sure it uses backslashes instead of forward slashes,
// and to resolve any relative components.
let git_root = env::current_dir()
.unwrap()
.join(PathBuf::from(git_root_relative.trim()))
.canonicalize()
.unwrap();
let s = git_root.to_str().unwrap();
// Bootstrap is quite bad at handling /? in front of paths
let src = match s.strip_prefix("\\\\?\\") {
let git_root = match s.strip_prefix("\\\\?\\") {
Some(p) => PathBuf::from(p),
None => git_root,
};
@ -1264,8 +1273,8 @@ impl Config {
//
// NOTE: this implies that downloadable bootstrap isn't supported when the build directory is outside
// the source directory. We could fix that by setting a variable from all three of python, ./x, and x.ps1.
if src.join("src").join("stage0.json").exists() {
config.src = src;
if git_root.join("src").join("stage0.json").exists() {
config.src = git_root;
}
} else {
// We're building from a tarball, not git sources.

View file

@ -131,4 +131,9 @@ pub const CONFIG_CHANGE_HISTORY: &[ChangeInfo] = &[
severity: ChangeSeverity::Warning,
summary: "The \"codegen\"/\"llvm\" profile has been removed and replaced with \"compiler\", use it instead for the same behavior.",
},
ChangeInfo {
change_id: 118724,
severity: ChangeSeverity::Info,
summary: "`x install` now skips providing tarball sources (under 'build/dist' path) to speed up the installation process.",
},
];

View file

@ -3,8 +3,8 @@ use std::{
process::Command,
};
use crate::core::build_steps::dist::distdir;
use crate::core::builder::Builder;
use crate::core::{build_steps::dist::distdir, builder::Kind};
use crate::utils::channel;
use crate::utils::helpers::t;
@ -325,7 +325,22 @@ impl<'a> Tarball<'a> {
assert!(!formats.is_empty(), "dist.compression-formats can't be empty");
cmd.arg("--compression-formats").arg(formats.join(","));
}
cmd.args(["--compression-profile", &self.builder.config.dist_compression_profile]);
// For `x install` tarball files aren't needed, so we can speed up the process by not producing them.
let compression_profile = if self.builder.kind == Kind::Install {
self.builder.verbose("Forcing dist.compression-profile = 'no-op' for `x install`.");
// "no-op" indicates that the rust-installer won't produce compressed tarball sources.
"no-op"
} else {
assert!(
self.builder.config.dist_compression_profile != "no-op",
"dist.compression-profile = 'no-op' can only be used for `x install`"
);
&self.builder.config.dist_compression_profile
};
cmd.args(&["--compression-profile", compression_profile]);
self.builder.run(&mut cmd);
// Ensure there are no symbolic links in the tarball. In particular,

View file

@ -111,10 +111,31 @@ x--expand-yaml-anchors--remove:
if: success() && !env.SKIP_JOB
- &base-ci-job
defaults:
run:
shell: ${{ contains(matrix.os, 'windows') && 'msys2 {0}' || 'bash' }}
timeout-minutes: 600
runs-on: "${{ matrix.os }}"
env: *shared-ci-variables
steps:
- if: contains(matrix.os, 'windows')
uses: msys2/setup-msys2@v2.22.0
with:
# i686 jobs use mingw32. x86_64 and cross-compile jobs use mingw64.
msystem: ${{ contains(matrix.name, 'i686') && 'mingw32' || 'mingw64' }}
# don't try to download updates for already installed packages
update: false
# don't try to use the msys that comes built-in to the github runner,
# so we can control what is installed (i.e. not python)
release: true
# Inherit the full path from the Windows environment, with MSYS2's */bin/
# dirs placed in front. This lets us run Windows-native Python etc.
path-type: inherit
install: >
make
dos2unix
diffutils
- name: disable git crlf conversion
run: git config --global core.autocrlf false

View file

@ -76,7 +76,7 @@ RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --set dist.compression-profile=balance
# the LLVM build, as not to run out of memory.
# This is an attempt to fix the spurious build error tracked by
# https://github.com/rust-lang/rust/issues/108227.
if isWindows && [[ ${CUSTOM_MINGW-0} -eq 1 ]]; then
if isKnownToBeMingwBuild; then
RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --set llvm.link-jobs=1"
fi

View file

@ -37,8 +37,7 @@ if isMacOS; then
# Configure `AR` specifically so rustbuild doesn't try to infer it as
# `clang-ar` by accident.
ciCommandSetEnv AR "ar"
elif isWindows && [[ ${CUSTOM_MINGW-0} -ne 1 ]]; then
elif isWindows && ! isKnownToBeMingwBuild; then
# If we're compiling for MSVC then we, like most other distribution builders,
# switch to clang as the compiler. This'll allow us eventually to enable LTO
# amongst LLVM and rustc. Note that we only do this on MSVC as I don't think

View file

@ -38,11 +38,11 @@ if isWindows; then
;;
esac
if [[ "${CUSTOM_MINGW-0}" -ne 1 ]]; then
pacman -S --noconfirm --needed mingw-w64-$arch-toolchain mingw-w64-$arch-cmake \
mingw-w64-$arch-gcc \
mingw-w64-$arch-python # the python package is actually for python3
ciCommandAddPath "$(ciCheckoutPath)/msys2/mingw${bits}/bin"
if [[ "${CUSTOM_MINGW:-0}" == 0 ]]; then
pacboy -S --noconfirm toolchain:p
# According to the comment in the Windows part of install-clang.sh, in the future we might
# want to do this instead:
# pacboy -S --noconfirm clang:p ...
else
mingw_dir="mingw${bits}"

View file

@ -1,17 +1,12 @@
#!/bin/bash
# Download and install MSYS2, needed primarily for the test suite (run-make) but
# also used by the MinGW toolchain for assembling things.
# Clean up and prepare the MSYS2 installation. MSYS2 is needed primarily for
# the test suite (run-make), but is also used by the MinGW toolchain for assembling things.
set -euo pipefail
IFS=$'\n\t'
source "$(cd "$(dirname "$0")" && pwd)/../shared.sh"
if isWindows; then
msys2Path="c:/msys64"
mkdir -p "${msys2Path}/home/${USERNAME}"
ciCommandAddPath "${msys2Path}/usr/bin"
# Detect the native Python version installed on the agent. On GitHub
# Actions, the C:\hostedtoolcache\windows\Python directory contains a
# subdirectory for each installed Python version.
@ -29,4 +24,33 @@ if isWindows; then
fi
ciCommandAddPath "C:\\hostedtoolcache\\windows\\Python\\${native_python_version}\\x64"
ciCommandAddPath "C:\\hostedtoolcache\\windows\\Python\\${native_python_version}\\x64\\Scripts"
# Install pacboy for easily installing packages
pacman -S --noconfirm pactoys
# Delete these pre-installed tools so we can't accidentally use them, because we are using the
# MSYS2 setup action versions instead.
# Delete pre-installed version of MSYS2
rm -r "/c/msys64/"
# Delete Strawberry Perl, which contains a version of mingw
rm -r "/c/Strawberry/"
# Delete these other copies of mingw, I don't even know where they come from.
rm -r "/c/mingw64/"
rm -r "/c/mingw32/"
if isKnownToBeMingwBuild; then
# Use the mingw version of CMake for mingw builds.
# However, the MSVC build needs native CMake, as it fails with the mingw one.
# Delete native CMake
rm -r "/c/Program Files/CMake/"
# Install mingw-w64-$arch-cmake
pacboy -S --noconfirm cmake:p
# We use Git-for-Windows for MSVC builds, and MSYS2 Git for mingw builds,
# so that both are tested.
# Delete Windows-Git
rm -r "/c/Program Files/Git/"
# Install MSYS2 git
pacman -S --noconfirm git
fi
fi

View file

@ -52,6 +52,10 @@ function isLinux {
[[ "${OSTYPE}" = "linux-gnu" ]]
}
function isKnownToBeMingwBuild {
isGitHubActions && [[ "${CI_JOB_NAME}" == *mingw ]]
}
function isCiBranch {
if [[ $# -ne 1 ]]; then
echo "usage: $0 <branch-name>"

View file

@ -1,4 +1,4 @@
use rustc_ast::token::{self, BinOpToken, Delimiter};
use rustc_ast::token::{self, BinOpToken, Delimiter, IdentIsRaw};
use rustc_ast::tokenstream::{TokenStream, TokenTree};
use rustc_ast_pretty::pprust::state::State as Printer;
use rustc_ast_pretty::pprust::PrintState;
@ -148,7 +148,7 @@ fn print_tts(printer: &mut Printer<'_>, tts: &TokenStream) {
(false, Other)
}
(Pound, token::Not) => (false, PoundBang),
(_, token::Ident(symbol, /* is_raw */ false))
(_, token::Ident(symbol, IdentIsRaw::No))
if !usually_needs_space_between_keyword_and_open_delim(*symbol, tt.span) =>
{
(true, Ident)

View file

@ -1207,6 +1207,13 @@ impl Tester for Collector {
}
}
#[cfg(test)] // used in tests
impl Tester for Vec<usize> {
fn add_test(&mut self, _test: String, _config: LangString, line: usize) {
self.push(line);
}
}
struct HirCollector<'a, 'hir, 'tcx> {
sess: &'a Session,
collector: &'a mut Collector,

View file

@ -480,11 +480,6 @@ fn test_markdown_html_escape() {
#[test]
fn test_find_testable_code_line() {
fn t(input: &str, expect: &[usize]) {
impl crate::doctest::Tester for Vec<usize> {
fn add_test(&mut self, _test: String, _config: LangString, line: usize) {
self.push(line);
}
}
let mut lines = Vec::<usize>::new();
find_testable_code(input, &mut lines, ErrorCodes::No, false, None, true);
assert_eq!(lines, expect);

View file

@ -4,7 +4,7 @@
//@[disabled] rustc-env:CLIPPY_CONF_DIR=tests/ui-toml/undocumented_unsafe_blocks/disabled
#![warn(clippy::undocumented_unsafe_blocks, clippy::unnecessary_safety_comment)]
#![allow(deref_nullptr, clippy::let_unit_value, clippy::missing_safety_doc)]
#![allow(deref_nullptr, non_local_definitions, clippy::let_unit_value, clippy::missing_safety_doc)]
#![feature(lint_reasons)]
extern crate proc_macro_unsafe;

Some files were not shown because too many files have changed in this diff Show more