Auto merge of #137959 - matthiaskrgr:rollup-62vjvwr, r=matthiaskrgr
Rollup of 12 pull requests Successful merges: - #135767 (Future incompatibility warning `unsupported_fn_ptr_calling_conventions`: Also warn in dependencies) - #137852 (Remove layouting dead code for non-array SIMD types.) - #137863 (Fix pretty printing of unsafe binders) - #137882 (do not build additional stage on compiler paths) - #137894 (Revert "store ScalarPair via memset when one side is undef and the other side can be memset") - #137902 (Make `ast::TokenKind` more like `lexer::TokenKind`) - #137921 (Subtree update of `rust-analyzer`) - #137922 (A few cleanups after the removal of `cfg(not(parallel))`) - #137939 (fix order on shl impl) - #137946 (Fix docker run-local docs) - #137955 (Always allow rustdoc-json tests to contain long lines) - #137958 (triagebot.toml: Don't label `test/rustdoc-json` as A-rustdoc-search) r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
fd17deacce
316 changed files with 7394 additions and 5071 deletions
|
@ -328,7 +328,7 @@ impl<'a> StripUnconfigured<'a> {
|
|||
|
||||
// For inner attributes, we do the same thing for the `!` in `#![attr]`.
|
||||
let mut trees = if cfg_attr.style == AttrStyle::Inner {
|
||||
let Some(TokenTree::Token(bang_token @ Token { kind: TokenKind::Not, .. }, _)) =
|
||||
let Some(TokenTree::Token(bang_token @ Token { kind: TokenKind::Bang, .. }, _)) =
|
||||
orig_trees.next()
|
||||
else {
|
||||
panic!("Bad tokens for attribute {cfg_attr:?}");
|
||||
|
|
|
@ -432,7 +432,7 @@ fn check_nested_occurrences(
|
|||
}
|
||||
(
|
||||
NestedMacroState::MacroRules,
|
||||
&TokenTree::Token(Token { kind: TokenKind::Not, .. }),
|
||||
&TokenTree::Token(Token { kind: TokenKind::Bang, .. }),
|
||||
) => {
|
||||
state = NestedMacroState::MacroRulesNot;
|
||||
}
|
||||
|
|
|
@ -690,7 +690,7 @@ fn has_compile_error_macro(rhs: &mbe::TokenTree) -> bool {
|
|||
&& let TokenKind::Ident(ident, _) = ident.kind
|
||||
&& ident == sym::compile_error
|
||||
&& let mbe::TokenTree::Token(bang) = bang
|
||||
&& let TokenKind::Not = bang.kind
|
||||
&& let TokenKind::Bang = bang.kind
|
||||
&& let mbe::TokenTree::Delimited(.., del) = args
|
||||
&& !del.delim.skip()
|
||||
{
|
||||
|
@ -1135,7 +1135,7 @@ fn check_matcher_core<'tt>(
|
|||
&& matches!(kind, NonterminalKind::Pat(PatParam { inferred: true }))
|
||||
&& matches!(
|
||||
next_token,
|
||||
TokenTree::Token(token) if *token == BinOp(token::BinOpToken::Or)
|
||||
TokenTree::Token(token) if *token == token::Or
|
||||
)
|
||||
{
|
||||
// It is suggestion to use pat_param, for example: $x:pat -> $x:pat_param.
|
||||
|
@ -1177,7 +1177,7 @@ fn check_matcher_core<'tt>(
|
|||
|
||||
if kind == NonterminalKind::Pat(PatWithOr)
|
||||
&& sess.psess.edition.at_least_rust_2021()
|
||||
&& next_token.is_token(&BinOp(token::BinOpToken::Or))
|
||||
&& next_token.is_token(&token::Or)
|
||||
{
|
||||
let suggestion = quoted_tt_to_string(&TokenTree::MetaVarDecl(
|
||||
span,
|
||||
|
@ -1296,7 +1296,7 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
|
|||
const TOKENS: &[&str] = &["`=>`", "`,`", "`=`", "`|`", "`if`", "`in`"];
|
||||
match tok {
|
||||
TokenTree::Token(token) => match token.kind {
|
||||
FatArrow | Comma | Eq | BinOp(token::Or) => IsInFollow::Yes,
|
||||
FatArrow | Comma | Eq | Or => IsInFollow::Yes,
|
||||
Ident(name, IdentIsRaw::No) if name == kw::If || name == kw::In => {
|
||||
IsInFollow::Yes
|
||||
}
|
||||
|
@ -1332,9 +1332,9 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
|
|||
| Colon
|
||||
| Eq
|
||||
| Gt
|
||||
| BinOp(token::Shr)
|
||||
| Shr
|
||||
| Semi
|
||||
| BinOp(token::Or) => IsInFollow::Yes,
|
||||
| Or => IsInFollow::Yes,
|
||||
Ident(name, IdentIsRaw::No) if name == kw::As || name == kw::Where => {
|
||||
IsInFollow::Yes
|
||||
}
|
||||
|
|
|
@ -302,8 +302,8 @@ fn parse_tree<'a>(
|
|||
/// `None`.
|
||||
fn kleene_op(token: &Token) -> Option<KleeneOp> {
|
||||
match token.kind {
|
||||
token::BinOp(token::Star) => Some(KleeneOp::ZeroOrMore),
|
||||
token::BinOp(token::Plus) => Some(KleeneOp::OneOrMore),
|
||||
token::Star => Some(KleeneOp::ZeroOrMore),
|
||||
token::Plus => Some(KleeneOp::OneOrMore),
|
||||
token::Question => Some(KleeneOp::ZeroOrOne),
|
||||
_ => None,
|
||||
}
|
||||
|
|
|
@ -180,28 +180,28 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre
|
|||
Gt => op(">"),
|
||||
AndAnd => op("&&"),
|
||||
OrOr => op("||"),
|
||||
Not => op("!"),
|
||||
Bang => op("!"),
|
||||
Tilde => op("~"),
|
||||
BinOp(Plus) => op("+"),
|
||||
BinOp(Minus) => op("-"),
|
||||
BinOp(Star) => op("*"),
|
||||
BinOp(Slash) => op("/"),
|
||||
BinOp(Percent) => op("%"),
|
||||
BinOp(Caret) => op("^"),
|
||||
BinOp(And) => op("&"),
|
||||
BinOp(Or) => op("|"),
|
||||
BinOp(Shl) => op("<<"),
|
||||
BinOp(Shr) => op(">>"),
|
||||
BinOpEq(Plus) => op("+="),
|
||||
BinOpEq(Minus) => op("-="),
|
||||
BinOpEq(Star) => op("*="),
|
||||
BinOpEq(Slash) => op("/="),
|
||||
BinOpEq(Percent) => op("%="),
|
||||
BinOpEq(Caret) => op("^="),
|
||||
BinOpEq(And) => op("&="),
|
||||
BinOpEq(Or) => op("|="),
|
||||
BinOpEq(Shl) => op("<<="),
|
||||
BinOpEq(Shr) => op(">>="),
|
||||
Plus => op("+"),
|
||||
Minus => op("-"),
|
||||
Star => op("*"),
|
||||
Slash => op("/"),
|
||||
Percent => op("%"),
|
||||
Caret => op("^"),
|
||||
And => op("&"),
|
||||
Or => op("|"),
|
||||
Shl => op("<<"),
|
||||
Shr => op(">>"),
|
||||
PlusEq => op("+="),
|
||||
MinusEq => op("-="),
|
||||
StarEq => op("*="),
|
||||
SlashEq => op("/="),
|
||||
PercentEq => op("%="),
|
||||
CaretEq => op("^="),
|
||||
AndEq => op("&="),
|
||||
OrEq => op("|="),
|
||||
ShlEq => op("<<="),
|
||||
ShrEq => op(">>="),
|
||||
At => op("@"),
|
||||
Dot => op("."),
|
||||
DotDot => op(".."),
|
||||
|
@ -322,16 +322,16 @@ impl ToInternal<SmallVec<[tokenstream::TokenTree; 2]>>
|
|||
b'=' => Eq,
|
||||
b'<' => Lt,
|
||||
b'>' => Gt,
|
||||
b'!' => Not,
|
||||
b'!' => Bang,
|
||||
b'~' => Tilde,
|
||||
b'+' => BinOp(Plus),
|
||||
b'-' => BinOp(Minus),
|
||||
b'*' => BinOp(Star),
|
||||
b'/' => BinOp(Slash),
|
||||
b'%' => BinOp(Percent),
|
||||
b'^' => BinOp(Caret),
|
||||
b'&' => BinOp(And),
|
||||
b'|' => BinOp(Or),
|
||||
b'+' => Plus,
|
||||
b'-' => Minus,
|
||||
b'*' => Star,
|
||||
b'/' => Slash,
|
||||
b'%' => Percent,
|
||||
b'^' => Caret,
|
||||
b'&' => And,
|
||||
b'|' => Or,
|
||||
b'@' => At,
|
||||
b'.' => Dot,
|
||||
b',' => Comma,
|
||||
|
@ -372,10 +372,9 @@ impl ToInternal<SmallVec<[tokenstream::TokenTree; 2]>>
|
|||
suffix,
|
||||
span,
|
||||
}) if symbol.as_str().starts_with('-') => {
|
||||
let minus = BinOp(BinOpToken::Minus);
|
||||
let symbol = Symbol::intern(&symbol.as_str()[1..]);
|
||||
let integer = TokenKind::lit(token::Integer, symbol, suffix);
|
||||
let a = tokenstream::TokenTree::token_joint_hidden(minus, span);
|
||||
let a = tokenstream::TokenTree::token_joint_hidden(Minus, span);
|
||||
let b = tokenstream::TokenTree::token_alone(integer, span);
|
||||
smallvec![a, b]
|
||||
}
|
||||
|
@ -385,10 +384,9 @@ impl ToInternal<SmallVec<[tokenstream::TokenTree; 2]>>
|
|||
suffix,
|
||||
span,
|
||||
}) if symbol.as_str().starts_with('-') => {
|
||||
let minus = BinOp(BinOpToken::Minus);
|
||||
let symbol = Symbol::intern(&symbol.as_str()[1..]);
|
||||
let float = TokenKind::lit(token::Float, symbol, suffix);
|
||||
let a = tokenstream::TokenTree::token_joint_hidden(minus, span);
|
||||
let a = tokenstream::TokenTree::token_joint_hidden(Minus, span);
|
||||
let b = tokenstream::TokenTree::token_alone(float, span);
|
||||
smallvec![a, b]
|
||||
}
|
||||
|
@ -599,10 +597,7 @@ impl server::TokenStream for Rustc<'_, '_> {
|
|||
Ok(Self::TokenStream::from_iter([
|
||||
// FIXME: The span of the `-` token is lost when
|
||||
// parsing, so we cannot faithfully recover it here.
|
||||
tokenstream::TokenTree::token_joint_hidden(
|
||||
token::BinOp(token::Minus),
|
||||
e.span,
|
||||
),
|
||||
tokenstream::TokenTree::token_joint_hidden(token::Minus, e.span),
|
||||
tokenstream::TokenTree::token_alone(token::Literal(*token_lit), e.span),
|
||||
]))
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue