1
Fork 0

Rename ast::TokenKind::Not as ast::TokenKind::Bang.

For consistency with `rustc_lexer::TokenKind::Bang`, and because other
`ast::TokenKind` variants generally have syntactic names instead of
semantic names (e.g. `Star` and `DotDot` instead of `Mul` and `Range`).
This commit is contained in:
Nicholas Nethercote 2024-12-20 14:04:25 +11:00
parent 2a1e2e9632
commit 53167c0b7f
20 changed files with 48 additions and 48 deletions

View file

@ -360,7 +360,7 @@ pub enum TokenKind {
/// `||` /// `||`
OrOr, OrOr,
/// `!` /// `!`
Not, Bang,
/// `~` /// `~`
Tilde, Tilde,
// `+` // `+`
@ -522,7 +522,7 @@ impl TokenKind {
Some(match (self, n) { Some(match (self, n) {
(Le, 1) => (Lt, Eq), (Le, 1) => (Lt, Eq),
(EqEq, 1) => (Eq, Eq), (EqEq, 1) => (Eq, Eq),
(Ne, 1) => (Not, Eq), (Ne, 1) => (Bang, Eq),
(Ge, 1) => (Gt, Eq), (Ge, 1) => (Gt, Eq),
(AndAnd, 1) => (And, And), (AndAnd, 1) => (And, And),
(OrOr, 1) => (Or, Or), (OrOr, 1) => (Or, Or),
@ -604,7 +604,7 @@ impl Token {
pub fn is_punct(&self) -> bool { pub fn is_punct(&self) -> bool {
match self.kind { match self.kind {
Eq | Lt | Le | EqEq | Ne | Ge | Gt | AndAnd | OrOr | Not | Tilde | Plus | Minus Eq | Lt | Le | EqEq | Ne | Ge | Gt | AndAnd | OrOr | Bang | Tilde | Plus | Minus
| Star | Slash | Percent | Caret | And | Or | Shl | Shr | PlusEq | MinusEq | StarEq | Star | Slash | Percent | Caret | And | Or | Shl | Shr | PlusEq | MinusEq | StarEq
| SlashEq | PercentEq | CaretEq | AndEq | OrEq | ShlEq | ShrEq | At | Dot | DotDot | SlashEq | PercentEq | CaretEq | AndEq | OrEq | ShlEq | ShrEq | At | Dot | DotDot
| DotDotDot | DotDotEq | Comma | Semi | Colon | PathSep | RArrow | LArrow | DotDotDot | DotDotEq | Comma | Semi | Colon | PathSep | RArrow | LArrow
@ -630,7 +630,7 @@ impl Token {
ident_can_begin_expr(name, self.span, is_raw), // value name or keyword ident_can_begin_expr(name, self.span, is_raw), // value name or keyword
OpenDelim(Parenthesis | Brace | Bracket) | // tuple, array or block OpenDelim(Parenthesis | Brace | Bracket) | // tuple, array or block
Literal(..) | // literal Literal(..) | // literal
Not | // operator not Bang | // operator not
Minus | // unary minus Minus | // unary minus
Star | // dereference Star | // dereference
Or | OrOr | // closure Or | OrOr | // closure
@ -701,7 +701,7 @@ impl Token {
ident_can_begin_type(name, self.span, is_raw), // type name or keyword ident_can_begin_type(name, self.span, is_raw), // type name or keyword
OpenDelim(Delimiter::Parenthesis) | // tuple OpenDelim(Delimiter::Parenthesis) | // tuple
OpenDelim(Delimiter::Bracket) | // array OpenDelim(Delimiter::Bracket) | // array
Not | // never Bang | // never
Star | // raw pointer Star | // raw pointer
And | // reference And | // reference
AndAnd | // double reference AndAnd | // double reference
@ -1004,8 +1004,8 @@ impl Token {
(Gt, Ge) => ShrEq, (Gt, Ge) => ShrEq,
(Gt, _) => return None, (Gt, _) => return None,
(Not, Eq) => Ne, (Bang, Eq) => Ne,
(Not, _) => return None, (Bang, _) => return None,
(Plus, Eq) => PlusEq, (Plus, Eq) => PlusEq,
(Plus, _) => return None, (Plus, _) => return None,

View file

@ -651,7 +651,7 @@ impl TokenStream {
if attr_style == AttrStyle::Inner { if attr_style == AttrStyle::Inner {
vec![ vec![
TokenTree::token_joint(token::Pound, span), TokenTree::token_joint(token::Pound, span),
TokenTree::token_joint_hidden(token::Not, span), TokenTree::token_joint_hidden(token::Bang, span),
body, body,
] ]
} else { } else {

View file

@ -317,7 +317,7 @@ fn space_between(tt1: &TokenTree, tt2: &TokenTree) -> bool {
(tt1, Tok(Token { kind: Comma | Semi | Dot, .. }, _)) if !is_punct(tt1) => false, (tt1, Tok(Token { kind: Comma | Semi | Dot, .. }, _)) if !is_punct(tt1) => false,
// IDENT + `!`: `println!()`, but `if !x { ... }` needs a space after the `if` // IDENT + `!`: `println!()`, but `if !x { ... }` needs a space after the `if`
(Tok(Token { kind: Ident(sym, is_raw), span }, _), Tok(Token { kind: Not, .. }, _)) (Tok(Token { kind: Ident(sym, is_raw), span }, _), Tok(Token { kind: Bang, .. }, _))
if !Ident::new(*sym, *span).is_reserved() || matches!(is_raw, IdentIsRaw::Yes) => if !Ident::new(*sym, *span).is_reserved() || matches!(is_raw, IdentIsRaw::Yes) =>
{ {
false false
@ -896,7 +896,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
token::Ne => "!=".into(), token::Ne => "!=".into(),
token::Ge => ">=".into(), token::Ge => ">=".into(),
token::Gt => ">".into(), token::Gt => ">".into(),
token::Not => "!".into(), token::Bang => "!".into(),
token::Tilde => "~".into(), token::Tilde => "~".into(),
token::OrOr => "||".into(), token::OrOr => "||".into(),
token::AndAnd => "&&".into(), token::AndAnd => "&&".into(),

View file

@ -328,7 +328,7 @@ impl<'a> StripUnconfigured<'a> {
// For inner attributes, we do the same thing for the `!` in `#![attr]`. // For inner attributes, we do the same thing for the `!` in `#![attr]`.
let mut trees = if cfg_attr.style == AttrStyle::Inner { let mut trees = if cfg_attr.style == AttrStyle::Inner {
let Some(TokenTree::Token(bang_token @ Token { kind: TokenKind::Not, .. }, _)) = let Some(TokenTree::Token(bang_token @ Token { kind: TokenKind::Bang, .. }, _)) =
orig_trees.next() orig_trees.next()
else { else {
panic!("Bad tokens for attribute {cfg_attr:?}"); panic!("Bad tokens for attribute {cfg_attr:?}");

View file

@ -432,7 +432,7 @@ fn check_nested_occurrences(
} }
( (
NestedMacroState::MacroRules, NestedMacroState::MacroRules,
&TokenTree::Token(Token { kind: TokenKind::Not, .. }), &TokenTree::Token(Token { kind: TokenKind::Bang, .. }),
) => { ) => {
state = NestedMacroState::MacroRulesNot; state = NestedMacroState::MacroRulesNot;
} }

View file

@ -690,7 +690,7 @@ fn has_compile_error_macro(rhs: &mbe::TokenTree) -> bool {
&& let TokenKind::Ident(ident, _) = ident.kind && let TokenKind::Ident(ident, _) = ident.kind
&& ident == sym::compile_error && ident == sym::compile_error
&& let mbe::TokenTree::Token(bang) = bang && let mbe::TokenTree::Token(bang) = bang
&& let TokenKind::Not = bang.kind && let TokenKind::Bang = bang.kind
&& let mbe::TokenTree::Delimited(.., del) = args && let mbe::TokenTree::Delimited(.., del) = args
&& !del.delim.skip() && !del.delim.skip()
{ {

View file

@ -180,7 +180,7 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre
Gt => op(">"), Gt => op(">"),
AndAnd => op("&&"), AndAnd => op("&&"),
OrOr => op("||"), OrOr => op("||"),
Not => op("!"), Bang => op("!"),
Tilde => op("~"), Tilde => op("~"),
Plus => op("+"), Plus => op("+"),
Minus => op("-"), Minus => op("-"),
@ -322,7 +322,7 @@ impl ToInternal<SmallVec<[tokenstream::TokenTree; 2]>>
b'=' => Eq, b'=' => Eq,
b'<' => Lt, b'<' => Lt,
b'>' => Gt, b'>' => Gt,
b'!' => Not, b'!' => Bang,
b'~' => Tilde, b'~' => Tilde,
b'+' => Plus, b'+' => Plus,
b'-' => Minus, b'-' => Minus,

View file

@ -384,7 +384,7 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
rustc_lexer::TokenKind::Colon => token::Colon, rustc_lexer::TokenKind::Colon => token::Colon,
rustc_lexer::TokenKind::Dollar => token::Dollar, rustc_lexer::TokenKind::Dollar => token::Dollar,
rustc_lexer::TokenKind::Eq => token::Eq, rustc_lexer::TokenKind::Eq => token::Eq,
rustc_lexer::TokenKind::Bang => token::Not, rustc_lexer::TokenKind::Bang => token::Bang,
rustc_lexer::TokenKind::Lt => token::Lt, rustc_lexer::TokenKind::Lt => token::Lt,
rustc_lexer::TokenKind::Gt => token::Gt, rustc_lexer::TokenKind::Gt => token::Gt,
rustc_lexer::TokenKind::Minus => token::Minus, rustc_lexer::TokenKind::Minus => token::Minus,

View file

@ -312,7 +312,7 @@ const ASCII_ARRAY: &[(&str, &str, Option<token::TokenKind>)] = &[
(",", "Comma", Some(token::Comma)), (",", "Comma", Some(token::Comma)),
(";", "Semicolon", Some(token::Semi)), (";", "Semicolon", Some(token::Semi)),
(":", "Colon", Some(token::Colon)), (":", "Colon", Some(token::Colon)),
("!", "Exclamation Mark", Some(token::Not)), ("!", "Exclamation Mark", Some(token::Bang)),
("?", "Question Mark", Some(token::Question)), ("?", "Question Mark", Some(token::Question)),
(".", "Period", Some(token::Dot)), (".", "Period", Some(token::Dot)),
("(", "Left Parenthesis", Some(token::OpenDelim(Delimiter::Parenthesis))), ("(", "Left Parenthesis", Some(token::OpenDelim(Delimiter::Parenthesis))),

View file

@ -130,7 +130,7 @@ impl<'a> Parser<'a> {
assert!(this.eat(exp!(Pound)), "parse_attribute called in non-attribute position"); assert!(this.eat(exp!(Pound)), "parse_attribute called in non-attribute position");
let style = let style =
if this.eat(exp!(Not)) { ast::AttrStyle::Inner } else { ast::AttrStyle::Outer }; if this.eat(exp!(Bang)) { ast::AttrStyle::Inner } else { ast::AttrStyle::Outer };
this.expect(exp!(OpenBracket))?; this.expect(exp!(OpenBracket))?;
let item = this.parse_attr_item(ForceCollect::No)?; let item = this.parse_attr_item(ForceCollect::No)?;
@ -312,7 +312,7 @@ impl<'a> Parser<'a> {
loop { loop {
let start_pos = self.num_bump_calls; let start_pos = self.num_bump_calls;
// Only try to parse if it is an inner attribute (has `!`). // Only try to parse if it is an inner attribute (has `!`).
let attr = if self.check(exp!(Pound)) && self.look_ahead(1, |t| t == &token::Not) { let attr = if self.check(exp!(Pound)) && self.look_ahead(1, |t| t == &token::Bang) {
Some(self.parse_attribute(InnerAttrPolicy::Permitted)?) Some(self.parse_attribute(InnerAttrPolicy::Permitted)?)
} else if let token::DocComment(comment_kind, attr_style, data) = self.token.kind { } else if let token::DocComment(comment_kind, attr_style, data) = self.token.kind {
if attr_style == ast::AttrStyle::Inner { if attr_style == ast::AttrStyle::Inner {

View file

@ -1961,7 +1961,7 @@ impl<'a> Parser<'a> {
&mut self, &mut self,
await_sp: Span, await_sp: Span,
) -> PResult<'a, P<Expr>> { ) -> PResult<'a, P<Expr>> {
let (hi, expr, is_question) = if self.token == token::Not { let (hi, expr, is_question) = if self.token == token::Bang {
// Handle `await!(<expr>)`. // Handle `await!(<expr>)`.
self.recover_await_macro()? self.recover_await_macro()?
} else { } else {
@ -1973,7 +1973,7 @@ impl<'a> Parser<'a> {
} }
fn recover_await_macro(&mut self) -> PResult<'a, (Span, P<Expr>, bool)> { fn recover_await_macro(&mut self) -> PResult<'a, (Span, P<Expr>, bool)> {
self.expect(exp!(Not))?; self.expect(exp!(Bang))?;
self.expect(exp!(OpenParen))?; self.expect(exp!(OpenParen))?;
let expr = self.parse_expr()?; let expr = self.parse_expr()?;
self.expect(exp!(CloseParen))?; self.expect(exp!(CloseParen))?;
@ -2033,7 +2033,7 @@ impl<'a> Parser<'a> {
pub(super) fn try_macro_suggestion(&mut self) -> PResult<'a, P<Expr>> { pub(super) fn try_macro_suggestion(&mut self) -> PResult<'a, P<Expr>> {
let is_try = self.token.is_keyword(kw::Try); let is_try = self.token.is_keyword(kw::Try);
let is_questionmark = self.look_ahead(1, |t| t == &token::Not); //check for ! let is_questionmark = self.look_ahead(1, |t| t == &token::Bang); //check for !
let is_open = self.look_ahead(2, |t| t == &token::OpenDelim(Delimiter::Parenthesis)); //check for ( let is_open = self.look_ahead(2, |t| t == &token::OpenDelim(Delimiter::Parenthesis)); //check for (
if is_try && is_questionmark && is_open { if is_try && is_questionmark && is_open {

View file

@ -505,7 +505,7 @@ impl<'a> Parser<'a> {
// Note: when adding new unary operators, don't forget to adjust TokenKind::can_begin_expr() // Note: when adding new unary operators, don't forget to adjust TokenKind::can_begin_expr()
match this.token.uninterpolate().kind { match this.token.uninterpolate().kind {
// `!expr` // `!expr`
token::Not => make_it!(this, attrs, |this, _| this.parse_expr_unary(lo, UnOp::Not)), token::Bang => make_it!(this, attrs, |this, _| this.parse_expr_unary(lo, UnOp::Not)),
// `~expr` // `~expr`
token::Tilde => make_it!(this, attrs, |this, _| this.recover_tilde_expr(lo)), token::Tilde => make_it!(this, attrs, |this, _| this.recover_tilde_expr(lo)),
// `-expr` // `-expr`
@ -1570,7 +1570,7 @@ impl<'a> Parser<'a> {
}; };
// `!`, as an operator, is prefix, so we know this isn't that. // `!`, as an operator, is prefix, so we know this isn't that.
let (span, kind) = if self.eat(exp!(Not)) { let (span, kind) = if self.eat(exp!(Bang)) {
// MACRO INVOCATION expression // MACRO INVOCATION expression
if qself.is_some() { if qself.is_some() {
self.dcx().emit_err(errors::MacroInvocationWithQualifiedPath(path.span)); self.dcx().emit_err(errors::MacroInvocationWithQualifiedPath(path.span));

View file

@ -382,7 +382,7 @@ impl<'a> Parser<'a> {
/// Are we sure this could not possibly be a macro invocation? /// Are we sure this could not possibly be a macro invocation?
fn isnt_macro_invocation(&mut self) -> bool { fn isnt_macro_invocation(&mut self) -> bool {
self.check_ident() && self.look_ahead(1, |t| *t != token::Not && *t != token::PathSep) self.check_ident() && self.look_ahead(1, |t| *t != token::Bang && *t != token::PathSep)
} }
/// Recover on encountering a struct, enum, or method definition where the user /// Recover on encountering a struct, enum, or method definition where the user
@ -480,7 +480,7 @@ impl<'a> Parser<'a> {
/// Parses an item macro, e.g., `item!();`. /// Parses an item macro, e.g., `item!();`.
fn parse_item_macro(&mut self, vis: &Visibility) -> PResult<'a, MacCall> { fn parse_item_macro(&mut self, vis: &Visibility) -> PResult<'a, MacCall> {
let path = self.parse_path(PathStyle::Mod)?; // `foo::bar` let path = self.parse_path(PathStyle::Mod)?; // `foo::bar`
self.expect(exp!(Not))?; // `!` self.expect(exp!(Bang))?; // `!`
match self.parse_delim_args() { match self.parse_delim_args() {
// `( .. )` or `[ .. ]` (followed by `;`), or `{ .. }`. // `( .. )` or `[ .. ]` (followed by `;`), or `{ .. }`.
Ok(args) => { Ok(args) => {
@ -540,7 +540,7 @@ impl<'a> Parser<'a> {
fn parse_polarity(&mut self) -> ast::ImplPolarity { fn parse_polarity(&mut self) -> ast::ImplPolarity {
// Disambiguate `impl !Trait for Type { ... }` and `impl ! { ... }` for the never type. // Disambiguate `impl !Trait for Type { ... }` and `impl ! { ... }` for the never type.
if self.check(exp!(Not)) && self.look_ahead(1, |t| t.can_begin_type()) { if self.check(exp!(Bang)) && self.look_ahead(1, |t| t.can_begin_type()) {
self.bump(); // `!` self.bump(); // `!`
ast::ImplPolarity::Negative(self.prev_token.span) ast::ImplPolarity::Negative(self.prev_token.span)
} else { } else {
@ -1579,7 +1579,7 @@ impl<'a> Parser<'a> {
} }
let ident = this.parse_field_ident("enum", vlo)?; let ident = this.parse_field_ident("enum", vlo)?;
if this.token == token::Not { if this.token == token::Bang {
if let Err(err) = this.unexpected() { if let Err(err) = this.unexpected() {
err.with_note(fluent::parse_macro_expands_to_enum_variant).emit(); err.with_note(fluent::parse_macro_expands_to_enum_variant).emit();
} }
@ -2034,7 +2034,7 @@ impl<'a> Parser<'a> {
attrs: AttrVec, attrs: AttrVec,
) -> PResult<'a, FieldDef> { ) -> PResult<'a, FieldDef> {
let name = self.parse_field_ident(adt_ty, lo)?; let name = self.parse_field_ident(adt_ty, lo)?;
if self.token == token::Not { if self.token == token::Bang {
if let Err(mut err) = self.unexpected() { if let Err(mut err) = self.unexpected() {
// Encounter the macro invocation // Encounter the macro invocation
err.subdiagnostic(MacroExpandsToAdtField { adt_ty }); err.subdiagnostic(MacroExpandsToAdtField { adt_ty });
@ -2184,7 +2184,7 @@ impl<'a> Parser<'a> {
if self.check_keyword(exp!(MacroRules)) { if self.check_keyword(exp!(MacroRules)) {
let macro_rules_span = self.token.span; let macro_rules_span = self.token.span;
if self.look_ahead(1, |t| *t == token::Not) && self.look_ahead(2, |t| t.is_ident()) { if self.look_ahead(1, |t| *t == token::Bang) && self.look_ahead(2, |t| t.is_ident()) {
return IsMacroRulesItem::Yes { has_bang: true }; return IsMacroRulesItem::Yes { has_bang: true };
} else if self.look_ahead(1, |t| (t.is_ident())) { } else if self.look_ahead(1, |t| (t.is_ident())) {
// macro_rules foo // macro_rules foo
@ -2209,11 +2209,11 @@ impl<'a> Parser<'a> {
self.expect_keyword(exp!(MacroRules))?; // `macro_rules` self.expect_keyword(exp!(MacroRules))?; // `macro_rules`
if has_bang { if has_bang {
self.expect(exp!(Not))?; // `!` self.expect(exp!(Bang))?; // `!`
} }
let ident = self.parse_ident()?; let ident = self.parse_ident()?;
if self.eat(exp!(Not)) { if self.eat(exp!(Bang)) {
// Handle macro_rules! foo! // Handle macro_rules! foo!
let span = self.prev_token.span; let span = self.prev_token.span;
self.dcx().emit_err(errors::MacroNameRemoveBang { span }); self.dcx().emit_err(errors::MacroNameRemoveBang { span });

View file

@ -767,7 +767,7 @@ impl<'a> Parser<'a> {
self.recover_dotdotdot_rest_pat(lo) self.recover_dotdotdot_rest_pat(lo)
} else if let Some(form) = self.parse_range_end() { } else if let Some(form) = self.parse_range_end() {
self.parse_pat_range_to(form)? // `..=X`, `...X`, or `..X`. self.parse_pat_range_to(form)? // `..=X`, `...X`, or `..X`.
} else if self.eat(exp!(Not)) { } else if self.eat(exp!(Bang)) {
// Parse `!` // Parse `!`
self.psess.gated_spans.gate(sym::never_patterns, self.prev_token.span); self.psess.gated_spans.gate(sym::never_patterns, self.prev_token.span);
PatKind::Never PatKind::Never
@ -823,7 +823,7 @@ impl<'a> Parser<'a> {
}; };
let span = lo.to(self.prev_token.span); let span = lo.to(self.prev_token.span);
if qself.is_none() && self.check(exp!(Not)) { if qself.is_none() && self.check(exp!(Bang)) {
self.parse_pat_mac_invoc(path)? self.parse_pat_mac_invoc(path)?
} else if let Some(form) = self.parse_range_end() { } else if let Some(form) = self.parse_range_end() {
let begin = self.mk_expr(span, ExprKind::Path(qself, path)); let begin = self.mk_expr(span, ExprKind::Path(qself, path));
@ -1335,7 +1335,7 @@ impl<'a> Parser<'a> {
| token::OpenDelim(Delimiter::Brace) // A struct pattern. | token::OpenDelim(Delimiter::Brace) // A struct pattern.
| token::DotDotDot | token::DotDotEq | token::DotDot // A range pattern. | token::DotDotDot | token::DotDotEq | token::DotDot // A range pattern.
| token::PathSep // A tuple / struct variant pattern. | token::PathSep // A tuple / struct variant pattern.
| token::Not)) // A macro expanding to a pattern. | token::Bang)) // A macro expanding to a pattern.
} }
/// Parses `ident` or `ident @ pat`. /// Parses `ident` or `ident @ pat`.

View file

@ -176,7 +176,7 @@ impl<'a> Parser<'a> {
let stmt = self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| { let stmt = self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
let path = this.parse_path(PathStyle::Expr)?; let path = this.parse_path(PathStyle::Expr)?;
if this.eat(exp!(Not)) { if this.eat(exp!(Bang)) {
let stmt_mac = this.parse_stmt_mac(lo, attrs, path)?; let stmt_mac = this.parse_stmt_mac(lo, attrs, path)?;
return Ok(( return Ok((
stmt_mac, stmt_mac,

View file

@ -2291,7 +2291,7 @@ fn string_to_tts_macro() {
Token { kind: token::Ident(name_macro_rules, IdentIsRaw::No), .. }, Token { kind: token::Ident(name_macro_rules, IdentIsRaw::No), .. },
_, _,
), ),
TokenTree::Token(Token { kind: token::Not, .. }, _), TokenTree::Token(Token { kind: token::Bang, .. }, _),
TokenTree::Token(Token { kind: token::Ident(name_zip, IdentIsRaw::No), .. }, _), TokenTree::Token(Token { kind: token::Ident(name_zip, IdentIsRaw::No), .. }, _),
TokenTree::Delimited(.., macro_delim, macro_tts), TokenTree::Delimited(.., macro_delim, macro_tts),
] if name_macro_rules == &kw::MacroRules && name_zip.as_str() == "zip" => { ] if name_macro_rules == &kw::MacroRules && name_zip.as_str() == "zip" => {

View file

@ -25,7 +25,7 @@ pub enum TokenType {
Gt, Gt,
AndAnd, AndAnd,
OrOr, OrOr,
Not, Bang,
Tilde, Tilde,
// BinOps // BinOps
@ -172,7 +172,7 @@ impl TokenType {
Gt, Gt,
AndAnd, AndAnd,
OrOr, OrOr,
Not, Bang,
Tilde, Tilde,
Plus, Plus,
@ -366,7 +366,7 @@ impl TokenType {
TokenType::Gt => "`>`", TokenType::Gt => "`>`",
TokenType::AndAnd => "`&&`", TokenType::AndAnd => "`&&`",
TokenType::OrOr => "`||`", TokenType::OrOr => "`||`",
TokenType::Not => "`!`", TokenType::Bang => "`!`",
TokenType::Tilde => "`~`", TokenType::Tilde => "`~`",
TokenType::Plus => "`+`", TokenType::Plus => "`+`",
@ -479,7 +479,7 @@ macro_rules! exp {
(Gt) => { exp!(@tok, Gt) }; (Gt) => { exp!(@tok, Gt) };
(AndAnd) => { exp!(@tok, AndAnd) }; (AndAnd) => { exp!(@tok, AndAnd) };
(OrOr) => { exp!(@tok, OrOr) }; (OrOr) => { exp!(@tok, OrOr) };
(Not) => { exp!(@tok, Not) }; (Bang) => { exp!(@tok, Bang) };
(Tilde) => { exp!(@tok, Tilde) }; (Tilde) => { exp!(@tok, Tilde) };
(Plus) => { exp!(@tok, Plus) }; (Plus) => { exp!(@tok, Plus) };
(Minus) => { exp!(@tok, Minus) }; (Minus) => { exp!(@tok, Minus) };

View file

@ -260,7 +260,7 @@ impl<'a> Parser<'a> {
let mut impl_dyn_multi = false; let mut impl_dyn_multi = false;
let kind = if self.check(exp!(OpenParen)) { let kind = if self.check(exp!(OpenParen)) {
self.parse_ty_tuple_or_parens(lo, allow_plus)? self.parse_ty_tuple_or_parens(lo, allow_plus)?
} else if self.eat(exp!(Not)) { } else if self.eat(exp!(Bang)) {
// Never type `!` // Never type `!`
TyKind::Never TyKind::Never
} else if self.eat(exp!(Star)) { } else if self.eat(exp!(Star)) {
@ -817,7 +817,7 @@ impl<'a> Parser<'a> {
) -> PResult<'a, TyKind> { ) -> PResult<'a, TyKind> {
// Simple path // Simple path
let path = self.parse_path_inner(PathStyle::Type, ty_generics)?; let path = self.parse_path_inner(PathStyle::Type, ty_generics)?;
if self.eat(exp!(Not)) { if self.eat(exp!(Bang)) {
// Macro invocation in type position // Macro invocation in type position
Ok(TyKind::MacCall(P(MacCall { path, args: self.parse_delim_args()? }))) Ok(TyKind::MacCall(P(MacCall { path, args: self.parse_delim_args()? })))
} else if allow_plus == AllowPlus::Yes && self.check_plus() { } else if allow_plus == AllowPlus::Yes && self.check_plus() {
@ -870,7 +870,7 @@ impl<'a> Parser<'a> {
fn can_begin_bound(&mut self) -> bool { fn can_begin_bound(&mut self) -> bool {
self.check_path() self.check_path()
|| self.check_lifetime() || self.check_lifetime()
|| self.check(exp!(Not)) || self.check(exp!(Bang))
|| self.check(exp!(Question)) || self.check(exp!(Question))
|| self.check(exp!(Tilde)) || self.check(exp!(Tilde))
|| self.check_keyword(exp!(For)) || self.check_keyword(exp!(For))
@ -1021,7 +1021,7 @@ impl<'a> Parser<'a> {
let polarity = if self.eat(exp!(Question)) { let polarity = if self.eat(exp!(Question)) {
BoundPolarity::Maybe(self.prev_token.span) BoundPolarity::Maybe(self.prev_token.span)
} else if self.eat(exp!(Not)) { } else if self.eat(exp!(Bang)) {
self.psess.gated_spans.gate(sym::negative_bounds, self.prev_token.span); self.psess.gated_spans.gate(sym::negative_bounds, self.prev_token.span);
BoundPolarity::Negative(self.prev_token.span) BoundPolarity::Negative(self.prev_token.span)
} else { } else {

View file

@ -140,7 +140,7 @@ fn print_tts(printer: &mut Printer<'_>, tts: &TokenStream) {
(DollarParen, token::Plus | token::Star | token::Question) => (false, Other), (DollarParen, token::Plus | token::Star | token::Question) => (false, Other),
(DollarParen, _) => (false, DollarParenSep), (DollarParen, _) => (false, DollarParenSep),
(DollarParenSep, token::Plus | token::Star) => (false, Other), (DollarParenSep, token::Plus | token::Star) => (false, Other),
(Pound, token::Not) => (false, PoundBang), (Pound, token::Bang) => (false, PoundBang),
(_, token::Ident(symbol, IdentIsRaw::No)) (_, token::Ident(symbol, IdentIsRaw::No))
if !usually_needs_space_between_keyword_and_open_delim(*symbol, tt.span) => if !usually_needs_space_between_keyword_and_open_delim(*symbol, tt.span) =>
{ {

View file

@ -1088,7 +1088,7 @@ fn force_space_before(tok: &TokenKind) -> bool {
| TokenKind::Gt | TokenKind::Gt
| TokenKind::AndAnd | TokenKind::AndAnd
| TokenKind::OrOr | TokenKind::OrOr
| TokenKind::Not | TokenKind::Bang
| TokenKind::Tilde | TokenKind::Tilde
| TokenKind::PlusEq | TokenKind::PlusEq
| TokenKind::MinusEq | TokenKind::MinusEq
@ -1131,7 +1131,7 @@ fn next_space(tok: &TokenKind) -> SpaceState {
debug!("next_space: {:?}", tok); debug!("next_space: {:?}", tok);
match tok { match tok {
TokenKind::Not TokenKind::Bang
| TokenKind::And | TokenKind::And
| TokenKind::Tilde | TokenKind::Tilde
| TokenKind::At | TokenKind::At