1
Fork 0

Speed up Parser::expected_token_types.

The parser pushes a `TokenType` to `Parser::expected_token_types` on
every call to the various `check`/`eat` methods, and clears it on every
call to `bump`. Some of those `TokenType` values are full tokens that
require cloning and dropping. This is a *lot* of work for something
that is only used in error messages and it accounts for a significant
fraction of parsing execution time.

This commit overhauls `TokenType` so that `Parser::expected_token_types`
can be implemented as a bitset. This requires changing `TokenType` to a
C-style parameterless enum, and adding `TokenTypeSet` which uses a
`u128` for the bits. (The new `TokenType` has 105 variants.)

The new types `ExpTokenPair` and `ExpKeywordPair` are now arguments to
the `check`/`eat` methods. This is for maximum speed. The elements in
the pairs are always statically known; e.g. a
`token::BinOp(token::Star)` is always paired with a `TokenType::Star`.
So we now compute `TokenType`s in advance and pass them in to
`check`/`eat` rather than the current approach of constructing them on
insertion into `expected_token_types`.

Values of these pair types can be produced by the new `exp!` macro,
which is used at every `check`/`eat` call site. The macro is for
convenience, allowing any pair to be generated from a single identifier.

The ident/keyword filtering in `expected_one_of_not_found` is no longer
necessary. It was there to account for some sloppiness in
`TokenKind`/`TokenType` comparisons.

The existing `TokenType` is moved to a new file `token_type.rs`, and all
its new infrastructure is added to that file. There is more boilerplate
code than I would like, but I can't see how to make it shorter.
This commit is contained in:
Nicholas Nethercote 2024-12-04 15:55:06 +11:00
parent d5370d981f
commit b9bf0b4b10
22 changed files with 1357 additions and 793 deletions

View file

@ -8,6 +8,7 @@ mod nonterminal;
mod pat;
mod path;
mod stmt;
pub mod token_type;
mod ty;
use std::assert_matches::debug_assert_matches;
@ -39,11 +40,14 @@ use rustc_index::interval::IntervalSet;
use rustc_session::parse::ParseSess;
use rustc_span::{DUMMY_SP, Ident, Span, Symbol, kw, sym};
use thin_vec::ThinVec;
use token_type::TokenTypeSet;
pub use token_type::{ExpKeywordPair, ExpTokenPair, TokenType};
use tracing::debug;
use crate::errors::{
self, IncorrectVisibilityRestriction, MismatchedClosingDelimiter, NonStringAbiLiteral,
};
use crate::exp;
use crate::lexer::UnmatchedDelim;
#[cfg(test)]
@ -141,7 +145,7 @@ pub struct Parser<'a> {
pub prev_token: Token,
pub capture_cfg: bool,
restrictions: Restrictions,
expected_token_types: Vec<TokenType>,
expected_token_types: TokenTypeSet,
token_cursor: TokenCursor,
// The number of calls to `bump`, i.e. the position in the token stream.
num_bump_calls: u32,
@ -367,48 +371,21 @@ impl TokenCursor {
}
}
#[derive(Debug, Clone, PartialEq)]
enum TokenType {
Token(TokenKind),
Keyword(Symbol),
Operator,
Lifetime,
Ident,
Path,
Type,
Const,
}
impl TokenType {
fn to_string(&self) -> String {
match self {
TokenType::Token(t) => format!("`{}`", pprust::token_kind_to_string(t)),
TokenType::Keyword(kw) => format!("`{kw}`"),
TokenType::Operator => "an operator".to_string(),
TokenType::Lifetime => "lifetime".to_string(),
TokenType::Ident => "identifier".to_string(),
TokenType::Path => "path".to_string(),
TokenType::Type => "type".to_string(),
TokenType::Const => "a const expression".to_string(),
}
}
}
/// A sequence separator.
#[derive(Debug)]
struct SeqSep {
struct SeqSep<'a> {
/// The separator token.
sep: Option<TokenKind>,
sep: Option<ExpTokenPair<'a>>,
/// `true` if a trailing separator is allowed.
trailing_sep_allowed: bool,
}
impl SeqSep {
fn trailing_allowed(t: TokenKind) -> SeqSep {
SeqSep { sep: Some(t), trailing_sep_allowed: true }
impl<'a> SeqSep<'a> {
fn trailing_allowed(sep: ExpTokenPair<'a>) -> SeqSep<'a> {
SeqSep { sep: Some(sep), trailing_sep_allowed: true }
}
fn none() -> SeqSep {
fn none() -> SeqSep<'a> {
SeqSep { sep: None, trailing_sep_allowed: false }
}
}
@ -490,7 +467,7 @@ impl<'a> Parser<'a> {
prev_token: Token::dummy(),
capture_cfg: false,
restrictions: Restrictions::empty(),
expected_token_types: Vec::new(),
expected_token_types: TokenTypeSet::new(),
token_cursor: TokenCursor { curr: TokenTreeCursor::new(stream), stack: Vec::new() },
num_bump_calls: 0,
break_last_token: 0,
@ -553,16 +530,16 @@ impl<'a> Parser<'a> {
}
/// Expects and consumes the token `t`. Signals an error if the next token is not `t`.
pub fn expect(&mut self, t: &TokenKind) -> PResult<'a, Recovered> {
pub fn expect(&mut self, exp: ExpTokenPair<'_>) -> PResult<'a, Recovered> {
if self.expected_token_types.is_empty() {
if self.token == *t {
if self.token == *exp.tok {
self.bump();
Ok(Recovered::No)
} else {
self.unexpected_try_recover(t)
self.unexpected_try_recover(exp.tok)
}
} else {
self.expect_one_of(slice::from_ref(t), &[])
self.expect_one_of(slice::from_ref(&exp), &[])
}
}
@ -571,13 +548,13 @@ impl<'a> Parser<'a> {
/// anything. Signal a fatal error if next token is unexpected.
fn expect_one_of(
&mut self,
edible: &[TokenKind],
inedible: &[TokenKind],
edible: &[ExpTokenPair<'_>],
inedible: &[ExpTokenPair<'_>],
) -> PResult<'a, Recovered> {
if edible.contains(&self.token.kind) {
if edible.iter().any(|exp| exp.tok == &self.token.kind) {
self.bump();
Ok(Recovered::No)
} else if inedible.contains(&self.token.kind) {
} else if inedible.iter().any(|exp| exp.tok == &self.token.kind) {
// leave it in the input
Ok(Recovered::No)
} else if self.token != token::Eof
@ -622,10 +599,10 @@ impl<'a> Parser<'a> {
/// This method will automatically add `tok` to `expected_token_types` if `tok` is not
/// encountered.
#[inline]
fn check(&mut self, tok: &TokenKind) -> bool {
let is_present = self.token == *tok;
fn check(&mut self, exp: ExpTokenPair<'_>) -> bool {
let is_present = self.token == *exp.tok;
if !is_present {
self.expected_token_types.push(TokenType::Token(tok.clone()));
self.expected_token_types.insert(exp.token_type);
}
is_present
}
@ -653,8 +630,8 @@ impl<'a> Parser<'a> {
/// Consumes a token 'tok' if it exists. Returns whether the given token was present.
#[inline]
#[must_use]
pub fn eat(&mut self, tok: &TokenKind) -> bool {
let is_present = self.check(tok);
pub fn eat(&mut self, exp: ExpTokenPair<'_>) -> bool {
let is_present = self.check(exp);
if is_present {
self.bump()
}
@ -665,23 +642,23 @@ impl<'a> Parser<'a> {
/// An expectation is also added for diagnostics purposes.
#[inline]
#[must_use]
fn check_keyword(&mut self, kw: Symbol) -> bool {
let is_keyword = self.token.is_keyword(kw);
fn check_keyword(&mut self, exp: ExpKeywordPair) -> bool {
let is_keyword = self.token.is_keyword(exp.kw);
if !is_keyword {
self.expected_token_types.push(TokenType::Keyword(kw));
self.expected_token_types.insert(exp.token_type);
}
is_keyword
}
#[inline]
#[must_use]
fn check_keyword_case(&mut self, kw: Symbol, case: Case) -> bool {
if self.check_keyword(kw) {
fn check_keyword_case(&mut self, exp: ExpKeywordPair, case: Case) -> bool {
if self.check_keyword(exp) {
true
// Do an ASCII case-insensitive match, because all keywords are ASCII.
} else if case == Case::Insensitive
&& let Some((ident, IdentIsRaw::No)) = self.token.ident()
&& ident.as_str().eq_ignore_ascii_case(kw.as_str())
&& ident.as_str().eq_ignore_ascii_case(exp.kw.as_str())
{
true
} else {
@ -694,8 +671,8 @@ impl<'a> Parser<'a> {
// Public for rustc_builtin_macros and rustfmt usage.
#[inline]
#[must_use]
pub fn eat_keyword(&mut self, kw: Symbol) -> bool {
let is_keyword = self.check_keyword(kw);
pub fn eat_keyword(&mut self, exp: ExpKeywordPair) -> bool {
let is_keyword = self.check_keyword(exp);
if is_keyword {
self.bump();
}
@ -707,14 +684,14 @@ impl<'a> Parser<'a> {
/// This is useful for recovery.
#[inline]
#[must_use]
fn eat_keyword_case(&mut self, kw: Symbol, case: Case) -> bool {
if self.eat_keyword(kw) {
fn eat_keyword_case(&mut self, exp: ExpKeywordPair, case: Case) -> bool {
if self.eat_keyword(exp) {
true
} else if case == Case::Insensitive
&& let Some((ident, IdentIsRaw::No)) = self.token.ident()
&& ident.as_str().to_lowercase() == kw.as_str().to_lowercase()
&& ident.as_str().to_lowercase() == exp.kw.as_str().to_lowercase()
{
self.dcx().emit_err(errors::KwBadCase { span: ident.span, kw: kw.as_str() });
self.dcx().emit_err(errors::KwBadCase { span: ident.span, kw: exp.kw.as_str() });
self.bump();
true
} else {
@ -738,8 +715,8 @@ impl<'a> Parser<'a> {
/// If the given word is not a keyword, signals an error.
/// If the next token is not the given word, signals an error.
/// Otherwise, eats it.
pub fn expect_keyword(&mut self, kw: Symbol) -> PResult<'a, ()> {
if !self.eat_keyword(kw) { self.unexpected() } else { Ok(()) }
pub fn expect_keyword(&mut self, exp: ExpKeywordPair) -> PResult<'a, ()> {
if !self.eat_keyword(exp) { self.unexpected() } else { Ok(()) }
}
/// Is the given keyword `kw` followed by a non-reserved identifier?
@ -748,9 +725,9 @@ impl<'a> Parser<'a> {
}
#[inline]
fn check_or_expected(&mut self, ok: bool, typ: TokenType) -> bool {
fn check_or_expected(&mut self, ok: bool, token_type: TokenType) -> bool {
if !ok {
self.expected_token_types.push(typ);
self.expected_token_types.insert(token_type);
}
ok
}
@ -795,22 +772,19 @@ impl<'a> Parser<'a> {
/// Otherwise returns `false`.
#[inline]
fn check_plus(&mut self) -> bool {
self.check_or_expected(
self.token.is_like_plus(),
TokenType::Token(token::BinOp(token::Plus)),
)
self.check_or_expected(self.token.is_like_plus(), TokenType::Plus)
}
/// Eats the expected token if it's present possibly breaking
/// compound tokens like multi-character operators in process.
/// Returns `true` if the token was eaten.
fn break_and_eat(&mut self, expected: TokenKind) -> bool {
if self.token == expected {
fn break_and_eat(&mut self, exp: ExpTokenPair<'_>) -> bool {
if self.token == *exp.tok {
self.bump();
return true;
}
match self.token.kind.break_two_token_op(1) {
Some((first, second)) if first == expected => {
Some((first, second)) if first == *exp.tok => {
let first_span = self.psess.source_map().start_point(self.token.span);
let second_span = self.token.span.with_lo(first_span.hi());
self.token = Token::new(first, first_span);
@ -827,7 +801,7 @@ impl<'a> Parser<'a> {
true
}
_ => {
self.expected_token_types.push(TokenType::Token(expected));
self.expected_token_types.insert(exp.token_type);
false
}
}
@ -835,24 +809,24 @@ impl<'a> Parser<'a> {
/// Eats `+` possibly breaking tokens like `+=` in process.
fn eat_plus(&mut self) -> bool {
self.break_and_eat(token::BinOp(token::Plus))
self.break_and_eat(exp!(Plus))
}
/// Eats `&` possibly breaking tokens like `&&` in process.
/// Signals an error if `&` is not eaten.
fn expect_and(&mut self) -> PResult<'a, ()> {
if self.break_and_eat(token::BinOp(token::And)) { Ok(()) } else { self.unexpected() }
if self.break_and_eat(exp!(And)) { Ok(()) } else { self.unexpected() }
}
/// Eats `|` possibly breaking tokens like `||` in process.
/// Signals an error if `|` was not eaten.
fn expect_or(&mut self) -> PResult<'a, ()> {
if self.break_and_eat(token::BinOp(token::Or)) { Ok(()) } else { self.unexpected() }
if self.break_and_eat(exp!(Or)) { Ok(()) } else { self.unexpected() }
}
/// Eats `<` possibly breaking tokens like `<<` in process.
fn eat_lt(&mut self) -> bool {
let ate = self.break_and_eat(token::Lt);
let ate = self.break_and_eat(exp!(Lt));
if ate {
// See doc comment for `unmatched_angle_bracket_count`.
self.unmatched_angle_bracket_count += 1;
@ -870,7 +844,7 @@ impl<'a> Parser<'a> {
/// Eats `>` possibly breaking tokens like `>>` in process.
/// Signals an error if `>` was not eaten.
fn expect_gt(&mut self) -> PResult<'a, ()> {
if self.break_and_eat(token::Gt) {
if self.break_and_eat(exp!(Gt)) {
// See doc comment for `unmatched_angle_bracket_count`.
if self.unmatched_angle_bracket_count > 0 {
self.unmatched_angle_bracket_count -= 1;
@ -885,10 +859,10 @@ impl<'a> Parser<'a> {
/// Checks if the next token is contained within `closes`, and returns `true` if so.
fn expect_any_with_type(
&mut self,
closes_expected: &[&TokenKind],
closes_expected: &[ExpTokenPair<'_>],
closes_not_expected: &[&TokenKind],
) -> bool {
closes_expected.iter().any(|k| self.check(k))
closes_expected.iter().any(|&close| self.check(close))
|| closes_not_expected.iter().any(|k| self.check_noexpect(k))
}
@ -897,9 +871,9 @@ impl<'a> Parser<'a> {
/// closing bracket.
fn parse_seq_to_before_tokens<T>(
&mut self,
closes_expected: &[&TokenKind],
closes_expected: &[ExpTokenPair<'_>],
closes_not_expected: &[&TokenKind],
sep: SeqSep,
sep: SeqSep<'_>,
mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> {
let mut first = true;
@ -911,13 +885,13 @@ impl<'a> Parser<'a> {
if let token::CloseDelim(..) | token::Eof = self.token.kind {
break;
}
if let Some(t) = &sep.sep {
if let Some(exp) = sep.sep {
if first {
// no separator for the first element
first = false;
} else {
// check for separator
match self.expect(t) {
match self.expect(exp) {
Ok(Recovered::No) => {
self.current_closure.take();
}
@ -928,7 +902,7 @@ impl<'a> Parser<'a> {
}
Err(mut expect_err) => {
let sp = self.prev_token.span.shrink_to_hi();
let token_str = pprust::token_kind_to_string(t);
let token_str = pprust::token_kind_to_string(exp.tok);
match self.current_closure.take() {
Some(closure_spans) if self.token == TokenKind::Semi => {
@ -948,7 +922,7 @@ impl<'a> Parser<'a> {
_ => {
// Attempt to keep parsing if it was a similar separator.
if let Some(tokens) = t.similar_tokens() {
if let Some(tokens) = exp.tok.similar_tokens() {
if tokens.contains(&self.token.kind) {
self.bump();
}
@ -998,15 +972,17 @@ impl<'a> Parser<'a> {
// Parsing failed, therefore it must be something more serious
// than just a missing separator.
for xx in &e.children {
// propagate the help message from sub error 'e' to main error 'expect_err;
// Propagate the help message from sub error `e` to main
// error `expect_err`.
expect_err.children.push(xx.clone());
}
e.cancel();
if self.token == token::Colon {
// we will try to recover in `maybe_recover_struct_lit_bad_delims`
// We will try to recover in
// `maybe_recover_struct_lit_bad_delims`.
return Err(expect_err);
} else if let [token::CloseDelim(Delimiter::Parenthesis)] =
closes_expected
} else if let [exp] = closes_expected
&& exp.token_type == TokenType::CloseParen
{
return Err(expect_err);
} else {
@ -1040,7 +1016,7 @@ impl<'a> Parser<'a> {
) -> PResult<'a, ()> {
let initial_semicolon = self.token.span;
while self.eat(&TokenKind::Semi) {
while self.eat(exp!(Semi)) {
let _ = self.parse_stmt_without_recovery(false, ForceCollect::No).unwrap_or_else(|e| {
e.cancel();
None
@ -1096,8 +1072,8 @@ impl<'a> Parser<'a> {
/// closing bracket.
fn parse_seq_to_before_end<T>(
&mut self,
close: &TokenKind,
sep: SeqSep,
close: ExpTokenPair<'_>,
sep: SeqSep<'_>,
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> {
self.parse_seq_to_before_tokens(&[close], &[], sep, f)
@ -1108,8 +1084,8 @@ impl<'a> Parser<'a> {
/// closing bracket.
fn parse_seq_to_end<T>(
&mut self,
close: &TokenKind,
sep: SeqSep,
close: ExpTokenPair<'_>,
sep: SeqSep<'_>,
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
) -> PResult<'a, (ThinVec<T>, Trailing)> {
let (val, trailing, recovered) = self.parse_seq_to_before_end(close, sep, f)?;
@ -1127,9 +1103,9 @@ impl<'a> Parser<'a> {
/// closing bracket.
fn parse_unspanned_seq<T>(
&mut self,
open: &TokenKind,
close: &TokenKind,
sep: SeqSep,
open: ExpTokenPair<'_>,
close: ExpTokenPair<'_>,
sep: SeqSep<'_>,
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
) -> PResult<'a, (ThinVec<T>, Trailing)> {
self.expect(open)?;
@ -1141,15 +1117,11 @@ impl<'a> Parser<'a> {
/// closing bracket.
fn parse_delim_comma_seq<T>(
&mut self,
delim: Delimiter,
open: ExpTokenPair<'_>,
close: ExpTokenPair<'_>,
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
) -> PResult<'a, (ThinVec<T>, Trailing)> {
self.parse_unspanned_seq(
&token::OpenDelim(delim),
&token::CloseDelim(delim),
SeqSep::trailing_allowed(token::Comma),
f,
)
self.parse_unspanned_seq(open, close, SeqSep::trailing_allowed(exp!(Comma)), f)
}
/// Parses a comma-separated sequence delimited by parentheses (e.g. `(x, y)`).
@ -1159,7 +1131,7 @@ impl<'a> Parser<'a> {
&mut self,
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
) -> PResult<'a, (ThinVec<T>, Trailing)> {
self.parse_delim_comma_seq(Delimiter::Parenthesis, f)
self.parse_delim_comma_seq(exp!(OpenParen), exp!(CloseParen), f)
}
/// Advance the parser by one token using provided token as the next one.
@ -1265,11 +1237,11 @@ impl<'a> Parser<'a> {
/// Parses asyncness: `async` or nothing.
fn parse_coroutine_kind(&mut self, case: Case) -> Option<CoroutineKind> {
let span = self.token.uninterpolated_span();
if self.eat_keyword_case(kw::Async, case) {
if self.eat_keyword_case(exp!(Async), case) {
// FIXME(gen_blocks): Do we want to unconditionally parse `gen` and then
// error if edition <= 2024, like we do with async and edition <= 2018?
if self.token.uninterpolated_span().at_least_rust_2024()
&& self.eat_keyword_case(kw::Gen, case)
&& self.eat_keyword_case(exp!(Gen), case)
{
let gen_span = self.prev_token.uninterpolated_span();
Some(CoroutineKind::AsyncGen {
@ -1285,7 +1257,7 @@ impl<'a> Parser<'a> {
})
}
} else if self.token.uninterpolated_span().at_least_rust_2024()
&& self.eat_keyword_case(kw::Gen, case)
&& self.eat_keyword_case(exp!(Gen), case)
{
Some(CoroutineKind::Gen {
span,
@ -1299,9 +1271,9 @@ impl<'a> Parser<'a> {
/// Parses fn unsafety: `unsafe`, `safe` or nothing.
fn parse_safety(&mut self, case: Case) -> Safety {
if self.eat_keyword_case(kw::Unsafe, case) {
if self.eat_keyword_case(exp!(Unsafe), case) {
Safety::Unsafe(self.prev_token.uninterpolated_span())
} else if self.eat_keyword_case(kw::Safe, case) {
} else if self.eat_keyword_case(exp!(Safe), case) {
Safety::Safe(self.prev_token.uninterpolated_span())
} else {
Safety::Default
@ -1327,7 +1299,7 @@ impl<'a> Parser<'a> {
if (self.check_const_closure() == is_closure)
&& !self
.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block())
&& self.eat_keyword_case(kw::Const, case)
&& self.eat_keyword_case(exp!(Const), case)
{
Const::Yes(self.prev_token.uninterpolated_span())
} else {
@ -1340,7 +1312,7 @@ impl<'a> Parser<'a> {
if pat {
self.psess.gated_spans.gate(sym::inline_const_pat, span);
}
self.expect_keyword(kw::Const)?;
self.expect_keyword(exp!(Const))?;
let (attrs, blk) = self.parse_inner_attrs_and_block()?;
let anon_const = AnonConst {
id: DUMMY_NODE_ID,
@ -1352,19 +1324,19 @@ impl<'a> Parser<'a> {
/// Parses mutability (`mut` or nothing).
fn parse_mutability(&mut self) -> Mutability {
if self.eat_keyword(kw::Mut) { Mutability::Mut } else { Mutability::Not }
if self.eat_keyword(exp!(Mut)) { Mutability::Mut } else { Mutability::Not }
}
/// Parses reference binding mode (`ref`, `ref mut`, or nothing).
fn parse_byref(&mut self) -> ByRef {
if self.eat_keyword(kw::Ref) { ByRef::Yes(self.parse_mutability()) } else { ByRef::No }
if self.eat_keyword(exp!(Ref)) { ByRef::Yes(self.parse_mutability()) } else { ByRef::No }
}
/// Possibly parses mutability (`const` or `mut`).
fn parse_const_or_mut(&mut self) -> Option<Mutability> {
if self.eat_keyword(kw::Mut) {
if self.eat_keyword(exp!(Mut)) {
Some(Mutability::Mut)
} else if self.eat_keyword(kw::Const) {
} else if self.eat_keyword(exp!(Const)) {
Some(Mutability::Not)
} else {
None
@ -1395,7 +1367,7 @@ impl<'a> Parser<'a> {
fn parse_attr_args(&mut self) -> PResult<'a, AttrArgs> {
Ok(if let Some(args) = self.parse_delim_args_inner() {
AttrArgs::Delimited(args)
} else if self.eat(&token::Eq) {
} else if self.eat(exp!(Eq)) {
let eq_span = self.prev_token.span;
AttrArgs::Eq { eq_span, expr: self.parse_expr_force_collect()? }
} else {
@ -1404,9 +1376,9 @@ impl<'a> Parser<'a> {
}
fn parse_delim_args_inner(&mut self) -> Option<DelimArgs> {
let delimited = self.check(&token::OpenDelim(Delimiter::Parenthesis))
|| self.check(&token::OpenDelim(Delimiter::Bracket))
|| self.check(&token::OpenDelim(Delimiter::Brace));
let delimited = self.check(exp!(OpenParen))
|| self.check(exp!(OpenBracket))
|| self.check(exp!(OpenBrace));
delimited.then(|| {
let TokenTree::Delimited(dspan, _, delim, tokens) = self.parse_token_tree() else {
@ -1485,7 +1457,7 @@ impl<'a> Parser<'a> {
pub fn parse_visibility(&mut self, fbt: FollowedByType) -> PResult<'a, Visibility> {
maybe_whole!(self, NtVis, |vis| vis.into_inner());
if !self.eat_keyword(kw::Pub) {
if !self.eat_keyword(exp!(Pub)) {
// We need a span for our `Spanned<VisibilityKind>`, but there's inherently no
// keyword to grab a span from for inherited visibility; an empty span at the
// beginning of the current token would seem to be the "Schelling span".
@ -1497,7 +1469,7 @@ impl<'a> Parser<'a> {
}
let lo = self.prev_token.span;
if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
if self.check(exp!(OpenParen)) {
// We don't `self.bump()` the `(` yet because this might be a struct definition where
// `()` or a tuple might be allowed. For example, `struct Struct(pub (), pub (usize));`.
// Because of this, we only `bump` the `(` if we're assured it is appropriate to do so
@ -1507,7 +1479,7 @@ impl<'a> Parser<'a> {
self.bump(); // `(`
self.bump(); // `in`
let path = self.parse_path(PathStyle::Mod)?; // `path`
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; // `)`
self.expect(exp!(CloseParen))?; // `)`
let vis = VisibilityKind::Restricted {
path: P(path),
id: ast::DUMMY_NODE_ID,
@ -1524,7 +1496,7 @@ impl<'a> Parser<'a> {
// Parse `pub(crate)`, `pub(self)`, or `pub(super)`.
self.bump(); // `(`
let path = self.parse_path(PathStyle::Mod)?; // `crate`/`super`/`self`
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; // `)`
self.expect(exp!(CloseParen))?; // `)`
let vis = VisibilityKind::Restricted {
path: P(path),
id: ast::DUMMY_NODE_ID,
@ -1550,7 +1522,7 @@ impl<'a> Parser<'a> {
fn recover_incorrect_vis_restriction(&mut self) -> PResult<'a, ()> {
self.bump(); // `(`
let path = self.parse_path(PathStyle::Mod)?;
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; // `)`
self.expect(exp!(CloseParen))?; // `)`
let path_str = pprust::path_to_string(&path);
self.dcx()
@ -1561,7 +1533,7 @@ impl<'a> Parser<'a> {
/// Parses `extern string_literal?`.
fn parse_extern(&mut self, case: Case) -> Extern {
if self.eat_keyword_case(kw::Extern, case) {
if self.eat_keyword_case(exp!(Extern), case) {
let mut extern_span = self.prev_token.span;
let abi = self.parse_abi();
if let Some(abi) = abi {
@ -1601,7 +1573,7 @@ impl<'a> Parser<'a> {
/// Checks for `::` or, potentially, `:::` and then look ahead after it.
fn check_path_sep_and_look_ahead(&mut self, looker: impl Fn(&Token) -> bool) -> bool {
if self.check(&token::PathSep) {
if self.check(exp!(PathSep)) {
if self.may_recover() && self.look_ahead(1, |t| t.kind == token::Colon) {
debug_assert!(!self.look_ahead(1, &looker), "Looker must not match on colon");
self.look_ahead(2, looker)