1
Fork 0

Rename Parser::expected_tokens as Parser::expected_token_types.

Because the `Token` type is similar to but different to the `TokenType`
type, and the difference is important, so we want to avoid confusion.
This commit is contained in:
Nicholas Nethercote 2024-12-03 20:09:29 +11:00
parent c434b4b4b6
commit 48f7714819
7 changed files with 26 additions and 25 deletions

View file

@ -95,7 +95,7 @@ fn parse_args<'a>(ecx: &ExtCtxt<'a>, sp: Span, tts: TokenStream) -> PResult<'a,
while p.token != token::Eof { while p.token != token::Eof {
if !p.eat(&token::Comma) { if !p.eat(&token::Comma) {
if first { if first {
p.clear_expected_tokens(); p.clear_expected_token_types();
} }
match p.expect(&token::Comma) { match p.expect(&token::Comma) {

View file

@ -483,9 +483,10 @@ impl<'a> Parser<'a> {
}) })
} }
self.expected_tokens.extend(edible.iter().chain(inedible).cloned().map(TokenType::Token)); self.expected_token_types
.extend(edible.iter().chain(inedible).cloned().map(TokenType::Token));
let mut expected = self let mut expected = self
.expected_tokens .expected_token_types
.iter() .iter()
.filter(|token| { .filter(|token| {
// Filter out suggestions that suggest the same token which was found and deemed incorrect. // Filter out suggestions that suggest the same token which was found and deemed incorrect.
@ -785,17 +786,17 @@ impl<'a> Parser<'a> {
let Some((curr_ident, _)) = self.token.ident() else { let Some((curr_ident, _)) = self.token.ident() else {
return; return;
}; };
let expected_tokens: &[TokenType] = let expected_token_types: &[TokenType] =
expected.len().checked_sub(10).map_or(&expected, |index| &expected[index..]); expected.len().checked_sub(10).map_or(&expected, |index| &expected[index..]);
let expected_keywords: Vec<Symbol> = expected_tokens let expected_keywords: Vec<Symbol> = expected_token_types
.iter() .iter()
.filter_map(|token| if let TokenType::Keyword(kw) = token { Some(*kw) } else { None }) .filter_map(|token| if let TokenType::Keyword(kw) = token { Some(*kw) } else { None })
.collect(); .collect();
// When there are a few keywords in the last ten elements of `self.expected_tokens` and the current // When there are a few keywords in the last ten elements of `self.expected_token_types`
// token is an identifier, it's probably a misspelled keyword. // and the current token is an identifier, it's probably a misspelled keyword. This handles
// This handles code like `async Move {}`, misspelled `if` in match guard, misspelled `else` in `if`-`else` // code like `async Move {}`, misspelled `if` in match guard, misspelled `else` in
// and mispelled `where` in a where clause. // `if`-`else` and mispelled `where` in a where clause.
if !expected_keywords.is_empty() if !expected_keywords.is_empty()
&& !curr_ident.is_used_keyword() && !curr_ident.is_used_keyword()
&& let Some(misspelled_kw) = find_similar_kw(curr_ident, &expected_keywords) && let Some(misspelled_kw) = find_similar_kw(curr_ident, &expected_keywords)
@ -3016,7 +3017,7 @@ impl<'a> Parser<'a> {
/// Check for exclusive ranges written as `..<` /// Check for exclusive ranges written as `..<`
pub(crate) fn maybe_err_dotdotlt_syntax(&self, maybe_lt: Token, mut err: Diag<'a>) -> Diag<'a> { pub(crate) fn maybe_err_dotdotlt_syntax(&self, maybe_lt: Token, mut err: Diag<'a>) -> Diag<'a> {
if maybe_lt == token::Lt if maybe_lt == token::Lt
&& (self.expected_tokens.contains(&TokenType::Token(token::Gt)) && (self.expected_token_types.contains(&TokenType::Token(token::Gt))
|| matches!(self.token.kind, token::Literal(..))) || matches!(self.token.kind, token::Literal(..)))
{ {
err.span_suggestion( err.span_suggestion(

View file

@ -153,7 +153,7 @@ impl<'a> Parser<'a> {
return Ok((lhs, parsed_something)); return Ok((lhs, parsed_something));
} }
self.expected_tokens.push(TokenType::Operator); self.expected_token_types.push(TokenType::Operator);
while let Some(op) = self.check_assoc_op() { while let Some(op) = self.check_assoc_op() {
let lhs_span = self.interpolated_or_expr_span(&lhs); let lhs_span = self.interpolated_or_expr_span(&lhs);
let cur_op_span = self.token.span; let cur_op_span = self.token.span;

View file

@ -2630,7 +2630,7 @@ impl<'a> Parser<'a> {
if !self.eat_keyword_case(kw::Fn, case) { if !self.eat_keyword_case(kw::Fn, case) {
// It is possible for `expect_one_of` to recover given the contents of // It is possible for `expect_one_of` to recover given the contents of
// `self.expected_tokens`, therefore, do not use `self.unexpected()` which doesn't // `self.expected_token_types`, therefore, do not use `self.unexpected()` which doesn't
// account for this. // account for this.
match self.expect_one_of(&[], &[]) { match self.expect_one_of(&[], &[]) {
Ok(Recovered::Yes(_)) => {} Ok(Recovered::Yes(_)) => {}

View file

@ -141,7 +141,7 @@ pub struct Parser<'a> {
pub prev_token: Token, pub prev_token: Token,
pub capture_cfg: bool, pub capture_cfg: bool,
restrictions: Restrictions, restrictions: Restrictions,
expected_tokens: Vec<TokenType>, expected_token_types: Vec<TokenType>,
token_cursor: TokenCursor, token_cursor: TokenCursor,
// The number of calls to `bump`, i.e. the position in the token stream. // The number of calls to `bump`, i.e. the position in the token stream.
num_bump_calls: u32, num_bump_calls: u32,
@ -490,7 +490,7 @@ impl<'a> Parser<'a> {
prev_token: Token::dummy(), prev_token: Token::dummy(),
capture_cfg: false, capture_cfg: false,
restrictions: Restrictions::empty(), restrictions: Restrictions::empty(),
expected_tokens: Vec::new(), expected_token_types: Vec::new(),
token_cursor: TokenCursor { curr: TokenTreeCursor::new(stream), stack: Vec::new() }, token_cursor: TokenCursor { curr: TokenTreeCursor::new(stream), stack: Vec::new() },
num_bump_calls: 0, num_bump_calls: 0,
break_last_token: 0, break_last_token: 0,
@ -554,7 +554,7 @@ impl<'a> Parser<'a> {
/// Expects and consumes the token `t`. Signals an error if the next token is not `t`. /// Expects and consumes the token `t`. Signals an error if the next token is not `t`.
pub fn expect(&mut self, t: &TokenKind) -> PResult<'a, Recovered> { pub fn expect(&mut self, t: &TokenKind) -> PResult<'a, Recovered> {
if self.expected_tokens.is_empty() { if self.expected_token_types.is_empty() {
if self.token == *t { if self.token == *t {
self.bump(); self.bump();
Ok(Recovered::No) Ok(Recovered::No)
@ -619,13 +619,13 @@ impl<'a> Parser<'a> {
/// Checks if the next token is `tok`, and returns `true` if so. /// Checks if the next token is `tok`, and returns `true` if so.
/// ///
/// This method will automatically add `tok` to `expected_tokens` if `tok` is not /// This method will automatically add `tok` to `expected_token_types` if `tok` is not
/// encountered. /// encountered.
#[inline] #[inline]
fn check(&mut self, tok: &TokenKind) -> bool { fn check(&mut self, tok: &TokenKind) -> bool {
let is_present = self.token == *tok; let is_present = self.token == *tok;
if !is_present { if !is_present {
self.expected_tokens.push(TokenType::Token(tok.clone())); self.expected_token_types.push(TokenType::Token(tok.clone()));
} }
is_present is_present
} }
@ -666,7 +666,7 @@ impl<'a> Parser<'a> {
#[inline] #[inline]
#[must_use] #[must_use]
fn check_keyword(&mut self, kw: Symbol) -> bool { fn check_keyword(&mut self, kw: Symbol) -> bool {
self.expected_tokens.push(TokenType::Keyword(kw)); self.expected_token_types.push(TokenType::Keyword(kw));
self.token.is_keyword(kw) self.token.is_keyword(kw)
} }
@ -755,7 +755,7 @@ impl<'a> Parser<'a> {
if ok { if ok {
true true
} else { } else {
self.expected_tokens.push(typ); self.expected_token_types.push(typ);
false false
} }
} }
@ -832,7 +832,7 @@ impl<'a> Parser<'a> {
true true
} }
_ => { _ => {
self.expected_tokens.push(TokenType::Token(expected)); self.expected_token_types.push(TokenType::Token(expected));
false false
} }
} }
@ -1180,7 +1180,7 @@ impl<'a> Parser<'a> {
self.token_spacing = next_spacing; self.token_spacing = next_spacing;
// Diagnostics. // Diagnostics.
self.expected_tokens.clear(); self.expected_token_types.clear();
} }
/// Advance the parser by one token. /// Advance the parser by one token.
@ -1670,8 +1670,8 @@ impl<'a> Parser<'a> {
DebugParser { parser: self, lookahead } DebugParser { parser: self, lookahead }
} }
pub fn clear_expected_tokens(&mut self) { pub fn clear_expected_token_types(&mut self) {
self.expected_tokens.clear(); self.expected_token_types.clear();
} }
pub fn approx_token_stream_pos(&self) -> u32 { pub fn approx_token_stream_pos(&self) -> u32 {

View file

@ -300,7 +300,7 @@ impl<'a> Parser<'a> {
) )
}; };
let check_args_start = |this: &mut Self| { let check_args_start = |this: &mut Self| {
this.expected_tokens.extend_from_slice(&[ this.expected_token_types.extend_from_slice(&[
TokenType::Token(token::Lt), TokenType::Token(token::Lt),
TokenType::Token(token::OpenDelim(Delimiter::Parenthesis)), TokenType::Token(token::OpenDelim(Delimiter::Parenthesis)),
]); ]);

View file

@ -1280,7 +1280,7 @@ impl<'a> Parser<'a> {
} }
pub(super) fn check_lifetime(&mut self) -> bool { pub(super) fn check_lifetime(&mut self) -> bool {
self.expected_tokens.push(TokenType::Lifetime); self.expected_token_types.push(TokenType::Lifetime);
self.token.is_lifetime() self.token.is_lifetime()
} }