Rename Parser::expected_tokens
as Parser::expected_token_types
.
Because the `Token` type is similar to but different to the `TokenType` type, and the difference is important, so we want to avoid confusion.
This commit is contained in:
parent
c434b4b4b6
commit
48f7714819
7 changed files with 26 additions and 25 deletions
|
@ -483,9 +483,10 @@ impl<'a> Parser<'a> {
|
|||
})
|
||||
}
|
||||
|
||||
self.expected_tokens.extend(edible.iter().chain(inedible).cloned().map(TokenType::Token));
|
||||
self.expected_token_types
|
||||
.extend(edible.iter().chain(inedible).cloned().map(TokenType::Token));
|
||||
let mut expected = self
|
||||
.expected_tokens
|
||||
.expected_token_types
|
||||
.iter()
|
||||
.filter(|token| {
|
||||
// Filter out suggestions that suggest the same token which was found and deemed incorrect.
|
||||
|
@ -785,17 +786,17 @@ impl<'a> Parser<'a> {
|
|||
let Some((curr_ident, _)) = self.token.ident() else {
|
||||
return;
|
||||
};
|
||||
let expected_tokens: &[TokenType] =
|
||||
let expected_token_types: &[TokenType] =
|
||||
expected.len().checked_sub(10).map_or(&expected, |index| &expected[index..]);
|
||||
let expected_keywords: Vec<Symbol> = expected_tokens
|
||||
let expected_keywords: Vec<Symbol> = expected_token_types
|
||||
.iter()
|
||||
.filter_map(|token| if let TokenType::Keyword(kw) = token { Some(*kw) } else { None })
|
||||
.collect();
|
||||
|
||||
// When there are a few keywords in the last ten elements of `self.expected_tokens` and the current
|
||||
// token is an identifier, it's probably a misspelled keyword.
|
||||
// This handles code like `async Move {}`, misspelled `if` in match guard, misspelled `else` in `if`-`else`
|
||||
// and mispelled `where` in a where clause.
|
||||
// When there are a few keywords in the last ten elements of `self.expected_token_types`
|
||||
// and the current token is an identifier, it's probably a misspelled keyword. This handles
|
||||
// code like `async Move {}`, misspelled `if` in match guard, misspelled `else` in
|
||||
// `if`-`else` and mispelled `where` in a where clause.
|
||||
if !expected_keywords.is_empty()
|
||||
&& !curr_ident.is_used_keyword()
|
||||
&& let Some(misspelled_kw) = find_similar_kw(curr_ident, &expected_keywords)
|
||||
|
@ -3016,7 +3017,7 @@ impl<'a> Parser<'a> {
|
|||
/// Check for exclusive ranges written as `..<`
|
||||
pub(crate) fn maybe_err_dotdotlt_syntax(&self, maybe_lt: Token, mut err: Diag<'a>) -> Diag<'a> {
|
||||
if maybe_lt == token::Lt
|
||||
&& (self.expected_tokens.contains(&TokenType::Token(token::Gt))
|
||||
&& (self.expected_token_types.contains(&TokenType::Token(token::Gt))
|
||||
|| matches!(self.token.kind, token::Literal(..)))
|
||||
{
|
||||
err.span_suggestion(
|
||||
|
|
|
@ -153,7 +153,7 @@ impl<'a> Parser<'a> {
|
|||
return Ok((lhs, parsed_something));
|
||||
}
|
||||
|
||||
self.expected_tokens.push(TokenType::Operator);
|
||||
self.expected_token_types.push(TokenType::Operator);
|
||||
while let Some(op) = self.check_assoc_op() {
|
||||
let lhs_span = self.interpolated_or_expr_span(&lhs);
|
||||
let cur_op_span = self.token.span;
|
||||
|
|
|
@ -2630,7 +2630,7 @@ impl<'a> Parser<'a> {
|
|||
|
||||
if !self.eat_keyword_case(kw::Fn, case) {
|
||||
// It is possible for `expect_one_of` to recover given the contents of
|
||||
// `self.expected_tokens`, therefore, do not use `self.unexpected()` which doesn't
|
||||
// `self.expected_token_types`, therefore, do not use `self.unexpected()` which doesn't
|
||||
// account for this.
|
||||
match self.expect_one_of(&[], &[]) {
|
||||
Ok(Recovered::Yes(_)) => {}
|
||||
|
|
|
@ -141,7 +141,7 @@ pub struct Parser<'a> {
|
|||
pub prev_token: Token,
|
||||
pub capture_cfg: bool,
|
||||
restrictions: Restrictions,
|
||||
expected_tokens: Vec<TokenType>,
|
||||
expected_token_types: Vec<TokenType>,
|
||||
token_cursor: TokenCursor,
|
||||
// The number of calls to `bump`, i.e. the position in the token stream.
|
||||
num_bump_calls: u32,
|
||||
|
@ -490,7 +490,7 @@ impl<'a> Parser<'a> {
|
|||
prev_token: Token::dummy(),
|
||||
capture_cfg: false,
|
||||
restrictions: Restrictions::empty(),
|
||||
expected_tokens: Vec::new(),
|
||||
expected_token_types: Vec::new(),
|
||||
token_cursor: TokenCursor { curr: TokenTreeCursor::new(stream), stack: Vec::new() },
|
||||
num_bump_calls: 0,
|
||||
break_last_token: 0,
|
||||
|
@ -554,7 +554,7 @@ impl<'a> Parser<'a> {
|
|||
|
||||
/// Expects and consumes the token `t`. Signals an error if the next token is not `t`.
|
||||
pub fn expect(&mut self, t: &TokenKind) -> PResult<'a, Recovered> {
|
||||
if self.expected_tokens.is_empty() {
|
||||
if self.expected_token_types.is_empty() {
|
||||
if self.token == *t {
|
||||
self.bump();
|
||||
Ok(Recovered::No)
|
||||
|
@ -619,13 +619,13 @@ impl<'a> Parser<'a> {
|
|||
|
||||
/// Checks if the next token is `tok`, and returns `true` if so.
|
||||
///
|
||||
/// This method will automatically add `tok` to `expected_tokens` if `tok` is not
|
||||
/// This method will automatically add `tok` to `expected_token_types` if `tok` is not
|
||||
/// encountered.
|
||||
#[inline]
|
||||
fn check(&mut self, tok: &TokenKind) -> bool {
|
||||
let is_present = self.token == *tok;
|
||||
if !is_present {
|
||||
self.expected_tokens.push(TokenType::Token(tok.clone()));
|
||||
self.expected_token_types.push(TokenType::Token(tok.clone()));
|
||||
}
|
||||
is_present
|
||||
}
|
||||
|
@ -666,7 +666,7 @@ impl<'a> Parser<'a> {
|
|||
#[inline]
|
||||
#[must_use]
|
||||
fn check_keyword(&mut self, kw: Symbol) -> bool {
|
||||
self.expected_tokens.push(TokenType::Keyword(kw));
|
||||
self.expected_token_types.push(TokenType::Keyword(kw));
|
||||
self.token.is_keyword(kw)
|
||||
}
|
||||
|
||||
|
@ -755,7 +755,7 @@ impl<'a> Parser<'a> {
|
|||
if ok {
|
||||
true
|
||||
} else {
|
||||
self.expected_tokens.push(typ);
|
||||
self.expected_token_types.push(typ);
|
||||
false
|
||||
}
|
||||
}
|
||||
|
@ -832,7 +832,7 @@ impl<'a> Parser<'a> {
|
|||
true
|
||||
}
|
||||
_ => {
|
||||
self.expected_tokens.push(TokenType::Token(expected));
|
||||
self.expected_token_types.push(TokenType::Token(expected));
|
||||
false
|
||||
}
|
||||
}
|
||||
|
@ -1180,7 +1180,7 @@ impl<'a> Parser<'a> {
|
|||
self.token_spacing = next_spacing;
|
||||
|
||||
// Diagnostics.
|
||||
self.expected_tokens.clear();
|
||||
self.expected_token_types.clear();
|
||||
}
|
||||
|
||||
/// Advance the parser by one token.
|
||||
|
@ -1670,8 +1670,8 @@ impl<'a> Parser<'a> {
|
|||
DebugParser { parser: self, lookahead }
|
||||
}
|
||||
|
||||
pub fn clear_expected_tokens(&mut self) {
|
||||
self.expected_tokens.clear();
|
||||
pub fn clear_expected_token_types(&mut self) {
|
||||
self.expected_token_types.clear();
|
||||
}
|
||||
|
||||
pub fn approx_token_stream_pos(&self) -> u32 {
|
||||
|
|
|
@ -300,7 +300,7 @@ impl<'a> Parser<'a> {
|
|||
)
|
||||
};
|
||||
let check_args_start = |this: &mut Self| {
|
||||
this.expected_tokens.extend_from_slice(&[
|
||||
this.expected_token_types.extend_from_slice(&[
|
||||
TokenType::Token(token::Lt),
|
||||
TokenType::Token(token::OpenDelim(Delimiter::Parenthesis)),
|
||||
]);
|
||||
|
|
|
@ -1280,7 +1280,7 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
|
||||
pub(super) fn check_lifetime(&mut self) -> bool {
|
||||
self.expected_tokens.push(TokenType::Lifetime);
|
||||
self.expected_token_types.push(TokenType::Lifetime);
|
||||
self.token.is_lifetime()
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue