1
Fork 0

Mark Parser::eat/check methods as must_use

This commit is contained in:
Michael Goulet 2024-07-29 21:21:15 -04:00
parent 612a33f20b
commit e4076e34f8
9 changed files with 44 additions and 13 deletions

View file

@ -24,7 +24,7 @@ fn parse_pat_ty<'a>(cx: &mut ExtCtxt<'a>, stream: TokenStream) -> PResult<'a, (P
let mut parser = cx.new_parser_from_tts(stream); let mut parser = cx.new_parser_from_tts(stream);
let ty = parser.parse_ty()?; let ty = parser.parse_ty()?;
parser.eat_keyword(sym::is); parser.expect_keyword(sym::is)?;
let pat = parser.parse_pat_no_top_alt(None, None)?; let pat = parser.parse_pat_no_top_alt(None, None)?;
Ok((ty, pat)) Ok((ty, pat))

View file

@ -3153,7 +3153,8 @@ impl<'a> Parser<'a> {
if !require_comma { if !require_comma {
arm_body = Some(expr); arm_body = Some(expr);
this.eat(&token::Comma); // Eat a comma if it exists, though.
let _ = this.eat(&token::Comma);
Ok(Recovered::No) Ok(Recovered::No)
} else if let Some((span, guar)) = } else if let Some((span, guar)) =
this.parse_arm_body_missing_braces(&expr, arrow_span) this.parse_arm_body_missing_braces(&expr, arrow_span)
@ -3654,7 +3655,7 @@ impl<'a> Parser<'a> {
fields.push(f); fields.push(f);
} }
self.recover_stmt_(SemiColonMode::Comma, BlockMode::Ignore); self.recover_stmt_(SemiColonMode::Comma, BlockMode::Ignore);
self.eat(&token::Comma); let _ = self.eat(&token::Comma);
} }
} }
} }

View file

@ -178,7 +178,8 @@ impl<'a> Parser<'a> {
span: this.prev_token.span, span: this.prev_token.span,
}); });
this.eat(&token::Comma); // Eat a trailing comma, if it exists.
let _ = this.eat(&token::Comma);
} }
let param = if this.check_lifetime() { let param = if this.check_lifetime() {

View file

@ -1192,13 +1192,14 @@ impl<'a> Parser<'a> {
mut safety: Safety, mut safety: Safety,
) -> PResult<'a, ItemInfo> { ) -> PResult<'a, ItemInfo> {
let abi = self.parse_abi(); // ABI? let abi = self.parse_abi(); // ABI?
// FIXME: This recovery should be tested better.
if safety == Safety::Default if safety == Safety::Default
&& self.token.is_keyword(kw::Unsafe) && self.token.is_keyword(kw::Unsafe)
&& self.look_ahead(1, |t| t.kind == token::OpenDelim(Delimiter::Brace)) && self.look_ahead(1, |t| t.kind == token::OpenDelim(Delimiter::Brace))
{ {
self.expect(&token::OpenDelim(Delimiter::Brace)).unwrap_err().emit(); self.expect(&token::OpenDelim(Delimiter::Brace)).unwrap_err().emit();
safety = Safety::Unsafe(self.token.span); safety = Safety::Unsafe(self.token.span);
self.eat_keyword(kw::Unsafe); let _ = self.eat_keyword(kw::Unsafe);
} }
let module = ast::ForeignMod { let module = ast::ForeignMod {
safety, safety,
@ -1759,7 +1760,7 @@ impl<'a> Parser<'a> {
} }
} }
} }
self.eat(&token::CloseDelim(Delimiter::Brace)); self.expect(&token::CloseDelim(Delimiter::Brace))?;
} else { } else {
let token_str = super::token_descr(&self.token); let token_str = super::token_descr(&self.token);
let where_str = if parsed_where { "" } else { "`where`, or " }; let where_str = if parsed_where { "" } else { "`where`, or " };
@ -1902,7 +1903,7 @@ impl<'a> Parser<'a> {
if let Some(_guar) = guar { if let Some(_guar) = guar {
// Handle a case like `Vec<u8>>,` where we can continue parsing fields // Handle a case like `Vec<u8>>,` where we can continue parsing fields
// after the comma // after the comma
self.eat(&token::Comma); let _ = self.eat(&token::Comma);
// `check_trailing_angle_brackets` already emitted a nicer error, as // `check_trailing_angle_brackets` already emitted a nicer error, as
// proven by the presence of `_guar`. We can continue parsing. // proven by the presence of `_guar`. We can continue parsing.

View file

@ -547,6 +547,7 @@ impl<'a> Parser<'a> {
} }
#[inline] #[inline]
#[must_use]
fn check_noexpect(&self, tok: &TokenKind) -> bool { fn check_noexpect(&self, tok: &TokenKind) -> bool {
self.token == *tok self.token == *tok
} }
@ -556,6 +557,7 @@ impl<'a> Parser<'a> {
/// the main purpose of this function is to reduce the cluttering of the suggestions list /// the main purpose of this function is to reduce the cluttering of the suggestions list
/// which using the normal eat method could introduce in some cases. /// which using the normal eat method could introduce in some cases.
#[inline] #[inline]
#[must_use]
fn eat_noexpect(&mut self, tok: &TokenKind) -> bool { fn eat_noexpect(&mut self, tok: &TokenKind) -> bool {
let is_present = self.check_noexpect(tok); let is_present = self.check_noexpect(tok);
if is_present { if is_present {
@ -566,6 +568,7 @@ impl<'a> Parser<'a> {
/// Consumes a token 'tok' if it exists. Returns whether the given token was present. /// Consumes a token 'tok' if it exists. Returns whether the given token was present.
#[inline] #[inline]
#[must_use]
pub fn eat(&mut self, tok: &TokenKind) -> bool { pub fn eat(&mut self, tok: &TokenKind) -> bool {
let is_present = self.check(tok); let is_present = self.check(tok);
if is_present { if is_present {
@ -577,12 +580,14 @@ impl<'a> Parser<'a> {
/// If the next token is the given keyword, returns `true` without eating it. /// If the next token is the given keyword, returns `true` without eating it.
/// An expectation is also added for diagnostics purposes. /// An expectation is also added for diagnostics purposes.
#[inline] #[inline]
#[must_use]
fn check_keyword(&mut self, kw: Symbol) -> bool { fn check_keyword(&mut self, kw: Symbol) -> bool {
self.expected_tokens.push(TokenType::Keyword(kw)); self.expected_tokens.push(TokenType::Keyword(kw));
self.token.is_keyword(kw) self.token.is_keyword(kw)
} }
#[inline] #[inline]
#[must_use]
fn check_keyword_case(&mut self, kw: Symbol, case: Case) -> bool { fn check_keyword_case(&mut self, kw: Symbol, case: Case) -> bool {
if self.check_keyword(kw) { if self.check_keyword(kw) {
return true; return true;
@ -602,6 +607,7 @@ impl<'a> Parser<'a> {
/// Otherwise, returns `false`. An expectation is also added for diagnostics purposes. /// Otherwise, returns `false`. An expectation is also added for diagnostics purposes.
// Public for rustc_builtin_macros and rustfmt usage. // Public for rustc_builtin_macros and rustfmt usage.
#[inline] #[inline]
#[must_use]
pub fn eat_keyword(&mut self, kw: Symbol) -> bool { pub fn eat_keyword(&mut self, kw: Symbol) -> bool {
if self.check_keyword(kw) { if self.check_keyword(kw) {
self.bump(); self.bump();
@ -615,6 +621,7 @@ impl<'a> Parser<'a> {
/// If the case differs (and is ignored) an error is issued. /// If the case differs (and is ignored) an error is issued.
/// This is useful for recovery. /// This is useful for recovery.
#[inline] #[inline]
#[must_use]
fn eat_keyword_case(&mut self, kw: Symbol, case: Case) -> bool { fn eat_keyword_case(&mut self, kw: Symbol, case: Case) -> bool {
if self.eat_keyword(kw) { if self.eat_keyword(kw) {
return true; return true;
@ -636,6 +643,7 @@ impl<'a> Parser<'a> {
/// Otherwise, returns `false`. No expectation is added. /// Otherwise, returns `false`. No expectation is added.
// Public for rustc_builtin_macros usage. // Public for rustc_builtin_macros usage.
#[inline] #[inline]
#[must_use]
pub fn eat_keyword_noexpect(&mut self, kw: Symbol) -> bool { pub fn eat_keyword_noexpect(&mut self, kw: Symbol) -> bool {
if self.token.is_keyword(kw) { if self.token.is_keyword(kw) {
self.bump(); self.bump();
@ -648,7 +656,7 @@ impl<'a> Parser<'a> {
/// If the given word is not a keyword, signals an error. /// If the given word is not a keyword, signals an error.
/// If the next token is not the given word, signals an error. /// If the next token is not the given word, signals an error.
/// Otherwise, eats it. /// Otherwise, eats it.
fn expect_keyword(&mut self, kw: Symbol) -> PResult<'a, ()> { pub fn expect_keyword(&mut self, kw: Symbol) -> PResult<'a, ()> {
if !self.eat_keyword(kw) { self.unexpected() } else { Ok(()) } if !self.eat_keyword(kw) { self.unexpected() } else { Ok(()) }
} }
@ -1025,8 +1033,11 @@ impl<'a> Parser<'a> {
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
) -> PResult<'a, (ThinVec<T>, Trailing)> { ) -> PResult<'a, (ThinVec<T>, Trailing)> {
let (val, trailing, recovered) = self.parse_seq_to_before_end(ket, sep, f)?; let (val, trailing, recovered) = self.parse_seq_to_before_end(ket, sep, f)?;
if matches!(recovered, Recovered::No) { if matches!(recovered, Recovered::No) && !self.eat(ket) {
self.eat(ket); self.dcx().span_delayed_bug(
self.token.span,
"recovered but `parse_seq_to_before_end` did not give us the ket token",
);
} }
Ok((val, trailing)) Ok((val, trailing))
} }
@ -1250,7 +1261,7 @@ impl<'a> Parser<'a> {
if pat { if pat {
self.psess.gated_spans.gate(sym::inline_const_pat, span); self.psess.gated_spans.gate(sym::inline_const_pat, span);
} }
self.eat_keyword(kw::Const); self.expect_keyword(kw::Const)?;
let (attrs, blk) = self.parse_inner_attrs_and_block()?; let (attrs, blk) = self.parse_inner_attrs_and_block()?;
let anon_const = AnonConst { let anon_const = AnonConst {
id: DUMMY_NODE_ID, id: DUMMY_NODE_ID,

View file

@ -313,7 +313,8 @@ impl<'a> Parser<'a> {
} }
// Generic arguments are found - `<`, `(`, `::<` or `::(`. // Generic arguments are found - `<`, `(`, `::<` or `::(`.
self.eat(&token::PathSep); // First, eat `::` if it exists.
let _ = self.eat(&token::PathSep);
let lo = self.token.span; let lo = self.token.span;
let args = if self.eat_lt() { let args = if self.eat_lt() {
// `<'a, T, A = U>` // `<'a, T, A = U>`

View file

@ -1,4 +1,4 @@
//@ known-bug: #123809 //@ known-bug: #123809
type Positive = std::pat::pattern_type!(std::pat:: is 0..); type Positive = std::pat::pattern_type!(std::pat is 0..);
pub fn main() {} pub fn main() {}

View file

@ -0,0 +1,8 @@
#![feature(core_pattern_type, core_pattern_types)]
use std::pat::pattern_type;
fn main() {
let x: pattern_type!(i32 0..1);
//~^ ERROR expected one of `!`, `(`, `+`, `::`, `<`, or `is`, found `0`
}

View file

@ -0,0 +1,8 @@
error: expected one of `!`, `(`, `+`, `::`, `<`, or `is`, found `0`
--> $DIR/missing-is.rs:6:30
|
LL | let x: pattern_type!(i32 0..1);
| ^ expected one of `!`, `(`, `+`, `::`, `<`, or `is`
error: aborting due to 1 previous error