2022-09-01 19:29:23 +02:00
|
|
|
|
use std::borrow::Cow;
|
2019-10-08 09:46:06 +02:00
|
|
|
|
use std::mem;
|
2024-11-28 14:03:16 -08:00
|
|
|
|
use std::ops::Bound;
|
2019-08-11 20:32:29 +02:00
|
|
|
|
|
2022-11-16 21:46:06 +01:00
|
|
|
|
use ast::Label;
|
2020-04-27 23:26:11 +05:30
|
|
|
|
use rustc_ast as ast;
|
2020-02-29 20:37:32 +03:00
|
|
|
|
use rustc_ast::ptr::P;
|
2024-04-18 14:53:52 +10:00
|
|
|
|
use rustc_ast::token::{self, Delimiter, InvisibleOrigin, MetaVarKind, TokenKind};
|
2024-05-12 12:28:10 -07:00
|
|
|
|
use rustc_ast::util::classify::{self, TrailingBrace};
|
2022-11-13 13:08:58 +00:00
|
|
|
|
use rustc_ast::{
|
2024-05-09 18:44:40 +10:00
|
|
|
|
AttrStyle, AttrVec, Block, BlockCheckMode, DUMMY_NODE_ID, Expr, ExprKind, HasAttrs, Local,
|
2021-02-12 18:04:37 -06:00
|
|
|
|
LocalKind, MacCall, MacCallStmt, MacStmtStyle, Recovered, Stmt, StmtKind,
|
|
|
|
|
};
|
2024-02-23 10:20:45 +11:00
|
|
|
|
use rustc_errors::{Applicability, Diag, PResult};
|
2024-12-13 10:29:23 +11:00
|
|
|
|
use rustc_span::{BytePos, ErrorGuaranteed, Ident, Span, kw, sym};
|
2023-01-30 14:13:27 +11:00
|
|
|
|
use thin_vec::{ThinVec, thin_vec};
|
2019-08-11 20:32:29 +02:00
|
|
|
|
|
2022-08-31 13:20:59 +02:00
|
|
|
|
use super::attr::InnerAttrForbiddenReason;
|
2022-09-01 19:29:23 +02:00
|
|
|
|
use super::diagnostics::AttemptLocalParseRecovery;
|
2022-10-14 23:16:25 +02:00
|
|
|
|
use super::pat::{PatternLocation, RecoverComma};
|
2019-10-08 09:46:06 +02:00
|
|
|
|
use super::path::PathStyle;
|
2021-12-04 11:05:30 -07:00
|
|
|
|
use super::{
|
|
|
|
|
AttrWrapper, BlockMode, FnParseMode, ForceCollect, Parser, Restrictions, SemiColonMode,
|
2024-08-06 17:16:40 +10:00
|
|
|
|
Trailing, UsePreAttrPos,
|
2024-07-29 08:13:50 +10:00
|
|
|
|
};
|
2024-04-18 20:18:13 +10:00
|
|
|
|
use crate::errors::{self, MalformedLoopLabel};
|
|
|
|
|
use crate::exp;
|
2024-07-29 08:13:50 +10:00
|
|
|
|
|
2019-08-11 20:32:29 +02:00
|
|
|
|
impl<'a> Parser<'a> {
|
2019-09-06 03:56:45 +01:00
|
|
|
|
/// Parses a statement. This stops just before trailing semicolons on everything but items.
|
2019-08-11 20:32:29 +02:00
|
|
|
|
/// e.g., a `StmtKind::Semi` parses to a `StmtKind::Expr`, leaving the trailing `;` unconsumed.
|
2024-09-18 20:38:43 +02:00
|
|
|
|
///
|
|
|
|
|
/// If `force_collect` is [`ForceCollect::Yes`], forces collection of tokens regardless of
|
|
|
|
|
/// whether or not we have attributes.
|
2020-08-30 13:04:36 -05:00
|
|
|
|
// Public for rustfmt usage.
|
2024-04-18 14:53:52 +10:00
|
|
|
|
pub fn parse_stmt(&mut self, force_collect: ForceCollect) -> PResult<'a, Option<Stmt>> {
|
|
|
|
|
Ok(self.parse_stmt_without_recovery(false, force_collect, false).unwrap_or_else(|e| {
|
2019-08-11 20:32:29 +02:00
|
|
|
|
e.emit();
|
|
|
|
|
self.recover_stmt_(SemiColonMode::Break, BlockMode::Ignore);
|
|
|
|
|
None
|
2019-12-03 05:39:00 +01:00
|
|
|
|
}))
|
2019-08-11 20:32:29 +02:00
|
|
|
|
}
|
|
|
|
|
|
2024-03-20 07:13:52 +11:00
|
|
|
|
/// If `force_collect` is [`ForceCollect::Yes`], forces collection of tokens regardless of
|
2024-04-18 14:53:52 +10:00
|
|
|
|
/// whether or not we have attributes. If `force_full_expr` is true, parses the stmt without
|
|
|
|
|
/// using `Restriction::STMT_EXPR`. Public for `cfg_eval` macro expansion.
|
2023-05-01 08:51:47 +00:00
|
|
|
|
pub fn parse_stmt_without_recovery(
|
2021-01-18 16:47:37 -05:00
|
|
|
|
&mut self,
|
2021-01-14 10:42:01 -05:00
|
|
|
|
capture_semi: bool,
|
2021-01-18 16:47:37 -05:00
|
|
|
|
force_collect: ForceCollect,
|
2024-04-18 14:53:52 +10:00
|
|
|
|
force_full_expr: bool,
|
2021-01-18 16:47:37 -05:00
|
|
|
|
) -> PResult<'a, Option<Stmt>> {
|
2024-08-06 17:16:40 +10:00
|
|
|
|
let pre_attr_pos = self.collect_pos();
|
2021-01-22 13:28:08 -05:00
|
|
|
|
let attrs = self.parse_outer_attributes()?;
|
2019-08-11 20:32:29 +02:00
|
|
|
|
let lo = self.token.span;
|
|
|
|
|
|
2024-04-18 14:53:52 +10:00
|
|
|
|
if let Some(stmt) = self.eat_metavar_seq(MetaVarKind::Stmt, |this| {
|
|
|
|
|
this.parse_stmt_without_recovery(false, ForceCollect::Yes, false)
|
|
|
|
|
}) {
|
|
|
|
|
let mut stmt = stmt.expect("an actual statement");
|
2022-02-28 07:49:56 -03:00
|
|
|
|
stmt.visit_attrs(|stmt_attrs| {
|
|
|
|
|
attrs.prepend_to_nt_inner(stmt_attrs);
|
|
|
|
|
});
|
2024-04-18 14:53:52 +10:00
|
|
|
|
return Ok(Some(stmt));
|
|
|
|
|
}
|
2020-11-17 14:27:44 -05:00
|
|
|
|
|
2022-08-08 03:19:37 +03:00
|
|
|
|
if self.token.is_keyword(kw::Mut) && self.is_keyword_ahead(1, &[kw::Let]) {
|
|
|
|
|
self.bump();
|
|
|
|
|
let mut_let_span = lo.to(self.token.span);
|
2023-12-18 21:14:02 +11:00
|
|
|
|
self.dcx().emit_err(errors::InvalidVariableDeclaration {
|
2022-08-17 10:06:24 +02:00
|
|
|
|
span: mut_let_span,
|
2023-02-05 03:26:33 +01:00
|
|
|
|
sub: errors::InvalidVariableDeclarationSub::SwitchMutLetOrder(mut_let_span),
|
2022-08-17 10:06:24 +02:00
|
|
|
|
});
|
2022-08-08 03:19:37 +03:00
|
|
|
|
}
|
|
|
|
|
|
2025-03-28 18:32:12 +01:00
|
|
|
|
let stmt = if self.token.is_keyword(kw::Super) && self.is_keyword_ahead(1, &[kw::Let]) {
|
|
|
|
|
self.collect_tokens(None, attrs, force_collect, |this, attrs| {
|
2025-03-27 18:29:58 +01:00
|
|
|
|
let super_span = this.token.span;
|
2025-03-28 18:32:12 +01:00
|
|
|
|
this.expect_keyword(exp!(Super))?;
|
|
|
|
|
this.expect_keyword(exp!(Let))?;
|
2025-03-27 18:29:58 +01:00
|
|
|
|
this.psess.gated_spans.gate(sym::super_let, super_span);
|
|
|
|
|
let local = this.parse_local(Some(super_span), attrs)?;
|
2025-03-28 18:32:12 +01:00
|
|
|
|
let trailing = Trailing::from(capture_semi && this.token == token::Semi);
|
|
|
|
|
Ok((
|
|
|
|
|
this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)),
|
|
|
|
|
trailing,
|
|
|
|
|
UsePreAttrPos::No,
|
|
|
|
|
))
|
|
|
|
|
})?
|
|
|
|
|
} else if self.token.is_keyword(kw::Let) {
|
2024-08-06 17:16:40 +10:00
|
|
|
|
self.collect_tokens(None, attrs, force_collect, |this, attrs| {
|
2024-12-04 15:55:06 +11:00
|
|
|
|
this.expect_keyword(exp!(Let))?;
|
2025-03-27 18:29:58 +01:00
|
|
|
|
let local = this.parse_local(None, attrs)?;
|
2024-08-06 10:17:46 +10:00
|
|
|
|
let trailing = Trailing::from(capture_semi && this.token == token::Semi);
|
2024-08-06 17:16:40 +10:00
|
|
|
|
Ok((
|
|
|
|
|
this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)),
|
|
|
|
|
trailing,
|
|
|
|
|
UsePreAttrPos::No,
|
|
|
|
|
))
|
2024-07-31 12:08:55 +10:00
|
|
|
|
})?
|
2022-12-03 23:24:49 +09:00
|
|
|
|
} else if self.is_kw_followed_by_ident(kw::Mut) && self.may_recover() {
|
2023-02-05 03:26:33 +01:00
|
|
|
|
self.recover_stmt_local_after_let(
|
|
|
|
|
lo,
|
|
|
|
|
attrs,
|
|
|
|
|
errors::InvalidVariableDeclarationSub::MissingLet,
|
2024-07-17 02:27:01 +10:00
|
|
|
|
force_collect,
|
2023-02-05 03:26:33 +01:00
|
|
|
|
)?
|
2022-12-03 23:24:49 +09:00
|
|
|
|
} else if self.is_kw_followed_by_ident(kw::Auto) && self.may_recover() {
|
2021-01-14 10:42:01 -05:00
|
|
|
|
self.bump(); // `auto`
|
2022-12-03 23:37:23 +09:00
|
|
|
|
self.recover_stmt_local_after_let(
|
|
|
|
|
lo,
|
|
|
|
|
attrs,
|
2023-02-05 03:26:33 +01:00
|
|
|
|
errors::InvalidVariableDeclarationSub::UseLetNotAuto,
|
2024-07-17 02:27:01 +10:00
|
|
|
|
force_collect,
|
2022-12-03 23:37:23 +09:00
|
|
|
|
)?
|
2022-12-03 23:24:49 +09:00
|
|
|
|
} else if self.is_kw_followed_by_ident(sym::var) && self.may_recover() {
|
2021-01-14 10:42:01 -05:00
|
|
|
|
self.bump(); // `var`
|
2022-12-03 23:37:23 +09:00
|
|
|
|
self.recover_stmt_local_after_let(
|
|
|
|
|
lo,
|
|
|
|
|
attrs,
|
2023-02-05 03:26:33 +01:00
|
|
|
|
errors::InvalidVariableDeclarationSub::UseLetNotVar,
|
2024-07-17 02:27:01 +10:00
|
|
|
|
force_collect,
|
2022-12-03 23:37:23 +09:00
|
|
|
|
)?
|
2023-01-19 10:24:17 +01:00
|
|
|
|
} else if self.check_path()
|
|
|
|
|
&& !self.token.is_qpath_start()
|
|
|
|
|
&& !self.is_path_start_item()
|
|
|
|
|
&& !self.is_builtin()
|
|
|
|
|
{
|
2021-01-14 10:42:01 -05:00
|
|
|
|
// We have avoided contextual keywords like `union`, items with `crate` visibility,
|
|
|
|
|
// or `auto trait` items. We aim to parse an arbitrary path `a::b` but not something
|
|
|
|
|
// that starts like a path (1 token), but it fact not a path.
|
|
|
|
|
// Also, we avoid stealing syntax from `parse_item_`.
|
2024-08-06 17:16:40 +10:00
|
|
|
|
//
|
|
|
|
|
// `UsePreAttrPos::Yes` here means the attribute belongs unconditionally to the
|
|
|
|
|
// expression, not the statement. (But the statement attributes/tokens are obtained
|
|
|
|
|
// from the expression anyway, because `Stmt` delegates `HasAttrs`/`HasTokens` to
|
|
|
|
|
// the things within `StmtKind`.)
|
|
|
|
|
let stmt = self.collect_tokens(
|
|
|
|
|
Some(pre_attr_pos),
|
2024-07-17 14:02:37 +10:00
|
|
|
|
AttrWrapper::empty(),
|
|
|
|
|
force_collect,
|
2024-08-06 17:16:40 +10:00
|
|
|
|
|this, _empty_attrs| {
|
|
|
|
|
Ok((this.parse_stmt_path_start(lo, attrs)?, Trailing::No, UsePreAttrPos::Yes))
|
|
|
|
|
},
|
2024-07-17 14:02:37 +10:00
|
|
|
|
);
|
|
|
|
|
match stmt {
|
|
|
|
|
Ok(stmt) => stmt,
|
|
|
|
|
Err(mut err) => {
|
|
|
|
|
self.suggest_add_missing_let_for_stmt(&mut err);
|
|
|
|
|
return Err(err);
|
2023-01-30 11:03:32 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2021-12-04 11:05:30 -07:00
|
|
|
|
} else if let Some(item) = self.parse_item_common(
|
2024-08-01 15:41:51 +10:00
|
|
|
|
attrs.clone(), // FIXME: unwanted clone of attrs
|
2021-12-04 11:05:30 -07:00
|
|
|
|
false,
|
|
|
|
|
true,
|
|
|
|
|
FnParseMode { req_name: |_| true, req_body: true },
|
|
|
|
|
force_collect,
|
|
|
|
|
)? {
|
2021-01-14 10:42:01 -05:00
|
|
|
|
self.mk_stmt(lo.to(item.span), StmtKind::Item(P(item)))
|
2024-12-04 15:55:06 +11:00
|
|
|
|
} else if self.eat(exp!(Semi)) {
|
2021-01-14 10:42:01 -05:00
|
|
|
|
// Do not attempt to parse an expression if we're done here.
|
2022-11-13 13:08:58 +00:00
|
|
|
|
self.error_outer_attrs(attrs);
|
2021-01-14 10:42:01 -05:00
|
|
|
|
self.mk_stmt(lo, StmtKind::Empty)
|
Remove `token::{Open,Close}Delim`.
By replacing them with `{Open,Close}{Param,Brace,Bracket,Invisible}`.
PR #137902 made `ast::TokenKind` more like `lexer::TokenKind` by
replacing the compound `BinOp{,Eq}(BinOpToken)` variants with fieldless
variants `Plus`, `Minus`, `Star`, etc. This commit does a similar thing
with delimiters. It also makes `ast::TokenKind` more similar to
`parser::TokenType`.
This requires a few new methods:
- `TokenKind::is_{,open_,close_}delim()` replace various kinds of
pattern matches.
- `Delimiter::as_{open,close}_token_kind` are used to convert
`Delimiter` values to `TokenKind`.
Despite these additions, it's a net reduction in lines of code. This is
because e.g. `token::OpenParen` is so much shorter than
`token::OpenDelim(Delimiter::Parenthesis)` that many multi-line forms
reduce to single line forms. And many places where the number of lines
doesn't change are still easier to read, just because the names are
shorter, e.g.:
```
- } else if self.token != token::CloseDelim(Delimiter::Brace) {
+ } else if self.token != token::CloseBrace {
```
2025-04-16 16:13:50 +10:00
|
|
|
|
} else if self.token != token::CloseBrace {
|
2024-08-06 17:16:40 +10:00
|
|
|
|
// Remainder are line-expr stmts. This is similar to the `parse_stmt_path_start` case
|
|
|
|
|
// above.
|
2024-04-18 14:53:52 +10:00
|
|
|
|
let restrictions =
|
|
|
|
|
if force_full_expr { Restrictions::empty() } else { Restrictions::STMT_EXPR };
|
2024-08-06 17:16:40 +10:00
|
|
|
|
let e = self.collect_tokens(
|
|
|
|
|
Some(pre_attr_pos),
|
2024-07-17 14:02:37 +10:00
|
|
|
|
AttrWrapper::empty(),
|
|
|
|
|
force_collect,
|
|
|
|
|
|this, _empty_attrs| {
|
2024-04-18 14:53:52 +10:00
|
|
|
|
let (expr, _) = this.parse_expr_res(restrictions, attrs)?;
|
2024-08-06 17:16:40 +10:00
|
|
|
|
Ok((expr, Trailing::No, UsePreAttrPos::Yes))
|
2024-07-17 14:02:37 +10:00
|
|
|
|
},
|
|
|
|
|
)?;
|
2024-12-04 15:55:06 +11:00
|
|
|
|
if matches!(e.kind, ExprKind::Assign(..)) && self.eat_keyword(exp!(Else)) {
|
2022-02-21 08:27:24 +01:00
|
|
|
|
let bl = self.parse_block()?;
|
|
|
|
|
// Destructuring assignment ... else.
|
|
|
|
|
// This is not allowed, but point it out in a nice way.
|
2023-12-18 21:14:02 +11:00
|
|
|
|
self.dcx().emit_err(errors::AssignmentElseNotAllowed { span: e.span.to(bl.span) });
|
2022-02-21 08:27:24 +01:00
|
|
|
|
}
|
2021-01-14 10:42:01 -05:00
|
|
|
|
self.mk_stmt(lo.to(e.span), StmtKind::Expr(e))
|
|
|
|
|
} else {
|
2022-11-13 13:08:58 +00:00
|
|
|
|
self.error_outer_attrs(attrs);
|
2021-01-14 10:42:01 -05:00
|
|
|
|
return Ok(None);
|
2024-09-18 20:38:43 +02:00
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
self.maybe_augment_stashed_expr_in_pats_with_suggestions(&stmt);
|
|
|
|
|
Ok(Some(stmt))
|
2019-08-11 20:32:29 +02:00
|
|
|
|
}
|
|
|
|
|
|
2021-05-06 16:21:40 +03:00
|
|
|
|
fn parse_stmt_path_start(&mut self, lo: Span, attrs: AttrWrapper) -> PResult<'a, Stmt> {
|
2024-08-06 17:16:40 +10:00
|
|
|
|
let stmt = self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
|
2021-01-14 10:42:01 -05:00
|
|
|
|
let path = this.parse_path(PathStyle::Expr)?;
|
2020-02-23 13:49:19 +01:00
|
|
|
|
|
2024-12-20 14:04:25 +11:00
|
|
|
|
if this.eat(exp!(Bang)) {
|
2022-08-17 12:34:33 +10:00
|
|
|
|
let stmt_mac = this.parse_stmt_mac(lo, attrs, path)?;
|
2024-08-06 17:16:40 +10:00
|
|
|
|
return Ok((
|
|
|
|
|
stmt_mac,
|
|
|
|
|
Trailing::from(this.token == token::Semi),
|
|
|
|
|
UsePreAttrPos::No,
|
|
|
|
|
));
|
2021-01-14 10:42:01 -05:00
|
|
|
|
}
|
2020-02-23 13:49:19 +01:00
|
|
|
|
|
2024-12-04 15:55:06 +11:00
|
|
|
|
let expr = if this.eat(exp!(OpenBrace)) {
|
2023-02-24 04:38:45 +01:00
|
|
|
|
this.parse_expr_struct(None, path, true)?
|
2021-01-14 10:42:01 -05:00
|
|
|
|
} else {
|
|
|
|
|
let hi = this.prev_token.span;
|
2022-08-15 09:58:38 +10:00
|
|
|
|
this.mk_expr(lo.to(hi), ExprKind::Path(None, path))
|
2021-01-14 10:42:01 -05:00
|
|
|
|
};
|
2020-02-23 13:49:19 +01:00
|
|
|
|
|
2021-01-14 10:42:01 -05:00
|
|
|
|
let expr = this.with_res(Restrictions::STMT_EXPR, |this| {
|
2024-07-16 15:54:34 +10:00
|
|
|
|
this.parse_expr_dot_or_call_with(attrs, expr, lo)
|
2020-11-28 18:33:17 -05:00
|
|
|
|
})?;
|
|
|
|
|
// `DUMMY_SP` will get overwritten later in this function
|
2024-08-06 17:16:40 +10:00
|
|
|
|
Ok((
|
|
|
|
|
this.mk_stmt(rustc_span::DUMMY_SP, StmtKind::Expr(expr)),
|
|
|
|
|
Trailing::No,
|
|
|
|
|
UsePreAttrPos::No,
|
|
|
|
|
))
|
2020-11-28 18:33:17 -05:00
|
|
|
|
})?;
|
|
|
|
|
|
|
|
|
|
if let StmtKind::Expr(expr) = stmt.kind {
|
2024-08-06 17:16:40 +10:00
|
|
|
|
// Perform this outside of the `collect_tokens` closure, since our
|
|
|
|
|
// outer attributes do not apply to this part of the expression.
|
|
|
|
|
let (expr, _) = self.with_res(Restrictions::STMT_EXPR, |this| {
|
2024-11-28 14:03:16 -08:00
|
|
|
|
this.parse_expr_assoc_rest_with(Bound::Unbounded, true, expr)
|
2021-01-14 10:42:01 -05:00
|
|
|
|
})?;
|
2020-11-28 18:33:17 -05:00
|
|
|
|
Ok(self.mk_stmt(lo.to(self.prev_token.span), StmtKind::Expr(expr)))
|
|
|
|
|
} else {
|
|
|
|
|
Ok(stmt)
|
|
|
|
|
}
|
2020-02-23 13:49:19 +01:00
|
|
|
|
}
|
|
|
|
|
|
2019-12-03 05:37:12 +01:00
|
|
|
|
/// Parses a statement macro `mac!(args)` provided a `path` representing `mac`.
|
|
|
|
|
/// At this point, the `!` token after the path has already been eaten.
|
2020-02-23 13:49:19 +01:00
|
|
|
|
fn parse_stmt_mac(&mut self, lo: Span, attrs: AttrVec, path: ast::Path) -> PResult<'a, Stmt> {
|
2022-11-18 11:24:21 +11:00
|
|
|
|
let args = self.parse_delim_args()?;
|
2020-02-29 14:56:15 +03:00
|
|
|
|
let hi = self.prev_token.span;
|
2019-12-03 05:37:12 +01:00
|
|
|
|
|
2023-08-02 09:56:26 +10:00
|
|
|
|
let style = match args.delim {
|
2022-11-18 11:24:21 +11:00
|
|
|
|
Delimiter::Brace => MacStmtStyle::Braces,
|
|
|
|
|
_ => MacStmtStyle::NoBraces,
|
2022-04-26 15:21:15 +10:00
|
|
|
|
};
|
2019-12-03 05:37:12 +01:00
|
|
|
|
|
2022-11-16 21:46:06 +01:00
|
|
|
|
let mac = P(MacCall { path, args });
|
2019-12-03 05:37:12 +01:00
|
|
|
|
|
2022-04-26 15:21:15 +10:00
|
|
|
|
let kind = if (style == MacStmtStyle::Braces
|
2024-04-18 14:53:52 +10:00
|
|
|
|
&& !matches!(self.token.kind, token::Dot | token::Question))
|
|
|
|
|
|| matches!(
|
|
|
|
|
self.token.kind,
|
|
|
|
|
token::Semi
|
|
|
|
|
| token::Eof
|
Remove `token::{Open,Close}Delim`.
By replacing them with `{Open,Close}{Param,Brace,Bracket,Invisible}`.
PR #137902 made `ast::TokenKind` more like `lexer::TokenKind` by
replacing the compound `BinOp{,Eq}(BinOpToken)` variants with fieldless
variants `Plus`, `Minus`, `Star`, etc. This commit does a similar thing
with delimiters. It also makes `ast::TokenKind` more similar to
`parser::TokenType`.
This requires a few new methods:
- `TokenKind::is_{,open_,close_}delim()` replace various kinds of
pattern matches.
- `Delimiter::as_{open,close}_token_kind` are used to convert
`Delimiter` values to `TokenKind`.
Despite these additions, it's a net reduction in lines of code. This is
because e.g. `token::OpenParen` is so much shorter than
`token::OpenDelim(Delimiter::Parenthesis)` that many multi-line forms
reduce to single line forms. And many places where the number of lines
doesn't change are still easier to read, just because the names are
shorter, e.g.:
```
- } else if self.token != token::CloseDelim(Delimiter::Brace) {
+ } else if self.token != token::CloseBrace {
```
2025-04-16 16:13:50 +10:00
|
|
|
|
| token::CloseInvisible(InvisibleOrigin::MetaVar(MetaVarKind::Stmt))
|
2024-04-18 14:53:52 +10:00
|
|
|
|
) {
|
2022-04-26 15:21:15 +10:00
|
|
|
|
StmtKind::MacCall(P(MacCallStmt { mac, style, attrs, tokens: None }))
|
|
|
|
|
} else {
|
|
|
|
|
// Since none of the above applied, this is an expression statement macro.
|
2022-08-15 09:58:38 +10:00
|
|
|
|
let e = self.mk_expr(lo.to(hi), ExprKind::MacCall(mac));
|
2022-05-19 15:51:49 +10:00
|
|
|
|
let e = self.maybe_recover_from_bad_qpath(e)?;
|
2024-07-16 15:54:34 +10:00
|
|
|
|
let e = self.parse_expr_dot_or_call_with(attrs, e, lo)?;
|
2024-11-28 14:03:16 -08:00
|
|
|
|
let (e, _) = self.parse_expr_assoc_rest_with(Bound::Unbounded, false, e)?;
|
2022-04-26 15:21:15 +10:00
|
|
|
|
StmtKind::Expr(e)
|
|
|
|
|
};
|
2020-02-23 13:49:19 +01:00
|
|
|
|
Ok(self.mk_stmt(lo.to(hi), kind))
|
2019-12-03 05:37:12 +01:00
|
|
|
|
}
|
|
|
|
|
|
2019-12-03 07:40:18 +01:00
|
|
|
|
/// Error on outer attributes in this context.
|
|
|
|
|
/// Also error if the previous token was a doc comment.
|
2022-11-13 13:08:58 +00:00
|
|
|
|
fn error_outer_attrs(&self, attrs: AttrWrapper) {
|
|
|
|
|
if !attrs.is_empty()
|
2024-03-04 16:31:49 +11:00
|
|
|
|
&& let attrs @ [.., last] = &*attrs.take_for_recovery(self.psess)
|
2022-11-13 13:08:58 +00:00
|
|
|
|
{
|
2020-02-27 04:10:42 +01:00
|
|
|
|
if last.is_doc_comment() {
|
2023-12-18 21:14:02 +11:00
|
|
|
|
self.dcx().emit_err(errors::DocCommentDoesNotDocumentAnything {
|
2022-09-01 19:29:23 +02:00
|
|
|
|
span: last.span,
|
|
|
|
|
missing_comma: None,
|
|
|
|
|
});
|
2019-12-03 07:40:18 +01:00
|
|
|
|
} else if attrs.iter().any(|a| a.style == AttrStyle::Outer) {
|
2023-12-18 21:14:02 +11:00
|
|
|
|
self.dcx().emit_err(errors::ExpectedStatementAfterOuterAttr { span: last.span });
|
2019-12-03 07:40:18 +01:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2022-12-03 23:37:23 +09:00
|
|
|
|
fn recover_stmt_local_after_let(
|
2019-12-03 18:08:19 +01:00
|
|
|
|
&mut self,
|
2019-12-04 12:15:01 +01:00
|
|
|
|
lo: Span,
|
2020-11-28 18:33:17 -05:00
|
|
|
|
attrs: AttrWrapper,
|
2023-02-05 03:26:33 +01:00
|
|
|
|
subdiagnostic: fn(Span) -> errors::InvalidVariableDeclarationSub,
|
2024-07-17 02:27:01 +10:00
|
|
|
|
force_collect: ForceCollect,
|
2020-02-23 14:10:03 +01:00
|
|
|
|
) -> PResult<'a, Stmt> {
|
2024-08-06 17:16:40 +10:00
|
|
|
|
let stmt = self.collect_tokens(None, attrs, force_collect, |this, attrs| {
|
2025-03-27 18:29:58 +01:00
|
|
|
|
let local = this.parse_local(None, attrs)?;
|
2024-07-17 02:27:01 +10:00
|
|
|
|
// FIXME - maybe capture semicolon in recovery?
|
2024-08-06 17:16:40 +10:00
|
|
|
|
Ok((
|
|
|
|
|
this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)),
|
|
|
|
|
Trailing::No,
|
|
|
|
|
UsePreAttrPos::No,
|
|
|
|
|
))
|
2024-07-17 02:27:01 +10:00
|
|
|
|
})?;
|
2023-12-18 21:14:02 +11:00
|
|
|
|
self.dcx()
|
|
|
|
|
.emit_err(errors::InvalidVariableDeclaration { span: lo, sub: subdiagnostic(lo) });
|
2020-02-23 14:10:03 +01:00
|
|
|
|
Ok(stmt)
|
2019-12-03 18:08:19 +01:00
|
|
|
|
}
|
|
|
|
|
|
2019-08-11 20:32:29 +02:00
|
|
|
|
/// Parses a local variable declaration.
|
2025-03-27 18:29:58 +01:00
|
|
|
|
fn parse_local(&mut self, super_: Option<Span>, attrs: AttrVec) -> PResult<'a, P<Local>> {
|
|
|
|
|
let lo = super_.unwrap_or(self.prev_token.span);
|
2022-08-03 18:45:26 +03:00
|
|
|
|
|
|
|
|
|
if self.token.is_keyword(kw::Const) && self.look_ahead(1, |t| t.is_ident()) {
|
2023-12-18 21:14:02 +11:00
|
|
|
|
self.dcx().emit_err(errors::ConstLetMutuallyExclusive { span: lo.to(self.token.span) });
|
2022-08-03 18:45:26 +03:00
|
|
|
|
self.bump();
|
|
|
|
|
}
|
|
|
|
|
|
2022-10-14 23:16:25 +02:00
|
|
|
|
let (pat, colon) =
|
|
|
|
|
self.parse_pat_before_ty(None, RecoverComma::Yes, PatternLocation::LetBinding)?;
|
2019-08-11 20:32:29 +02:00
|
|
|
|
|
Detect more cases of `=` to `:` typo
When a `Local` is fully parsed, but not followed by a `;`, keep the `:` span
arround and mention it. If the type could continue being parsed as an
expression, suggest replacing the `:` with a `=`.
```
error: expected one of `!`, `+`, `->`, `::`, `;`, or `=`, found `.`
--> file.rs:2:32
|
2 | let _: std::env::temp_dir().join("foo");
| - ^ expected one of `!`, `+`, `->`, `::`, `;`, or `=`
| |
| while parsing the type for `_`
| help: use `=` if you meant to assign
```
Fix #119665.
2024-02-27 00:48:32 +00:00
|
|
|
|
let (err, ty, colon_sp) = if colon {
|
2019-08-11 20:32:29 +02:00
|
|
|
|
// Save the state of the parser before parsing type normally, in case there is a `:`
|
|
|
|
|
// instead of an `=` typo.
|
|
|
|
|
let parser_snapshot_before_type = self.clone();
|
2020-02-29 14:56:15 +03:00
|
|
|
|
let colon_sp = self.prev_token.span;
|
2019-08-11 20:32:29 +02:00
|
|
|
|
match self.parse_ty() {
|
Detect more cases of `=` to `:` typo
When a `Local` is fully parsed, but not followed by a `;`, keep the `:` span
arround and mention it. If the type could continue being parsed as an
expression, suggest replacing the `:` with a `=`.
```
error: expected one of `!`, `+`, `->`, `::`, `;`, or `=`, found `.`
--> file.rs:2:32
|
2 | let _: std::env::temp_dir().join("foo");
| - ^ expected one of `!`, `+`, `->`, `::`, `;`, or `=`
| |
| while parsing the type for `_`
| help: use `=` if you meant to assign
```
Fix #119665.
2024-02-27 00:48:32 +00:00
|
|
|
|
Ok(ty) => (None, Some(ty), Some(colon_sp)),
|
2019-08-11 20:32:29 +02:00
|
|
|
|
Err(mut err) => {
|
Detect more cases of `=` to `:` typo
When a `Local` is fully parsed, but not followed by a `;`, keep the `:` span
arround and mention it. If the type could continue being parsed as an
expression, suggest replacing the `:` with a `=`.
```
error: expected one of `!`, `+`, `->`, `::`, `;`, or `=`, found `.`
--> file.rs:2:32
|
2 | let _: std::env::temp_dir().join("foo");
| - ^ expected one of `!`, `+`, `->`, `::`, `;`, or `=`
| |
| while parsing the type for `_`
| help: use `=` if you meant to assign
```
Fix #119665.
2024-02-27 00:48:32 +00:00
|
|
|
|
err.span_label(
|
|
|
|
|
colon_sp,
|
|
|
|
|
format!(
|
|
|
|
|
"while parsing the type for {}",
|
|
|
|
|
pat.descr()
|
|
|
|
|
.map_or_else(|| "the binding".to_string(), |n| format!("`{n}`"))
|
|
|
|
|
),
|
|
|
|
|
);
|
2022-05-01 19:05:35 +02:00
|
|
|
|
// we use noexpect here because we don't actually expect Eq to be here
|
|
|
|
|
// but we are still checking for it in order to be able to handle it if
|
|
|
|
|
// it is there
|
|
|
|
|
let err = if self.check_noexpect(&token::Eq) {
|
2020-07-14 10:35:59 -07:00
|
|
|
|
err.emit();
|
|
|
|
|
None
|
|
|
|
|
} else {
|
|
|
|
|
// Rewind to before attempting to parse the type and continue parsing.
|
|
|
|
|
let parser_snapshot_after_type =
|
|
|
|
|
mem::replace(self, parser_snapshot_before_type);
|
|
|
|
|
Some((parser_snapshot_after_type, colon_sp, err))
|
|
|
|
|
};
|
Detect more cases of `=` to `:` typo
When a `Local` is fully parsed, but not followed by a `;`, keep the `:` span
arround and mention it. If the type could continue being parsed as an
expression, suggest replacing the `:` with a `=`.
```
error: expected one of `!`, `+`, `->`, `::`, `;`, or `=`, found `.`
--> file.rs:2:32
|
2 | let _: std::env::temp_dir().join("foo");
| - ^ expected one of `!`, `+`, `->`, `::`, `;`, or `=`
| |
| while parsing the type for `_`
| help: use `=` if you meant to assign
```
Fix #119665.
2024-02-27 00:48:32 +00:00
|
|
|
|
(err, None, Some(colon_sp))
|
2019-08-11 20:32:29 +02:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
} else {
|
Detect more cases of `=` to `:` typo
When a `Local` is fully parsed, but not followed by a `;`, keep the `:` span
arround and mention it. If the type could continue being parsed as an
expression, suggest replacing the `:` with a `=`.
```
error: expected one of `!`, `+`, `->`, `::`, `;`, or `=`, found `.`
--> file.rs:2:32
|
2 | let _: std::env::temp_dir().join("foo");
| - ^ expected one of `!`, `+`, `->`, `::`, `;`, or `=`
| |
| while parsing the type for `_`
| help: use `=` if you meant to assign
```
Fix #119665.
2024-02-27 00:48:32 +00:00
|
|
|
|
(None, None, None)
|
2019-08-11 20:32:29 +02:00
|
|
|
|
};
|
2020-05-20 22:09:03 +02:00
|
|
|
|
let init = match (self.parse_initializer(err.is_some()), err) {
|
2019-08-11 20:32:29 +02:00
|
|
|
|
(Ok(init), None) => {
|
|
|
|
|
// init parsed, ty parsed
|
|
|
|
|
init
|
|
|
|
|
}
|
|
|
|
|
(Ok(init), Some((_, colon_sp, mut err))) => {
|
|
|
|
|
// init parsed, ty error
|
|
|
|
|
// Could parse the type as if it were the initializer, it is likely there was a
|
|
|
|
|
// typo in the code: `:` instead of `=`. Add suggestion and emit the error.
|
|
|
|
|
err.span_suggestion_short(
|
|
|
|
|
colon_sp,
|
|
|
|
|
"use `=` if you meant to assign",
|
2022-06-13 15:48:40 +09:00
|
|
|
|
" =",
|
2019-08-11 20:32:29 +02:00
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
|
);
|
|
|
|
|
err.emit();
|
|
|
|
|
// As this was parsed successfully, continue as if the code has been fixed for the
|
|
|
|
|
// rest of the file. It will still fail due to the emitted error, but we avoid
|
|
|
|
|
// extra noise.
|
|
|
|
|
init
|
|
|
|
|
}
|
2022-01-26 03:39:14 +00:00
|
|
|
|
(Err(init_err), Some((snapshot, _, ty_err))) => {
|
2019-08-11 20:32:29 +02:00
|
|
|
|
// init error, ty error
|
|
|
|
|
init_err.cancel();
|
|
|
|
|
// Couldn't parse the type nor the initializer, only raise the type error and
|
|
|
|
|
// return to the parser state before parsing the type as the initializer.
|
|
|
|
|
// let x: <parse_error>;
|
2020-04-17 13:59:14 -07:00
|
|
|
|
*self = snapshot;
|
2019-08-11 20:32:29 +02:00
|
|
|
|
return Err(ty_err);
|
|
|
|
|
}
|
|
|
|
|
(Err(err), None) => {
|
|
|
|
|
// init error, ty parsed
|
|
|
|
|
// Couldn't parse the initializer and we're not attempting to recover a failed
|
|
|
|
|
// parse of the type, return the error.
|
|
|
|
|
return Err(err);
|
|
|
|
|
}
|
|
|
|
|
};
|
2021-06-22 13:00:58 -05:00
|
|
|
|
let kind = match init {
|
|
|
|
|
None => LocalKind::Decl,
|
|
|
|
|
Some(init) => {
|
2024-12-04 15:55:06 +11:00
|
|
|
|
if self.eat_keyword(exp!(Else)) {
|
2021-10-17 07:02:52 +02:00
|
|
|
|
if self.token.is_keyword(kw::If) {
|
|
|
|
|
// `let...else if`. Emit the same error that `parse_block()` would,
|
|
|
|
|
// but explicitly point out that this pattern is not allowed.
|
|
|
|
|
let msg = "conditional `else if` is not supported for `let...else`";
|
Use `Cow` in `{D,Subd}iagnosticMessage`.
Each of `{D,Subd}iagnosticMessage::{Str,Eager}` has a comment:
```
// FIXME(davidtwco): can a `Cow<'static, str>` be used here?
```
This commit answers that question in the affirmative. It's not the most
compelling change ever, but it might be worth merging.
This requires changing the `impl<'a> From<&'a str>` impls to `impl
From<&'static str>`, which involves a bunch of knock-on changes that
require/result in call sites being a little more precise about exactly
what kind of string they use to create errors, and not just `&str`. This
will result in fewer unnecessary allocations, though this will not have
any notable perf effects given that these are error paths.
Note that I was lazy within Clippy, using `to_string` in a few places to
preserve the existing string imprecision. I could have used `impl
Into<{D,Subd}iagnosticMessage>` in various places as is done in the
compiler, but that would have required changes to *many* call sites
(mostly changing `&format("...")` to `format!("...")`) which didn't seem
worthwhile.
2023-05-04 10:55:21 +10:00
|
|
|
|
return Err(self.error_block_no_opening_brace_msg(Cow::from(msg)));
|
2021-10-17 07:02:52 +02:00
|
|
|
|
}
|
2021-06-22 13:00:58 -05:00
|
|
|
|
let els = self.parse_block()?;
|
2021-07-30 17:12:11 -05:00
|
|
|
|
self.check_let_else_init_bool_expr(&init);
|
|
|
|
|
self.check_let_else_init_trailing_brace(&init);
|
2021-06-22 13:00:58 -05:00
|
|
|
|
LocalKind::InitElse(init, els)
|
|
|
|
|
} else {
|
|
|
|
|
LocalKind::Init(init)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
};
|
2020-02-29 14:56:15 +03:00
|
|
|
|
let hi = if self.token == token::Semi { self.token.span } else { self.prev_token.span };
|
Detect more cases of `=` to `:` typo
When a `Local` is fully parsed, but not followed by a `;`, keep the `:` span
arround and mention it. If the type could continue being parsed as an
expression, suggest replacing the `:` with a `=`.
```
error: expected one of `!`, `+`, `->`, `::`, `;`, or `=`, found `.`
--> file.rs:2:32
|
2 | let _: std::env::temp_dir().join("foo");
| - ^ expected one of `!`, `+`, `->`, `::`, `;`, or `=`
| |
| while parsing the type for `_`
| help: use `=` if you meant to assign
```
Fix #119665.
2024-02-27 00:48:32 +00:00
|
|
|
|
Ok(P(ast::Local {
|
2025-03-27 18:29:58 +01:00
|
|
|
|
super_,
|
Detect more cases of `=` to `:` typo
When a `Local` is fully parsed, but not followed by a `;`, keep the `:` span
arround and mention it. If the type could continue being parsed as an
expression, suggest replacing the `:` with a `=`.
```
error: expected one of `!`, `+`, `->`, `::`, `;`, or `=`, found `.`
--> file.rs:2:32
|
2 | let _: std::env::temp_dir().join("foo");
| - ^ expected one of `!`, `+`, `->`, `::`, `;`, or `=`
| |
| while parsing the type for `_`
| help: use `=` if you meant to assign
```
Fix #119665.
2024-02-27 00:48:32 +00:00
|
|
|
|
ty,
|
|
|
|
|
pat,
|
|
|
|
|
kind,
|
|
|
|
|
id: DUMMY_NODE_ID,
|
|
|
|
|
span: lo.to(hi),
|
|
|
|
|
colon_sp,
|
|
|
|
|
attrs,
|
|
|
|
|
tokens: None,
|
|
|
|
|
}))
|
2019-08-11 20:32:29 +02:00
|
|
|
|
}
|
|
|
|
|
|
2021-07-30 17:12:11 -05:00
|
|
|
|
fn check_let_else_init_bool_expr(&self, init: &ast::Expr) {
|
|
|
|
|
if let ast::ExprKind::Binary(op, ..) = init.kind {
|
2023-11-28 09:42:25 +11:00
|
|
|
|
if op.node.is_lazy() {
|
2023-12-18 21:14:02 +11:00
|
|
|
|
self.dcx().emit_err(errors::InvalidExpressionInLetElse {
|
2022-09-01 19:29:23 +02:00
|
|
|
|
span: init.span,
|
2023-11-28 09:11:03 +11:00
|
|
|
|
operator: op.node.as_str(),
|
2023-12-18 03:01:05 +00:00
|
|
|
|
sugg: errors::WrapInParentheses::Expression {
|
2022-09-01 19:29:23 +02:00
|
|
|
|
left: init.span.shrink_to_lo(),
|
|
|
|
|
right: init.span.shrink_to_hi(),
|
|
|
|
|
},
|
|
|
|
|
});
|
2021-07-30 17:12:11 -05:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn check_let_else_init_trailing_brace(&self, init: &ast::Expr) {
|
|
|
|
|
if let Some(trailing) = classify::expr_trailing_brace(init) {
|
2024-05-12 12:28:10 -07:00
|
|
|
|
let (span, sugg) = match trailing {
|
|
|
|
|
TrailingBrace::MacCall(mac) => (
|
|
|
|
|
mac.span(),
|
|
|
|
|
errors::WrapInParentheses::MacroArgs {
|
|
|
|
|
left: mac.args.dspan.open,
|
|
|
|
|
right: mac.args.dspan.close,
|
|
|
|
|
},
|
|
|
|
|
),
|
|
|
|
|
TrailingBrace::Expr(expr) => (
|
|
|
|
|
expr.span,
|
|
|
|
|
errors::WrapInParentheses::Expression {
|
|
|
|
|
left: expr.span.shrink_to_lo(),
|
|
|
|
|
right: expr.span.shrink_to_hi(),
|
|
|
|
|
},
|
|
|
|
|
),
|
2023-12-18 03:01:05 +00:00
|
|
|
|
};
|
|
|
|
|
self.dcx().emit_err(errors::InvalidCurlyInLetElse {
|
2024-05-12 12:28:10 -07:00
|
|
|
|
span: span.with_lo(span.hi() - BytePos(1)),
|
2023-12-18 03:01:05 +00:00
|
|
|
|
sugg,
|
2022-09-01 19:29:23 +02:00
|
|
|
|
});
|
2021-07-30 17:12:11 -05:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2021-03-13 05:56:30 +09:00
|
|
|
|
/// Parses the RHS of a local variable declaration (e.g., `= 14;`).
|
2020-05-20 22:09:03 +02:00
|
|
|
|
fn parse_initializer(&mut self, eq_optional: bool) -> PResult<'a, Option<P<Expr>>> {
|
|
|
|
|
let eq_consumed = match self.token.kind {
|
2024-12-20 07:28:16 +11:00
|
|
|
|
token::PlusEq
|
|
|
|
|
| token::MinusEq
|
|
|
|
|
| token::StarEq
|
|
|
|
|
| token::SlashEq
|
|
|
|
|
| token::PercentEq
|
|
|
|
|
| token::CaretEq
|
|
|
|
|
| token::AndEq
|
|
|
|
|
| token::OrEq
|
|
|
|
|
| token::ShlEq
|
|
|
|
|
| token::ShrEq => {
|
2024-08-09 05:48:52 +00:00
|
|
|
|
// Recover `let x <op>= 1` as `let x = 1` We must not use `+ BytePos(1)` here
|
|
|
|
|
// because `<op>` can be a multi-byte lookalike that was recovered, e.g. `➖=` (the
|
|
|
|
|
// `➖` is a U+2796 Heavy Minus Sign Unicode Character) that was recovered as a
|
|
|
|
|
// `-=`.
|
|
|
|
|
let extra_op_span = self.psess.source_map().start_point(self.token.span);
|
2024-07-06 03:07:46 +00:00
|
|
|
|
self.dcx().emit_err(errors::CompoundAssignmentExpressionInLet {
|
|
|
|
|
span: self.token.span,
|
2024-08-09 05:48:52 +00:00
|
|
|
|
suggestion: extra_op_span,
|
2024-07-06 03:07:46 +00:00
|
|
|
|
});
|
2020-05-20 22:09:03 +02:00
|
|
|
|
self.bump();
|
|
|
|
|
true
|
2020-05-07 03:57:31 +02:00
|
|
|
|
}
|
2024-12-04 15:55:06 +11:00
|
|
|
|
_ => self.eat(exp!(Eq)),
|
2020-05-07 03:57:31 +02:00
|
|
|
|
};
|
|
|
|
|
|
2020-05-20 22:09:03 +02:00
|
|
|
|
Ok(if eq_consumed || eq_optional { Some(self.parse_expr()?) } else { None })
|
2019-08-11 20:32:29 +02:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Parses a block. No inner attributes are allowed.
|
2023-12-25 20:53:01 +00:00
|
|
|
|
pub fn parse_block(&mut self) -> PResult<'a, P<Block>> {
|
2025-03-17 00:25:15 -05:00
|
|
|
|
let (attrs, block) = self.parse_inner_attrs_and_block(None)?;
|
2020-03-05 11:42:56 +01:00
|
|
|
|
if let [.., last] = &*attrs {
|
2024-08-02 00:28:57 +08:00
|
|
|
|
let suggest_to_outer = match &last.kind {
|
|
|
|
|
ast::AttrKind::Normal(attr) => attr.item.is_valid_for_outer_style(),
|
|
|
|
|
_ => false,
|
|
|
|
|
};
|
2022-08-31 13:20:59 +02:00
|
|
|
|
self.error_on_forbidden_inner_attr(
|
|
|
|
|
last.span,
|
|
|
|
|
super::attr::InnerAttrPolicy::Forbidden(Some(
|
|
|
|
|
InnerAttrForbiddenReason::InCodeBlock,
|
|
|
|
|
)),
|
2024-08-02 00:28:57 +08:00
|
|
|
|
suggest_to_outer,
|
2022-08-31 13:20:59 +02:00
|
|
|
|
);
|
2019-12-03 08:20:19 +01:00
|
|
|
|
}
|
2020-03-05 11:42:56 +01:00
|
|
|
|
Ok(block)
|
2019-12-03 08:20:19 +01:00
|
|
|
|
}
|
|
|
|
|
|
2024-02-23 10:20:45 +11:00
|
|
|
|
fn error_block_no_opening_brace_msg(&mut self, msg: Cow<'static, str>) -> Diag<'a> {
|
2024-11-14 20:29:24 +00:00
|
|
|
|
let prev = self.prev_token.span;
|
2019-12-03 08:20:19 +01:00
|
|
|
|
let sp = self.token.span;
|
2023-12-18 21:09:22 +11:00
|
|
|
|
let mut e = self.dcx().struct_span_err(sp, msg);
|
2025-04-04 21:36:03 +00:00
|
|
|
|
self.label_expected_raw_ref(&mut e);
|
|
|
|
|
|
|
|
|
|
let do_not_suggest_help = self.token.is_keyword(kw::In)
|
|
|
|
|
|| self.token == token::Colon
|
|
|
|
|
|| self.prev_token.is_keyword(kw::Raw);
|
2019-08-11 20:32:29 +02:00
|
|
|
|
|
2019-12-03 08:20:19 +01:00
|
|
|
|
// Check to see if the user has written something like
|
|
|
|
|
//
|
|
|
|
|
// if (cond)
|
|
|
|
|
// bar;
|
|
|
|
|
//
|
|
|
|
|
// which is valid in other languages, but not Rust.
|
2024-04-18 14:53:52 +10:00
|
|
|
|
match self.parse_stmt_without_recovery(false, ForceCollect::No, false) {
|
2022-05-27 21:58:48 -07:00
|
|
|
|
// If the next token is an open brace, e.g., we have:
|
|
|
|
|
//
|
|
|
|
|
// if expr other_expr {
|
|
|
|
|
// ^ ^ ^- lookahead(1) is a brace
|
|
|
|
|
// | |- current token is not "else"
|
|
|
|
|
// |- (statement we just parsed)
|
|
|
|
|
//
|
|
|
|
|
// the place-inside-a-block suggestion would be more likely wrong than right.
|
|
|
|
|
//
|
|
|
|
|
// FIXME(compiler-errors): this should probably parse an arbitrary expr and not
|
|
|
|
|
// just lookahead one token, so we can see if there's a brace after _that_,
|
|
|
|
|
// since we want to protect against:
|
|
|
|
|
// `if 1 1 + 1 {` being suggested as `if { 1 } 1 + 1 {`
|
|
|
|
|
// + +
|
2020-03-07 11:00:40 +01:00
|
|
|
|
Ok(Some(_))
|
2022-05-27 21:58:48 -07:00
|
|
|
|
if (!self.token.is_keyword(kw::Else)
|
Remove `token::{Open,Close}Delim`.
By replacing them with `{Open,Close}{Param,Brace,Bracket,Invisible}`.
PR #137902 made `ast::TokenKind` more like `lexer::TokenKind` by
replacing the compound `BinOp{,Eq}(BinOpToken)` variants with fieldless
variants `Plus`, `Minus`, `Star`, etc. This commit does a similar thing
with delimiters. It also makes `ast::TokenKind` more similar to
`parser::TokenType`.
This requires a few new methods:
- `TokenKind::is_{,open_,close_}delim()` replace various kinds of
pattern matches.
- `Delimiter::as_{open,close}_token_kind` are used to convert
`Delimiter` values to `TokenKind`.
Despite these additions, it's a net reduction in lines of code. This is
because e.g. `token::OpenParen` is so much shorter than
`token::OpenDelim(Delimiter::Parenthesis)` that many multi-line forms
reduce to single line forms. And many places where the number of lines
doesn't change are still easier to read, just because the names are
shorter, e.g.:
```
- } else if self.token != token::CloseDelim(Delimiter::Brace) {
+ } else if self.token != token::CloseBrace {
```
2025-04-16 16:13:50 +10:00
|
|
|
|
&& self.look_ahead(1, |t| t == &token::OpenBrace))
|
2020-03-07 11:00:40 +01:00
|
|
|
|
|| do_not_suggest_help => {}
|
2022-01-12 20:43:24 +00:00
|
|
|
|
// Do not suggest `if foo println!("") {;}` (as would be seen in test for #46836).
|
|
|
|
|
Ok(Some(Stmt { kind: StmtKind::Empty, .. })) => {}
|
2020-03-07 11:00:40 +01:00
|
|
|
|
Ok(Some(stmt)) => {
|
2024-03-04 16:31:49 +11:00
|
|
|
|
let stmt_own_line = self.psess.source_map().is_line_before_span_empty(sp);
|
2024-12-04 15:55:06 +11:00
|
|
|
|
let stmt_span = if stmt_own_line && self.eat(exp!(Semi)) {
|
2019-12-03 08:20:19 +01:00
|
|
|
|
// Expand the span to include the semicolon.
|
2020-02-29 14:56:15 +03:00
|
|
|
|
stmt.span.with_hi(self.prev_token.span.hi())
|
2019-12-03 08:20:19 +01:00
|
|
|
|
} else {
|
|
|
|
|
stmt.span
|
|
|
|
|
};
|
2024-11-16 20:02:27 +00:00
|
|
|
|
self.suggest_fixes_misparsed_for_loop_head(
|
|
|
|
|
&mut e,
|
|
|
|
|
prev.between(sp),
|
|
|
|
|
stmt_span,
|
|
|
|
|
&stmt.kind,
|
|
|
|
|
);
|
2019-08-11 20:32:29 +02:00
|
|
|
|
}
|
2022-01-26 03:39:14 +00:00
|
|
|
|
Err(e) => {
|
2019-12-03 08:20:19 +01:00
|
|
|
|
self.recover_stmt_(SemiColonMode::Break, BlockMode::Ignore);
|
|
|
|
|
e.cancel();
|
|
|
|
|
}
|
|
|
|
|
_ => {}
|
2019-08-11 20:32:29 +02:00
|
|
|
|
}
|
2019-12-03 08:20:19 +01:00
|
|
|
|
e.span_label(sp, "expected `{`");
|
2021-10-17 07:02:52 +02:00
|
|
|
|
e
|
|
|
|
|
}
|
|
|
|
|
|
2024-11-16 20:02:27 +00:00
|
|
|
|
fn suggest_fixes_misparsed_for_loop_head(
|
|
|
|
|
&self,
|
|
|
|
|
e: &mut Diag<'_>,
|
|
|
|
|
between: Span,
|
|
|
|
|
stmt_span: Span,
|
|
|
|
|
stmt_kind: &StmtKind,
|
|
|
|
|
) {
|
|
|
|
|
match (&self.token.kind, &stmt_kind) {
|
Remove `token::{Open,Close}Delim`.
By replacing them with `{Open,Close}{Param,Brace,Bracket,Invisible}`.
PR #137902 made `ast::TokenKind` more like `lexer::TokenKind` by
replacing the compound `BinOp{,Eq}(BinOpToken)` variants with fieldless
variants `Plus`, `Minus`, `Star`, etc. This commit does a similar thing
with delimiters. It also makes `ast::TokenKind` more similar to
`parser::TokenType`.
This requires a few new methods:
- `TokenKind::is_{,open_,close_}delim()` replace various kinds of
pattern matches.
- `Delimiter::as_{open,close}_token_kind` are used to convert
`Delimiter` values to `TokenKind`.
Despite these additions, it's a net reduction in lines of code. This is
because e.g. `token::OpenParen` is so much shorter than
`token::OpenDelim(Delimiter::Parenthesis)` that many multi-line forms
reduce to single line forms. And many places where the number of lines
doesn't change are still easier to read, just because the names are
shorter, e.g.:
```
- } else if self.token != token::CloseDelim(Delimiter::Brace) {
+ } else if self.token != token::CloseBrace {
```
2025-04-16 16:13:50 +10:00
|
|
|
|
(token::OpenBrace, StmtKind::Expr(expr)) if let ExprKind::Call(..) = expr.kind => {
|
2024-11-16 20:02:27 +00:00
|
|
|
|
// for _ in x y() {}
|
|
|
|
|
e.span_suggestion_verbose(
|
|
|
|
|
between,
|
|
|
|
|
"you might have meant to write a method call",
|
|
|
|
|
".".to_string(),
|
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
|
);
|
|
|
|
|
}
|
Remove `token::{Open,Close}Delim`.
By replacing them with `{Open,Close}{Param,Brace,Bracket,Invisible}`.
PR #137902 made `ast::TokenKind` more like `lexer::TokenKind` by
replacing the compound `BinOp{,Eq}(BinOpToken)` variants with fieldless
variants `Plus`, `Minus`, `Star`, etc. This commit does a similar thing
with delimiters. It also makes `ast::TokenKind` more similar to
`parser::TokenType`.
This requires a few new methods:
- `TokenKind::is_{,open_,close_}delim()` replace various kinds of
pattern matches.
- `Delimiter::as_{open,close}_token_kind` are used to convert
`Delimiter` values to `TokenKind`.
Despite these additions, it's a net reduction in lines of code. This is
because e.g. `token::OpenParen` is so much shorter than
`token::OpenDelim(Delimiter::Parenthesis)` that many multi-line forms
reduce to single line forms. And many places where the number of lines
doesn't change are still easier to read, just because the names are
shorter, e.g.:
```
- } else if self.token != token::CloseDelim(Delimiter::Brace) {
+ } else if self.token != token::CloseBrace {
```
2025-04-16 16:13:50 +10:00
|
|
|
|
(token::OpenBrace, StmtKind::Expr(expr)) if let ExprKind::Field(..) = expr.kind => {
|
2024-11-16 20:02:27 +00:00
|
|
|
|
// for _ in x y.z {}
|
|
|
|
|
e.span_suggestion_verbose(
|
|
|
|
|
between,
|
|
|
|
|
"you might have meant to write a field access",
|
|
|
|
|
".".to_string(),
|
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
|
);
|
|
|
|
|
}
|
Remove `token::{Open,Close}Delim`.
By replacing them with `{Open,Close}{Param,Brace,Bracket,Invisible}`.
PR #137902 made `ast::TokenKind` more like `lexer::TokenKind` by
replacing the compound `BinOp{,Eq}(BinOpToken)` variants with fieldless
variants `Plus`, `Minus`, `Star`, etc. This commit does a similar thing
with delimiters. It also makes `ast::TokenKind` more similar to
`parser::TokenType`.
This requires a few new methods:
- `TokenKind::is_{,open_,close_}delim()` replace various kinds of
pattern matches.
- `Delimiter::as_{open,close}_token_kind` are used to convert
`Delimiter` values to `TokenKind`.
Despite these additions, it's a net reduction in lines of code. This is
because e.g. `token::OpenParen` is so much shorter than
`token::OpenDelim(Delimiter::Parenthesis)` that many multi-line forms
reduce to single line forms. And many places where the number of lines
doesn't change are still easier to read, just because the names are
shorter, e.g.:
```
- } else if self.token != token::CloseDelim(Delimiter::Brace) {
+ } else if self.token != token::CloseBrace {
```
2025-04-16 16:13:50 +10:00
|
|
|
|
(token::CloseBrace, StmtKind::Expr(expr))
|
2024-11-16 20:02:27 +00:00
|
|
|
|
if let ExprKind::Struct(expr) = &expr.kind
|
|
|
|
|
&& let None = expr.qself
|
|
|
|
|
&& expr.path.segments.len() == 1 =>
|
|
|
|
|
{
|
|
|
|
|
// This is specific to "mistyped `if` condition followed by empty body"
|
|
|
|
|
//
|
|
|
|
|
// for _ in x y {}
|
|
|
|
|
e.span_suggestion_verbose(
|
|
|
|
|
between,
|
|
|
|
|
"you might have meant to write a field access",
|
|
|
|
|
".".to_string(),
|
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
|
);
|
|
|
|
|
}
|
Remove `token::{Open,Close}Delim`.
By replacing them with `{Open,Close}{Param,Brace,Bracket,Invisible}`.
PR #137902 made `ast::TokenKind` more like `lexer::TokenKind` by
replacing the compound `BinOp{,Eq}(BinOpToken)` variants with fieldless
variants `Plus`, `Minus`, `Star`, etc. This commit does a similar thing
with delimiters. It also makes `ast::TokenKind` more similar to
`parser::TokenType`.
This requires a few new methods:
- `TokenKind::is_{,open_,close_}delim()` replace various kinds of
pattern matches.
- `Delimiter::as_{open,close}_token_kind` are used to convert
`Delimiter` values to `TokenKind`.
Despite these additions, it's a net reduction in lines of code. This is
because e.g. `token::OpenParen` is so much shorter than
`token::OpenDelim(Delimiter::Parenthesis)` that many multi-line forms
reduce to single line forms. And many places where the number of lines
doesn't change are still easier to read, just because the names are
shorter, e.g.:
```
- } else if self.token != token::CloseDelim(Delimiter::Brace) {
+ } else if self.token != token::CloseBrace {
```
2025-04-16 16:13:50 +10:00
|
|
|
|
(token::OpenBrace, StmtKind::Expr(expr))
|
2024-11-16 20:02:27 +00:00
|
|
|
|
if let ExprKind::Lit(lit) = expr.kind
|
|
|
|
|
&& let None = lit.suffix
|
|
|
|
|
&& let token::LitKind::Integer | token::LitKind::Float = lit.kind =>
|
|
|
|
|
{
|
|
|
|
|
// for _ in x 0 {}
|
|
|
|
|
// for _ in x 0.0 {}
|
|
|
|
|
e.span_suggestion_verbose(
|
|
|
|
|
between,
|
|
|
|
|
format!("you might have meant to write a field access"),
|
|
|
|
|
".".to_string(),
|
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
|
);
|
|
|
|
|
}
|
Remove `token::{Open,Close}Delim`.
By replacing them with `{Open,Close}{Param,Brace,Bracket,Invisible}`.
PR #137902 made `ast::TokenKind` more like `lexer::TokenKind` by
replacing the compound `BinOp{,Eq}(BinOpToken)` variants with fieldless
variants `Plus`, `Minus`, `Star`, etc. This commit does a similar thing
with delimiters. It also makes `ast::TokenKind` more similar to
`parser::TokenType`.
This requires a few new methods:
- `TokenKind::is_{,open_,close_}delim()` replace various kinds of
pattern matches.
- `Delimiter::as_{open,close}_token_kind` are used to convert
`Delimiter` values to `TokenKind`.
Despite these additions, it's a net reduction in lines of code. This is
because e.g. `token::OpenParen` is so much shorter than
`token::OpenDelim(Delimiter::Parenthesis)` that many multi-line forms
reduce to single line forms. And many places where the number of lines
doesn't change are still easier to read, just because the names are
shorter, e.g.:
```
- } else if self.token != token::CloseDelim(Delimiter::Brace) {
+ } else if self.token != token::CloseBrace {
```
2025-04-16 16:13:50 +10:00
|
|
|
|
(token::OpenBrace, StmtKind::Expr(expr))
|
2024-11-16 20:02:27 +00:00
|
|
|
|
if let ExprKind::Loop(..)
|
|
|
|
|
| ExprKind::If(..)
|
|
|
|
|
| ExprKind::While(..)
|
|
|
|
|
| ExprKind::Match(..)
|
|
|
|
|
| ExprKind::ForLoop { .. }
|
|
|
|
|
| ExprKind::TryBlock(..)
|
|
|
|
|
| ExprKind::Ret(..)
|
|
|
|
|
| ExprKind::Closure(..)
|
|
|
|
|
| ExprKind::Struct(..)
|
|
|
|
|
| ExprKind::Try(..) = expr.kind =>
|
|
|
|
|
{
|
|
|
|
|
// These are more likely to have been meant as a block body.
|
|
|
|
|
e.multipart_suggestion(
|
|
|
|
|
"you might have meant to write this as part of a block",
|
|
|
|
|
vec![
|
|
|
|
|
(stmt_span.shrink_to_lo(), "{ ".to_string()),
|
|
|
|
|
(stmt_span.shrink_to_hi(), " }".to_string()),
|
|
|
|
|
],
|
|
|
|
|
// Speculative; has been misleading in the past (#46836).
|
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
|
);
|
|
|
|
|
}
|
Remove `token::{Open,Close}Delim`.
By replacing them with `{Open,Close}{Param,Brace,Bracket,Invisible}`.
PR #137902 made `ast::TokenKind` more like `lexer::TokenKind` by
replacing the compound `BinOp{,Eq}(BinOpToken)` variants with fieldless
variants `Plus`, `Minus`, `Star`, etc. This commit does a similar thing
with delimiters. It also makes `ast::TokenKind` more similar to
`parser::TokenType`.
This requires a few new methods:
- `TokenKind::is_{,open_,close_}delim()` replace various kinds of
pattern matches.
- `Delimiter::as_{open,close}_token_kind` are used to convert
`Delimiter` values to `TokenKind`.
Despite these additions, it's a net reduction in lines of code. This is
because e.g. `token::OpenParen` is so much shorter than
`token::OpenDelim(Delimiter::Parenthesis)` that many multi-line forms
reduce to single line forms. And many places where the number of lines
doesn't change are still easier to read, just because the names are
shorter, e.g.:
```
- } else if self.token != token::CloseDelim(Delimiter::Brace) {
+ } else if self.token != token::CloseBrace {
```
2025-04-16 16:13:50 +10:00
|
|
|
|
(token::OpenBrace, _) => {}
|
2024-11-16 20:02:27 +00:00
|
|
|
|
(_, _) => {
|
|
|
|
|
e.multipart_suggestion(
|
|
|
|
|
"you might have meant to write this as part of a block",
|
|
|
|
|
vec![
|
|
|
|
|
(stmt_span.shrink_to_lo(), "{ ".to_string()),
|
|
|
|
|
(stmt_span.shrink_to_hi(), " }".to_string()),
|
|
|
|
|
],
|
|
|
|
|
// Speculative; has been misleading in the past (#46836).
|
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2021-10-17 07:02:52 +02:00
|
|
|
|
fn error_block_no_opening_brace<T>(&mut self) -> PResult<'a, T> {
|
|
|
|
|
let tok = super::token_descr(&self.token);
|
2023-07-25 22:00:13 +02:00
|
|
|
|
let msg = format!("expected `{{`, found {tok}");
|
Use `Cow` in `{D,Subd}iagnosticMessage`.
Each of `{D,Subd}iagnosticMessage::{Str,Eager}` has a comment:
```
// FIXME(davidtwco): can a `Cow<'static, str>` be used here?
```
This commit answers that question in the affirmative. It's not the most
compelling change ever, but it might be worth merging.
This requires changing the `impl<'a> From<&'a str>` impls to `impl
From<&'static str>`, which involves a bunch of knock-on changes that
require/result in call sites being a little more precise about exactly
what kind of string they use to create errors, and not just `&str`. This
will result in fewer unnecessary allocations, though this will not have
any notable perf effects given that these are error paths.
Note that I was lazy within Clippy, using `to_string` in a few places to
preserve the existing string imprecision. I could have used `impl
Into<{D,Subd}iagnosticMessage>` in various places as is done in the
compiler, but that would have required changes to *many* call sites
(mostly changing `&format("...")` to `format!("...")`) which didn't seem
worthwhile.
2023-05-04 10:55:21 +10:00
|
|
|
|
Err(self.error_block_no_opening_brace_msg(Cow::from(msg)))
|
2019-08-11 20:32:29 +02:00
|
|
|
|
}
|
|
|
|
|
|
2025-03-17 00:25:15 -05:00
|
|
|
|
/// Parses a block. Inner attributes are allowed, block labels are not.
|
|
|
|
|
///
|
|
|
|
|
/// If `loop_header` is `Some` and an unexpected block label is encountered,
|
|
|
|
|
/// it is suggested to be moved just before `loop_header`, else it is suggested to be removed.
|
|
|
|
|
pub(super) fn parse_inner_attrs_and_block(
|
|
|
|
|
&mut self,
|
|
|
|
|
loop_header: Option<Span>,
|
|
|
|
|
) -> PResult<'a, (AttrVec, P<Block>)> {
|
2025-03-24 00:12:53 +01:00
|
|
|
|
self.parse_block_common(self.token.span, BlockCheckMode::Default, loop_header)
|
2020-03-05 05:49:30 +01:00
|
|
|
|
}
|
2019-08-11 20:32:29 +02:00
|
|
|
|
|
2025-03-17 00:25:15 -05:00
|
|
|
|
/// Parses a block. Inner attributes are allowed, block labels are not.
|
|
|
|
|
///
|
|
|
|
|
/// If `loop_header` is `Some` and an unexpected block label is encountered,
|
|
|
|
|
/// it is suggested to be moved just before `loop_header`, else it is suggested to be removed.
|
2020-03-05 05:49:30 +01:00
|
|
|
|
pub(super) fn parse_block_common(
|
|
|
|
|
&mut self,
|
|
|
|
|
lo: Span,
|
|
|
|
|
blk_mode: BlockCheckMode,
|
2025-03-17 00:25:15 -05:00
|
|
|
|
loop_header: Option<Span>,
|
2022-08-17 12:34:33 +10:00
|
|
|
|
) -> PResult<'a, (AttrVec, P<Block>)> {
|
2024-04-18 20:18:13 +10:00
|
|
|
|
if let Some(block) = self.eat_metavar_seq(MetaVarKind::Block, |this| this.parse_block()) {
|
|
|
|
|
return Ok((AttrVec::new(), block));
|
|
|
|
|
}
|
2020-03-05 02:46:13 +01:00
|
|
|
|
|
2024-05-16 09:22:37 +10:00
|
|
|
|
let maybe_ident = self.prev_token;
|
2025-03-17 00:25:15 -05:00
|
|
|
|
self.maybe_recover_unexpected_block_label(loop_header);
|
2024-12-04 15:55:06 +11:00
|
|
|
|
if !self.eat(exp!(OpenBrace)) {
|
2020-03-05 02:46:13 +01:00
|
|
|
|
return self.error_block_no_opening_brace();
|
|
|
|
|
}
|
|
|
|
|
|
2020-08-12 15:39:15 -07:00
|
|
|
|
let attrs = self.parse_inner_attributes()?;
|
2025-03-24 00:12:53 +01:00
|
|
|
|
let tail = match self.maybe_suggest_struct_literal(lo, blk_mode, maybe_ident) {
|
2022-01-12 20:43:24 +00:00
|
|
|
|
Some(tail) => tail?,
|
|
|
|
|
None => self.parse_block_tail(lo, blk_mode, AttemptLocalParseRecovery::Yes)?,
|
2020-08-12 15:39:15 -07:00
|
|
|
|
};
|
|
|
|
|
Ok((attrs, tail))
|
2019-08-11 20:32:29 +02:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Parses the rest of a block expression or function body.
|
|
|
|
|
/// Precondition: already parsed the '{'.
|
2022-05-20 19:51:09 -04:00
|
|
|
|
pub(crate) fn parse_block_tail(
|
2020-08-12 15:39:15 -07:00
|
|
|
|
&mut self,
|
|
|
|
|
lo: Span,
|
|
|
|
|
s: BlockCheckMode,
|
|
|
|
|
recover: AttemptLocalParseRecovery,
|
|
|
|
|
) -> PResult<'a, P<Block>> {
|
2023-01-30 14:13:27 +11:00
|
|
|
|
let mut stmts = ThinVec::new();
|
2022-12-28 17:56:22 -08:00
|
|
|
|
let mut snapshot = None;
|
2024-12-04 15:55:06 +11:00
|
|
|
|
while !self.eat(exp!(CloseBrace)) {
|
2019-08-11 20:32:29 +02:00
|
|
|
|
if self.token == token::Eof {
|
|
|
|
|
break;
|
|
|
|
|
}
|
2024-12-20 07:28:16 +11:00
|
|
|
|
if self.is_vcs_conflict_marker(&TokenKind::Shl, &TokenKind::Lt) {
|
2022-12-28 18:46:20 -08:00
|
|
|
|
// Account for `<<<<<<<` diff markers. We can't proactively error here because
|
2022-12-28 17:56:22 -08:00
|
|
|
|
// that can be a valid path start, so we snapshot and reparse only we've
|
|
|
|
|
// encountered another parse error.
|
|
|
|
|
snapshot = Some(self.create_snapshot_for_diagnostic());
|
|
|
|
|
}
|
2020-08-12 15:39:15 -07:00
|
|
|
|
let stmt = match self.parse_full_stmt(recover) {
|
|
|
|
|
Err(mut err) if recover.yes() => {
|
2022-12-28 17:56:22 -08:00
|
|
|
|
if let Some(ref mut snapshot) = snapshot {
|
2024-05-08 14:22:09 +02:00
|
|
|
|
snapshot.recover_vcs_conflict_marker();
|
2022-12-28 17:56:22 -08:00
|
|
|
|
}
|
2022-11-16 21:46:06 +01:00
|
|
|
|
if self.token == token::Colon {
|
2023-05-03 22:32:52 +09:00
|
|
|
|
// if a previous and next token of the current one is
|
|
|
|
|
// integer literal (e.g. `1:42`), it's likely a range
|
|
|
|
|
// expression for Pythonistas and we can suggest so.
|
|
|
|
|
if self.prev_token.is_integer_lit()
|
2023-05-16 14:53:05 +09:00
|
|
|
|
&& self.may_recover()
|
2023-05-03 22:32:52 +09:00
|
|
|
|
&& self.look_ahead(1, |token| token.is_integer_lit())
|
|
|
|
|
{
|
2023-05-03 22:54:22 +09:00
|
|
|
|
// FIXME(hkmatsumoto): Might be better to trigger
|
2023-05-03 22:32:52 +09:00
|
|
|
|
// this only when parsing an index expression.
|
|
|
|
|
err.span_suggestion_verbose(
|
|
|
|
|
self.token.span,
|
2023-11-27 22:18:03 +09:00
|
|
|
|
"you might have meant a range expression",
|
2023-05-03 22:32:52 +09:00
|
|
|
|
"..",
|
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
|
);
|
2023-05-16 14:53:05 +09:00
|
|
|
|
} else {
|
|
|
|
|
// if next token is following a colon, it's likely a path
|
|
|
|
|
// and we can suggest a path separator
|
|
|
|
|
self.bump();
|
|
|
|
|
if self.token.span.lo() == self.prev_token.span.hi() {
|
|
|
|
|
err.span_suggestion_verbose(
|
|
|
|
|
self.prev_token.span,
|
|
|
|
|
"maybe write a path separator here",
|
|
|
|
|
"::",
|
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
|
);
|
|
|
|
|
}
|
2022-11-16 21:46:06 +01:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2024-02-25 22:22:11 +01:00
|
|
|
|
let guar = err.emit();
|
2019-08-11 20:32:29 +02:00
|
|
|
|
self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore);
|
2024-02-25 22:22:11 +01:00
|
|
|
|
Some(self.mk_stmt_err(self.token.span, guar))
|
2019-08-11 20:32:29 +02:00
|
|
|
|
}
|
|
|
|
|
Ok(stmt) => stmt,
|
2020-08-12 15:39:15 -07:00
|
|
|
|
Err(err) => return Err(err),
|
2019-08-11 20:32:29 +02:00
|
|
|
|
};
|
|
|
|
|
if let Some(stmt) = stmt {
|
|
|
|
|
stmts.push(stmt);
|
|
|
|
|
} else {
|
|
|
|
|
// Found only `;` or `}`.
|
|
|
|
|
continue;
|
|
|
|
|
};
|
|
|
|
|
}
|
2020-02-29 14:56:15 +03:00
|
|
|
|
Ok(self.mk_block(stmts, s, lo.to(self.prev_token.span)))
|
2019-08-11 20:32:29 +02:00
|
|
|
|
}
|
|
|
|
|
|
Detect missing `.` in method chain in let bindings and statements
On parse errors where an ident is found where one wasn't expected, see if the next elements might have been meant as method call or field access.
```
error: expected one of `.`, `;`, `?`, `else`, or an operator, found `map`
--> $DIR/missing-dot-on-statement-expression.rs:7:29
|
LL | let _ = [1, 2, 3].iter()map(|x| x);
| ^^^ expected one of `.`, `;`, `?`, `else`, or an operator
|
help: you might have meant to write a method call
|
LL | let _ = [1, 2, 3].iter().map(|x| x);
| +
```
2024-11-16 00:07:58 +00:00
|
|
|
|
fn recover_missing_dot(&mut self, err: &mut Diag<'_>) {
|
2024-12-21 03:02:07 +00:00
|
|
|
|
let Some((ident, _)) = self.token.ident() else {
|
|
|
|
|
return;
|
|
|
|
|
};
|
|
|
|
|
if let Some(c) = ident.name.as_str().chars().next()
|
|
|
|
|
&& c.is_uppercase()
|
|
|
|
|
{
|
Detect missing `.` in method chain in let bindings and statements
On parse errors where an ident is found where one wasn't expected, see if the next elements might have been meant as method call or field access.
```
error: expected one of `.`, `;`, `?`, `else`, or an operator, found `map`
--> $DIR/missing-dot-on-statement-expression.rs:7:29
|
LL | let _ = [1, 2, 3].iter()map(|x| x);
| ^^^ expected one of `.`, `;`, `?`, `else`, or an operator
|
help: you might have meant to write a method call
|
LL | let _ = [1, 2, 3].iter().map(|x| x);
| +
```
2024-11-16 00:07:58 +00:00
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
if self.token.is_reserved_ident() && !self.token.is_ident_named(kw::Await) {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
if self.prev_token.is_reserved_ident() && self.prev_token.is_ident_named(kw::Await) {
|
|
|
|
|
// Likely `foo.await bar`
|
|
|
|
|
} else if !self.prev_token.is_reserved_ident() && self.prev_token.is_ident() {
|
|
|
|
|
// Likely `foo bar`
|
|
|
|
|
} else if self.prev_token.kind == token::Question {
|
|
|
|
|
// `foo? bar`
|
Remove `token::{Open,Close}Delim`.
By replacing them with `{Open,Close}{Param,Brace,Bracket,Invisible}`.
PR #137902 made `ast::TokenKind` more like `lexer::TokenKind` by
replacing the compound `BinOp{,Eq}(BinOpToken)` variants with fieldless
variants `Plus`, `Minus`, `Star`, etc. This commit does a similar thing
with delimiters. It also makes `ast::TokenKind` more similar to
`parser::TokenType`.
This requires a few new methods:
- `TokenKind::is_{,open_,close_}delim()` replace various kinds of
pattern matches.
- `Delimiter::as_{open,close}_token_kind` are used to convert
`Delimiter` values to `TokenKind`.
Despite these additions, it's a net reduction in lines of code. This is
because e.g. `token::OpenParen` is so much shorter than
`token::OpenDelim(Delimiter::Parenthesis)` that many multi-line forms
reduce to single line forms. And many places where the number of lines
doesn't change are still easier to read, just because the names are
shorter, e.g.:
```
- } else if self.token != token::CloseDelim(Delimiter::Brace) {
+ } else if self.token != token::CloseBrace {
```
2025-04-16 16:13:50 +10:00
|
|
|
|
} else if self.prev_token.kind == token::CloseParen {
|
Detect missing `.` in method chain in let bindings and statements
On parse errors where an ident is found where one wasn't expected, see if the next elements might have been meant as method call or field access.
```
error: expected one of `.`, `;`, `?`, `else`, or an operator, found `map`
--> $DIR/missing-dot-on-statement-expression.rs:7:29
|
LL | let _ = [1, 2, 3].iter()map(|x| x);
| ^^^ expected one of `.`, `;`, `?`, `else`, or an operator
|
help: you might have meant to write a method call
|
LL | let _ = [1, 2, 3].iter().map(|x| x);
| +
```
2024-11-16 00:07:58 +00:00
|
|
|
|
// `foo() bar`
|
|
|
|
|
} else {
|
|
|
|
|
return;
|
|
|
|
|
}
|
2024-11-16 00:10:48 +00:00
|
|
|
|
if self.token.span == self.prev_token.span {
|
|
|
|
|
// Account for syntax errors in proc-macros.
|
|
|
|
|
return;
|
|
|
|
|
}
|
Detect missing `.` in method chain in let bindings and statements
On parse errors where an ident is found where one wasn't expected, see if the next elements might have been meant as method call or field access.
```
error: expected one of `.`, `;`, `?`, `else`, or an operator, found `map`
--> $DIR/missing-dot-on-statement-expression.rs:7:29
|
LL | let _ = [1, 2, 3].iter()map(|x| x);
| ^^^ expected one of `.`, `;`, `?`, `else`, or an operator
|
help: you might have meant to write a method call
|
LL | let _ = [1, 2, 3].iter().map(|x| x);
| +
```
2024-11-16 00:07:58 +00:00
|
|
|
|
if self.look_ahead(1, |t| [token::Semi, token::Question, token::Dot].contains(&t.kind)) {
|
|
|
|
|
err.span_suggestion_verbose(
|
|
|
|
|
self.prev_token.span.between(self.token.span),
|
|
|
|
|
"you might have meant to write a field access",
|
|
|
|
|
".".to_string(),
|
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
|
);
|
|
|
|
|
}
|
Remove `token::{Open,Close}Delim`.
By replacing them with `{Open,Close}{Param,Brace,Bracket,Invisible}`.
PR #137902 made `ast::TokenKind` more like `lexer::TokenKind` by
replacing the compound `BinOp{,Eq}(BinOpToken)` variants with fieldless
variants `Plus`, `Minus`, `Star`, etc. This commit does a similar thing
with delimiters. It also makes `ast::TokenKind` more similar to
`parser::TokenType`.
This requires a few new methods:
- `TokenKind::is_{,open_,close_}delim()` replace various kinds of
pattern matches.
- `Delimiter::as_{open,close}_token_kind` are used to convert
`Delimiter` values to `TokenKind`.
Despite these additions, it's a net reduction in lines of code. This is
because e.g. `token::OpenParen` is so much shorter than
`token::OpenDelim(Delimiter::Parenthesis)` that many multi-line forms
reduce to single line forms. And many places where the number of lines
doesn't change are still easier to read, just because the names are
shorter, e.g.:
```
- } else if self.token != token::CloseDelim(Delimiter::Brace) {
+ } else if self.token != token::CloseBrace {
```
2025-04-16 16:13:50 +10:00
|
|
|
|
if self.look_ahead(1, |t| t.kind == token::OpenParen) {
|
Detect missing `.` in method chain in let bindings and statements
On parse errors where an ident is found where one wasn't expected, see if the next elements might have been meant as method call or field access.
```
error: expected one of `.`, `;`, `?`, `else`, or an operator, found `map`
--> $DIR/missing-dot-on-statement-expression.rs:7:29
|
LL | let _ = [1, 2, 3].iter()map(|x| x);
| ^^^ expected one of `.`, `;`, `?`, `else`, or an operator
|
help: you might have meant to write a method call
|
LL | let _ = [1, 2, 3].iter().map(|x| x);
| +
```
2024-11-16 00:07:58 +00:00
|
|
|
|
err.span_suggestion_verbose(
|
|
|
|
|
self.prev_token.span.between(self.token.span),
|
|
|
|
|
"you might have meant to write a method call",
|
|
|
|
|
".".to_string(),
|
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-08-11 20:32:29 +02:00
|
|
|
|
/// Parses a statement, including the trailing semicolon.
|
2020-08-12 15:39:15 -07:00
|
|
|
|
pub fn parse_full_stmt(
|
|
|
|
|
&mut self,
|
|
|
|
|
recover: AttemptLocalParseRecovery,
|
|
|
|
|
) -> PResult<'a, Option<Stmt>> {
|
2024-04-18 14:53:52 +10:00
|
|
|
|
// Skip looking for a trailing semicolon when we have a metavar seq.
|
|
|
|
|
if let Some(stmt) = self.eat_metavar_seq(MetaVarKind::Stmt, |this| {
|
|
|
|
|
// Why pass `true` for `force_full_expr`? Statement expressions are less expressive
|
|
|
|
|
// than "full" expressions, due to the `STMT_EXPR` restriction, and sometimes need
|
|
|
|
|
// parentheses. E.g. the "full" expression `match paren_around_match {} | true` when
|
|
|
|
|
// used in statement context must be written `(match paren_around_match {} | true)`.
|
|
|
|
|
// However, if the expression we are parsing in this statement context was pasted by a
|
|
|
|
|
// declarative macro, it may have come from a "full" expression context, and lack
|
|
|
|
|
// these parentheses. So we lift the `STMT_EXPR` restriction to ensure the statement
|
|
|
|
|
// will reparse successfully.
|
|
|
|
|
this.parse_stmt_without_recovery(false, ForceCollect::No, true)
|
|
|
|
|
}) {
|
|
|
|
|
let stmt = stmt.expect("an actual statement");
|
|
|
|
|
return Ok(Some(stmt));
|
|
|
|
|
}
|
2019-08-11 20:32:29 +02:00
|
|
|
|
|
2024-04-18 14:53:52 +10:00
|
|
|
|
let Some(mut stmt) = self.parse_stmt_without_recovery(true, ForceCollect::No, false)?
|
|
|
|
|
else {
|
2022-02-19 00:48:49 +01:00
|
|
|
|
return Ok(None);
|
2019-08-11 20:32:29 +02:00
|
|
|
|
};
|
|
|
|
|
|
2019-10-20 14:35:46 -07:00
|
|
|
|
let mut eat_semi = true;
|
2022-11-16 21:46:06 +01:00
|
|
|
|
let mut add_semi_to_stmt = false;
|
|
|
|
|
|
2022-11-22 09:42:01 +00:00
|
|
|
|
match &mut stmt.kind {
|
Handle attempts to have multiple `cfg`d tail expressions
When encountering code that seems like it might be trying to have
multiple tail expressions depending on `cfg` information, suggest
alternatives that will success to parse.
```rust
fn foo() -> String {
#[cfg(feature = "validation")]
[1, 2, 3].iter().map(|c| c.to_string()).collect::<String>()
#[cfg(not(feature = "validation"))]
String::new()
}
```
```
error: expected `;`, found `#`
--> $DIR/multiple-tail-expr-behind-cfg.rs:5:64
|
LL | #[cfg(feature = "validation")]
| ------------------------------ only `;` terminated statements or tail expressions are allowed after this attribute
LL | [1, 2, 3].iter().map(|c| c.to_string()).collect::<String>()
| ^ expected `;` here
LL | #[cfg(not(feature = "validation"))]
| - unexpected token
|
help: add `;` here
|
LL | [1, 2, 3].iter().map(|c| c.to_string()).collect::<String>();
| +
help: alternatively, consider surrounding the expression with a block
|
LL | { [1, 2, 3].iter().map(|c| c.to_string()).collect::<String>() }
| + +
help: it seems like you are trying to provide different expressions depending on `cfg`, consider using `if cfg!(..)`
|
LL ~ if cfg!(feature = "validation") {
LL ~ [1, 2, 3].iter().map(|c| c.to_string()).collect::<String>()
LL ~ } else if cfg!(not(feature = "validation")) {
LL ~ String::new()
LL + }
|
```
Fix #106020.
2023-11-16 21:21:26 +00:00
|
|
|
|
// Expression without semicolon.
|
|
|
|
|
StmtKind::Expr(expr)
|
|
|
|
|
if classify::expr_requires_semi_to_be_stmt(expr)
|
|
|
|
|
&& !expr.attrs.is_empty()
|
Remove `token::{Open,Close}Delim`.
By replacing them with `{Open,Close}{Param,Brace,Bracket,Invisible}`.
PR #137902 made `ast::TokenKind` more like `lexer::TokenKind` by
replacing the compound `BinOp{,Eq}(BinOpToken)` variants with fieldless
variants `Plus`, `Minus`, `Star`, etc. This commit does a similar thing
with delimiters. It also makes `ast::TokenKind` more similar to
`parser::TokenType`.
This requires a few new methods:
- `TokenKind::is_{,open_,close_}delim()` replace various kinds of
pattern matches.
- `Delimiter::as_{open,close}_token_kind` are used to convert
`Delimiter` values to `TokenKind`.
Despite these additions, it's a net reduction in lines of code. This is
because e.g. `token::OpenParen` is so much shorter than
`token::OpenDelim(Delimiter::Parenthesis)` that many multi-line forms
reduce to single line forms. And many places where the number of lines
doesn't change are still easier to read, just because the names are
shorter, e.g.:
```
- } else if self.token != token::CloseDelim(Delimiter::Brace) {
+ } else if self.token != token::CloseBrace {
```
2025-04-16 16:13:50 +10:00
|
|
|
|
&& !matches!(self.token.kind, token::Eof | token::Semi | token::CloseBrace) =>
|
Handle attempts to have multiple `cfg`d tail expressions
When encountering code that seems like it might be trying to have
multiple tail expressions depending on `cfg` information, suggest
alternatives that will success to parse.
```rust
fn foo() -> String {
#[cfg(feature = "validation")]
[1, 2, 3].iter().map(|c| c.to_string()).collect::<String>()
#[cfg(not(feature = "validation"))]
String::new()
}
```
```
error: expected `;`, found `#`
--> $DIR/multiple-tail-expr-behind-cfg.rs:5:64
|
LL | #[cfg(feature = "validation")]
| ------------------------------ only `;` terminated statements or tail expressions are allowed after this attribute
LL | [1, 2, 3].iter().map(|c| c.to_string()).collect::<String>()
| ^ expected `;` here
LL | #[cfg(not(feature = "validation"))]
| - unexpected token
|
help: add `;` here
|
LL | [1, 2, 3].iter().map(|c| c.to_string()).collect::<String>();
| +
help: alternatively, consider surrounding the expression with a block
|
LL | { [1, 2, 3].iter().map(|c| c.to_string()).collect::<String>() }
| + +
help: it seems like you are trying to provide different expressions depending on `cfg`, consider using `if cfg!(..)`
|
LL ~ if cfg!(feature = "validation") {
LL ~ [1, 2, 3].iter().map(|c| c.to_string()).collect::<String>()
LL ~ } else if cfg!(not(feature = "validation")) {
LL ~ String::new()
LL + }
|
```
Fix #106020.
2023-11-16 21:21:26 +00:00
|
|
|
|
{
|
|
|
|
|
// The user has written `#[attr] expr` which is unsupported. (#106020)
|
2024-02-25 22:22:11 +01:00
|
|
|
|
let guar = self.attr_on_non_tail_expr(&expr);
|
Handle attempts to have multiple `cfg`d tail expressions
When encountering code that seems like it might be trying to have
multiple tail expressions depending on `cfg` information, suggest
alternatives that will success to parse.
```rust
fn foo() -> String {
#[cfg(feature = "validation")]
[1, 2, 3].iter().map(|c| c.to_string()).collect::<String>()
#[cfg(not(feature = "validation"))]
String::new()
}
```
```
error: expected `;`, found `#`
--> $DIR/multiple-tail-expr-behind-cfg.rs:5:64
|
LL | #[cfg(feature = "validation")]
| ------------------------------ only `;` terminated statements or tail expressions are allowed after this attribute
LL | [1, 2, 3].iter().map(|c| c.to_string()).collect::<String>()
| ^ expected `;` here
LL | #[cfg(not(feature = "validation"))]
| - unexpected token
|
help: add `;` here
|
LL | [1, 2, 3].iter().map(|c| c.to_string()).collect::<String>();
| +
help: alternatively, consider surrounding the expression with a block
|
LL | { [1, 2, 3].iter().map(|c| c.to_string()).collect::<String>() }
| + +
help: it seems like you are trying to provide different expressions depending on `cfg`, consider using `if cfg!(..)`
|
LL ~ if cfg!(feature = "validation") {
LL ~ [1, 2, 3].iter().map(|c| c.to_string()).collect::<String>()
LL ~ } else if cfg!(not(feature = "validation")) {
LL ~ String::new()
LL + }
|
```
Fix #106020.
2023-11-16 21:21:26 +00:00
|
|
|
|
// We already emitted an error, so don't emit another type error
|
|
|
|
|
let sp = expr.span.to(self.prev_token.span);
|
2024-02-25 22:22:11 +01:00
|
|
|
|
*expr = self.mk_expr_err(sp, guar);
|
Handle attempts to have multiple `cfg`d tail expressions
When encountering code that seems like it might be trying to have
multiple tail expressions depending on `cfg` information, suggest
alternatives that will success to parse.
```rust
fn foo() -> String {
#[cfg(feature = "validation")]
[1, 2, 3].iter().map(|c| c.to_string()).collect::<String>()
#[cfg(not(feature = "validation"))]
String::new()
}
```
```
error: expected `;`, found `#`
--> $DIR/multiple-tail-expr-behind-cfg.rs:5:64
|
LL | #[cfg(feature = "validation")]
| ------------------------------ only `;` terminated statements or tail expressions are allowed after this attribute
LL | [1, 2, 3].iter().map(|c| c.to_string()).collect::<String>()
| ^ expected `;` here
LL | #[cfg(not(feature = "validation"))]
| - unexpected token
|
help: add `;` here
|
LL | [1, 2, 3].iter().map(|c| c.to_string()).collect::<String>();
| +
help: alternatively, consider surrounding the expression with a block
|
LL | { [1, 2, 3].iter().map(|c| c.to_string()).collect::<String>() }
| + +
help: it seems like you are trying to provide different expressions depending on `cfg`, consider using `if cfg!(..)`
|
LL ~ if cfg!(feature = "validation") {
LL ~ [1, 2, 3].iter().map(|c| c.to_string()).collect::<String>()
LL ~ } else if cfg!(not(feature = "validation")) {
LL ~ String::new()
LL + }
|
```
Fix #106020.
2023-11-16 21:21:26 +00:00
|
|
|
|
}
|
|
|
|
|
|
2020-02-25 00:59:39 +01:00
|
|
|
|
// Expression without semicolon.
|
2022-11-22 09:42:01 +00:00
|
|
|
|
StmtKind::Expr(expr)
|
2022-10-22 07:56:26 +08:00
|
|
|
|
if self.token != token::Eof && classify::expr_requires_semi_to_be_stmt(expr) =>
|
|
|
|
|
{
|
2020-02-25 00:59:39 +01:00
|
|
|
|
// Just check for errors and recover; do not eat semicolon yet.
|
2022-11-16 21:46:06 +01:00
|
|
|
|
|
2024-12-04 15:55:06 +11:00
|
|
|
|
let expect_result = self.expect_one_of(&[], &[exp!(Semi), exp!(CloseBrace)]);
|
2022-11-16 21:46:06 +01:00
|
|
|
|
|
2024-02-25 22:22:11 +01:00
|
|
|
|
// Try to both emit a better diagnostic, and avoid further errors by replacing
|
|
|
|
|
// the `expr` with `ExprKind::Err`.
|
2022-11-16 21:46:06 +01:00
|
|
|
|
let replace_with_err = 'break_recover: {
|
|
|
|
|
match expect_result {
|
2024-02-25 22:22:11 +01:00
|
|
|
|
Ok(Recovered::No) => None,
|
2024-05-09 18:44:40 +10:00
|
|
|
|
Ok(Recovered::Yes(guar)) => {
|
2024-02-25 22:22:11 +01:00
|
|
|
|
// Skip type error to avoid extra errors.
|
|
|
|
|
Some(guar)
|
|
|
|
|
}
|
Make `DiagnosticBuilder::emit` consuming.
This works for most of its call sites. This is nice, because `emit` very
much makes sense as a consuming operation -- indeed,
`DiagnosticBuilderState` exists to ensure no diagnostic is emitted
twice, but it uses runtime checks.
For the small number of call sites where a consuming emit doesn't work,
the commit adds `DiagnosticBuilder::emit_without_consuming`. (This will
be removed in subsequent commits.)
Likewise, `emit_unless` becomes consuming. And `delay_as_bug` becomes
consuming, while `delay_as_bug_without_consuming` is added (which will
also be removed in subsequent commits.)
All this requires significant changes to `DiagnosticBuilder`'s chaining
methods. Currently `DiagnosticBuilder` method chaining uses a
non-consuming `&mut self -> &mut Self` style, which allows chaining to
be used when the chain ends in `emit()`, like so:
```
struct_err(msg).span(span).emit();
```
But it doesn't work when producing a `DiagnosticBuilder` value,
requiring this:
```
let mut err = self.struct_err(msg);
err.span(span);
err
```
This style of chaining won't work with consuming `emit` though. For
that, we need to use to a `self -> Self` style. That also would allow
`DiagnosticBuilder` production to be chained, e.g.:
```
self.struct_err(msg).span(span)
```
However, removing the `&mut self -> &mut Self` style would require that
individual modifications of a `DiagnosticBuilder` go from this:
```
err.span(span);
```
to this:
```
err = err.span(span);
```
There are *many* such places. I have a high tolerance for tedious
refactorings, but even I gave up after a long time trying to convert
them all.
Instead, this commit has it both ways: the existing `&mut self -> Self`
chaining methods are kept, and new `self -> Self` chaining methods are
added, all of which have a `_mv` suffix (short for "move"). Changes to
the existing `forward!` macro lets this happen with very little
additional boilerplate code. I chose to add the suffix to the new
chaining methods rather than the existing ones, because the number of
changes required is much smaller that way.
This doubled chainging is a bit clumsy, but I think it is worthwhile
because it allows a *lot* of good things to subsequently happen. In this
commit, there are many `mut` qualifiers removed in places where
diagnostics are emitted without being modified. In subsequent commits:
- chaining can be used more, making the code more concise;
- more use of chaining also permits the removal of redundant diagnostic
APIs like `struct_err_with_code`, which can be replaced easily with
`struct_err` + `code_mv`;
- `emit_without_diagnostic` can be removed, which simplifies a lot of
machinery, removing the need for `DiagnosticBuilderState`.
2024-01-03 12:17:35 +11:00
|
|
|
|
Err(e) => {
|
2022-11-16 21:46:06 +01:00
|
|
|
|
if self.recover_colon_as_semi() {
|
|
|
|
|
// recover_colon_as_semi has already emitted a nicer error.
|
2023-04-28 09:55:38 +08:00
|
|
|
|
e.delay_as_bug();
|
2022-11-16 21:46:06 +01:00
|
|
|
|
add_semi_to_stmt = true;
|
|
|
|
|
eat_semi = false;
|
|
|
|
|
|
2024-02-25 22:22:11 +01:00
|
|
|
|
break 'break_recover None;
|
2022-11-16 21:46:06 +01:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
match &expr.kind {
|
|
|
|
|
ExprKind::Path(None, ast::Path { segments, .. })
|
2024-08-07 12:41:49 +02:00
|
|
|
|
if let [segment] = segments.as_slice() =>
|
2022-11-16 21:46:06 +01:00
|
|
|
|
{
|
|
|
|
|
if self.token == token::Colon
|
|
|
|
|
&& self.look_ahead(1, |token| {
|
2024-04-18 20:18:13 +10:00
|
|
|
|
token.is_metavar_block()
|
2022-11-16 21:46:06 +01:00
|
|
|
|
|| matches!(
|
|
|
|
|
token.kind,
|
|
|
|
|
token::Ident(
|
|
|
|
|
kw::For | kw::Loop | kw::While,
|
2024-02-13 23:28:27 +00:00
|
|
|
|
token::IdentIsRaw::No
|
Remove `token::{Open,Close}Delim`.
By replacing them with `{Open,Close}{Param,Brace,Bracket,Invisible}`.
PR #137902 made `ast::TokenKind` more like `lexer::TokenKind` by
replacing the compound `BinOp{,Eq}(BinOpToken)` variants with fieldless
variants `Plus`, `Minus`, `Star`, etc. This commit does a similar thing
with delimiters. It also makes `ast::TokenKind` more similar to
`parser::TokenType`.
This requires a few new methods:
- `TokenKind::is_{,open_,close_}delim()` replace various kinds of
pattern matches.
- `Delimiter::as_{open,close}_token_kind` are used to convert
`Delimiter` values to `TokenKind`.
Despite these additions, it's a net reduction in lines of code. This is
because e.g. `token::OpenParen` is so much shorter than
`token::OpenDelim(Delimiter::Parenthesis)` that many multi-line forms
reduce to single line forms. And many places where the number of lines
doesn't change are still easier to read, just because the names are
shorter, e.g.:
```
- } else if self.token != token::CloseDelim(Delimiter::Brace) {
+ } else if self.token != token::CloseBrace {
```
2025-04-16 16:13:50 +10:00
|
|
|
|
) | token::OpenBrace
|
2022-11-16 21:46:06 +01:00
|
|
|
|
)
|
|
|
|
|
})
|
|
|
|
|
{
|
|
|
|
|
let snapshot = self.create_snapshot_for_diagnostic();
|
|
|
|
|
let label = Label {
|
|
|
|
|
ident: Ident::from_str_and_span(
|
2024-08-07 12:41:49 +02:00
|
|
|
|
&format!("'{}", segment.ident),
|
|
|
|
|
segment.ident.span,
|
2022-11-16 21:46:06 +01:00
|
|
|
|
),
|
|
|
|
|
};
|
|
|
|
|
match self.parse_expr_labeled(label, false) {
|
|
|
|
|
Ok(labeled_expr) => {
|
2024-02-25 22:22:11 +01:00
|
|
|
|
e.cancel();
|
2023-12-18 21:14:02 +11:00
|
|
|
|
self.dcx().emit_err(MalformedLoopLabel {
|
2022-11-16 21:46:06 +01:00
|
|
|
|
span: label.ident.span,
|
2024-07-06 03:07:46 +00:00
|
|
|
|
suggestion: label.ident.span.shrink_to_lo(),
|
2022-11-16 21:46:06 +01:00
|
|
|
|
});
|
|
|
|
|
*expr = labeled_expr;
|
2024-02-25 22:22:11 +01:00
|
|
|
|
break 'break_recover None;
|
2022-11-16 21:46:06 +01:00
|
|
|
|
}
|
|
|
|
|
Err(err) => {
|
|
|
|
|
err.cancel();
|
|
|
|
|
self.restore_snapshot(snapshot);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
_ => {}
|
|
|
|
|
}
|
2022-10-22 07:56:26 +08:00
|
|
|
|
|
2024-02-25 22:22:11 +01:00
|
|
|
|
let res =
|
|
|
|
|
self.check_mistyped_turbofish_with_multiple_type_params(e, expr);
|
|
|
|
|
|
|
|
|
|
Some(if recover.no() {
|
|
|
|
|
res?
|
|
|
|
|
} else {
|
Detect missing `.` in method chain in let bindings and statements
On parse errors where an ident is found where one wasn't expected, see if the next elements might have been meant as method call or field access.
```
error: expected one of `.`, `;`, `?`, `else`, or an operator, found `map`
--> $DIR/missing-dot-on-statement-expression.rs:7:29
|
LL | let _ = [1, 2, 3].iter()map(|x| x);
| ^^^ expected one of `.`, `;`, `?`, `else`, or an operator
|
help: you might have meant to write a method call
|
LL | let _ = [1, 2, 3].iter().map(|x| x);
| +
```
2024-11-16 00:07:58 +00:00
|
|
|
|
res.unwrap_or_else(|mut e| {
|
|
|
|
|
self.recover_missing_dot(&mut e);
|
2024-02-25 22:22:11 +01:00
|
|
|
|
let guar = e.emit();
|
|
|
|
|
self.recover_stmt();
|
|
|
|
|
guar
|
|
|
|
|
})
|
|
|
|
|
})
|
2020-08-12 15:39:15 -07:00
|
|
|
|
}
|
2020-08-31 10:24:37 -07:00
|
|
|
|
}
|
2022-10-22 07:56:26 +08:00
|
|
|
|
};
|
2022-11-16 21:46:06 +01:00
|
|
|
|
|
2024-02-25 22:22:11 +01:00
|
|
|
|
if let Some(guar) = replace_with_err {
|
2022-10-22 07:56:26 +08:00
|
|
|
|
// We already emitted an error, so don't emit another type error
|
2020-02-29 14:56:15 +03:00
|
|
|
|
let sp = expr.span.to(self.prev_token.span);
|
2024-02-25 22:22:11 +01:00
|
|
|
|
*expr = self.mk_expr_err(sp, guar);
|
2019-08-11 20:32:29 +02:00
|
|
|
|
}
|
|
|
|
|
}
|
2020-11-17 14:27:44 -05:00
|
|
|
|
StmtKind::Expr(_) | StmtKind::MacCall(_) => {}
|
2024-03-14 11:25:05 +01:00
|
|
|
|
StmtKind::Let(local) if let Err(mut e) = self.expect_semi() => {
|
2021-08-16 17:29:49 +02:00
|
|
|
|
// We might be at the `,` in `let x = foo<bar, baz>;`. Try to recover.
|
2021-06-22 13:00:58 -05:00
|
|
|
|
match &mut local.kind {
|
|
|
|
|
LocalKind::Init(expr) | LocalKind::InitElse(expr, _) => {
|
Detect missing `.` in method chain in let bindings and statements
On parse errors where an ident is found where one wasn't expected, see if the next elements might have been meant as method call or field access.
```
error: expected one of `.`, `;`, `?`, `else`, or an operator, found `map`
--> $DIR/missing-dot-on-statement-expression.rs:7:29
|
LL | let _ = [1, 2, 3].iter()map(|x| x);
| ^^^ expected one of `.`, `;`, `?`, `else`, or an operator
|
help: you might have meant to write a method call
|
LL | let _ = [1, 2, 3].iter().map(|x| x);
| +
```
2024-11-16 00:07:58 +00:00
|
|
|
|
self.check_mistyped_turbofish_with_multiple_type_params(e, expr).map_err(
|
|
|
|
|
|mut e| {
|
|
|
|
|
self.recover_missing_dot(&mut e);
|
|
|
|
|
e
|
|
|
|
|
},
|
|
|
|
|
)?;
|
2022-01-12 20:43:24 +00:00
|
|
|
|
// We found `foo<bar, baz>`, have we fully recovered?
|
|
|
|
|
self.expect_semi()?;
|
|
|
|
|
}
|
Detect more cases of `=` to `:` typo
When a `Local` is fully parsed, but not followed by a `;`, keep the `:` span
arround and mention it. If the type could continue being parsed as an
expression, suggest replacing the `:` with a `=`.
```
error: expected one of `!`, `+`, `->`, `::`, `;`, or `=`, found `.`
--> file.rs:2:32
|
2 | let _: std::env::temp_dir().join("foo");
| - ^ expected one of `!`, `+`, `->`, `::`, `;`, or `=`
| |
| while parsing the type for `_`
| help: use `=` if you meant to assign
```
Fix #119665.
2024-02-27 00:48:32 +00:00
|
|
|
|
LocalKind::Decl => {
|
|
|
|
|
if let Some(colon_sp) = local.colon_sp {
|
|
|
|
|
e.span_label(
|
|
|
|
|
colon_sp,
|
|
|
|
|
format!(
|
|
|
|
|
"while parsing the type for {}",
|
|
|
|
|
local.pat.descr().map_or_else(
|
|
|
|
|
|| "the binding".to_string(),
|
|
|
|
|
|n| format!("`{n}`")
|
|
|
|
|
)
|
|
|
|
|
),
|
|
|
|
|
);
|
2024-08-09 17:44:47 +10:00
|
|
|
|
let suggest_eq = if self.token == token::Dot
|
Detect more cases of `=` to `:` typo
When a `Local` is fully parsed, but not followed by a `;`, keep the `:` span
arround and mention it. If the type could continue being parsed as an
expression, suggest replacing the `:` with a `=`.
```
error: expected one of `!`, `+`, `->`, `::`, `;`, or `=`, found `.`
--> file.rs:2:32
|
2 | let _: std::env::temp_dir().join("foo");
| - ^ expected one of `!`, `+`, `->`, `::`, `;`, or `=`
| |
| while parsing the type for `_`
| help: use `=` if you meant to assign
```
Fix #119665.
2024-02-27 00:48:32 +00:00
|
|
|
|
&& let _ = self.bump()
|
|
|
|
|
&& let mut snapshot = self.create_snapshot_for_diagnostic()
|
2024-03-06 21:59:03 +00:00
|
|
|
|
&& let Ok(_) = snapshot
|
|
|
|
|
.parse_dot_suffix_expr(
|
Detect more cases of `=` to `:` typo
When a `Local` is fully parsed, but not followed by a `;`, keep the `:` span
arround and mention it. If the type could continue being parsed as an
expression, suggest replacing the `:` with a `=`.
```
error: expected one of `!`, `+`, `->`, `::`, `;`, or `=`, found `.`
--> file.rs:2:32
|
2 | let _: std::env::temp_dir().join("foo");
| - ^ expected one of `!`, `+`, `->`, `::`, `;`, or `=`
| |
| while parsing the type for `_`
| help: use `=` if you meant to assign
```
Fix #119665.
2024-02-27 00:48:32 +00:00
|
|
|
|
colon_sp,
|
2024-03-06 21:59:03 +00:00
|
|
|
|
self.mk_expr_err(
|
|
|
|
|
colon_sp,
|
|
|
|
|
self.dcx()
|
|
|
|
|
.delayed_bug("error during `:` -> `=` recovery"),
|
|
|
|
|
),
|
|
|
|
|
)
|
|
|
|
|
.map_err(Diag::cancel)
|
|
|
|
|
{
|
Detect more cases of `=` to `:` typo
When a `Local` is fully parsed, but not followed by a `;`, keep the `:` span
arround and mention it. If the type could continue being parsed as an
expression, suggest replacing the `:` with a `=`.
```
error: expected one of `!`, `+`, `->`, `::`, `;`, or `=`, found `.`
--> file.rs:2:32
|
2 | let _: std::env::temp_dir().join("foo");
| - ^ expected one of `!`, `+`, `->`, `::`, `;`, or `=`
| |
| while parsing the type for `_`
| help: use `=` if you meant to assign
```
Fix #119665.
2024-02-27 00:48:32 +00:00
|
|
|
|
true
|
|
|
|
|
} else if let Some(op) = self.check_assoc_op()
|
|
|
|
|
&& op.node.can_continue_expr_unambiguously()
|
|
|
|
|
{
|
|
|
|
|
true
|
|
|
|
|
} else {
|
|
|
|
|
false
|
|
|
|
|
};
|
|
|
|
|
if suggest_eq {
|
|
|
|
|
e.span_suggestion_short(
|
|
|
|
|
colon_sp,
|
|
|
|
|
"use `=` if you meant to assign",
|
|
|
|
|
"=",
|
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return Err(e);
|
|
|
|
|
}
|
2020-08-31 10:24:37 -07:00
|
|
|
|
}
|
2019-12-03 18:47:44 +01:00
|
|
|
|
eat_semi = false;
|
2019-08-11 20:32:29 +02:00
|
|
|
|
}
|
2024-03-14 11:25:05 +01:00
|
|
|
|
StmtKind::Empty | StmtKind::Item(_) | StmtKind::Let(_) | StmtKind::Semi(_) => {
|
2021-08-16 17:29:49 +02:00
|
|
|
|
eat_semi = false
|
2023-10-13 08:58:33 +00:00
|
|
|
|
}
|
2019-08-11 20:32:29 +02:00
|
|
|
|
}
|
|
|
|
|
|
2024-12-04 15:55:06 +11:00
|
|
|
|
if add_semi_to_stmt || (eat_semi && self.eat(exp!(Semi))) {
|
2019-08-11 20:32:29 +02:00
|
|
|
|
stmt = stmt.add_trailing_semicolon();
|
|
|
|
|
}
|
2022-11-16 21:46:06 +01:00
|
|
|
|
|
2020-02-29 14:56:15 +03:00
|
|
|
|
stmt.span = stmt.span.to(self.prev_token.span);
|
2019-08-11 20:32:29 +02:00
|
|
|
|
Ok(Some(stmt))
|
|
|
|
|
}
|
|
|
|
|
|
2023-01-30 14:13:27 +11:00
|
|
|
|
pub(super) fn mk_block(
|
|
|
|
|
&self,
|
|
|
|
|
stmts: ThinVec<Stmt>,
|
|
|
|
|
rules: BlockCheckMode,
|
|
|
|
|
span: Span,
|
|
|
|
|
) -> P<Block> {
|
2025-03-23 22:00:39 +01:00
|
|
|
|
P(Block { stmts, id: DUMMY_NODE_ID, rules, span, tokens: None })
|
2019-12-06 23:23:30 +01:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub(super) fn mk_stmt(&self, span: Span, kind: StmtKind) -> Stmt {
|
2020-11-17 14:27:44 -05:00
|
|
|
|
Stmt { id: DUMMY_NODE_ID, kind, span }
|
2019-12-02 09:32:54 +01:00
|
|
|
|
}
|
2019-12-31 01:57:42 +01:00
|
|
|
|
|
2024-02-25 22:22:11 +01:00
|
|
|
|
pub(super) fn mk_stmt_err(&self, span: Span, guar: ErrorGuaranteed) -> Stmt {
|
|
|
|
|
self.mk_stmt(span, StmtKind::Expr(self.mk_expr_err(span, guar)))
|
2019-12-31 01:57:42 +01:00
|
|
|
|
}
|
|
|
|
|
|
2024-02-25 22:22:11 +01:00
|
|
|
|
pub(super) fn mk_block_err(&self, span: Span, guar: ErrorGuaranteed) -> P<Block> {
|
|
|
|
|
self.mk_block(thin_vec![self.mk_stmt_err(span, guar)], BlockCheckMode::Default, span)
|
2019-12-31 01:57:42 +01:00
|
|
|
|
}
|
2019-08-11 20:32:29 +02:00
|
|
|
|
}
|