1
Fork 0
rust/compiler/rustc_parse/src/parser/item.rs

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

2925 lines
120 KiB
Rust
Raw Normal View History

use super::diagnostics::{dummy_arg, ConsumeClosingDelim};
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
2024-02-13 23:44:33 +00:00
use super::{
AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, Trailing, TrailingToken,
2024-02-13 23:44:33 +00:00
};
use crate::errors::{self, MacroExpandsToAdtField};
use crate::fluent_generated as fluent;
use crate::maybe_whole;
2024-02-13 23:28:27 +00:00
use ast::token::IdentIsRaw;
2021-01-29 08:31:08 +01:00
use rustc_ast::ast::*;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, TokenKind};
use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
use rustc_ast::util::case::Case;
use rustc_ast::{self as ast};
use rustc_ast_pretty::pprust;
use rustc_errors::{codes::*, struct_span_code_err, Applicability, PResult, StashKey};
use rustc_span::edit_distance::edit_distance;
use rustc_span::edition::Edition;
use rustc_span::source_map;
2020-04-19 13:00:18 +02:00
use rustc_span::symbol::{kw, sym, Ident, Symbol};
use rustc_span::{Span, DUMMY_SP};
use std::fmt::Write;
use std::mem;
use thin_vec::{thin_vec, ThinVec};
use tracing::debug;
2019-08-11 18:34:42 +02:00
2020-03-08 21:50:01 +01:00
impl<'a> Parser<'a> {
/// Parses a source module as a crate. This is the main entry point for the parser.
pub fn parse_crate_mod(&mut self) -> PResult<'a, ast::Crate> {
let (attrs, items, spans) = self.parse_mod(&token::Eof)?;
2022-03-03 18:45:25 -05:00
Ok(ast::Crate { attrs, items, spans, id: DUMMY_NODE_ID, is_placeholder: false })
2020-03-08 21:50:01 +01:00
}
/// Parses a `mod <foo> { ... }` or `mod <foo>;` item.
fn parse_item_mod(&mut self, attrs: &mut AttrVec) -> PResult<'a, ItemInfo> {
2024-05-17 14:17:48 -03:00
let safety = self.parse_safety(Case::Sensitive);
2020-08-23 03:42:19 -07:00
self.expect_keyword(kw::Mod)?;
2020-03-08 21:50:01 +01:00
let id = self.parse_ident()?;
let mod_kind = if self.eat(&token::Semi) {
ModKind::Unloaded
2020-03-08 21:50:01 +01:00
} else {
self.expect(&token::OpenDelim(Delimiter::Brace))?;
let (inner_attrs, items, inner_span) =
self.parse_mod(&token::CloseDelim(Delimiter::Brace))?;
attrs.extend(inner_attrs);
ModKind::Loaded(items, Inline::Yes, inner_span)
2020-03-08 21:50:01 +01:00
};
2024-05-17 14:17:48 -03:00
Ok((id, ItemKind::Mod(safety, mod_kind)))
2020-03-08 21:50:01 +01:00
}
/// Parses the contents of a module (inner attributes followed by module items).
/// We exit once we hit `term`
2020-08-23 03:42:19 -07:00
pub fn parse_mod(
&mut self,
term: &TokenKind,
2023-01-30 15:39:22 +11:00
) -> PResult<'a, (AttrVec, ThinVec<P<Item>>, ModSpans)> {
2020-03-08 21:50:01 +01:00
let lo = self.token.span;
let attrs = self.parse_inner_attributes()?;
let post_attr_lo = self.token.span;
let mut items: ThinVec<P<_>> = ThinVec::new();
// There shouldn't be any stray semicolons before or after items.
// `parse_item` consumes the appropriate semicolons so any leftover is an error.
loop {
while self.maybe_consume_incorrect_semicolon(items.last().map(|x| &**x)) {} // Eat all bad semicolons
let Some(item) = self.parse_item(ForceCollect::No)? else {
break;
};
2020-03-08 21:50:01 +01:00
items.push(item);
}
if !self.eat(term) {
let token_str = super::token_descr(&self.token);
if !self.maybe_consume_incorrect_semicolon(items.last().map(|x| &**x)) {
let msg = format!("expected item, found {token_str}");
let mut err = self.dcx().struct_span_err(self.token.span, msg);
let span = self.token.span;
if self.is_kw_followed_by_ident(kw::Let) {
err.span_label(
span,
"consider using `const` or `static` instead of `let` for global variables",
);
} else {
err.span_label(span, "expected item")
.note("for a full list of items that can appear in modules, see <https://doc.rust-lang.org/reference/items.html>");
};
2020-03-08 21:50:01 +01:00
return Err(err);
}
}
let inject_use_span = post_attr_lo.data().with_hi(post_attr_lo.lo());
let mod_spans = ModSpans { inner_span: lo.to(self.prev_token.span), inject_use_span };
Ok((attrs, items, mod_spans))
2020-03-08 21:50:01 +01:00
}
}
pub(super) type ItemInfo = (Ident, ItemKind);
2019-08-11 18:34:42 +02:00
impl<'a> Parser<'a> {
pub fn parse_item(&mut self, force_collect: ForceCollect) -> PResult<'a, Option<P<Item>>> {
let fn_parse_mode = FnParseMode { req_name: |_| true, req_body: true };
self.parse_item_(fn_parse_mode, force_collect).map(|i| i.map(P))
2019-08-11 18:34:42 +02:00
}
fn parse_item_(
&mut self,
fn_parse_mode: FnParseMode,
force_collect: ForceCollect,
) -> PResult<'a, Option<Item>> {
self.recover_vcs_conflict_marker();
let attrs = self.parse_outer_attributes()?;
self.recover_vcs_conflict_marker();
self.parse_item_common(attrs, true, false, fn_parse_mode, force_collect)
}
pub(super) fn parse_item_common(
&mut self,
attrs: AttrWrapper,
mac_allowed: bool,
attrs_allowed: bool,
fn_parse_mode: FnParseMode,
force_collect: ForceCollect,
) -> PResult<'a, Option<Item>> {
maybe_whole!(self, NtItem, |item| {
attrs.prepend_to_nt_inner(&mut item.attrs);
Some(item.into_inner())
});
let item =
self.collect_tokens_trailing_token(attrs, force_collect, |this: &mut Self, attrs| {
let item =
this.parse_item_common_(attrs, mac_allowed, attrs_allowed, fn_parse_mode);
Ok((item?, TrailingToken::None))
})?;
Ok(item)
}
fn parse_item_common_(
&mut self,
mut attrs: AttrVec,
mac_allowed: bool,
attrs_allowed: bool,
fn_parse_mode: FnParseMode,
) -> PResult<'a, Option<Item>> {
2019-08-11 18:34:42 +02:00
let lo = self.token.span;
let vis = self.parse_visibility(FollowedByType::No)?;
let mut def = self.parse_defaultness();
let kind = self.parse_item_kind(
&mut attrs,
mac_allowed,
lo,
&vis,
&mut def,
fn_parse_mode,
Case::Sensitive,
)?;
if let Some((ident, kind)) = kind {
self.error_on_unconsumed_default(def, &kind);
let span = lo.to(self.prev_token.span);
let id = DUMMY_NODE_ID;
let item = Item { ident, attrs, id, kind, vis, span, tokens: None };
return Ok(Some(item));
}
2019-08-11 18:34:42 +02:00
// At this point, we have failed to parse an item.
if !matches!(vis.kind, VisibilityKind::Inherited) {
self.dcx().emit_err(errors::VisibilityNotFollowedByItem { span: vis.span, vis });
}
if let Defaultness::Default(span) = def {
self.dcx().emit_err(errors::DefaultNotFollowedByItem { span });
}
if !attrs_allowed {
self.recover_attrs_no_item(&attrs)?;
}
Ok(None)
}
/// Error in-case `default` was parsed in an in-appropriate context.
fn error_on_unconsumed_default(&self, def: Defaultness, kind: &ItemKind) {
if let Defaultness::Default(span) = def {
self.dcx().emit_err(errors::InappropriateDefault {
span,
article: kind.article(),
descr: kind.descr(),
});
}
}
/// Parses one of the items allowed by the flags.
fn parse_item_kind(
&mut self,
attrs: &mut AttrVec,
macros_allowed: bool,
lo: Span,
vis: &Visibility,
def: &mut Defaultness,
fn_parse_mode: FnParseMode,
case: Case,
) -> PResult<'a, Option<ItemInfo>> {
2024-05-14 16:38:01 +02:00
let check_pub = def == &Defaultness::Final;
let mut def_ = || mem::replace(def, Defaultness::Final);
let info = if self.eat_keyword_case(kw::Use, case) {
self.parse_use_item()?
2024-05-14 16:38:01 +02:00
} else if self.check_fn_front_matter(check_pub, case) {
// FUNCTION ITEM
let (ident, sig, generics, body) =
self.parse_fn(attrs, fn_parse_mode, lo, vis, case)?;
(ident, ItemKind::Fn(Box::new(Fn { defaultness: def_(), sig, generics, body })))
} else if self.eat_keyword(kw::Extern) {
2019-08-11 18:34:42 +02:00
if self.eat_keyword(kw::Crate) {
// EXTERN CRATE
self.parse_item_extern_crate()?
} else {
// EXTERN BLOCK
2024-05-17 14:17:48 -03:00
self.parse_item_foreign_mod(attrs, Safety::Default)?
2019-08-11 18:34:42 +02:00
}
2020-08-23 03:42:19 -07:00
} else if self.is_unsafe_foreign_mod() {
// EXTERN BLOCK
2024-05-17 14:17:48 -03:00
let safety = self.parse_safety(Case::Sensitive);
2020-08-23 03:42:19 -07:00
self.expect_keyword(kw::Extern)?;
2024-05-17 14:17:48 -03:00
self.parse_item_foreign_mod(attrs, safety)?
} else if self.is_static_global() {
let safety = self.parse_safety(Case::Sensitive);
2019-08-11 18:34:42 +02:00
// STATIC ITEM
self.bump(); // `static`
2023-05-04 16:08:33 +02:00
let mutability = self.parse_mutability();
let (ident, item) = self.parse_static_item(safety, mutability)?;
2023-05-04 16:08:33 +02:00
(ident, ItemKind::Static(Box::new(item)))
} else if let Const::Yes(const_span) = self.parse_constness(Case::Sensitive) {
2019-08-11 18:34:42 +02:00
// CONST ITEM
2020-12-03 12:37:19 -06:00
if self.token.is_keyword(kw::Impl) {
// recover from `const impl`, suggest `impl const`
self.recover_const_impl(const_span, attrs, def_())?
2020-12-03 12:37:19 -06:00
} else {
self.recover_const_mut(const_span);
2023-05-04 16:08:33 +02:00
let (ident, generics, ty, expr) = self.parse_const_item()?;
(
ident,
ItemKind::Const(Box::new(ConstItem {
defaultness: def_(),
generics,
ty,
expr,
})),
)
2020-12-03 12:37:19 -06:00
}
} else if self.check_keyword(kw::Trait) || self.check_auto_or_unsafe_trait_item() {
// TRAIT ITEM
self.parse_item_trait(attrs, lo)?
} else if self.check_keyword(kw::Impl)
2019-08-11 18:34:42 +02:00
|| self.check_keyword(kw::Unsafe) && self.is_keyword_ahead(1, &[kw::Impl])
2019-10-01 13:48:54 +02:00
{
2019-08-11 18:34:42 +02:00
// IMPL ITEM
self.parse_item_impl(attrs, def_())?
2023-11-26 15:57:31 +03:00
} else if self.is_reuse_path_item() {
self.parse_item_delegation()?
2020-08-23 03:42:19 -07:00
} else if self.check_keyword(kw::Mod)
|| self.check_keyword(kw::Unsafe) && self.is_keyword_ahead(1, &[kw::Mod])
{
2019-08-11 18:34:42 +02:00
// MODULE ITEM
self.parse_item_mod(attrs)?
} else if self.eat_keyword(kw::Type) {
2019-08-11 18:34:42 +02:00
// TYPE ITEM
self.parse_type_alias(def_())?
} else if self.eat_keyword(kw::Enum) {
2019-08-11 18:34:42 +02:00
// ENUM ITEM
self.parse_item_enum()?
} else if self.eat_keyword(kw::Struct) {
2019-08-11 18:34:42 +02:00
// STRUCT ITEM
self.parse_item_struct()?
} else if self.is_kw_followed_by_ident(kw::Union) {
2019-08-11 18:34:42 +02:00
// UNION ITEM
self.bump(); // `union`
self.parse_item_union()?
} else if self.is_builtin() {
// BUILTIN# ITEM
return self.parse_item_builtin();
} else if self.eat_keyword(kw::Macro) {
// MACROS 2.0 ITEM
self.parse_item_decl_macro(lo)?
} else if let IsMacroRulesItem::Yes { has_bang } = self.is_macro_rules_item() {
// MACRO_RULES ITEM
self.parse_item_macro_rules(vis, has_bang)?
} else if self.isnt_macro_invocation()
&& (self.token.is_ident_named(sym::import)
|| self.token.is_ident_named(sym::using)
|| self.token.is_ident_named(sym::include)
|| self.token.is_ident_named(sym::require))
{
return self.recover_import_as_use();
} else if self.isnt_macro_invocation() && vis.kind.is_pub() {
self.recover_missing_kw_before_item()?;
return Ok(None);
} else if self.isnt_macro_invocation() && case == Case::Sensitive {
_ = def_;
// Recover wrong cased keywords
return self.parse_item_kind(
attrs,
macros_allowed,
lo,
vis,
def,
fn_parse_mode,
Case::Insensitive,
);
2020-03-07 13:15:58 +01:00
} else if macros_allowed && self.check_path() {
// MACRO INVOCATION ITEM
2022-08-12 12:20:10 +10:00
(Ident::empty(), ItemKind::MacCall(P(self.parse_item_macro(vis)?)))
} else {
return Ok(None);
};
Ok(Some(info))
}
2024-05-14 16:35:00 +02:00
fn recover_import_as_use(&mut self) -> PResult<'a, Option<ItemInfo>> {
let span = self.token.span;
let token_name = super::token_descr(&self.token);
let snapshot = self.create_snapshot_for_diagnostic();
self.bump();
match self.parse_use_item() {
Ok(u) => {
self.dcx().emit_err(errors::RecoverImportAsUse { span, token_name });
Ok(Some(u))
}
Err(e) => {
e.cancel();
self.restore_snapshot(snapshot);
Ok(None)
}
}
}
2024-05-14 16:35:00 +02:00
fn parse_use_item(&mut self) -> PResult<'a, ItemInfo> {
let tree = self.parse_use_tree()?;
if let Err(mut e) = self.expect_semi() {
match tree.kind {
UseTreeKind::Glob => {
e.note("the wildcard token must be last on the path");
}
UseTreeKind::Nested { .. } => {
e.note("glob-like brace syntax must be last on the path");
}
_ => (),
}
return Err(e);
}
Ok((Ident::empty(), ItemKind::Use(tree)))
}
/// When parsing a statement, would the start of a path be an item?
pub(super) fn is_path_start_item(&mut self) -> bool {
self.is_kw_followed_by_ident(kw::Union) // no: `union::b`, yes: `union U { .. }`
2023-11-26 15:57:31 +03:00
|| self.is_reuse_path_item()
|| self.check_auto_or_unsafe_trait_item() // no: `auto::b`, yes: `auto trait X { .. }`
|| self.is_async_fn() // no(2015): `async::b`, yes: `async fn`
|| matches!(self.is_macro_rules_item(), IsMacroRulesItem::Yes{..}) // no: `macro_rules::b`, yes: `macro_rules! mac`
}
2023-11-26 15:57:31 +03:00
fn is_reuse_path_item(&mut self) -> bool {
// no: `reuse ::path` for compatibility reasons with macro invocations
self.token.is_keyword(kw::Reuse)
2024-04-04 19:03:32 +02:00
&& self.look_ahead(1, |t| t.is_path_start() && t.kind != token::PathSep)
2023-11-26 15:57:31 +03:00
}
/// Are we sure this could not possibly be a macro invocation?
fn isnt_macro_invocation(&mut self) -> bool {
2024-04-04 19:03:32 +02:00
self.check_ident() && self.look_ahead(1, |t| *t != token::Not && *t != token::PathSep)
}
/// Recover on encountering a struct or method definition where the user
/// forgot to add the `struct` or `fn` keyword after writing `pub`: `pub S {}`.
fn recover_missing_kw_before_item(&mut self) -> PResult<'a, ()> {
// Space between `pub` keyword and the identifier
//
// pub S {}
// ^^^ `sp` points here
let sp = self.prev_token.span.between(self.token.span);
let full_sp = self.prev_token.span.to(self.token.span);
let ident_sp = self.token.span;
let ident = if self.look_ahead(1, |t| {
[
token::Lt,
token::OpenDelim(Delimiter::Brace),
token::OpenDelim(Delimiter::Parenthesis),
]
.contains(&t.kind)
}) {
self.parse_ident().unwrap()
} else {
return Ok(());
};
let mut found_generics = false;
if self.check(&token::Lt) {
found_generics = true;
self.eat_to_tokens(&[&token::Gt]);
self.bump(); // `>`
}
let err = if self.check(&token::OpenDelim(Delimiter::Brace)) {
// possible public struct definition where `struct` was forgotten
Some(errors::MissingKeywordForItemDefinition::Struct { span: sp, ident })
} else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
// possible public function or tuple struct definition where `fn`/`struct` was
// forgotten
self.bump(); // `(`
let is_method = self.recover_self_param();
self.consume_block(Delimiter::Parenthesis, ConsumeClosingDelim::Yes);
let err =
if self.check(&token::RArrow) || self.check(&token::OpenDelim(Delimiter::Brace)) {
self.eat_to_tokens(&[&token::OpenDelim(Delimiter::Brace)]);
self.bump(); // `{`
self.consume_block(Delimiter::Brace, ConsumeClosingDelim::Yes);
if is_method {
errors::MissingKeywordForItemDefinition::Method { span: sp, ident }
} else {
errors::MissingKeywordForItemDefinition::Function { span: sp, ident }
}
} else if self.check(&token::Semi) {
errors::MissingKeywordForItemDefinition::Struct { span: sp, ident }
} else {
errors::MissingKeywordForItemDefinition::Ambiguous {
span: sp,
subdiag: if found_generics {
None
} else if let Ok(snippet) = self.span_to_snippet(ident_sp) {
Some(errors::AmbiguousMissingKwForItemSub::SuggestMacro {
span: full_sp,
snippet,
})
} else {
Some(errors::AmbiguousMissingKwForItemSub::HelpMacro)
},
}
};
Some(err)
} else if found_generics {
Some(errors::MissingKeywordForItemDefinition::Ambiguous { span: sp, subdiag: None })
} else {
None
};
if let Some(err) = err { Err(self.dcx().create_err(err)) } else { Ok(()) }
2019-10-01 12:40:56 +02:00
}
fn parse_item_builtin(&mut self) -> PResult<'a, Option<ItemInfo>> {
// To be expanded
return Ok(None);
}
/// Parses an item macro, e.g., `item!();`.
2020-02-29 19:32:20 +03:00
fn parse_item_macro(&mut self, vis: &Visibility) -> PResult<'a, MacCall> {
let path = self.parse_path(PathStyle::Mod)?; // `foo::bar`
self.expect(&token::Not)?; // `!`
match self.parse_delim_args() {
// `( .. )` or `[ .. ]` (followed by `;`), or `{ .. }`.
Ok(args) => {
self.eat_semi_for_macro_if_needed(&args);
self.complain_if_pub_macro(vis, false);
Ok(MacCall { path, args })
}
Err(mut err) => {
// Maybe the user misspelled `macro_rules` (issue #91227)
if self.token.is_ident()
&& path.segments.len() == 1
&& edit_distance("macro_rules", &path.segments[0].ident.to_string(), 2)
.is_some()
{
err.span_suggestion(
path.span,
"perhaps you meant to define a macro",
"macro_rules",
Applicability::MachineApplicable,
);
}
Err(err)
}
}
2019-08-11 18:34:42 +02:00
}
/// Recover if we parsed attributes and expected an item but there was none.
fn recover_attrs_no_item(&mut self, attrs: &[Attribute]) -> PResult<'a, ()> {
2022-02-19 00:48:49 +01:00
let ([start @ end] | [start, .., end]) = attrs else {
return Ok(());
2019-08-11 18:34:42 +02:00
};
let msg = if end.is_doc_comment() {
"expected item after doc comment"
} else {
"expected item after attributes"
};
let mut err = self.dcx().struct_span_err(end.span, msg);
if end.is_doc_comment() {
err.span_label(end.span, "this doc comment doesn't document anything");
} else if self.token.kind == TokenKind::Semi {
err.span_suggestion_verbose(
self.token.span,
"consider removing this semicolon",
"",
Applicability::MaybeIncorrect,
);
}
if let [.., penultimate, _] = attrs {
err.span_label(start.span.to(penultimate.span), "other attributes here");
2019-08-11 18:34:42 +02:00
}
Err(err)
}
fn is_async_fn(&self) -> bool {
2019-08-11 20:32:29 +02:00
self.token.is_keyword(kw::Async) && self.is_keyword_ahead(1, &[kw::Fn])
}
2020-03-05 15:39:35 -08:00
fn parse_polarity(&mut self) -> ast::ImplPolarity {
// Disambiguate `impl !Trait for Type { ... }` and `impl ! { ... }` for the never type.
if self.check(&token::Not) && self.look_ahead(1, |t| t.can_begin_type()) {
self.bump(); // `!`
ast::ImplPolarity::Negative(self.prev_token.span)
} else {
ast::ImplPolarity::Positive
}
}
/// Parses an implementation item.
2019-08-11 18:34:42 +02:00
///
2022-04-15 15:04:34 -07:00
/// ```ignore (illustrative)
/// impl<'a, T> TYPE { /* impl items */ }
/// impl<'a, T> TRAIT for TYPE { /* impl items */ }
/// impl<'a, T> !TRAIT for TYPE { /* impl items */ }
/// impl<'a, T> const TRAIT for TYPE { /* impl items */ }
/// ```
2019-08-11 18:34:42 +02:00
///
/// We actually parse slightly more relaxed grammar for better error reporting and recovery.
2022-04-15 15:04:34 -07:00
/// ```ebnf
/// "impl" GENERICS "const"? "!"? TYPE "for"? (TYPE | "..") ("where" PREDICATES)? "{" BODY "}"
/// "impl" GENERICS "const"? "!"? TYPE ("where" PREDICATES)? "{" BODY "}"
/// ```
fn parse_item_impl(
&mut self,
attrs: &mut AttrVec,
defaultness: Defaultness,
) -> PResult<'a, ItemInfo> {
2024-05-17 14:17:48 -03:00
let safety = self.parse_safety(Case::Sensitive);
self.expect_keyword(kw::Impl)?;
2019-08-11 18:34:42 +02:00
// First, parse generic parameters if necessary.
let mut generics = if self.choose_generics_over_qpath(0) {
2019-08-11 18:34:42 +02:00
self.parse_generics()?
} else {
let mut generics = Generics::default();
// impl A for B {}
// /\ this is where `generics.span` should point when there are no type params.
generics.span = self.prev_token.span.shrink_to_hi();
generics
2019-08-11 18:34:42 +02:00
};
let constness = self.parse_constness(Case::Sensitive);
if let Const::Yes(span) = constness {
self.psess.gated_spans.gate(sym::const_trait_impl, span);
}
2020-01-02 15:49:45 -08:00
2024-02-20 16:09:03 +00:00
// Parse stray `impl async Trait`
if (self.token.uninterpolated_span().at_least_rust_2018()
&& self.token.is_keyword(kw::Async))
|| self.is_kw_followed_by_ident(kw::Async)
{
self.bump();
self.dcx().emit_err(errors::AsyncImpl { span: self.prev_token.span });
}
2020-03-05 15:39:35 -08:00
let polarity = self.parse_polarity();
2019-08-11 18:34:42 +02:00
// Parse both types and traits as a type, then reinterpret if necessary.
let err_path = |span| ast::Path::from_ident(Ident::new(kw::Empty, span));
2019-08-11 18:34:42 +02:00
let ty_first = if self.token.is_keyword(kw::For) && self.look_ahead(1, |t| t != &token::Lt)
{
let span = self.prev_token.span.between(self.token.span);
self.dcx().emit_err(errors::MissingTraitInTraitImpl {
span,
for_span: span.to(self.token.span),
});
P(Ty {
kind: TyKind::Path(None, err_path(span)),
span,
id: DUMMY_NODE_ID,
tokens: None,
})
2019-08-11 18:34:42 +02:00
} else {
self.parse_ty_with_generics_recovery(&generics)?
2019-08-11 18:34:42 +02:00
};
// If `for` is missing we try to recover.
let has_for = self.eat_keyword(kw::For);
let missing_for_span = self.prev_token.span.between(self.token.span);
2019-08-11 18:34:42 +02:00
let ty_second = if self.token == token::DotDot {
// We need to report this error after `cfg` expansion for compatibility reasons
self.bump(); // `..`, do not add it to expected tokens
// AST validation later detects this `TyKind::Dummy` and emits an
// error. (#121072 will hopefully remove all this special handling
// of the obsolete `impl Trait for ..` and then this can go away.)
Some(self.mk_ty(self.prev_token.span, TyKind::Dummy))
2019-08-11 18:34:42 +02:00
} else if has_for || self.token.can_begin_type() {
Some(self.parse_ty()?)
} else {
None
};
generics.where_clause = self.parse_where_clause()?;
Implement token-based handling of attributes during expansion This PR modifies the macro expansion infrastructure to handle attributes in a fully token-based manner. As a result: * Derives macros no longer lose spans when their input is modified by eager cfg-expansion. This is accomplished by performing eager cfg-expansion on the token stream that we pass to the derive proc-macro * Inner attributes now preserve spans in all cases, including when we have multiple inner attributes in a row. This is accomplished through the following changes: * New structs `AttrAnnotatedTokenStream` and `AttrAnnotatedTokenTree` are introduced. These are very similar to a normal `TokenTree`, but they also track the position of attributes and attribute targets within the stream. They are built when we collect tokens during parsing. An `AttrAnnotatedTokenStream` is converted to a regular `TokenStream` when we invoke a macro. * Token capturing and `LazyTokenStream` are modified to work with `AttrAnnotatedTokenStream`. A new `ReplaceRange` type is introduced, which is created during the parsing of a nested AST node to make the 'outer' AST node aware of the attributes and attribute target stored deeper in the token stream. * When we need to perform eager cfg-expansion (either due to `#[derive]` or `#[cfg_eval]`), we tokenize and reparse our target, capturing additional information about the locations of `#[cfg]` and `#[cfg_attr]` attributes at any depth within the target. This is a performance optimization, allowing us to perform less work in the typical case where captured tokens never have eager cfg-expansion run.
2020-11-28 18:33:17 -05:00
let impl_items = self.parse_item_list(attrs, |p| p.parse_impl_item(ForceCollect::No))?;
2019-08-11 18:34:42 +02:00
let item_kind = match ty_second {
Some(ty_second) => {
// impl Trait for Type
if !has_for {
self.dcx().emit_err(errors::MissingForInTraitImpl { span: missing_for_span });
2019-08-11 18:34:42 +02:00
}
let ty_first = ty_first.into_inner();
2019-09-26 17:25:31 +01:00
let path = match ty_first.kind {
2019-08-11 18:34:42 +02:00
// This notably includes paths passed through `ty` macro fragments (#46438).
TyKind::Path(None, path) => path,
other => {
2024-06-05 16:18:52 -04:00
if let TyKind::ImplTrait(_, bounds) = other
&& let [bound] = bounds.as_slice()
{
// Suggest removing extra `impl` keyword:
// `impl<T: Default> impl Default for Wrapper<T>`
// ^^^^^
let extra_impl_kw = ty_first.span.until(bound.span());
self.dcx().emit_err(errors::ExtraImplKeywordInTraitImpl {
extra_impl_kw,
impl_trait_span: ty_first.span,
});
} else {
self.dcx().emit_err(errors::ExpectedTraitInTraitImplFoundType {
span: ty_first.span,
});
}
2019-08-11 18:34:42 +02:00
err_path(ty_first.span)
}
};
2020-01-13 20:30:25 -08:00
let trait_ref = TraitRef { path, ref_id: ty_first.id };
2019-08-11 18:34:42 +02:00
ItemKind::Impl(Box::new(Impl {
2024-05-17 14:17:48 -03:00
safety,
2019-08-11 18:34:42 +02:00
polarity,
defaultness,
constness,
2019-08-11 18:34:42 +02:00
generics,
of_trait: Some(trait_ref),
self_ty: ty_second,
items: impl_items,
2021-08-05 04:58:59 +02:00
}))
2019-08-11 18:34:42 +02:00
}
None => {
// impl Type
ItemKind::Impl(Box::new(Impl {
2024-05-17 14:17:48 -03:00
safety,
2019-08-11 18:34:42 +02:00
polarity,
defaultness,
constness,
2019-08-11 18:34:42 +02:00
generics,
of_trait: None,
self_ty: ty_first,
items: impl_items,
2021-08-05 04:58:59 +02:00
}))
2019-08-11 18:34:42 +02:00
}
};
Ok((Ident::empty(), item_kind))
2019-08-11 18:34:42 +02:00
}
2023-11-26 15:57:31 +03:00
fn parse_item_delegation(&mut self) -> PResult<'a, ItemInfo> {
let span = self.token.span;
self.expect_keyword(kw::Reuse)?;
let (qself, path) = if self.eat_lt() {
let (qself, path) = self.parse_qpath(PathStyle::Expr)?;
(Some(qself), path)
} else {
(None, self.parse_path(PathStyle::Expr)?)
};
let rename = |this: &mut Self| {
Ok(if this.eat_keyword(kw::As) { Some(this.parse_ident()?) } else { None })
};
let body = |this: &mut Self| {
Ok(if this.check(&token::OpenDelim(Delimiter::Brace)) {
Some(this.parse_block()?)
} else {
this.expect(&token::Semi)?;
None
})
};
2024-03-14 11:41:38 +03:00
let (ident, item_kind) = if self.eat(&token::PathSep) {
2024-03-15 14:21:03 +03:00
let suffixes = if self.eat(&token::BinOp(token::Star)) {
None
} else {
let parse_suffix = |p: &mut Self| Ok((p.parse_path_segment_ident()?, rename(p)?));
Some(self.parse_delim_comma_seq(Delimiter::Brace, parse_suffix)?.0)
};
let deleg = DelegationMac { qself, prefix: path, suffixes, body: body(self)? };
(Ident::empty(), ItemKind::DelegationMac(Box::new(deleg)))
2023-11-26 15:57:31 +03:00
} else {
let rename = rename(self)?;
let ident = rename.unwrap_or_else(|| path.segments.last().unwrap().ident);
2024-03-15 14:21:03 +03:00
let deleg = Delegation {
id: DUMMY_NODE_ID,
qself,
path,
rename,
body: body(self)?,
from_glob: false,
};
(ident, ItemKind::Delegation(Box::new(deleg)))
2023-11-26 15:57:31 +03:00
};
2023-11-26 15:57:31 +03:00
let span = span.to(self.prev_token.span);
self.psess.gated_spans.gate(sym::fn_delegation, span);
2023-11-26 15:57:31 +03:00
Ok((ident, item_kind))
2023-11-26 15:57:31 +03:00
}
fn parse_item_list<T>(
&mut self,
attrs: &mut AttrVec,
mut parse_item: impl FnMut(&mut Parser<'a>) -> PResult<'a, Option<Option<T>>>,
) -> PResult<'a, ThinVec<T>> {
let open_brace_span = self.token.span;
// Recover `impl Ty;` instead of `impl Ty {}`
if self.token == TokenKind::Semi {
self.dcx().emit_err(errors::UseEmptyBlockNotSemi { span: self.token.span });
self.bump();
return Ok(ThinVec::new());
}
self.expect(&token::OpenDelim(Delimiter::Brace))?;
attrs.extend(self.parse_inner_attributes()?);
2019-08-11 18:34:42 +02:00
let mut items = ThinVec::new();
while !self.eat(&token::CloseDelim(Delimiter::Brace)) {
if self.recover_doc_comment_before_brace() {
continue;
}
self.recover_vcs_conflict_marker();
match parse_item(self) {
Ok(None) => {
let mut is_unnecessary_semicolon = !items.is_empty()
// When the close delim is `)` in a case like the following, `token.kind` is expected to be `token::CloseDelim(Delimiter::Parenthesis)`,
// but the actual `token.kind` is `token::CloseDelim(Delimiter::Brace)`.
// This is because the `token.kind` of the close delim is treated as the same as
// that of the open delim in `TokenTreesReader::parse_token_tree`, even if the delimiters of them are different.
// Therefore, `token.kind` should not be compared here.
//
// issue-60075.rs
// ```
// trait T {
// fn qux() -> Option<usize> {
// let _ = if true {
// });
// ^ this close delim
// Some(4)
// }
// ```
&& self
.span_to_snippet(self.prev_token.span)
.is_ok_and(|snippet| snippet == "}")
&& self.token.kind == token::Semi;
let mut semicolon_span = self.token.span;
if !is_unnecessary_semicolon {
// #105369, Detect spurious `;` before assoc fn body
is_unnecessary_semicolon = self.token == token::OpenDelim(Delimiter::Brace)
&& self.prev_token.kind == token::Semi;
semicolon_span = self.prev_token.span;
}
// We have to bail or we'll potentially never make progress.
let non_item_span = self.token.span;
let is_let = self.token.is_keyword(kw::Let);
let mut err =
self.dcx().struct_span_err(non_item_span, "non-item in item list");
self.consume_block(Delimiter::Brace, ConsumeClosingDelim::Yes);
if is_let {
err.span_suggestion(
non_item_span,
"consider using `const` instead of `let` for associated const",
"const",
Applicability::MachineApplicable,
);
} else {
err.span_label(open_brace_span, "item list starts here")
.span_label(non_item_span, "non-item starts here")
.span_label(self.prev_token.span, "item list ends here");
}
if is_unnecessary_semicolon {
err.span_suggestion(
semicolon_span,
"consider removing this semicolon",
"",
Applicability::MaybeIncorrect,
);
}
err.emit();
break;
}
Ok(Some(item)) => items.extend(item),
Make `DiagnosticBuilder::emit` consuming. This works for most of its call sites. This is nice, because `emit` very much makes sense as a consuming operation -- indeed, `DiagnosticBuilderState` exists to ensure no diagnostic is emitted twice, but it uses runtime checks. For the small number of call sites where a consuming emit doesn't work, the commit adds `DiagnosticBuilder::emit_without_consuming`. (This will be removed in subsequent commits.) Likewise, `emit_unless` becomes consuming. And `delay_as_bug` becomes consuming, while `delay_as_bug_without_consuming` is added (which will also be removed in subsequent commits.) All this requires significant changes to `DiagnosticBuilder`'s chaining methods. Currently `DiagnosticBuilder` method chaining uses a non-consuming `&mut self -> &mut Self` style, which allows chaining to be used when the chain ends in `emit()`, like so: ``` struct_err(msg).span(span).emit(); ``` But it doesn't work when producing a `DiagnosticBuilder` value, requiring this: ``` let mut err = self.struct_err(msg); err.span(span); err ``` This style of chaining won't work with consuming `emit` though. For that, we need to use to a `self -> Self` style. That also would allow `DiagnosticBuilder` production to be chained, e.g.: ``` self.struct_err(msg).span(span) ``` However, removing the `&mut self -> &mut Self` style would require that individual modifications of a `DiagnosticBuilder` go from this: ``` err.span(span); ``` to this: ``` err = err.span(span); ``` There are *many* such places. I have a high tolerance for tedious refactorings, but even I gave up after a long time trying to convert them all. Instead, this commit has it both ways: the existing `&mut self -> Self` chaining methods are kept, and new `self -> Self` chaining methods are added, all of which have a `_mv` suffix (short for "move"). Changes to the existing `forward!` macro lets this happen with very little additional boilerplate code. I chose to add the suffix to the new chaining methods rather than the existing ones, because the number of changes required is much smaller that way. This doubled chainging is a bit clumsy, but I think it is worthwhile because it allows a *lot* of good things to subsequently happen. In this commit, there are many `mut` qualifiers removed in places where diagnostics are emitted without being modified. In subsequent commits: - chaining can be used more, making the code more concise; - more use of chaining also permits the removal of redundant diagnostic APIs like `struct_err_with_code`, which can be replaced easily with `struct_err` + `code_mv`; - `emit_without_diagnostic` can be removed, which simplifies a lot of machinery, removing the need for `DiagnosticBuilderState`.
2024-01-03 12:17:35 +11:00
Err(err) => {
self.consume_block(Delimiter::Brace, ConsumeClosingDelim::Yes);
err.with_span_label(
Make `DiagnosticBuilder::emit` consuming. This works for most of its call sites. This is nice, because `emit` very much makes sense as a consuming operation -- indeed, `DiagnosticBuilderState` exists to ensure no diagnostic is emitted twice, but it uses runtime checks. For the small number of call sites where a consuming emit doesn't work, the commit adds `DiagnosticBuilder::emit_without_consuming`. (This will be removed in subsequent commits.) Likewise, `emit_unless` becomes consuming. And `delay_as_bug` becomes consuming, while `delay_as_bug_without_consuming` is added (which will also be removed in subsequent commits.) All this requires significant changes to `DiagnosticBuilder`'s chaining methods. Currently `DiagnosticBuilder` method chaining uses a non-consuming `&mut self -> &mut Self` style, which allows chaining to be used when the chain ends in `emit()`, like so: ``` struct_err(msg).span(span).emit(); ``` But it doesn't work when producing a `DiagnosticBuilder` value, requiring this: ``` let mut err = self.struct_err(msg); err.span(span); err ``` This style of chaining won't work with consuming `emit` though. For that, we need to use to a `self -> Self` style. That also would allow `DiagnosticBuilder` production to be chained, e.g.: ``` self.struct_err(msg).span(span) ``` However, removing the `&mut self -> &mut Self` style would require that individual modifications of a `DiagnosticBuilder` go from this: ``` err.span(span); ``` to this: ``` err = err.span(span); ``` There are *many* such places. I have a high tolerance for tedious refactorings, but even I gave up after a long time trying to convert them all. Instead, this commit has it both ways: the existing `&mut self -> Self` chaining methods are kept, and new `self -> Self` chaining methods are added, all of which have a `_mv` suffix (short for "move"). Changes to the existing `forward!` macro lets this happen with very little additional boilerplate code. I chose to add the suffix to the new chaining methods rather than the existing ones, because the number of changes required is much smaller that way. This doubled chainging is a bit clumsy, but I think it is worthwhile because it allows a *lot* of good things to subsequently happen. In this commit, there are many `mut` qualifiers removed in places where diagnostics are emitted without being modified. In subsequent commits: - chaining can be used more, making the code more concise; - more use of chaining also permits the removal of redundant diagnostic APIs like `struct_err_with_code`, which can be replaced easily with `struct_err` + `code_mv`; - `emit_without_diagnostic` can be removed, which simplifies a lot of machinery, removing the need for `DiagnosticBuilderState`.
2024-01-03 12:17:35 +11:00
open_brace_span,
"while parsing this item list starting here",
)
.with_span_label(self.prev_token.span, "the item list ends here")
Make `DiagnosticBuilder::emit` consuming. This works for most of its call sites. This is nice, because `emit` very much makes sense as a consuming operation -- indeed, `DiagnosticBuilderState` exists to ensure no diagnostic is emitted twice, but it uses runtime checks. For the small number of call sites where a consuming emit doesn't work, the commit adds `DiagnosticBuilder::emit_without_consuming`. (This will be removed in subsequent commits.) Likewise, `emit_unless` becomes consuming. And `delay_as_bug` becomes consuming, while `delay_as_bug_without_consuming` is added (which will also be removed in subsequent commits.) All this requires significant changes to `DiagnosticBuilder`'s chaining methods. Currently `DiagnosticBuilder` method chaining uses a non-consuming `&mut self -> &mut Self` style, which allows chaining to be used when the chain ends in `emit()`, like so: ``` struct_err(msg).span(span).emit(); ``` But it doesn't work when producing a `DiagnosticBuilder` value, requiring this: ``` let mut err = self.struct_err(msg); err.span(span); err ``` This style of chaining won't work with consuming `emit` though. For that, we need to use to a `self -> Self` style. That also would allow `DiagnosticBuilder` production to be chained, e.g.: ``` self.struct_err(msg).span(span) ``` However, removing the `&mut self -> &mut Self` style would require that individual modifications of a `DiagnosticBuilder` go from this: ``` err.span(span); ``` to this: ``` err = err.span(span); ``` There are *many* such places. I have a high tolerance for tedious refactorings, but even I gave up after a long time trying to convert them all. Instead, this commit has it both ways: the existing `&mut self -> Self` chaining methods are kept, and new `self -> Self` chaining methods are added, all of which have a `_mv` suffix (short for "move"). Changes to the existing `forward!` macro lets this happen with very little additional boilerplate code. I chose to add the suffix to the new chaining methods rather than the existing ones, because the number of changes required is much smaller that way. This doubled chainging is a bit clumsy, but I think it is worthwhile because it allows a *lot* of good things to subsequently happen. In this commit, there are many `mut` qualifiers removed in places where diagnostics are emitted without being modified. In subsequent commits: - chaining can be used more, making the code more concise; - more use of chaining also permits the removal of redundant diagnostic APIs like `struct_err_with_code`, which can be replaced easily with `struct_err` + `code_mv`; - `emit_without_diagnostic` can be removed, which simplifies a lot of machinery, removing the need for `DiagnosticBuilderState`.
2024-01-03 12:17:35 +11:00
.emit();
break;
2019-08-11 18:34:42 +02:00
}
}
}
Ok(items)
}
/// Recover on a doc comment before `}`.
fn recover_doc_comment_before_brace(&mut self) -> bool {
if let token::DocComment(..) = self.token.kind {
if self.look_ahead(1, |tok| tok == &token::CloseDelim(Delimiter::Brace)) {
// FIXME: merge with `DocCommentDoesNotDocumentAnything` (E0585)
struct_span_code_err!(
self.dcx(),
self.token.span,
E0584,
"found a documentation comment that doesn't document anything",
)
.with_span_label(self.token.span, "this doc comment doesn't document anything")
.with_help(
2022-09-27 16:08:04 +08:00
"doc comments must come before what they document, if a comment was \
intended use `//`",
)
.emit();
self.bump();
return true;
}
}
false
2019-08-11 18:34:42 +02:00
}
/// Parses defaultness (i.e., `default` or nothing).
fn parse_defaultness(&mut self) -> Defaultness {
// We are interested in `default` followed by another identifier.
// However, we must avoid keywords that occur as binary operators.
// Currently, the only applicable keyword is `as` (`default as Ty`).
2019-08-11 18:34:42 +02:00
if self.check_keyword(kw::Default)
&& self.look_ahead(1, |t| t.is_non_raw_ident_where(|i| i.name != kw::As))
2019-08-11 18:34:42 +02:00
{
self.bump(); // `default`
Defaultness::Default(self.prev_token.uninterpolated_span())
2019-08-11 18:34:42 +02:00
} else {
Defaultness::Final
}
}
/// Is this an `(unsafe auto? | auto) trait` item?
fn check_auto_or_unsafe_trait_item(&mut self) -> bool {
// auto trait
self.check_keyword(kw::Auto) && self.is_keyword_ahead(1, &[kw::Trait])
// unsafe auto trait
|| self.check_keyword(kw::Unsafe) && self.is_keyword_ahead(1, &[kw::Trait, kw::Auto])
}
/// Parses `unsafe? auto? trait Foo { ... }` or `trait Foo = Bar;`.
fn parse_item_trait(&mut self, attrs: &mut AttrVec, lo: Span) -> PResult<'a, ItemInfo> {
2024-05-17 14:17:48 -03:00
let safety = self.parse_safety(Case::Sensitive);
2019-10-01 14:19:08 +02:00
// Parse optional `auto` prefix.
2023-10-03 19:06:17 +00:00
let is_auto = if self.eat_keyword(kw::Auto) {
self.psess.gated_spans.gate(sym::auto_traits, self.prev_token.span);
2023-10-03 19:06:17 +00:00
IsAuto::Yes
} else {
IsAuto::No
};
2019-10-01 14:19:08 +02:00
self.expect_keyword(kw::Trait)?;
2019-08-11 18:34:42 +02:00
let ident = self.parse_ident()?;
let mut generics = self.parse_generics()?;
2019-08-11 18:34:42 +02:00
// Parse optional colon and supertrait bounds.
let had_colon = self.eat(&token::Colon);
let span_at_colon = self.prev_token.span;
2023-04-25 05:15:50 +00:00
let bounds = if had_colon { self.parse_generic_bounds()? } else { Vec::new() };
2019-08-11 18:34:42 +02:00
let span_before_eq = self.prev_token.span;
2019-08-11 18:34:42 +02:00
if self.eat(&token::Eq) {
// It's a trait alias.
if had_colon {
let span = span_at_colon.to(span_before_eq);
self.dcx().emit_err(errors::BoundsNotAllowedOnTraitAliases { span });
}
2023-04-25 05:15:50 +00:00
let bounds = self.parse_generic_bounds()?;
generics.where_clause = self.parse_where_clause()?;
self.expect_semi()?;
let whole_span = lo.to(self.prev_token.span);
2020-02-10 15:24:53 +01:00
if is_auto == IsAuto::Yes {
self.dcx().emit_err(errors::TraitAliasCannotBeAuto { span: whole_span });
2019-08-11 18:34:42 +02:00
}
2024-05-17 14:17:48 -03:00
if let Safety::Unsafe(_) = safety {
self.dcx().emit_err(errors::TraitAliasCannotBeUnsafe { span: whole_span });
2019-08-11 18:34:42 +02:00
}
self.psess.gated_spans.gate(sym::trait_alias, whole_span);
2019-09-21 17:40:50 +02:00
Ok((ident, ItemKind::TraitAlias(generics, bounds)))
2019-08-11 18:34:42 +02:00
} else {
// It's a normal trait.
generics.where_clause = self.parse_where_clause()?;
Implement token-based handling of attributes during expansion This PR modifies the macro expansion infrastructure to handle attributes in a fully token-based manner. As a result: * Derives macros no longer lose spans when their input is modified by eager cfg-expansion. This is accomplished by performing eager cfg-expansion on the token stream that we pass to the derive proc-macro * Inner attributes now preserve spans in all cases, including when we have multiple inner attributes in a row. This is accomplished through the following changes: * New structs `AttrAnnotatedTokenStream` and `AttrAnnotatedTokenTree` are introduced. These are very similar to a normal `TokenTree`, but they also track the position of attributes and attribute targets within the stream. They are built when we collect tokens during parsing. An `AttrAnnotatedTokenStream` is converted to a regular `TokenStream` when we invoke a macro. * Token capturing and `LazyTokenStream` are modified to work with `AttrAnnotatedTokenStream`. A new `ReplaceRange` type is introduced, which is created during the parsing of a nested AST node to make the 'outer' AST node aware of the attributes and attribute target stored deeper in the token stream. * When we need to perform eager cfg-expansion (either due to `#[derive]` or `#[cfg_eval]`), we tokenize and reparse our target, capturing additional information about the locations of `#[cfg]` and `#[cfg_attr]` attributes at any depth within the target. This is a performance optimization, allowing us to perform less work in the typical case where captured tokens never have eager cfg-expansion run.
2020-11-28 18:33:17 -05:00
let items = self.parse_item_list(attrs, |p| p.parse_trait_item(ForceCollect::No))?;
Ok((
ident,
2024-05-17 14:17:48 -03:00
ItemKind::Trait(Box::new(Trait { is_auto, safety, generics, bounds, items })),
))
2019-08-11 18:34:42 +02:00
}
}
Implement token-based handling of attributes during expansion This PR modifies the macro expansion infrastructure to handle attributes in a fully token-based manner. As a result: * Derives macros no longer lose spans when their input is modified by eager cfg-expansion. This is accomplished by performing eager cfg-expansion on the token stream that we pass to the derive proc-macro * Inner attributes now preserve spans in all cases, including when we have multiple inner attributes in a row. This is accomplished through the following changes: * New structs `AttrAnnotatedTokenStream` and `AttrAnnotatedTokenTree` are introduced. These are very similar to a normal `TokenTree`, but they also track the position of attributes and attribute targets within the stream. They are built when we collect tokens during parsing. An `AttrAnnotatedTokenStream` is converted to a regular `TokenStream` when we invoke a macro. * Token capturing and `LazyTokenStream` are modified to work with `AttrAnnotatedTokenStream`. A new `ReplaceRange` type is introduced, which is created during the parsing of a nested AST node to make the 'outer' AST node aware of the attributes and attribute target stored deeper in the token stream. * When we need to perform eager cfg-expansion (either due to `#[derive]` or `#[cfg_eval]`), we tokenize and reparse our target, capturing additional information about the locations of `#[cfg]` and `#[cfg_attr]` attributes at any depth within the target. This is a performance optimization, allowing us to perform less work in the typical case where captured tokens never have eager cfg-expansion run.
2020-11-28 18:33:17 -05:00
pub fn parse_impl_item(
&mut self,
force_collect: ForceCollect,
) -> PResult<'a, Option<Option<P<AssocItem>>>> {
let fn_parse_mode = FnParseMode { req_name: |_| true, req_body: true };
self.parse_assoc_item(fn_parse_mode, force_collect)
2019-12-01 17:29:13 +01:00
}
Implement token-based handling of attributes during expansion This PR modifies the macro expansion infrastructure to handle attributes in a fully token-based manner. As a result: * Derives macros no longer lose spans when their input is modified by eager cfg-expansion. This is accomplished by performing eager cfg-expansion on the token stream that we pass to the derive proc-macro * Inner attributes now preserve spans in all cases, including when we have multiple inner attributes in a row. This is accomplished through the following changes: * New structs `AttrAnnotatedTokenStream` and `AttrAnnotatedTokenTree` are introduced. These are very similar to a normal `TokenTree`, but they also track the position of attributes and attribute targets within the stream. They are built when we collect tokens during parsing. An `AttrAnnotatedTokenStream` is converted to a regular `TokenStream` when we invoke a macro. * Token capturing and `LazyTokenStream` are modified to work with `AttrAnnotatedTokenStream`. A new `ReplaceRange` type is introduced, which is created during the parsing of a nested AST node to make the 'outer' AST node aware of the attributes and attribute target stored deeper in the token stream. * When we need to perform eager cfg-expansion (either due to `#[derive]` or `#[cfg_eval]`), we tokenize and reparse our target, capturing additional information about the locations of `#[cfg]` and `#[cfg_attr]` attributes at any depth within the target. This is a performance optimization, allowing us to perform less work in the typical case where captured tokens never have eager cfg-expansion run.
2020-11-28 18:33:17 -05:00
pub fn parse_trait_item(
&mut self,
force_collect: ForceCollect,
) -> PResult<'a, Option<Option<P<AssocItem>>>> {
let fn_parse_mode =
FnParseMode { req_name: |edition| edition >= Edition::Edition2018, req_body: false };
self.parse_assoc_item(fn_parse_mode, force_collect)
2019-12-01 17:29:13 +01:00
}
/// Parses associated items.
Implement token-based handling of attributes during expansion This PR modifies the macro expansion infrastructure to handle attributes in a fully token-based manner. As a result: * Derives macros no longer lose spans when their input is modified by eager cfg-expansion. This is accomplished by performing eager cfg-expansion on the token stream that we pass to the derive proc-macro * Inner attributes now preserve spans in all cases, including when we have multiple inner attributes in a row. This is accomplished through the following changes: * New structs `AttrAnnotatedTokenStream` and `AttrAnnotatedTokenTree` are introduced. These are very similar to a normal `TokenTree`, but they also track the position of attributes and attribute targets within the stream. They are built when we collect tokens during parsing. An `AttrAnnotatedTokenStream` is converted to a regular `TokenStream` when we invoke a macro. * Token capturing and `LazyTokenStream` are modified to work with `AttrAnnotatedTokenStream`. A new `ReplaceRange` type is introduced, which is created during the parsing of a nested AST node to make the 'outer' AST node aware of the attributes and attribute target stored deeper in the token stream. * When we need to perform eager cfg-expansion (either due to `#[derive]` or `#[cfg_eval]`), we tokenize and reparse our target, capturing additional information about the locations of `#[cfg]` and `#[cfg_attr]` attributes at any depth within the target. This is a performance optimization, allowing us to perform less work in the typical case where captured tokens never have eager cfg-expansion run.
2020-11-28 18:33:17 -05:00
fn parse_assoc_item(
&mut self,
fn_parse_mode: FnParseMode,
Implement token-based handling of attributes during expansion This PR modifies the macro expansion infrastructure to handle attributes in a fully token-based manner. As a result: * Derives macros no longer lose spans when their input is modified by eager cfg-expansion. This is accomplished by performing eager cfg-expansion on the token stream that we pass to the derive proc-macro * Inner attributes now preserve spans in all cases, including when we have multiple inner attributes in a row. This is accomplished through the following changes: * New structs `AttrAnnotatedTokenStream` and `AttrAnnotatedTokenTree` are introduced. These are very similar to a normal `TokenTree`, but they also track the position of attributes and attribute targets within the stream. They are built when we collect tokens during parsing. An `AttrAnnotatedTokenStream` is converted to a regular `TokenStream` when we invoke a macro. * Token capturing and `LazyTokenStream` are modified to work with `AttrAnnotatedTokenStream`. A new `ReplaceRange` type is introduced, which is created during the parsing of a nested AST node to make the 'outer' AST node aware of the attributes and attribute target stored deeper in the token stream. * When we need to perform eager cfg-expansion (either due to `#[derive]` or `#[cfg_eval]`), we tokenize and reparse our target, capturing additional information about the locations of `#[cfg]` and `#[cfg_attr]` attributes at any depth within the target. This is a performance optimization, allowing us to perform less work in the typical case where captured tokens never have eager cfg-expansion run.
2020-11-28 18:33:17 -05:00
force_collect: ForceCollect,
) -> PResult<'a, Option<Option<P<AssocItem>>>> {
Ok(self.parse_item_(fn_parse_mode, force_collect)?.map(
|Item { attrs, id, span, vis, ident, kind, tokens }| {
let kind = match AssocItemKind::try_from(kind) {
Ok(kind) => kind,
Err(kind) => match kind {
ItemKind::Static(box StaticItem { ty, safety: _, mutability: _, expr }) => {
self.dcx().emit_err(errors::AssociatedStaticItemNotAllowed { span });
2023-03-29 12:34:05 +00:00
AssocItemKind::Const(Box::new(ConstItem {
defaultness: Defaultness::Final,
2023-05-04 16:08:33 +02:00
generics: Generics::default(),
ty,
expr,
2023-03-29 12:34:05 +00:00
}))
}
_ => return self.error_bad_item_kind(span, &kind, "`trait`s or `impl`s"),
},
};
Some(P(Item { attrs, id, span, vis, ident, kind, tokens }))
},
))
}
/// Parses a `type` alias with the following grammar:
2022-04-15 15:04:34 -07:00
/// ```ebnf
2023-05-04 16:08:33 +02:00
/// TypeAlias = "type" Ident Generics (":" GenericBounds)? WhereClause ("=" Ty)? WhereClause ";" ;
/// ```
/// The `"type"` has already been eaten.
fn parse_type_alias(&mut self, defaultness: Defaultness) -> PResult<'a, ItemInfo> {
2019-08-11 18:34:42 +02:00
let ident = self.parse_ident()?;
let mut generics = self.parse_generics()?;
// Parse optional colon and param bounds.
let bounds =
2023-04-25 05:15:50 +00:00
if self.eat(&token::Colon) { self.parse_generic_bounds()? } else { Vec::new() };
let before_where_clause = self.parse_where_clause()?;
2019-08-11 18:34:42 +02:00
let ty = if self.eat(&token::Eq) { Some(self.parse_ty()?) } else { None };
let after_where_clause = self.parse_where_clause()?;
2024-02-19 14:25:33 +01:00
let where_clauses = TyAliasWhereClauses {
before: TyAliasWhereClause {
has_where_token: before_where_clause.has_where_token,
span: before_where_clause.span,
},
after: TyAliasWhereClause {
has_where_token: after_where_clause.has_where_token,
span: after_where_clause.span,
},
split: before_where_clause.predicates.len(),
};
let mut predicates = before_where_clause.predicates;
predicates.extend(after_where_clause.predicates);
let where_clause = WhereClause {
has_where_token: before_where_clause.has_where_token
|| after_where_clause.has_where_token,
predicates,
span: DUMMY_SP,
};
generics.where_clause = where_clause;
self.expect_semi()?;
2019-08-11 18:34:42 +02:00
Ok((
ident,
ItemKind::TyAlias(Box::new(TyAlias {
defaultness,
generics,
where_clauses,
bounds,
ty,
})),
))
2019-08-11 18:34:42 +02:00
}
/// Parses a `UseTree`.
///
/// ```text
2019-08-11 18:34:42 +02:00
/// USE_TREE = [`::`] `*` |
/// [`::`] `{` USE_TREE_LIST `}` |
/// PATH `::` `*` |
/// PATH `::` `{` USE_TREE_LIST `}` |
/// PATH [`as` IDENT]
/// ```
fn parse_use_tree(&mut self) -> PResult<'a, UseTree> {
let lo = self.token.span;
2022-09-08 17:22:52 +10:00
let mut prefix =
ast::Path { segments: ThinVec::new(), span: lo.shrink_to_lo(), tokens: None };
let kind = if self.check(&token::OpenDelim(Delimiter::Brace))
2019-08-11 18:34:42 +02:00
|| self.check(&token::BinOp(token::Star))
|| self.is_import_coupler()
{
// `use *;` or `use ::*;` or `use {...};` or `use ::{...};`
let mod_sep_ctxt = self.token.span.ctxt();
2024-04-04 19:03:32 +02:00
if self.eat(&token::PathSep) {
2019-08-11 18:34:42 +02:00
prefix
.segments
.push(PathSegment::path_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt)));
}
self.parse_use_tree_glob_or_nested()?
2019-08-11 18:34:42 +02:00
} else {
// `use path::*;` or `use path::{...};` or `use path;` or `use path as bar;`
prefix = self.parse_path(PathStyle::Mod)?;
2024-04-04 19:03:32 +02:00
if self.eat(&token::PathSep) {
self.parse_use_tree_glob_or_nested()?
2019-08-11 18:34:42 +02:00
} else {
// Recover from using a colon as path separator.
while self.eat_noexpect(&token::Colon) {
self.dcx()
.emit_err(errors::SingleColonImportPath { span: self.prev_token.span });
// We parse the rest of the path and append it to the original prefix.
self.parse_path_segments(&mut prefix.segments, PathStyle::Mod, None)?;
prefix.span = lo.to(self.prev_token.span);
}
UseTreeKind::Simple(self.parse_rename()?)
2019-08-11 18:34:42 +02:00
}
};
Ok(UseTree { prefix, kind, span: lo.to(self.prev_token.span) })
2019-08-11 18:34:42 +02:00
}
/// Parses `*` or `{...}`.
fn parse_use_tree_glob_or_nested(&mut self) -> PResult<'a, UseTreeKind> {
Ok(if self.eat(&token::BinOp(token::Star)) {
UseTreeKind::Glob
} else {
let lo = self.token.span;
UseTreeKind::Nested {
items: self.parse_use_tree_list()?,
span: lo.to(self.prev_token.span),
}
})
}
2019-08-11 18:34:42 +02:00
/// Parses a `UseTreeKind::Nested(list)`.
///
/// ```text
2024-04-16 18:19:27 -07:00
/// USE_TREE_LIST = ∅ | (USE_TREE `,`)* USE_TREE [`,`]
2019-08-11 18:34:42 +02:00
/// ```
fn parse_use_tree_list(&mut self) -> PResult<'a, ThinVec<(UseTree, ast::NodeId)>> {
self.parse_delim_comma_seq(Delimiter::Brace, |p| {
p.recover_vcs_conflict_marker();
Ok((p.parse_use_tree()?, DUMMY_NODE_ID))
})
.map(|(r, _)| r)
2019-08-11 18:34:42 +02:00
}
fn parse_rename(&mut self) -> PResult<'a, Option<Ident>> {
if self.eat_keyword(kw::As) { self.parse_ident_or_underscore().map(Some) } else { Ok(None) }
}
2020-04-19 13:00:18 +02:00
fn parse_ident_or_underscore(&mut self) -> PResult<'a, Ident> {
match self.token.ident() {
2024-02-13 23:28:27 +00:00
Some((ident @ Ident { name: kw::Underscore, .. }, IdentIsRaw::No)) => {
self.bump();
Ok(ident)
}
_ => self.parse_ident(),
}
}
2019-08-11 18:34:42 +02:00
/// Parses `extern crate` links.
///
/// # Examples
///
2022-04-15 15:04:34 -07:00
/// ```ignore (illustrative)
2019-08-11 18:34:42 +02:00
/// extern crate foo;
/// extern crate bar as foo;
/// ```
fn parse_item_extern_crate(&mut self) -> PResult<'a, ItemInfo> {
2019-08-11 18:34:42 +02:00
// Accept `extern crate name-like-this` for better diagnostics
let orig_name = self.parse_crate_name_with_dashes()?;
let (item_name, orig_name) = if let Some(rename) = self.parse_rename()? {
(rename, Some(orig_name.name))
} else {
(orig_name, None)
};
self.expect_semi()?;
Ok((item_name, ItemKind::ExternCrate(orig_name)))
2019-08-11 18:34:42 +02:00
}
2020-04-19 13:00:18 +02:00
fn parse_crate_name_with_dashes(&mut self) -> PResult<'a, Ident> {
let ident = if self.token.is_keyword(kw::SelfLower) {
2019-08-11 18:34:42 +02:00
self.parse_path_segment_ident()
} else {
self.parse_ident()
}?;
2019-08-11 18:34:42 +02:00
let dash = token::BinOp(token::BinOpToken::Minus);
if self.token != dash {
return Ok(ident);
2019-08-11 18:34:42 +02:00
}
// Accept `extern crate name-like-this` for better diagnostics.
let mut dashes = vec![];
let mut idents = vec![];
while self.eat(&dash) {
dashes.push(self.prev_token.span);
idents.push(self.parse_ident()?);
2019-08-11 18:34:42 +02:00
}
let fixed_name_sp = ident.span.to(idents.last().unwrap().span);
let mut fixed_name = ident.name.to_string();
for part in idents {
write!(fixed_name, "_{}", part.name).unwrap();
}
self.dcx().emit_err(errors::ExternCrateNameWithDashes {
span: fixed_name_sp,
sugg: errors::ExternCrateNameWithDashesSugg { dashes },
});
Ok(Ident::from_str_and_span(&fixed_name, fixed_name_sp))
2019-08-11 18:34:42 +02:00
}
/// Parses `extern` for foreign ABIs modules.
///
/// `extern` is expected to have been consumed before calling this method.
2019-08-11 18:34:42 +02:00
///
/// # Examples
///
/// ```ignore (only-for-syntax-highlight)
/// extern "C" {}
/// extern {}
/// ```
2020-08-23 03:42:19 -07:00
fn parse_item_foreign_mod(
&mut self,
attrs: &mut AttrVec,
2024-05-17 14:17:48 -03:00
mut safety: Safety,
2020-08-23 03:42:19 -07:00
) -> PResult<'a, ItemInfo> {
let abi = self.parse_abi(); // ABI?
2024-05-17 14:17:48 -03:00
if safety == Safety::Default
&& self.token.is_keyword(kw::Unsafe)
&& self.look_ahead(1, |t| t.kind == token::OpenDelim(Delimiter::Brace))
{
self.expect(&token::OpenDelim(Delimiter::Brace)).unwrap_err().emit();
2024-05-17 14:17:48 -03:00
safety = Safety::Unsafe(self.token.span);
self.eat_keyword(kw::Unsafe);
2021-11-17 16:31:56 +01:00
}
let module = ast::ForeignMod {
2024-05-17 14:17:48 -03:00
safety,
abi,
items: self.parse_item_list(attrs, |p| p.parse_foreign_item(ForceCollect::No))?,
};
Ok((Ident::empty(), ItemKind::ForeignMod(module)))
2019-08-11 18:34:42 +02:00
}
/// Parses a foreign item (one in an `extern { ... }` block).
Implement token-based handling of attributes during expansion This PR modifies the macro expansion infrastructure to handle attributes in a fully token-based manner. As a result: * Derives macros no longer lose spans when their input is modified by eager cfg-expansion. This is accomplished by performing eager cfg-expansion on the token stream that we pass to the derive proc-macro * Inner attributes now preserve spans in all cases, including when we have multiple inner attributes in a row. This is accomplished through the following changes: * New structs `AttrAnnotatedTokenStream` and `AttrAnnotatedTokenTree` are introduced. These are very similar to a normal `TokenTree`, but they also track the position of attributes and attribute targets within the stream. They are built when we collect tokens during parsing. An `AttrAnnotatedTokenStream` is converted to a regular `TokenStream` when we invoke a macro. * Token capturing and `LazyTokenStream` are modified to work with `AttrAnnotatedTokenStream`. A new `ReplaceRange` type is introduced, which is created during the parsing of a nested AST node to make the 'outer' AST node aware of the attributes and attribute target stored deeper in the token stream. * When we need to perform eager cfg-expansion (either due to `#[derive]` or `#[cfg_eval]`), we tokenize and reparse our target, capturing additional information about the locations of `#[cfg]` and `#[cfg_attr]` attributes at any depth within the target. This is a performance optimization, allowing us to perform less work in the typical case where captured tokens never have eager cfg-expansion run.
2020-11-28 18:33:17 -05:00
pub fn parse_foreign_item(
&mut self,
force_collect: ForceCollect,
) -> PResult<'a, Option<Option<P<ForeignItem>>>> {
let fn_parse_mode = FnParseMode { req_name: |_| true, req_body: false };
Ok(self.parse_item_(fn_parse_mode, force_collect)?.map(
|Item { attrs, id, span, vis, ident, kind, tokens }| {
let kind = match ForeignItemKind::try_from(kind) {
Ok(kind) => kind,
Err(kind) => match kind {
2023-03-29 12:34:05 +00:00
ItemKind::Const(box ConstItem { ty, expr, .. }) => {
let const_span = Some(span.with_hi(ident.span.lo()))
.filter(|span| span.can_be_used_for_suggestions());
self.dcx().emit_err(errors::ExternItemCannotBeConst {
ident_span: ident.span,
const_span,
});
ForeignItemKind::Static(Box::new(StaticItem {
ty,
mutability: Mutability::Not,
expr,
safety: Safety::Default,
}))
}
_ => return self.error_bad_item_kind(span, &kind, "`extern` blocks"),
},
};
Some(P(Item { attrs, id, span, vis, ident, kind, tokens }))
},
))
}
2019-08-11 18:34:42 +02:00
fn error_bad_item_kind<T>(&self, span: Span, kind: &ItemKind, ctx: &'static str) -> Option<T> {
// FIXME(#100717): needs variant for each `ItemKind` (instead of using `ItemKind::descr()`)
let span = self.psess.source_map().guess_head_span(span);
2020-03-26 14:13:50 +01:00
let descr = kind.descr();
2024-03-15 14:21:03 +03:00
let help = match kind {
ItemKind::DelegationMac(deleg) if deleg.suffixes.is_none() => None,
_ => Some(()),
};
self.dcx().emit_err(errors::BadItemKind { span, descr, ctx, help });
None
2020-02-23 06:04:37 +01:00
}
2020-08-23 03:42:19 -07:00
fn is_unsafe_foreign_mod(&self) -> bool {
self.token.is_keyword(kw::Unsafe)
&& self.is_keyword_ahead(1, &[kw::Extern])
&& self.look_ahead(
2 + self.look_ahead(2, |t| t.can_begin_string_literal() as usize),
|t| t.kind == token::OpenDelim(Delimiter::Brace),
2020-08-23 03:42:19 -07:00
)
}
2019-08-11 18:34:42 +02:00
fn is_static_global(&mut self) -> bool {
if self.check_keyword(kw::Static) {
// Check if this could be a closure.
2019-08-11 18:34:42 +02:00
!self.look_ahead(1, |token| {
if token.is_keyword(kw::Move) {
return true;
}
2020-10-26 21:02:48 -04:00
matches!(token.kind, token::BinOp(token::Or) | token::OrOr)
2019-08-11 18:34:42 +02:00
})
} else {
let quals: &[Symbol] = &[kw::Unsafe, kw::Safe];
// `$qual static`
quals.iter().any(|&kw| self.check_keyword(kw))
&& self.look_ahead(1, |t| t.is_keyword(kw::Static))
2019-08-11 18:34:42 +02:00
}
}
2020-01-31 14:23:12 +01:00
/// Recover on `const mut` with `const` already eaten.
fn recover_const_mut(&mut self, const_span: Span) {
if self.eat_keyword(kw::Mut) {
let span = self.prev_token.span;
self.dcx()
.emit_err(errors::ConstGlobalCannotBeMutable { ident_span: span, const_span });
} else if self.eat_keyword(kw::Let) {
let span = self.prev_token.span;
self.dcx().emit_err(errors::ConstLetMutuallyExclusive { span: const_span.to(span) });
2020-01-31 14:23:12 +01:00
}
}
2020-12-03 12:37:19 -06:00
/// Recover on `const impl` with `const` already eaten.
fn recover_const_impl(
&mut self,
const_span: Span,
attrs: &mut AttrVec,
2020-12-03 12:37:19 -06:00
defaultness: Defaultness,
) -> PResult<'a, ItemInfo> {
let impl_span = self.token.span;
Make `DiagnosticBuilder::emit` consuming. This works for most of its call sites. This is nice, because `emit` very much makes sense as a consuming operation -- indeed, `DiagnosticBuilderState` exists to ensure no diagnostic is emitted twice, but it uses runtime checks. For the small number of call sites where a consuming emit doesn't work, the commit adds `DiagnosticBuilder::emit_without_consuming`. (This will be removed in subsequent commits.) Likewise, `emit_unless` becomes consuming. And `delay_as_bug` becomes consuming, while `delay_as_bug_without_consuming` is added (which will also be removed in subsequent commits.) All this requires significant changes to `DiagnosticBuilder`'s chaining methods. Currently `DiagnosticBuilder` method chaining uses a non-consuming `&mut self -> &mut Self` style, which allows chaining to be used when the chain ends in `emit()`, like so: ``` struct_err(msg).span(span).emit(); ``` But it doesn't work when producing a `DiagnosticBuilder` value, requiring this: ``` let mut err = self.struct_err(msg); err.span(span); err ``` This style of chaining won't work with consuming `emit` though. For that, we need to use to a `self -> Self` style. That also would allow `DiagnosticBuilder` production to be chained, e.g.: ``` self.struct_err(msg).span(span) ``` However, removing the `&mut self -> &mut Self` style would require that individual modifications of a `DiagnosticBuilder` go from this: ``` err.span(span); ``` to this: ``` err = err.span(span); ``` There are *many* such places. I have a high tolerance for tedious refactorings, but even I gave up after a long time trying to convert them all. Instead, this commit has it both ways: the existing `&mut self -> Self` chaining methods are kept, and new `self -> Self` chaining methods are added, all of which have a `_mv` suffix (short for "move"). Changes to the existing `forward!` macro lets this happen with very little additional boilerplate code. I chose to add the suffix to the new chaining methods rather than the existing ones, because the number of changes required is much smaller that way. This doubled chainging is a bit clumsy, but I think it is worthwhile because it allows a *lot* of good things to subsequently happen. In this commit, there are many `mut` qualifiers removed in places where diagnostics are emitted without being modified. In subsequent commits: - chaining can be used more, making the code more concise; - more use of chaining also permits the removal of redundant diagnostic APIs like `struct_err_with_code`, which can be replaced easily with `struct_err` + `code_mv`; - `emit_without_diagnostic` can be removed, which simplifies a lot of machinery, removing the need for `DiagnosticBuilderState`.
2024-01-03 12:17:35 +11:00
let err = self.expected_ident_found_err();
// Only try to recover if this is implementing a trait for a type
let mut impl_info = match self.parse_item_impl(attrs, defaultness) {
Ok(impl_info) => impl_info,
Err(recovery_error) => {
// Recovery failed, raise the "expected identifier" error
recovery_error.cancel();
return Err(err);
}
};
2022-11-22 09:42:01 +00:00
match &mut impl_info.1 {
ItemKind::Impl(box Impl { of_trait: Some(trai), constness, .. }) => {
2020-12-03 12:37:19 -06:00
*constness = Const::Yes(const_span);
let before_trait = trai.path.span.shrink_to_lo();
let const_up_to_impl = const_span.with_hi(impl_span.lo());
err.with_multipart_suggestion(
2020-12-03 12:37:19 -06:00
"you might have meant to write a const trait impl",
vec![(const_up_to_impl, "".to_owned()), (before_trait, "const ".to_owned())],
Applicability::MaybeIncorrect,
)
.emit();
}
ItemKind::Impl { .. } => return Err(err),
_ => unreachable!(),
}
2020-12-03 12:37:19 -06:00
Ok(impl_info)
}
2023-05-04 16:08:33 +02:00
/// Parse a static item with the prefix `"static" "mut"?` already parsed and stored in `mutability`.
///
2023-05-04 16:08:33 +02:00
/// ```ebnf
/// Static = "static" "mut"? $ident ":" $ty (= $expr)? ";" ;
/// ```
fn parse_static_item(
&mut self,
safety: Safety,
mutability: Mutability,
) -> PResult<'a, (Ident, StaticItem)> {
2023-05-04 16:08:33 +02:00
let ident = self.parse_ident()?;
2023-05-04 16:08:33 +02:00
if self.token.kind == TokenKind::Lt && self.may_recover() {
let generics = self.parse_generics()?;
self.dcx().emit_err(errors::StaticWithGenerics { span: generics.span });
2023-05-04 16:08:33 +02:00
}
// Parse the type of a static item. That is, the `":" $ty` fragment.
// FIXME: This could maybe benefit from `.may_recover()`?
let ty = match (self.eat(&token::Colon), self.check(&token::Eq) | self.check(&token::Semi))
{
(true, false) => self.parse_ty()?,
2023-05-04 16:08:33 +02:00
// If there wasn't a `:` or the colon was followed by a `=` or `;`, recover a missing type.
(colon, _) => self.recover_missing_global_item_type(colon, Some(mutability)),
};
let expr = if self.eat(&token::Eq) { Some(self.parse_expr()?) } else { None };
self.expect_semi()?;
Ok((ident, StaticItem { ty, safety, mutability, expr }))
2023-05-04 16:08:33 +02:00
}
/// Parse a constant item with the prefix `"const"` already parsed.
///
/// ```ebnf
/// Const = "const" ($ident | "_") Generics ":" $ty (= $expr)? WhereClause ";" ;
/// ```
fn parse_const_item(&mut self) -> PResult<'a, (Ident, Generics, P<Ty>, Option<P<ast::Expr>>)> {
let ident = self.parse_ident_or_underscore()?;
let mut generics = self.parse_generics()?;
// Check the span for emptiness instead of the list of parameters in order to correctly
// recognize and subsequently flag empty parameter lists (`<>`) as unstable.
if !generics.span.is_empty() {
self.psess.gated_spans.gate(sym::generic_const_items, generics.span);
2023-05-04 16:08:33 +02:00
}
// Parse the type of a constant item. That is, the `":" $ty` fragment.
// FIXME: This could maybe benefit from `.may_recover()`?
let ty = match (
self.eat(&token::Colon),
self.check(&token::Eq) | self.check(&token::Semi) | self.check_keyword(kw::Where),
) {
(true, false) => self.parse_ty()?,
// If there wasn't a `:` or the colon was followed by a `=`, `;` or `where`, recover a missing type.
(colon, _) => self.recover_missing_global_item_type(colon, None),
};
2023-05-04 16:08:33 +02:00
// Proactively parse a where-clause to be able to provide a good error message in case we
// encounter the item body following it.
let before_where_clause =
if self.may_recover() { self.parse_where_clause()? } else { WhereClause::default() };
let expr = if self.eat(&token::Eq) { Some(self.parse_expr()?) } else { None };
2023-05-04 16:08:33 +02:00
let after_where_clause = self.parse_where_clause()?;
// Provide a nice error message if the user placed a where-clause before the item body.
// Users may be tempted to write such code if they are still used to the deprecated
// where-clause location on type aliases and associated types. See also #89122.
if before_where_clause.has_where_token
&& let Some(expr) = &expr
{
self.dcx().emit_err(errors::WhereClauseBeforeConstBody {
2023-05-04 16:08:33 +02:00
span: before_where_clause.span,
name: ident.span,
body: expr.span,
sugg: if !after_where_clause.has_where_token {
self.psess.source_map().span_to_snippet(expr.span).ok().map(|body| {
2023-05-04 16:08:33 +02:00
errors::WhereClauseBeforeConstBodySugg {
left: before_where_clause.span.shrink_to_lo(),
snippet: body,
right: before_where_clause.span.shrink_to_hi().to(expr.span),
}
})
} else {
// FIXME(generic_const_items): Provide a structured suggestion to merge the first
// where-clause into the second one.
None
},
});
}
// Merge the predicates of both where-clauses since either one can be relevant.
// If we didn't parse a body (which is valid for associated consts in traits) and we were
// allowed to recover, `before_where_clause` contains the predicates, otherwise they are
// in `after_where_clause`. Further, both of them might contain predicates iff two
// where-clauses were provided which is syntactically ill-formed but we want to recover from
// it and treat them as one large where-clause.
let mut predicates = before_where_clause.predicates;
predicates.extend(after_where_clause.predicates);
let where_clause = WhereClause {
has_where_token: before_where_clause.has_where_token
|| after_where_clause.has_where_token,
predicates,
span: if after_where_clause.has_where_token {
after_where_clause.span
} else {
before_where_clause.span
},
};
if where_clause.has_where_token {
self.psess.gated_spans.gate(sym::generic_const_items, where_clause.span);
2023-05-04 16:08:33 +02:00
}
generics.where_clause = where_clause;
self.expect_semi()?;
2023-05-04 16:08:33 +02:00
Ok((ident, generics, ty, expr))
2019-08-11 18:34:42 +02:00
}
/// We were supposed to parse `":" $ty` but the `:` or the type was missing.
/// This means that the type is missing.
2023-05-04 16:08:33 +02:00
fn recover_missing_global_item_type(
&mut self,
colon_present: bool,
m: Option<Mutability>,
) -> P<Ty> {
// Construct the error and stash it away with the hope
// that typeck will later enrich the error with a type.
let kind = match m {
Some(Mutability::Mut) => "static mut",
Some(Mutability::Not) => "static",
None => "const",
};
let colon = match colon_present {
true => "",
false => ":",
};
let span = self.prev_token.span.shrink_to_hi();
let err = self.dcx().create_err(errors::MissingConstType { span, colon, kind });
err.stash(span, StashKey::ItemNoType);
// The user intended that the type be inferred,
// so treat this as if the user wrote e.g. `const A: _ = expr;`.
P(Ty { kind: TyKind::Infer, span, id: ast::DUMMY_NODE_ID, tokens: None })
}
2019-08-11 18:34:42 +02:00
/// Parses an enum declaration.
fn parse_item_enum(&mut self) -> PResult<'a, ItemInfo> {
if self.token.is_keyword(kw::Struct) {
2022-09-26 23:13:09 +09:00
let span = self.prev_token.span.to(self.token.span);
let err = errors::EnumStructMutuallyExclusive { span };
if self.look_ahead(1, |t| t.is_ident()) {
self.bump();
self.dcx().emit_err(err);
} else {
return Err(self.dcx().create_err(err));
}
}
let prev_span = self.prev_token.span;
2019-08-11 18:34:42 +02:00
let id = self.parse_ident()?;
let mut generics = self.parse_generics()?;
generics.where_clause = self.parse_where_clause()?;
// Possibly recover `enum Foo;` instead of `enum Foo {}`
let (variants, _) = if self.token == TokenKind::Semi {
self.dcx().emit_err(errors::UseEmptyBlockNotSemi { span: self.token.span });
self.bump();
2024-02-13 23:48:23 +00:00
(thin_vec![], Trailing::No)
} else {
2023-11-06 23:24:42 +00:00
self.parse_delim_comma_seq(Delimiter::Brace, |p| p.parse_enum_variant(id.span))
.map_err(|mut err| {
2023-05-06 02:27:32 +08:00
err.span_label(id.span, "while parsing this enum");
if self.token == token::Colon {
2023-05-06 02:27:32 +08:00
let snapshot = self.create_snapshot_for_diagnostic();
self.bump();
match self.parse_ty() {
Ok(_) => {
err.span_suggestion_verbose(
prev_span,
"perhaps you meant to use `struct` here",
"struct",
2023-05-06 02:27:32 +08:00
Applicability::MaybeIncorrect,
);
}
Err(e) => {
e.cancel();
}
}
self.restore_snapshot(snapshot);
}
2023-11-06 23:24:42 +00:00
self.eat_to_tokens(&[&token::CloseDelim(Delimiter::Brace)]);
self.bump(); // }
2023-05-06 02:27:32 +08:00
err
2023-11-06 23:24:42 +00:00
})?
};
2021-07-21 21:53:45 +02:00
let enum_definition = EnumDef { variants: variants.into_iter().flatten().collect() };
Ok((id, ItemKind::Enum(enum_definition, generics)))
2019-08-11 18:34:42 +02:00
}
2023-11-06 23:24:42 +00:00
fn parse_enum_variant(&mut self, span: Span) -> PResult<'a, Option<Variant>> {
self.recover_vcs_conflict_marker();
let variant_attrs = self.parse_outer_attributes()?;
self.recover_vcs_conflict_marker();
let help = "enum variants can be `Variant`, `Variant = <integer>`, \
`Variant(Type, ..., TypeN)` or `Variant { fields: Types }`";
self.collect_tokens_trailing_token(
variant_attrs,
ForceCollect::No,
|this, variant_attrs| {
let vlo = this.token.span;
let vis = this.parse_visibility(FollowedByType::No)?;
if !this.recover_nested_adt_item(kw::Enum)? {
return Ok((None, TrailingToken::None));
}
let ident = this.parse_field_ident("enum", vlo)?;
if this.token == token::Not {
if let Err(err) = this.unexpected() {
err.with_note(fluent::parse_macro_expands_to_enum_variant).emit();
2023-07-24 17:05:10 +00:00
}
this.bump();
this.parse_delim_args()?;
return Ok((None, TrailingToken::MaybeComma));
}
let struct_def = if this.check(&token::OpenDelim(Delimiter::Brace)) {
// Parse a struct variant.
let (fields, recovered) =
2023-11-06 23:24:42 +00:00
match this.parse_record_struct_body("struct", ident.span, false) {
Ok((fields, recovered)) => (fields, recovered),
Err(mut err) => {
if this.token == token::Colon {
// We handle `enum` to `struct` suggestion in the caller.
return Err(err);
}
this.eat_to_tokens(&[&token::CloseDelim(Delimiter::Brace)]);
this.bump(); // }
err.span_label(span, "while parsing this enum");
err.help(help);
let guar = err.emit();
(thin_vec![], Recovered::Yes(guar))
2023-11-06 23:24:42 +00:00
}
};
2024-02-13 23:44:33 +00:00
VariantData::Struct { fields, recovered: recovered.into() }
} else if this.check(&token::OpenDelim(Delimiter::Parenthesis)) {
2023-11-06 23:24:42 +00:00
let body = match this.parse_tuple_struct_body() {
Ok(body) => body,
Err(mut err) => {
if this.token == token::Colon {
// We handle `enum` to `struct` suggestion in the caller.
return Err(err);
}
this.eat_to_tokens(&[&token::CloseDelim(Delimiter::Parenthesis)]);
this.bump(); // )
err.span_label(span, "while parsing this enum");
err.help(help);
2023-11-06 23:24:42 +00:00
err.emit();
thin_vec![]
}
};
VariantData::Tuple(body, DUMMY_NODE_ID)
} else {
VariantData::Unit(DUMMY_NODE_ID)
};
2019-08-11 18:34:42 +02:00
let disr_expr =
if this.eat(&token::Eq) { Some(this.parse_expr_anon_const()?) } else { None };
let vr = ast::Variant {
ident,
vis,
id: DUMMY_NODE_ID,
attrs: variant_attrs,
data: struct_def,
disr_expr,
span: vlo.to(this.prev_token.span),
is_placeholder: false,
};
2019-08-11 18:34:42 +02:00
Ok((Some(vr), TrailingToken::MaybeComma))
},
)
.map_err(|mut err| {
err.help(help);
err
})
2019-08-11 18:34:42 +02:00
}
/// Parses `struct Foo { ... }`.
fn parse_item_struct(&mut self) -> PResult<'a, ItemInfo> {
let class_name = self.parse_ident()?;
let mut generics = self.parse_generics()?;
// There is a special case worth noting here, as reported in issue #17904.
// If we are parsing a tuple struct it is the case that the where clause
// should follow the field list. Like so:
//
// struct Foo<T>(T) where T: Copy;
//
// If we are parsing a normal record-style struct it is the case
// that the where clause comes before the body, and after the generics.
// So if we look ahead and see a brace or a where-clause we begin
// parsing a record style struct.
//
// Otherwise if we look ahead and see a paren we parse a tuple-style
// struct.
let vdata = if self.token.is_keyword(kw::Where) {
let tuple_struct_body;
(generics.where_clause, tuple_struct_body) =
self.parse_struct_where_clause(class_name, generics.span)?;
if let Some(body) = tuple_struct_body {
// If we see a misplaced tuple struct body: `struct Foo<T> where T: Copy, (T);`
let body = VariantData::Tuple(body, DUMMY_NODE_ID);
self.expect_semi()?;
body
} else if self.eat(&token::Semi) {
2019-08-11 18:34:42 +02:00
// If we see a: `struct Foo<T> where T: Copy;` style decl.
VariantData::Unit(DUMMY_NODE_ID)
2019-08-11 18:34:42 +02:00
} else {
// If we see: `struct Foo<T> where T: Copy { ... }`
let (fields, recovered) = self.parse_record_struct_body(
"struct",
class_name.span,
generics.where_clause.has_where_token,
)?;
2024-02-13 23:44:33 +00:00
VariantData::Struct { fields, recovered: recovered.into() }
2019-08-11 18:34:42 +02:00
}
// No `where` so: `struct Foo<T>;`
} else if self.eat(&token::Semi) {
VariantData::Unit(DUMMY_NODE_ID)
2019-08-11 18:34:42 +02:00
// Record-style struct definition
} else if self.token == token::OpenDelim(Delimiter::Brace) {
let (fields, recovered) = self.parse_record_struct_body(
"struct",
class_name.span,
generics.where_clause.has_where_token,
)?;
2024-02-13 23:44:33 +00:00
VariantData::Struct { fields, recovered: recovered.into() }
2019-08-11 18:34:42 +02:00
// Tuple-style struct definition with optional where-clause.
} else if self.token == token::OpenDelim(Delimiter::Parenthesis) {
let body = VariantData::Tuple(self.parse_tuple_struct_body()?, DUMMY_NODE_ID);
2019-08-11 18:34:42 +02:00
generics.where_clause = self.parse_where_clause()?;
self.expect_semi()?;
2019-08-11 18:34:42 +02:00
body
} else {
let err =
errors::UnexpectedTokenAfterStructName::new(self.token.span, self.token.clone());
return Err(self.dcx().create_err(err));
2019-08-11 18:34:42 +02:00
};
Ok((class_name, ItemKind::Struct(vdata, generics)))
2019-08-11 18:34:42 +02:00
}
/// Parses `union Foo { ... }`.
fn parse_item_union(&mut self) -> PResult<'a, ItemInfo> {
let class_name = self.parse_ident()?;
let mut generics = self.parse_generics()?;
let vdata = if self.token.is_keyword(kw::Where) {
generics.where_clause = self.parse_where_clause()?;
let (fields, recovered) = self.parse_record_struct_body(
"union",
class_name.span,
generics.where_clause.has_where_token,
)?;
2024-02-13 23:44:33 +00:00
VariantData::Struct { fields, recovered: recovered.into() }
} else if self.token == token::OpenDelim(Delimiter::Brace) {
let (fields, recovered) = self.parse_record_struct_body(
"union",
class_name.span,
generics.where_clause.has_where_token,
)?;
2024-02-13 23:44:33 +00:00
VariantData::Struct { fields, recovered: recovered.into() }
2019-08-11 18:34:42 +02:00
} else {
2019-12-07 03:07:35 +01:00
let token_str = super::token_descr(&self.token);
let msg = format!("expected `where` or `{{` after union name, found {token_str}");
let mut err = self.dcx().struct_span_err(self.token.span, msg);
2019-08-11 18:34:42 +02:00
err.span_label(self.token.span, "expected `where` or `{` after union name");
return Err(err);
};
Ok((class_name, ItemKind::Union(vdata, generics)))
2019-08-11 18:34:42 +02:00
}
/// This function parses the fields of record structs:
///
/// - `struct S { ... }`
/// - `enum E { Variant { ... } }`
pub(crate) fn parse_record_struct_body(
&mut self,
adt_ty: &str,
ident_span: Span,
parsed_where: bool,
2024-02-13 23:44:33 +00:00
) -> PResult<'a, (ThinVec<FieldDef>, Recovered)> {
let mut fields = ThinVec::new();
2024-02-13 23:44:33 +00:00
let mut recovered = Recovered::No;
if self.eat(&token::OpenDelim(Delimiter::Brace)) {
while self.token != token::CloseDelim(Delimiter::Brace) {
match self.parse_field_def(adt_ty) {
Ok(field) => {
fields.push(field);
}
2019-08-11 18:34:42 +02:00
Err(mut err) => {
self.consume_block(Delimiter::Brace, ConsumeClosingDelim::No);
err.span_label(ident_span, format!("while parsing this {adt_ty}"));
let guar = err.emit();
recovered = Recovered::Yes(guar);
break;
2019-08-11 18:34:42 +02:00
}
}
}
self.eat(&token::CloseDelim(Delimiter::Brace));
2019-08-11 18:34:42 +02:00
} else {
2019-12-07 03:07:35 +01:00
let token_str = super::token_descr(&self.token);
let where_str = if parsed_where { "" } else { "`where`, or " };
let msg = format!("expected {where_str}`{{` after struct name, found {token_str}");
let mut err = self.dcx().struct_span_err(self.token.span, msg);
err.span_label(self.token.span, format!("expected {where_str}`{{` after struct name",));
2019-08-11 18:34:42 +02:00
return Err(err);
}
Ok((fields, recovered))
}
pub(super) fn parse_tuple_struct_body(&mut self) -> PResult<'a, ThinVec<FieldDef>> {
2019-08-11 18:34:42 +02:00
// This is the case where we find `struct Foo<T>(T) where T: Copy;`
// Unit like structs are handled in parse_item_struct function
self.parse_paren_comma_seq(|p| {
let attrs = p.parse_outer_attributes()?;
p.collect_tokens_trailing_token(attrs, ForceCollect::No, |p, attrs| {
let mut snapshot = None;
if p.is_vcs_conflict_marker(&TokenKind::BinOp(token::Shl), &TokenKind::Lt) {
2022-12-28 18:46:20 -08:00
// Account for `<<<<<<<` diff markers. We can't proactively error here because
// that can be a valid type start, so we snapshot and reparse only we've
// encountered another parse error.
snapshot = Some(p.create_snapshot_for_diagnostic());
}
let lo = p.token.span;
let vis = match p.parse_visibility(FollowedByType::Yes) {
Ok(vis) => vis,
Err(err) => {
if let Some(ref mut snapshot) = snapshot {
snapshot.recover_vcs_conflict_marker();
}
return Err(err);
}
};
let ty = match p.parse_ty() {
Ok(ty) => ty,
Err(err) => {
if let Some(ref mut snapshot) = snapshot {
snapshot.recover_vcs_conflict_marker();
}
return Err(err);
}
};
Ok((
FieldDef {
span: lo.to(ty.span),
vis,
ident: None,
id: DUMMY_NODE_ID,
ty,
attrs,
is_placeholder: false,
},
TrailingToken::MaybeComma,
))
2019-08-11 18:34:42 +02:00
})
})
.map(|(r, _)| r)
}
/// Parses an element of a struct declaration.
fn parse_field_def(&mut self, adt_ty: &str) -> PResult<'a, FieldDef> {
self.recover_vcs_conflict_marker();
2019-08-11 18:34:42 +02:00
let attrs = self.parse_outer_attributes()?;
self.recover_vcs_conflict_marker();
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
let lo = this.token.span;
let vis = this.parse_visibility(FollowedByType::No)?;
2023-07-24 17:05:10 +00:00
this.parse_single_struct_field(adt_ty, lo, vis, attrs)
.map(|field| (field, TrailingToken::None))
})
2019-08-11 18:34:42 +02:00
}
/// Parses a structure field declaration.
fn parse_single_struct_field(
&mut self,
adt_ty: &str,
2019-08-11 18:34:42 +02:00
lo: Span,
vis: Visibility,
attrs: AttrVec,
) -> PResult<'a, FieldDef> {
2019-08-11 18:34:42 +02:00
let mut seen_comma: bool = false;
let a_var = self.parse_name_and_ty(adt_ty, lo, vis, attrs)?;
2019-08-11 18:34:42 +02:00
if self.token == token::Comma {
seen_comma = true;
}
if self.eat(&token::Semi) {
let sp = self.prev_token.span;
let mut err =
self.dcx().struct_span_err(sp, format!("{adt_ty} fields are separated by `,`"));
err.span_suggestion_short(
sp,
"replace `;` with `,`",
",",
Applicability::MachineApplicable,
);
return Err(err);
}
2019-08-11 18:34:42 +02:00
match self.token.kind {
token::Comma => {
self.bump();
}
token::CloseDelim(Delimiter::Brace) => {}
token::DocComment(..) => {
let previous_span = self.prev_token.span;
let mut err = errors::DocCommentDoesNotDocumentAnything {
span: self.token.span,
missing_comma: None,
};
2019-08-11 18:34:42 +02:00
self.bump(); // consume the doc comment
let comma_after_doc_seen = self.eat(&token::Comma);
// `seen_comma` is always false, because we are inside doc block
// condition is here to make code more readable
if !seen_comma && comma_after_doc_seen {
2019-08-11 18:34:42 +02:00
seen_comma = true;
}
if comma_after_doc_seen || self.token == token::CloseDelim(Delimiter::Brace) {
self.dcx().emit_err(err);
2019-08-11 18:34:42 +02:00
} else {
if !seen_comma {
2022-10-18 02:00:06 +08:00
let sp = previous_span.shrink_to_hi();
err.missing_comma = Some(sp);
2019-08-11 18:34:42 +02:00
}
return Err(self.dcx().create_err(err));
2019-08-11 18:34:42 +02:00
}
}
_ => {
let sp = self.prev_token.span.shrink_to_hi();
let msg =
format!("expected `,`, or `}}`, found {}", super::token_descr(&self.token));
// Try to recover extra trailing angle brackets
if let TyKind::Path(_, Path { segments, .. }) = &a_var.ty.kind {
if let Some(last_segment) = segments.last() {
let guar = self.check_trailing_angle_brackets(
last_segment,
&[&token::Comma, &token::CloseDelim(Delimiter::Brace)],
);
if let Some(_guar) = guar {
// Handle a case like `Vec<u8>>,` where we can continue parsing fields
// after the comma
self.eat(&token::Comma);
// `check_trailing_angle_brackets` already emitted a nicer error, as
// proven by the presence of `_guar`. We can continue parsing.
return Ok(a_var);
}
}
}
let mut err = self.dcx().struct_span_err(sp, msg);
if self.token.is_ident()
|| (self.token.kind == TokenKind::Pound
&& (self.look_ahead(1, |t| t == &token::OpenDelim(Delimiter::Bracket))))
{
// This is likely another field, TokenKind::Pound is used for `#[..]`
// attribute for next field. Emit the diagnostic and continue parsing.
2019-08-11 18:34:42 +02:00
err.span_suggestion(
sp,
"try adding a comma",
",",
2019-08-11 18:34:42 +02:00
Applicability::MachineApplicable,
);
err.emit();
2019-08-11 18:34:42 +02:00
} else {
return Err(err);
}
}
}
Ok(a_var)
}
fn expect_field_ty_separator(&mut self) -> PResult<'a, ()> {
Make `DiagnosticBuilder::emit` consuming. This works for most of its call sites. This is nice, because `emit` very much makes sense as a consuming operation -- indeed, `DiagnosticBuilderState` exists to ensure no diagnostic is emitted twice, but it uses runtime checks. For the small number of call sites where a consuming emit doesn't work, the commit adds `DiagnosticBuilder::emit_without_consuming`. (This will be removed in subsequent commits.) Likewise, `emit_unless` becomes consuming. And `delay_as_bug` becomes consuming, while `delay_as_bug_without_consuming` is added (which will also be removed in subsequent commits.) All this requires significant changes to `DiagnosticBuilder`'s chaining methods. Currently `DiagnosticBuilder` method chaining uses a non-consuming `&mut self -> &mut Self` style, which allows chaining to be used when the chain ends in `emit()`, like so: ``` struct_err(msg).span(span).emit(); ``` But it doesn't work when producing a `DiagnosticBuilder` value, requiring this: ``` let mut err = self.struct_err(msg); err.span(span); err ``` This style of chaining won't work with consuming `emit` though. For that, we need to use to a `self -> Self` style. That also would allow `DiagnosticBuilder` production to be chained, e.g.: ``` self.struct_err(msg).span(span) ``` However, removing the `&mut self -> &mut Self` style would require that individual modifications of a `DiagnosticBuilder` go from this: ``` err.span(span); ``` to this: ``` err = err.span(span); ``` There are *many* such places. I have a high tolerance for tedious refactorings, but even I gave up after a long time trying to convert them all. Instead, this commit has it both ways: the existing `&mut self -> Self` chaining methods are kept, and new `self -> Self` chaining methods are added, all of which have a `_mv` suffix (short for "move"). Changes to the existing `forward!` macro lets this happen with very little additional boilerplate code. I chose to add the suffix to the new chaining methods rather than the existing ones, because the number of changes required is much smaller that way. This doubled chainging is a bit clumsy, but I think it is worthwhile because it allows a *lot* of good things to subsequently happen. In this commit, there are many `mut` qualifiers removed in places where diagnostics are emitted without being modified. In subsequent commits: - chaining can be used more, making the code more concise; - more use of chaining also permits the removal of redundant diagnostic APIs like `struct_err_with_code`, which can be replaced easily with `struct_err` + `code_mv`; - `emit_without_diagnostic` can be removed, which simplifies a lot of machinery, removing the need for `DiagnosticBuilderState`.
2024-01-03 12:17:35 +11:00
if let Err(err) = self.expect(&token::Colon) {
let sm = self.psess.source_map();
let eq_typo = self.token.kind == token::Eq && self.look_ahead(1, |t| t.is_path_start());
let semi_typo = self.token.kind == token::Semi
&& self.look_ahead(1, |t| {
t.is_path_start()
// We check that we are in a situation like `foo; bar` to avoid bad suggestions
// when there's no type and `;` was used instead of a comma.
&& match (sm.lookup_line(self.token.span.hi()), sm.lookup_line(t.span.lo())) {
(Ok(l), Ok(r)) => l.line == r.line,
_ => true,
}
});
if eq_typo || semi_typo {
self.bump();
// Gracefully handle small typos.
err.with_span_suggestion_short(
self.prev_token.span,
"field names and their types are separated with `:`",
":",
Applicability::MachineApplicable,
)
.emit();
} else {
return Err(err);
}
}
Ok(())
}
2019-08-11 18:34:42 +02:00
/// Parses a structure field.
fn parse_name_and_ty(
&mut self,
adt_ty: &str,
2019-08-11 18:34:42 +02:00
lo: Span,
vis: Visibility,
attrs: AttrVec,
) -> PResult<'a, FieldDef> {
let name = self.parse_field_ident(adt_ty, lo)?;
2023-07-24 17:05:10 +00:00
if self.token.kind == token::Not {
if let Err(mut err) = self.unexpected() {
// Encounter the macro invocation
err.subdiagnostic(MacroExpandsToAdtField { adt_ty });
return Err(err);
2023-07-24 17:05:10 +00:00
}
}
self.expect_field_ty_separator()?;
let ty = self.parse_ty_for_field_def()?;
if self.token.kind == token::Colon && self.look_ahead(1, |tok| tok.kind != token::Colon) {
self.dcx().emit_err(errors::SingleColonStructType { span: self.token.span });
}
if self.token.kind == token::Eq {
self.bump();
let const_expr = self.parse_expr_anon_const()?;
let sp = ty.span.shrink_to_hi().to(const_expr.value.span);
self.dcx().emit_err(errors::EqualsStructDefault { span: sp });
}
Ok(FieldDef {
span: lo.to(self.prev_token.span),
2019-08-11 18:34:42 +02:00
ident: Some(name),
vis,
id: DUMMY_NODE_ID,
2019-08-11 18:34:42 +02:00
ty,
attrs,
is_placeholder: false,
2019-08-11 18:34:42 +02:00
})
}
/// Parses a field identifier. Specialized version of `parse_ident_common`
/// for better diagnostics and suggestions.
fn parse_field_ident(&mut self, adt_ty: &str, lo: Span) -> PResult<'a, Ident> {
let (ident, is_raw) = self.ident_or_err(true)?;
if ident.name == kw::Underscore {
self.psess.gated_spans.gate(sym::unnamed_fields, lo);
2024-02-13 23:28:27 +00:00
} else if matches!(is_raw, IdentIsRaw::No) && ident.is_reserved() {
2022-09-22 23:41:38 +08:00
let snapshot = self.create_snapshot_for_diagnostic();
let err = if self.check_fn_front_matter(false, Case::Sensitive) {
let inherited_vis =
Visibility { span: DUMMY_SP, kind: VisibilityKind::Inherited, tokens: None };
// We use `parse_fn` to get a span for the function
let fn_parse_mode = FnParseMode { req_name: |_| true, req_body: true };
match self.parse_fn(
&mut AttrVec::new(),
fn_parse_mode,
lo,
&inherited_vis,
Case::Insensitive,
) {
Ok(_) => {
self.dcx().struct_span_err(
lo.to(self.prev_token.span),
format!("functions are not allowed in {adt_ty} definitions"),
)
.with_help(
"unlike in C++, Java, and C#, functions are declared in `impl` blocks",
)
.with_help("see https://doc.rust-lang.org/book/ch05-03-method-syntax.html for more information")
}
Err(err) => {
err.cancel();
self.restore_snapshot(snapshot);
self.expected_ident_found_err()
}
}
2022-09-22 23:41:38 +08:00
} else if self.eat_keyword(kw::Struct) {
match self.parse_item_struct() {
Ok((ident, _)) => self
.dcx()
.struct_span_err(
2022-09-22 23:41:38 +08:00
lo.with_hi(ident.span.hi()),
format!("structs are not allowed in {adt_ty} definitions"),
)
.with_help(
"consider creating a new `struct` definition instead of nesting",
),
2022-09-22 23:41:38 +08:00
Err(err) => {
err.cancel();
self.restore_snapshot(snapshot);
self.expected_ident_found_err()
2022-09-22 23:41:38 +08:00
}
}
} else {
let mut err = self.expected_ident_found_err();
2022-10-11 17:01:22 +00:00
if self.eat_keyword_noexpect(kw::Let)
&& let removal_span = self.prev_token.span.until(self.token.span)
&& let Ok(ident) = self
.parse_ident_common(false)
// Cancel this error, we don't need it.
.map_err(|err| err.cancel())
&& self.token.kind == TokenKind::Colon
{
err.span_suggestion(
2022-10-11 17:01:22 +00:00
removal_span,
"remove this `let` keyword",
String::new(),
Applicability::MachineApplicable,
);
err.note("the `let` keyword is not allowed in `struct` fields");
err.note("see <https://doc.rust-lang.org/book/ch05-01-defining-structs.html> for more information");
err.emit();
return Ok(ident);
2022-10-11 17:01:22 +00:00
} else {
self.restore_snapshot(snapshot);
}
err
};
return Err(err);
}
self.bump();
Ok(ident)
}
/// Parses a declarative macro 2.0 definition.
/// The `macro` keyword has already been parsed.
2022-04-15 15:04:34 -07:00
/// ```ebnf
/// MacBody = "{" TOKEN_STREAM "}" ;
/// MacParams = "(" TOKEN_STREAM ")" ;
/// DeclMac = "macro" Ident MacParams? MacBody ;
/// ```
fn parse_item_decl_macro(&mut self, lo: Span) -> PResult<'a, ItemInfo> {
let ident = self.parse_ident()?;
let body = if self.check(&token::OpenDelim(Delimiter::Brace)) {
self.parse_delim_args()? // `MacBody`
} else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
let params = self.parse_token_tree(); // `MacParams`
let pspan = params.span();
if !self.check(&token::OpenDelim(Delimiter::Brace)) {
self.unexpected()?;
}
let body = self.parse_token_tree(); // `MacBody`
// Convert `MacParams MacBody` into `{ MacParams => MacBody }`.
let bspan = body.span();
let arrow = TokenTree::token_alone(token::FatArrow, pspan.between(bspan)); // `=>`
let tokens = TokenStream::new(vec![params, arrow, body]);
let dspan = DelimSpan::from_pair(pspan.shrink_to_lo(), bspan.shrink_to_hi());
P(DelimArgs { dspan, delim: Delimiter::Brace, tokens })
} else {
self.unexpected_any()?
};
2019-08-11 18:34:42 +02:00
self.psess.gated_spans.gate(sym::decl_macro, lo.to(self.prev_token.span));
Ok((ident, ItemKind::MacroDef(ast::MacroDef { body, macro_rules: false })))
}
/// Is this a possibly malformed start of a `macro_rules! foo` item definition?
fn is_macro_rules_item(&mut self) -> IsMacroRulesItem {
if self.check_keyword(kw::MacroRules) {
let macro_rules_span = self.token.span;
if self.look_ahead(1, |t| *t == token::Not) && self.look_ahead(2, |t| t.is_ident()) {
return IsMacroRulesItem::Yes { has_bang: true };
} else if self.look_ahead(1, |t| (t.is_ident())) {
// macro_rules foo
self.dcx().emit_err(errors::MacroRulesMissingBang {
span: macro_rules_span,
hi: macro_rules_span.shrink_to_hi(),
});
return IsMacroRulesItem::Yes { has_bang: false };
}
}
IsMacroRulesItem::No
}
2019-08-11 18:34:42 +02:00
2020-03-14 01:23:24 +03:00
/// Parses a `macro_rules! foo { ... }` declarative macro.
fn parse_item_macro_rules(
&mut self,
vis: &Visibility,
has_bang: bool,
) -> PResult<'a, ItemInfo> {
self.expect_keyword(kw::MacroRules)?; // `macro_rules`
2019-09-21 19:18:41 +02:00
if has_bang {
self.expect(&token::Not)?; // `!`
}
let ident = self.parse_ident()?;
if self.eat(&token::Not) {
// Handle macro_rules! foo!
let span = self.prev_token.span;
self.dcx().emit_err(errors::MacroNameRemoveBang { span });
}
let body = self.parse_delim_args()?;
self.eat_semi_for_macro_if_needed(&body);
self.complain_if_pub_macro(vis, true);
2019-09-21 19:18:41 +02:00
Ok((ident, ItemKind::MacroDef(ast::MacroDef { body, macro_rules: true })))
}
/// Item macro invocations or `macro_rules!` definitions need inherited visibility.
/// If that's not the case, emit an error.
fn complain_if_pub_macro(&self, vis: &Visibility, macro_rules: bool) {
if let VisibilityKind::Inherited = vis.kind {
return;
}
let vstr = pprust::vis_to_string(vis);
let vstr = vstr.trim_end();
if macro_rules {
self.dcx().emit_err(errors::MacroRulesVisibility { span: vis.span, vis: vstr });
} else {
self.dcx().emit_err(errors::MacroInvocationVisibility { span: vis.span, vis: vstr });
2019-08-11 18:34:42 +02:00
}
}
fn eat_semi_for_macro_if_needed(&mut self, args: &DelimArgs) {
if args.need_semicolon() && !self.eat(&token::Semi) {
self.report_invalid_macro_expansion_item(args);
}
}
fn report_invalid_macro_expansion_item(&self, args: &DelimArgs) {
let span = args.dspan.entire();
let mut err = self.dcx().struct_span_err(
span,
"macros that expand to items must be delimited with braces or followed by a semicolon",
2020-01-29 18:02:58 +09:00
);
// FIXME: This will make us not emit the help even for declarative
// macros within the same crate (that we can fix), which is sad.
if !span.from_expansion() {
2023-03-03 22:48:21 +00:00
let DelimSpan { open, close } = args.dspan;
err.multipart_suggestion(
"change the delimiters to curly braces",
vec![(open, "{".to_string()), (close, '}'.to_string())],
Applicability::MaybeIncorrect,
);
2020-01-29 18:02:58 +09:00
err.span_suggestion(
span.with_neighbor(self.token.span).shrink_to_hi(),
"add a semicolon",
';',
Applicability::MaybeIncorrect,
2020-01-29 18:02:58 +09:00
);
}
err.emit();
}
/// Checks if current token is one of tokens which cannot be nested like `kw::Enum`. In case
/// it is, we try to parse the item and report error about nested types.
fn recover_nested_adt_item(&mut self, keyword: Symbol) -> PResult<'a, bool> {
if (self.token.is_keyword(kw::Enum)
|| self.token.is_keyword(kw::Struct)
|| self.token.is_keyword(kw::Union))
&& self.look_ahead(1, |t| t.is_ident())
2019-11-23 04:01:14 +01:00
{
let kw_token = self.token.clone();
let kw_str = pprust::token_to_string(&kw_token);
let item = self.parse_item(ForceCollect::No)?;
self.dcx().emit_err(errors::NestedAdt {
span: kw_token.span,
item: item.unwrap().span,
kw_str,
keyword: keyword.as_str(),
});
// We successfully parsed the item but we must inform the caller about nested problem.
return Ok(false);
}
Ok(true)
}
2019-08-11 18:34:42 +02:00
}
/// The parsing configuration used to parse a parameter list (see `parse_fn_params`).
2020-02-10 15:35:05 +01:00
///
/// The function decides if, per-parameter `p`, `p` must have a pattern or just a type.
2021-12-06 10:38:29 -07:00
///
/// This function pointer accepts an edition, because in edition 2015, trait declarations
/// were allowed to omit parameter names. In 2018, they became required.
type ReqName = fn(Edition) -> bool;
/// Parsing configuration for functions.
2021-12-06 10:38:29 -07:00
///
/// The syntax of function items is slightly different within trait definitions,
/// impl blocks, and modules. It is still parsed using the same code, just with
/// different flags set, so that even when the input is wrong and produces a parse
/// error, it still gets into the AST and the rest of the parser and
/// type checker can run.
#[derive(Clone, Copy)]
pub(crate) struct FnParseMode {
2021-12-06 10:38:29 -07:00
/// A function pointer that decides if, per-parameter `p`, `p` must have a
/// pattern or just a type. This field affects parsing of the parameters list.
///
/// ```text
/// fn foo(alef: A) -> X { X::new() }
/// -----^^ affects parsing this part of the function signature
/// |
/// if req_name returns false, then this name is optional
///
/// fn bar(A) -> X;
/// ^
/// |
/// if req_name returns true, this is an error
/// ```
///
/// Calling this function pointer should only return false if:
///
/// * The item is being parsed inside of a trait definition.
/// Within an impl block or a module, it should always evaluate
/// to true.
/// * The span is from Edition 2015. In particular, you can get a
/// 2015 span inside a 2021 crate using macros.
2024-06-03 15:47:46 +10:00
pub(super) req_name: ReqName,
2021-12-06 10:38:29 -07:00
/// If this flag is set to `true`, then plain, semicolon-terminated function
/// prototypes are not allowed here.
///
/// ```text
/// fn foo(alef: A) -> X { X::new() }
/// ^^^^^^^^^^^^
/// |
/// this is always allowed
///
/// fn bar(alef: A, bet: B) -> X;
/// ^
/// |
/// if req_body is set to true, this is an error
/// ```
///
/// This field should only be set to false if the item is inside of a trait
/// definition or extern block. Within an impl block or a module, it should
/// always be set to true.
2024-06-03 15:47:46 +10:00
pub(super) req_body: bool,
}
/// Parsing of functions and methods.
impl<'a> Parser<'a> {
/// Parse a function starting from the front matter (`const ...`) to the body `{ ... }` or `;`.
fn parse_fn(
&mut self,
attrs: &mut AttrVec,
fn_parse_mode: FnParseMode,
Use smaller def span for functions Currently, the def span of a funtion encompasses the entire function signature and body. However, this is usually unnecessarily verbose - when we are pointing at an entire function in a diagnostic, we almost always want to point at the signature. The actual contents of the body tends to be irrelevant to the diagnostic we are emitting, and just takes up additional screen space. This commit changes the `def_span` of all function items (freestanding functions, `impl`-block methods, and `trait`-block methods) to be the span of the signature. For example, the function ```rust pub fn foo<T>(val: T) -> T { val } ``` now has a `def_span` corresponding to `pub fn foo<T>(val: T) -> T` (everything before the opening curly brace). Trait methods without a body have a `def_span` which includes the trailing semicolon. For example: ```rust trait Foo { fn bar(); }``` the function definition `Foo::bar` has a `def_span` of `fn bar();` This makes our diagnostic output much shorter, and emphasizes information that is relevant to whatever diagnostic we are reporting. We continue to use the full span (including the body) in a few of places: * MIR building uses the full span when building source scopes. * 'Outlives suggestions' use the full span to sort the diagnostics being emitted. * The `#[rustc_on_unimplemented(enclosing_scope="in this scope")]` attribute points the entire scope body. * The 'unconditional recursion' lint uses the full span to show additional context for the recursive call. All of these cases work only with local items, so we don't need to add anything extra to crate metadata.
2020-08-12 17:02:14 -04:00
sig_lo: Span,
vis: &Visibility,
case: Case,
) -> PResult<'a, (Ident, FnSig, Generics, Option<P<Block>>)> {
let fn_span = self.token.span;
let header = self.parse_fn_front_matter(vis, case)?; // `const ... fn`
let ident = self.parse_ident()?; // `foo`
let mut generics = self.parse_generics()?; // `<'a, T, ...>`
let decl = match self.parse_fn_decl(
fn_parse_mode.req_name,
AllowPlus::Yes,
RecoverReturnSign::Yes,
) {
Ok(decl) => decl,
Err(old_err) => {
// If we see `for Ty ...` then user probably meant `impl` item.
if self.token.is_keyword(kw::For) {
old_err.cancel();
return Err(self.dcx().create_err(errors::FnTypoWithImpl { fn_span }));
} else {
return Err(old_err);
}
}
};
generics.where_clause = self.parse_where_clause()?; // `where T: Ord`
Use smaller def span for functions Currently, the def span of a funtion encompasses the entire function signature and body. However, this is usually unnecessarily verbose - when we are pointing at an entire function in a diagnostic, we almost always want to point at the signature. The actual contents of the body tends to be irrelevant to the diagnostic we are emitting, and just takes up additional screen space. This commit changes the `def_span` of all function items (freestanding functions, `impl`-block methods, and `trait`-block methods) to be the span of the signature. For example, the function ```rust pub fn foo<T>(val: T) -> T { val } ``` now has a `def_span` corresponding to `pub fn foo<T>(val: T) -> T` (everything before the opening curly brace). Trait methods without a body have a `def_span` which includes the trailing semicolon. For example: ```rust trait Foo { fn bar(); }``` the function definition `Foo::bar` has a `def_span` of `fn bar();` This makes our diagnostic output much shorter, and emphasizes information that is relevant to whatever diagnostic we are reporting. We continue to use the full span (including the body) in a few of places: * MIR building uses the full span when building source scopes. * 'Outlives suggestions' use the full span to sort the diagnostics being emitted. * The `#[rustc_on_unimplemented(enclosing_scope="in this scope")]` attribute points the entire scope body. * The 'unconditional recursion' lint uses the full span to show additional context for the recursive call. All of these cases work only with local items, so we don't need to add anything extra to crate metadata.
2020-08-12 17:02:14 -04:00
let mut sig_hi = self.prev_token.span;
let body = self.parse_fn_body(attrs, &ident, &mut sig_hi, fn_parse_mode.req_body)?; // `;` or `{ ... }`.
Use smaller def span for functions Currently, the def span of a funtion encompasses the entire function signature and body. However, this is usually unnecessarily verbose - when we are pointing at an entire function in a diagnostic, we almost always want to point at the signature. The actual contents of the body tends to be irrelevant to the diagnostic we are emitting, and just takes up additional screen space. This commit changes the `def_span` of all function items (freestanding functions, `impl`-block methods, and `trait`-block methods) to be the span of the signature. For example, the function ```rust pub fn foo<T>(val: T) -> T { val } ``` now has a `def_span` corresponding to `pub fn foo<T>(val: T) -> T` (everything before the opening curly brace). Trait methods without a body have a `def_span` which includes the trailing semicolon. For example: ```rust trait Foo { fn bar(); }``` the function definition `Foo::bar` has a `def_span` of `fn bar();` This makes our diagnostic output much shorter, and emphasizes information that is relevant to whatever diagnostic we are reporting. We continue to use the full span (including the body) in a few of places: * MIR building uses the full span when building source scopes. * 'Outlives suggestions' use the full span to sort the diagnostics being emitted. * The `#[rustc_on_unimplemented(enclosing_scope="in this scope")]` attribute points the entire scope body. * The 'unconditional recursion' lint uses the full span to show additional context for the recursive call. All of these cases work only with local items, so we don't need to add anything extra to crate metadata.
2020-08-12 17:02:14 -04:00
let fn_sig_span = sig_lo.to(sig_hi);
Ok((ident, FnSig { header, decl, span: fn_sig_span }, generics, body))
}
/// Parse the "body" of a function.
/// This can either be `;` when there's no body,
/// or e.g. a block when the function is a provided one.
Use smaller def span for functions Currently, the def span of a funtion encompasses the entire function signature and body. However, this is usually unnecessarily verbose - when we are pointing at an entire function in a diagnostic, we almost always want to point at the signature. The actual contents of the body tends to be irrelevant to the diagnostic we are emitting, and just takes up additional screen space. This commit changes the `def_span` of all function items (freestanding functions, `impl`-block methods, and `trait`-block methods) to be the span of the signature. For example, the function ```rust pub fn foo<T>(val: T) -> T { val } ``` now has a `def_span` corresponding to `pub fn foo<T>(val: T) -> T` (everything before the opening curly brace). Trait methods without a body have a `def_span` which includes the trailing semicolon. For example: ```rust trait Foo { fn bar(); }``` the function definition `Foo::bar` has a `def_span` of `fn bar();` This makes our diagnostic output much shorter, and emphasizes information that is relevant to whatever diagnostic we are reporting. We continue to use the full span (including the body) in a few of places: * MIR building uses the full span when building source scopes. * 'Outlives suggestions' use the full span to sort the diagnostics being emitted. * The `#[rustc_on_unimplemented(enclosing_scope="in this scope")]` attribute points the entire scope body. * The 'unconditional recursion' lint uses the full span to show additional context for the recursive call. All of these cases work only with local items, so we don't need to add anything extra to crate metadata.
2020-08-12 17:02:14 -04:00
fn parse_fn_body(
&mut self,
attrs: &mut AttrVec,
ident: &Ident,
Use smaller def span for functions Currently, the def span of a funtion encompasses the entire function signature and body. However, this is usually unnecessarily verbose - when we are pointing at an entire function in a diagnostic, we almost always want to point at the signature. The actual contents of the body tends to be irrelevant to the diagnostic we are emitting, and just takes up additional screen space. This commit changes the `def_span` of all function items (freestanding functions, `impl`-block methods, and `trait`-block methods) to be the span of the signature. For example, the function ```rust pub fn foo<T>(val: T) -> T { val } ``` now has a `def_span` corresponding to `pub fn foo<T>(val: T) -> T` (everything before the opening curly brace). Trait methods without a body have a `def_span` which includes the trailing semicolon. For example: ```rust trait Foo { fn bar(); }``` the function definition `Foo::bar` has a `def_span` of `fn bar();` This makes our diagnostic output much shorter, and emphasizes information that is relevant to whatever diagnostic we are reporting. We continue to use the full span (including the body) in a few of places: * MIR building uses the full span when building source scopes. * 'Outlives suggestions' use the full span to sort the diagnostics being emitted. * The `#[rustc_on_unimplemented(enclosing_scope="in this scope")]` attribute points the entire scope body. * The 'unconditional recursion' lint uses the full span to show additional context for the recursive call. All of these cases work only with local items, so we don't need to add anything extra to crate metadata.
2020-08-12 17:02:14 -04:00
sig_hi: &mut Span,
req_body: bool,
Use smaller def span for functions Currently, the def span of a funtion encompasses the entire function signature and body. However, this is usually unnecessarily verbose - when we are pointing at an entire function in a diagnostic, we almost always want to point at the signature. The actual contents of the body tends to be irrelevant to the diagnostic we are emitting, and just takes up additional screen space. This commit changes the `def_span` of all function items (freestanding functions, `impl`-block methods, and `trait`-block methods) to be the span of the signature. For example, the function ```rust pub fn foo<T>(val: T) -> T { val } ``` now has a `def_span` corresponding to `pub fn foo<T>(val: T) -> T` (everything before the opening curly brace). Trait methods without a body have a `def_span` which includes the trailing semicolon. For example: ```rust trait Foo { fn bar(); }``` the function definition `Foo::bar` has a `def_span` of `fn bar();` This makes our diagnostic output much shorter, and emphasizes information that is relevant to whatever diagnostic we are reporting. We continue to use the full span (including the body) in a few of places: * MIR building uses the full span when building source scopes. * 'Outlives suggestions' use the full span to sort the diagnostics being emitted. * The `#[rustc_on_unimplemented(enclosing_scope="in this scope")]` attribute points the entire scope body. * The 'unconditional recursion' lint uses the full span to show additional context for the recursive call. All of these cases work only with local items, so we don't need to add anything extra to crate metadata.
2020-08-12 17:02:14 -04:00
) -> PResult<'a, Option<P<Block>>> {
let has_semi = if req_body {
self.token.kind == TokenKind::Semi
} else {
// Only include `;` in list of expected tokens if body is not required
self.check(&TokenKind::Semi)
};
let (inner_attrs, body) = if has_semi {
Use smaller def span for functions Currently, the def span of a funtion encompasses the entire function signature and body. However, this is usually unnecessarily verbose - when we are pointing at an entire function in a diagnostic, we almost always want to point at the signature. The actual contents of the body tends to be irrelevant to the diagnostic we are emitting, and just takes up additional screen space. This commit changes the `def_span` of all function items (freestanding functions, `impl`-block methods, and `trait`-block methods) to be the span of the signature. For example, the function ```rust pub fn foo<T>(val: T) -> T { val } ``` now has a `def_span` corresponding to `pub fn foo<T>(val: T) -> T` (everything before the opening curly brace). Trait methods without a body have a `def_span` which includes the trailing semicolon. For example: ```rust trait Foo { fn bar(); }``` the function definition `Foo::bar` has a `def_span` of `fn bar();` This makes our diagnostic output much shorter, and emphasizes information that is relevant to whatever diagnostic we are reporting. We continue to use the full span (including the body) in a few of places: * MIR building uses the full span when building source scopes. * 'Outlives suggestions' use the full span to sort the diagnostics being emitted. * The `#[rustc_on_unimplemented(enclosing_scope="in this scope")]` attribute points the entire scope body. * The 'unconditional recursion' lint uses the full span to show additional context for the recursive call. All of these cases work only with local items, so we don't need to add anything extra to crate metadata.
2020-08-12 17:02:14 -04:00
// Include the trailing semicolon in the span of the signature
self.expect_semi()?;
*sig_hi = self.prev_token.span;
(AttrVec::new(), None)
} else if self.check(&token::OpenDelim(Delimiter::Brace)) || self.token.is_whole_block() {
self.parse_block_common(self.token.span, BlockCheckMode::Default, false)
.map(|(attrs, body)| (attrs, Some(body)))?
2020-03-05 09:08:09 +01:00
} else if self.token.kind == token::Eq {
// Recover `fn foo() = $expr;`.
self.bump(); // `=`
let eq_sp = self.prev_token.span;
let _ = self.parse_expr()?;
self.expect_semi()?; // `;`
let span = eq_sp.to(self.prev_token.span);
let guar = self.dcx().emit_err(errors::FunctionBodyEqualsExpr {
span,
sugg: errors::FunctionBodyEqualsExprSugg { eq: eq_sp, semi: self.prev_token.span },
});
(AttrVec::new(), Some(self.mk_block_err(span, guar)))
2020-03-05 08:08:07 +01:00
} else {
let expected = if req_body {
&[token::OpenDelim(Delimiter::Brace)][..]
} else {
&[token::Semi, token::OpenDelim(Delimiter::Brace)]
};
if let Err(mut err) = self.expected_one_of_not_found(&[], expected) {
if self.token.kind == token::CloseDelim(Delimiter::Brace) {
// The enclosing `mod`, `trait` or `impl` is being closed, so keep the `fn` in
// the AST for typechecking.
err.span_label(ident.span, "while parsing this `fn`");
err.emit();
} else {
// check for typo'd Fn* trait bounds such as
// fn foo<F>() where F: FnOnce -> () {}
if self.token.kind == token::RArrow {
let machine_applicable = [sym::FnOnce, sym::FnMut, sym::Fn]
.into_iter()
.any(|s| self.prev_token.is_ident_named(s));
err.subdiagnostic(errors::FnTraitMissingParen {
span: self.prev_token.span,
machine_applicable,
});
}
return Err(err);
}
}
(AttrVec::new(), None)
};
attrs.extend(inner_attrs);
Ok(body)
}
/// Is the current token the start of an `FnHeader` / not a valid parse?
///
/// `check_pub` adds additional `pub` to the checks in case users place it
/// wrongly, can be used to ensure `pub` never comes after `default`.
pub(super) fn check_fn_front_matter(&mut self, check_pub: bool, case: Case) -> bool {
// We use an over-approximation here.
// `const const`, `fn const` won't parse, but we're not stepping over other syntax either.
// `pub` is added in case users got confused with the ordering like `async pub fn`,
// only if it wasn't preceded by `default` as `default pub` is invalid.
let quals: &[Symbol] = if check_pub {
&[kw::Pub, kw::Gen, kw::Const, kw::Async, kw::Unsafe, kw::Safe, kw::Extern]
} else {
&[kw::Gen, kw::Const, kw::Async, kw::Unsafe, kw::Safe, kw::Extern]
};
self.check_keyword_case(kw::Fn, case) // Definitely an `fn`.
// `$qual fn` or `$qual $qual`:
|| quals.iter().any(|&kw| self.check_keyword_case(kw, case))
&& self.look_ahead(1, |t| {
2020-08-23 03:42:19 -07:00
// `$qual fn`, e.g. `const fn` or `async fn`.
t.is_keyword_case(kw::Fn, case)
2020-08-23 03:42:19 -07:00
// Two qualifiers `$qual $qual` is enough, e.g. `async unsafe`.
|| (
(
t.is_non_raw_ident_where(|i|
quals.contains(&i.name)
// Rule out 2015 `const async: T = val`.
&& i.is_reserved()
)
|| case == Case::Insensitive
&& t.is_non_raw_ident_where(|i| quals.iter().any(|qual| qual.as_str() == i.name.as_str().to_lowercase()))
)
2023-11-28 18:18:19 +00:00
// Rule out `unsafe extern {`.
&& !self.is_unsafe_foreign_mod()
// Rule out `async gen {` and `async gen move {`
&& !self.is_async_gen_block())
})
// `extern ABI fn`
|| self.check_keyword_case(kw::Extern, case)
&& self.look_ahead(1, |t| t.can_begin_string_literal())
&& (self.look_ahead(2, |t| t.is_keyword_case(kw::Fn, case)) ||
// this branch is only for better diagnostic in later, `pub` is not allowed here
(self.may_recover()
&& self.look_ahead(2, |t| t.is_keyword(kw::Pub))
&& self.look_ahead(3, |t| t.is_keyword_case(kw::Fn, case))))
}
/// Parses all the "front matter" (or "qualifiers") for a `fn` declaration,
/// up to and including the `fn` keyword. The formal grammar is:
///
/// ```text
2020-08-23 03:42:19 -07:00
/// Extern = "extern" StringLit? ;
/// FnQual = "const"? "async"? "unsafe"? Extern? ;
2020-08-23 03:42:19 -07:00
/// FnFrontMatter = FnQual "fn" ;
/// ```
///
/// `vis` represents the visibility that was already parsed, if any. Use
/// `Visibility::Inherited` when no visibility is known.
pub(super) fn parse_fn_front_matter(
&mut self,
orig_vis: &Visibility,
case: Case,
) -> PResult<'a, FnHeader> {
let sp_start = self.token.span;
let constness = self.parse_constness(case);
let async_start_sp = self.token.span;
2023-12-05 21:45:01 +00:00
let coroutine_kind = self.parse_coroutine_kind(case);
let unsafe_start_sp = self.token.span;
2024-05-17 14:17:48 -03:00
let safety = self.parse_safety(case);
let ext_start_sp = self.token.span;
let ext = self.parse_extern(case);
2023-12-05 21:45:01 +00:00
if let Some(CoroutineKind::Async { span, .. }) = coroutine_kind {
2023-02-01 10:42:20 +00:00
if span.is_rust_2015() {
self.dcx().emit_err(errors::AsyncFnIn2015 {
span,
help: errors::HelpUseLatestEdition::new(),
});
}
}
2023-12-05 21:45:01 +00:00
match coroutine_kind {
Some(CoroutineKind::Gen { span, .. }) | Some(CoroutineKind::AsyncGen { span, .. }) => {
self.psess.gated_spans.gate(sym::gen_blocks, span);
2023-12-05 21:45:01 +00:00
}
Some(CoroutineKind::Async { .. }) | None => {}
2023-11-29 16:59:06 -08:00
}
if !self.eat_keyword_case(kw::Fn, case) {
// It is possible for `expect_one_of` to recover given the contents of
// `self.expected_tokens`, therefore, do not use `self.unexpected()` which doesn't
// account for this.
match self.expect_one_of(&[], &[]) {
Ok(Recovered::Yes(_)) => {}
2024-02-13 23:44:33 +00:00
Ok(Recovered::No) => unreachable!(),
Err(mut err) => {
// Qualifier keywords ordering check
enum WrongKw {
Duplicated(Span),
Misplaced(Span),
}
// We may be able to recover
let mut recover_constness = constness;
2023-12-05 21:45:01 +00:00
let mut recover_coroutine_kind = coroutine_kind;
2024-05-17 14:17:48 -03:00
let mut recover_safety = safety;
// This will allow the machine fix to directly place the keyword in the correct place or to indicate
// that the keyword is already present and the second instance should be removed.
let wrong_kw = if self.check_keyword(kw::Const) {
match constness {
Const::Yes(sp) => Some(WrongKw::Duplicated(sp)),
Const::No => {
recover_constness = Const::Yes(self.token.span);
Some(WrongKw::Misplaced(async_start_sp))
}
}
} else if self.check_keyword(kw::Async) {
2023-12-05 21:45:01 +00:00
match coroutine_kind {
2023-11-30 16:39:56 -08:00
Some(CoroutineKind::Async { span, .. }) => {
Some(WrongKw::Duplicated(span))
}
2023-12-05 21:45:01 +00:00
Some(CoroutineKind::AsyncGen { span, .. }) => {
Some(WrongKw::Duplicated(span))
}
2023-11-30 16:39:56 -08:00
Some(CoroutineKind::Gen { .. }) => {
2023-12-05 21:45:01 +00:00
recover_coroutine_kind = Some(CoroutineKind::AsyncGen {
span: self.token.span,
closure_id: DUMMY_NODE_ID,
return_impl_trait_id: DUMMY_NODE_ID,
});
// FIXME(gen_blocks): This span is wrong, didn't want to think about it.
Some(WrongKw::Misplaced(unsafe_start_sp))
2023-11-30 14:54:39 -08:00
}
2023-11-30 16:39:56 -08:00
None => {
2023-12-05 21:45:01 +00:00
recover_coroutine_kind = Some(CoroutineKind::Async {
span: self.token.span,
closure_id: DUMMY_NODE_ID,
return_impl_trait_id: DUMMY_NODE_ID,
2023-11-30 16:39:56 -08:00
});
Some(WrongKw::Misplaced(unsafe_start_sp))
}
}
} else if self.check_keyword(kw::Unsafe) {
2024-05-17 14:17:48 -03:00
match safety {
Safety::Unsafe(sp) => Some(WrongKw::Duplicated(sp)),
Safety::Safe(sp) => {
recover_safety = Safety::Unsafe(self.token.span);
Some(WrongKw::Misplaced(sp))
}
2024-05-17 14:17:48 -03:00
Safety::Default => {
recover_safety = Safety::Unsafe(self.token.span);
Some(WrongKw::Misplaced(ext_start_sp))
}
}
} else if self.check_keyword(kw::Safe) {
match safety {
Safety::Safe(sp) => Some(WrongKw::Duplicated(sp)),
Safety::Unsafe(sp) => {
recover_safety = Safety::Safe(self.token.span);
Some(WrongKw::Misplaced(sp))
}
Safety::Default => {
recover_safety = Safety::Safe(self.token.span);
Some(WrongKw::Misplaced(ext_start_sp))
}
}
} else {
None
};
// The keyword is already present, suggest removal of the second instance
if let Some(WrongKw::Duplicated(original_sp)) = wrong_kw {
let original_kw = self
.span_to_snippet(original_sp)
.expect("Span extracted directly from keyword should always work");
err.span_suggestion(
self.token.uninterpolated_span(),
format!("`{original_kw}` already used earlier, remove this one"),
"",
Applicability::MachineApplicable,
)
.span_note(original_sp, format!("`{original_kw}` first seen here"));
}
// The keyword has not been seen yet, suggest correct placement in the function front matter
else if let Some(WrongKw::Misplaced(correct_pos_sp)) = wrong_kw {
let correct_pos_sp = correct_pos_sp.to(self.prev_token.span);
if let Ok(current_qual) = self.span_to_snippet(correct_pos_sp) {
let misplaced_qual_sp = self.token.uninterpolated_span();
let misplaced_qual = self.span_to_snippet(misplaced_qual_sp).unwrap();
err.span_suggestion(
correct_pos_sp.to(misplaced_qual_sp),
format!("`{misplaced_qual}` must come before `{current_qual}`"),
format!("{misplaced_qual} {current_qual}"),
Applicability::MachineApplicable,
).note("keyword order for functions declaration is `pub`, `default`, `const`, `async`, `unsafe`, `extern`");
}
}
// Recover incorrect visibility order such as `async pub`
else if self.check_keyword(kw::Pub) {
let sp = sp_start.to(self.prev_token.span);
if let Ok(snippet) = self.span_to_snippet(sp) {
let current_vis = match self.parse_visibility(FollowedByType::No) {
Ok(v) => v,
Err(d) => {
d.cancel();
return Err(err);
}
};
let vs = pprust::vis_to_string(&current_vis);
let vs = vs.trim_end();
// There was no explicit visibility
if matches!(orig_vis.kind, VisibilityKind::Inherited) {
err.span_suggestion(
2021-08-10 02:00:25 +02:00
sp_start.to(self.prev_token.span),
format!("visibility `{vs}` must come before `{snippet}`"),
format!("{vs} {snippet}"),
Applicability::MachineApplicable,
);
}
// There was an explicit visibility
else {
err.span_suggestion(
current_vis.span,
2021-10-24 15:11:11 +02:00
"there is already a visibility modifier, remove one",
"",
Applicability::MachineApplicable,
)
.span_note(orig_vis.span, "explicit visibility first seen here");
}
}
}
2023-12-04 13:43:38 -08:00
// FIXME(gen_blocks): add keyword recovery logic for genness
2023-11-30 14:54:39 -08:00
if wrong_kw.is_some()
&& self.may_recover()
&& self.look_ahead(1, |tok| tok.is_keyword_case(kw::Fn, case))
{
// Advance past the misplaced keyword and `fn`
self.bump();
self.bump();
err.emit();
return Ok(FnHeader {
constness: recover_constness,
2024-05-17 14:17:48 -03:00
safety: recover_safety,
2023-12-05 21:45:01 +00:00
coroutine_kind: recover_coroutine_kind,
ext,
});
}
return Err(err);
}
}
}
2024-05-17 14:17:48 -03:00
Ok(FnHeader { constness, safety, coroutine_kind, ext })
}
/// Parses the parameter list and result type of a function declaration.
pub(super) fn parse_fn_decl(
&mut self,
2020-02-10 15:35:05 +01:00
req_name: ReqName,
ret_allow_plus: AllowPlus,
recover_return_sign: RecoverReturnSign,
) -> PResult<'a, P<FnDecl>> {
Ok(P(FnDecl {
inputs: self.parse_fn_params(req_name)?,
output: self.parse_ret_ty(ret_allow_plus, RecoverQPath::Yes, recover_return_sign)?,
}))
}
/// Parses the parameter list of a function, including the `(` and `)` delimiters.
pub(super) fn parse_fn_params(&mut self, req_name: ReqName) -> PResult<'a, ThinVec<Param>> {
let mut first_param = true;
// Parse the arguments, starting out with `self` being allowed...
if self.token.kind != TokenKind::OpenDelim(Delimiter::Parenthesis)
// might be typo'd trait impl, handled elsewhere
&& !self.token.is_keyword(kw::For)
{
// recover from missing argument list, e.g. `fn main -> () {}`
self.dcx()
.emit_err(errors::MissingFnParams { span: self.prev_token.span.shrink_to_hi() });
return Ok(ThinVec::new());
}
let (mut params, _) = self.parse_paren_comma_seq(|p| {
p.recover_vcs_conflict_marker();
let snapshot = p.create_snapshot_for_diagnostic();
Make `DiagnosticBuilder::emit` consuming. This works for most of its call sites. This is nice, because `emit` very much makes sense as a consuming operation -- indeed, `DiagnosticBuilderState` exists to ensure no diagnostic is emitted twice, but it uses runtime checks. For the small number of call sites where a consuming emit doesn't work, the commit adds `DiagnosticBuilder::emit_without_consuming`. (This will be removed in subsequent commits.) Likewise, `emit_unless` becomes consuming. And `delay_as_bug` becomes consuming, while `delay_as_bug_without_consuming` is added (which will also be removed in subsequent commits.) All this requires significant changes to `DiagnosticBuilder`'s chaining methods. Currently `DiagnosticBuilder` method chaining uses a non-consuming `&mut self -> &mut Self` style, which allows chaining to be used when the chain ends in `emit()`, like so: ``` struct_err(msg).span(span).emit(); ``` But it doesn't work when producing a `DiagnosticBuilder` value, requiring this: ``` let mut err = self.struct_err(msg); err.span(span); err ``` This style of chaining won't work with consuming `emit` though. For that, we need to use to a `self -> Self` style. That also would allow `DiagnosticBuilder` production to be chained, e.g.: ``` self.struct_err(msg).span(span) ``` However, removing the `&mut self -> &mut Self` style would require that individual modifications of a `DiagnosticBuilder` go from this: ``` err.span(span); ``` to this: ``` err = err.span(span); ``` There are *many* such places. I have a high tolerance for tedious refactorings, but even I gave up after a long time trying to convert them all. Instead, this commit has it both ways: the existing `&mut self -> Self` chaining methods are kept, and new `self -> Self` chaining methods are added, all of which have a `_mv` suffix (short for "move"). Changes to the existing `forward!` macro lets this happen with very little additional boilerplate code. I chose to add the suffix to the new chaining methods rather than the existing ones, because the number of changes required is much smaller that way. This doubled chainging is a bit clumsy, but I think it is worthwhile because it allows a *lot* of good things to subsequently happen. In this commit, there are many `mut` qualifiers removed in places where diagnostics are emitted without being modified. In subsequent commits: - chaining can be used more, making the code more concise; - more use of chaining also permits the removal of redundant diagnostic APIs like `struct_err_with_code`, which can be replaced easily with `struct_err` + `code_mv`; - `emit_without_diagnostic` can be removed, which simplifies a lot of machinery, removing the need for `DiagnosticBuilderState`.
2024-01-03 12:17:35 +11:00
let param = p.parse_param_general(req_name, first_param).or_else(|e| {
let guar = e.emit();
let lo = p.prev_token.span;
p.restore_snapshot(snapshot);
// Skip every token until next possible arg or end.
p.eat_to_tokens(&[&token::Comma, &token::CloseDelim(Delimiter::Parenthesis)]);
// Create a placeholder argument for proper arg count (issue #34264).
Ok(dummy_arg(Ident::new(kw::Empty, lo.to(p.prev_token.span)), guar))
});
// ...now that we've parsed the first argument, `self` is no longer allowed.
first_param = false;
param
})?;
2019-11-26 22:19:54 -05:00
// Replace duplicated recovered params with `_` pattern to avoid unnecessary errors.
self.deduplicate_recovered_params_names(&mut params);
Ok(params)
}
/// Parses a single function parameter.
///
/// - `self` is syntactically allowed when `first_param` holds.
2020-02-10 15:35:05 +01:00
fn parse_param_general(&mut self, req_name: ReqName, first_param: bool) -> PResult<'a, Param> {
let lo = self.token.span;
let attrs = self.parse_outer_attributes()?;
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
// Possibly parse `self`. Recover if we parsed it and it wasn't allowed here.
if let Some(mut param) = this.parse_self_param()? {
param.attrs = attrs;
let res = if first_param { Ok(param) } else { this.recover_bad_self_param(param) };
return Ok((res?, TrailingToken::None));
}
let is_name_required = match this.token.kind {
token::DotDotDot => false,
_ => req_name(this.token.span.with_neighbor(this.prev_token.span).edition()),
};
let (pat, ty) = if is_name_required || this.is_named_param() {
debug!("parse_param_general parse_pat (is_name_required:{})", is_name_required);
let (pat, colon) = this.parse_fn_param_pat_colon()?;
if !colon {
let mut err = this.unexpected().unwrap_err();
return if let Some(ident) =
this.parameter_without_type(&mut err, pat, is_name_required, first_param)
{
let guar = err.emit();
Ok((dummy_arg(ident, guar), TrailingToken::None))
} else {
Err(err)
};
}
this.eat_incorrect_doc_comment_for_param_type();
(pat, this.parse_ty_for_param()?)
} else {
debug!("parse_param_general ident_to_pat");
let parser_snapshot_before_ty = this.create_snapshot_for_diagnostic();
this.eat_incorrect_doc_comment_for_param_type();
let mut ty = this.parse_ty_for_param();
if ty.is_ok()
&& this.token != token::Comma
&& this.token != token::CloseDelim(Delimiter::Parenthesis)
{
// This wasn't actually a type, but a pattern looking like a type,
// so we are going to rollback and re-parse for recovery.
ty = this.unexpected_any();
}
match ty {
Ok(ty) => {
let ident = Ident::new(kw::Empty, this.prev_token.span);
let bm = BindingMode::NONE;
let pat = this.mk_pat_ident(ty.span, bm, ident);
(pat, ty)
}
// If this is a C-variadic argument and we hit an error, return the error.
Err(err) if this.token == token::DotDotDot => return Err(err),
// Recover from attempting to parse the argument as a type without pattern.
Err(err) => {
err.cancel();
this.restore_snapshot(parser_snapshot_before_ty);
this.recover_arg_parse()?
}
}
};
2022-08-12 15:21:39 +00:00
let span = lo.to(this.prev_token.span);
Ok((
Param { attrs, id: ast::DUMMY_NODE_ID, is_placeholder: false, pat, span, ty },
TrailingToken::None,
))
})
}
/// Returns the parsed optional self parameter and whether a self shortcut was used.
fn parse_self_param(&mut self) -> PResult<'a, Option<Param>> {
// Extract an identifier *after* having confirmed that the token is one.
let expect_self_ident = |this: &mut Self| match this.token.ident() {
2024-02-13 23:28:27 +00:00
Some((ident, IdentIsRaw::No)) => {
this.bump();
ident
}
_ => unreachable!(),
};
// Is `self` `n` tokens ahead?
let is_isolated_self = |this: &Self, n| {
this.is_keyword_ahead(n, &[kw::SelfLower])
2024-04-04 19:03:32 +02:00
&& this.look_ahead(n + 1, |t| t != &token::PathSep)
};
// Is `mut self` `n` tokens ahead?
let is_isolated_mut_self =
|this: &Self, n| this.is_keyword_ahead(n, &[kw::Mut]) && is_isolated_self(this, n + 1);
// Parse `self` or `self: TYPE`. We already know the current token is `self`.
let parse_self_possibly_typed = |this: &mut Self, m| {
let eself_ident = expect_self_ident(this);
let eself_hi = this.prev_token.span;
let eself = if this.eat(&token::Colon) {
SelfKind::Explicit(this.parse_ty()?, m)
} else {
SelfKind::Value(m)
};
Ok((eself, eself_ident, eself_hi))
};
// Recover for the grammar `*self`, `*const self`, and `*mut self`.
let recover_self_ptr = |this: &mut Self| {
this.dcx().emit_err(errors::SelfArgumentPointer { span: this.token.span });
Ok((SelfKind::Value(Mutability::Not), expect_self_ident(this), this.prev_token.span))
};
// Parse optional `self` parameter of a method.
// Only a limited set of initial token sequences is considered `self` parameters; anything
// else is parsed as a normal function parameter list, so some lookahead is required.
let eself_lo = self.token.span;
let (eself, eself_ident, eself_hi) = match self.token.uninterpolate().kind {
token::BinOp(token::And) => {
let eself = if is_isolated_self(self, 1) {
// `&self`
self.bump();
SelfKind::Region(None, Mutability::Not)
} else if is_isolated_mut_self(self, 1) {
// `&mut self`
self.bump();
self.bump();
SelfKind::Region(None, Mutability::Mut)
} else if self.look_ahead(1, |t| t.is_lifetime()) && is_isolated_self(self, 2) {
// `&'lt self`
self.bump();
let lt = self.expect_lifetime();
SelfKind::Region(Some(lt), Mutability::Not)
} else if self.look_ahead(1, |t| t.is_lifetime()) && is_isolated_mut_self(self, 2) {
// `&'lt mut self`
self.bump();
let lt = self.expect_lifetime();
self.bump();
SelfKind::Region(Some(lt), Mutability::Mut)
} else {
// `&not_self`
return Ok(None);
};
(eself, expect_self_ident(self), self.prev_token.span)
}
// `*self`
token::BinOp(token::Star) if is_isolated_self(self, 1) => {
self.bump();
recover_self_ptr(self)?
}
// `*mut self` and `*const self`
token::BinOp(token::Star)
if self.look_ahead(1, |t| t.is_mutability()) && is_isolated_self(self, 2) =>
{
self.bump();
self.bump();
recover_self_ptr(self)?
}
// `self` and `self: TYPE`
token::Ident(..) if is_isolated_self(self, 0) => {
parse_self_possibly_typed(self, Mutability::Not)?
}
// `mut self` and `mut self: TYPE`
token::Ident(..) if is_isolated_mut_self(self, 0) => {
self.bump();
parse_self_possibly_typed(self, Mutability::Mut)?
}
_ => return Ok(None),
};
let eself = source_map::respan(eself_lo.to(eself_hi), eself);
2019-12-03 16:38:34 +01:00
Ok(Some(Param::from_self(AttrVec::default(), eself, eself_ident)))
}
fn is_named_param(&self) -> bool {
2022-11-22 09:42:01 +00:00
let offset = match &self.token.kind {
token::Interpolated(nt) => match &**nt {
token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon),
_ => 0,
},
token::BinOp(token::And) | token::AndAnd => 1,
_ if self.token.is_keyword(kw::Mut) => 1,
_ => 0,
};
self.look_ahead(offset, |t| t.is_ident())
&& self.look_ahead(offset + 1, |t| t == &token::Colon)
}
fn recover_self_param(&mut self) -> bool {
2023-04-15 20:49:54 +02:00
matches!(
self.parse_outer_attributes()
.and_then(|_| self.parse_self_param())
.map_err(|e| e.cancel()),
Ok(Some(_))
)
}
}
enum IsMacroRulesItem {
Yes { has_bang: bool },
No,
}