Auto merge of #114292 - estebank:issue-71039, r=b-naber
More detail when expecting expression but encountering bad macro argument On nested macro invocations where the same macro fragment changes fragment type from one to the next, point at the chain of invocations and at the macro fragment definition place, explaining that the change has occurred. Fix #71039. ``` error: expected expression, found pattern `1 + 1` --> $DIR/trace_faulty_macros.rs:49:37 | LL | (let $p:pat = $e:expr) => {test!(($p,$e))}; | ------- -- this is interpreted as expression, but it is expected to be pattern | | | this macro fragment matcher is expression ... LL | (($p:pat, $e:pat)) => {let $p = $e;}; | ------ ^^ expected expression | | | this macro fragment matcher is pattern ... LL | test!(let x = 1+1); | ------------------ | | | | | this is expected to be expression | in this macro invocation | = note: when forwarding a matched fragment to another macro-by-example, matchers in the second macro will see an opaque AST of the fragment type, not the underlying tokens = note: this error originates in the macro `test` (in Nightly builds, run with -Z macro-backtrace for more info) ```
This commit is contained in:
commit
2831701757
27 changed files with 200 additions and 67 deletions
|
@ -249,7 +249,7 @@ impl<'a> Parser<'a> {
|
|||
/// The delimiters or `=` are still put into the resulting token stream.
|
||||
pub fn parse_attr_item(&mut self, capture_tokens: bool) -> PResult<'a, ast::AttrItem> {
|
||||
let item = match &self.token.kind {
|
||||
token::Interpolated(nt) => match &**nt {
|
||||
token::Interpolated(nt) => match &nt.0 {
|
||||
Nonterminal::NtMeta(item) => Some(item.clone().into_inner()),
|
||||
_ => None,
|
||||
},
|
||||
|
@ -369,7 +369,7 @@ impl<'a> Parser<'a> {
|
|||
/// ```
|
||||
pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> {
|
||||
let nt_meta = match &self.token.kind {
|
||||
token::Interpolated(nt) => match &**nt {
|
||||
token::Interpolated(nt) => match &nt.0 {
|
||||
token::NtMeta(e) => Some(e.clone()),
|
||||
_ => None,
|
||||
},
|
||||
|
|
|
@ -24,11 +24,12 @@ use crate::parser;
|
|||
use rustc_ast as ast;
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::{self, Delimiter, Lit, LitKind, TokenKind};
|
||||
use rustc_ast::tokenstream::AttrTokenTree;
|
||||
use rustc_ast::util::parser::AssocOp;
|
||||
use rustc_ast::{
|
||||
AngleBracketedArg, AngleBracketedArgs, AnonConst, AttrVec, BinOpKind, BindingAnnotation, Block,
|
||||
BlockCheckMode, Expr, ExprKind, GenericArg, Generics, Item, ItemKind, Param, Pat, PatKind,
|
||||
Path, PathSegment, QSelf, Ty, TyKind,
|
||||
BlockCheckMode, Expr, ExprKind, GenericArg, Generics, HasTokens, Item, ItemKind, Param, Pat,
|
||||
PatKind, Path, PathSegment, QSelf, Ty, TyKind,
|
||||
};
|
||||
use rustc_ast_pretty::pprust;
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
|
@ -2252,6 +2253,59 @@ impl<'a> Parser<'a> {
|
|||
err.subdiagnostic(ExprParenthesesNeeded::surrounding(*sp));
|
||||
}
|
||||
err.span_label(span, "expected expression");
|
||||
|
||||
// Walk the chain of macro expansions for the current token to point at how the original
|
||||
// code was interpreted. This helps the user realize when a macro argument of one type is
|
||||
// later reinterpreted as a different type, like `$x:expr` being reinterpreted as `$x:pat`
|
||||
// in a subsequent macro invocation (#71039).
|
||||
let mut tok = self.token.clone();
|
||||
let mut labels = vec![];
|
||||
while let TokenKind::Interpolated(node) = &tok.kind {
|
||||
let tokens = node.0.tokens();
|
||||
labels.push(node.clone());
|
||||
if let Some(tokens) = tokens
|
||||
&& let tokens = tokens.to_attr_token_stream()
|
||||
&& let tokens = tokens.0.deref()
|
||||
&& let [AttrTokenTree::Token(token, _)] = &tokens[..]
|
||||
{
|
||||
tok = token.clone();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
let mut iter = labels.into_iter().peekable();
|
||||
let mut show_link = false;
|
||||
while let Some(node) = iter.next() {
|
||||
let descr = node.0.descr();
|
||||
if let Some(next) = iter.peek() {
|
||||
let next_descr = next.0.descr();
|
||||
if next_descr != descr {
|
||||
err.span_label(next.1, format!("this macro fragment matcher is {next_descr}"));
|
||||
err.span_label(node.1, format!("this macro fragment matcher is {descr}"));
|
||||
err.span_label(
|
||||
next.0.use_span(),
|
||||
format!("this is expected to be {next_descr}"),
|
||||
);
|
||||
err.span_label(
|
||||
node.0.use_span(),
|
||||
format!(
|
||||
"this is interpreted as {}, but it is expected to be {}",
|
||||
next_descr, descr,
|
||||
),
|
||||
);
|
||||
show_link = true;
|
||||
} else {
|
||||
err.span_label(node.1, "");
|
||||
}
|
||||
}
|
||||
}
|
||||
if show_link {
|
||||
err.note(
|
||||
"when forwarding a matched fragment to another macro-by-example, matchers in the \
|
||||
second macro will see an opaque AST of the fragment type, not the underlying \
|
||||
tokens",
|
||||
);
|
||||
}
|
||||
err
|
||||
}
|
||||
|
||||
|
|
|
@ -46,7 +46,7 @@ use thin_vec::{thin_vec, ThinVec};
|
|||
macro_rules! maybe_whole_expr {
|
||||
($p:expr) => {
|
||||
if let token::Interpolated(nt) = &$p.token.kind {
|
||||
match &**nt {
|
||||
match &nt.0 {
|
||||
token::NtExpr(e) | token::NtLiteral(e) => {
|
||||
let e = e.clone();
|
||||
$p.bump();
|
||||
|
@ -1952,7 +1952,7 @@ impl<'a> Parser<'a> {
|
|||
mk_lit_char: impl FnOnce(Symbol, Span) -> L,
|
||||
) -> PResult<'a, L> {
|
||||
if let token::Interpolated(nt) = &self.token.kind
|
||||
&& let token::NtExpr(e) | token::NtLiteral(e) = &**nt
|
||||
&& let token::NtExpr(e) | token::NtLiteral(e) = &nt.0
|
||||
&& matches!(e.kind, ExprKind::Err)
|
||||
{
|
||||
let mut err = errors::InvalidInterpolatedExpression { span: self.token.span }
|
||||
|
|
|
@ -123,7 +123,7 @@ impl<'a> Parser<'a> {
|
|||
// Don't use `maybe_whole` so that we have precise control
|
||||
// over when we bump the parser
|
||||
if let token::Interpolated(nt) = &self.token.kind
|
||||
&& let token::NtItem(item) = &**nt
|
||||
&& let token::NtItem(item) = &nt.0
|
||||
{
|
||||
let mut item = item.clone();
|
||||
self.bump();
|
||||
|
@ -2750,7 +2750,7 @@ impl<'a> Parser<'a> {
|
|||
|
||||
fn is_named_param(&self) -> bool {
|
||||
let offset = match &self.token.kind {
|
||||
token::Interpolated(nt) => match **nt {
|
||||
token::Interpolated(nt) => match &nt.0 {
|
||||
token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon),
|
||||
_ => 0,
|
||||
},
|
||||
|
|
|
@ -93,7 +93,7 @@ pub enum TrailingToken {
|
|||
macro_rules! maybe_whole {
|
||||
($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
|
||||
if let token::Interpolated(nt) = &$p.token.kind {
|
||||
if let token::$constructor(x) = &**nt {
|
||||
if let token::$constructor(x) = &nt.0 {
|
||||
let $x = x.clone();
|
||||
$p.bump();
|
||||
return Ok($e);
|
||||
|
@ -110,7 +110,7 @@ macro_rules! maybe_recover_from_interpolated_ty_qpath {
|
|||
&& $self.may_recover()
|
||||
&& $self.look_ahead(1, |t| t == &token::ModSep)
|
||||
&& let token::Interpolated(nt) = &$self.token.kind
|
||||
&& let token::NtTy(ty) = &**nt
|
||||
&& let token::NtTy(ty) = &nt.0
|
||||
{
|
||||
let ty = ty.clone();
|
||||
$self.bump();
|
||||
|
@ -367,12 +367,14 @@ impl TokenDescription {
|
|||
pub(super) fn token_descr(token: &Token) -> String {
|
||||
let name = pprust::token_to_string(token).to_string();
|
||||
|
||||
let kind = TokenDescription::from_token(token).map(|kind| match kind {
|
||||
TokenDescription::ReservedIdentifier => "reserved identifier",
|
||||
TokenDescription::Keyword => "keyword",
|
||||
TokenDescription::ReservedKeyword => "reserved keyword",
|
||||
TokenDescription::DocComment => "doc comment",
|
||||
});
|
||||
let kind = match (TokenDescription::from_token(token), &token.kind) {
|
||||
(Some(TokenDescription::ReservedIdentifier), _) => Some("reserved identifier"),
|
||||
(Some(TokenDescription::Keyword), _) => Some("keyword"),
|
||||
(Some(TokenDescription::ReservedKeyword), _) => Some("reserved keyword"),
|
||||
(Some(TokenDescription::DocComment), _) => Some("doc comment"),
|
||||
(None, TokenKind::Interpolated(node)) => Some(node.0.descr()),
|
||||
(None, _) => None,
|
||||
};
|
||||
|
||||
if let Some(kind) = kind { format!("{kind} `{name}`") } else { format!("`{name}`") }
|
||||
}
|
||||
|
@ -662,7 +664,7 @@ impl<'a> Parser<'a> {
|
|||
fn check_inline_const(&self, dist: usize) -> bool {
|
||||
self.is_keyword_ahead(dist, &[kw::Const])
|
||||
&& self.look_ahead(dist + 1, |t| match &t.kind {
|
||||
token::Interpolated(nt) => matches!(**nt, token::NtBlock(..)),
|
||||
token::Interpolated(nt) => matches!(&nt.0, token::NtBlock(..)),
|
||||
token::OpenDelim(Delimiter::Brace) => true,
|
||||
_ => false,
|
||||
})
|
||||
|
|
|
@ -50,12 +50,12 @@ impl<'a> Parser<'a> {
|
|||
NonterminalKind::Literal => token.can_begin_literal_maybe_minus(),
|
||||
NonterminalKind::Vis => match token.kind {
|
||||
// The follow-set of :vis + "priv" keyword + interpolated
|
||||
token::Comma | token::Ident(..) | token::Interpolated(..) => true,
|
||||
token::Comma | token::Ident(..) | token::Interpolated(_) => true,
|
||||
_ => token.can_begin_type(),
|
||||
},
|
||||
NonterminalKind::Block => match &token.kind {
|
||||
token::OpenDelim(Delimiter::Brace) => true,
|
||||
token::Interpolated(nt) => match **nt {
|
||||
token::Interpolated(nt) => match &nt.0 {
|
||||
NtBlock(_) | NtLifetime(_) | NtStmt(_) | NtExpr(_) | NtLiteral(_) => true,
|
||||
NtItem(_) | NtPat(_) | NtTy(_) | NtIdent(..) | NtMeta(_) | NtPath(_)
|
||||
| NtVis(_) => false,
|
||||
|
@ -64,7 +64,7 @@ impl<'a> Parser<'a> {
|
|||
},
|
||||
NonterminalKind::Path | NonterminalKind::Meta => match &token.kind {
|
||||
token::ModSep | token::Ident(..) => true,
|
||||
token::Interpolated(nt) => may_be_ident(nt),
|
||||
token::Interpolated(nt) => may_be_ident(&nt.0),
|
||||
_ => false,
|
||||
},
|
||||
NonterminalKind::PatParam { .. } | NonterminalKind::PatWithOr => {
|
||||
|
@ -75,7 +75,7 @@ impl<'a> Parser<'a> {
|
|||
token::BinOp(token::And) | // reference
|
||||
token::BinOp(token::Minus) | // negative literal
|
||||
token::AndAnd | // double reference
|
||||
token::Literal(..) | // literal
|
||||
token::Literal(_) | // literal
|
||||
token::DotDot | // range pattern (future compat)
|
||||
token::DotDotDot | // range pattern (future compat)
|
||||
token::ModSep | // path
|
||||
|
@ -83,14 +83,14 @@ impl<'a> Parser<'a> {
|
|||
token::BinOp(token::Shl) => true, // path (double UFCS)
|
||||
// leading vert `|` or-pattern
|
||||
token::BinOp(token::Or) => matches!(kind, NonterminalKind::PatWithOr),
|
||||
token::Interpolated(nt) => may_be_ident(nt),
|
||||
token::Interpolated(nt) => may_be_ident(&nt.0),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
NonterminalKind::Lifetime => match &token.kind {
|
||||
token::Lifetime(_) => true,
|
||||
token::Interpolated(nt) => {
|
||||
matches!(**nt, NtLifetime(_))
|
||||
matches!(&nt.0, NtLifetime(_))
|
||||
}
|
||||
_ => false,
|
||||
},
|
||||
|
@ -191,7 +191,7 @@ impl<'a> Parser<'a> {
|
|||
panic!(
|
||||
"Missing tokens for nt {:?} at {:?}: {:?}",
|
||||
nt,
|
||||
nt.span(),
|
||||
nt.use_span(),
|
||||
pprust::nonterminal_to_string(&nt)
|
||||
);
|
||||
}
|
||||
|
|
|
@ -592,7 +592,7 @@ impl<'a> Parser<'a> {
|
|||
|
||||
// Make sure we don't allow e.g. `let mut $p;` where `$p:pat`.
|
||||
if let token::Interpolated(nt) = &self.token.kind {
|
||||
if let token::NtPat(_) = **nt {
|
||||
if let token::NtPat(..) = &nt.0 {
|
||||
self.expected_ident_found_err().emit();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -185,7 +185,7 @@ impl<'a> Parser<'a> {
|
|||
});
|
||||
|
||||
if let token::Interpolated(nt) = &self.token.kind {
|
||||
if let token::NtTy(ty) = &**nt {
|
||||
if let token::NtTy(ty) = &nt.0 {
|
||||
if let ast::TyKind::Path(None, path) = &ty.kind {
|
||||
let path = path.clone();
|
||||
self.bump();
|
||||
|
|
|
@ -53,7 +53,7 @@ impl<'a> Parser<'a> {
|
|||
// Don't use `maybe_whole` so that we have precise control
|
||||
// over when we bump the parser
|
||||
if let token::Interpolated(nt) = &self.token.kind
|
||||
&& let token::NtStmt(stmt) = &**nt
|
||||
&& let token::NtStmt(stmt) = &nt.0
|
||||
{
|
||||
let mut stmt = stmt.clone();
|
||||
self.bump();
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue