1
Fork 0

Remove a Span from TokenKind::Interpolated.

This span records the declaration of the metavariable in the LHS of the macro.
It's used in a couple of error messages. Unfortunately, it gets in the way of
the long-term goal of removing `TokenKind::Interpolated`. So this commit
removes it, which degrades a couple of (obscure) error messages but makes
things simpler and enables the next commit.
This commit is contained in:
Nicholas Nethercote 2024-04-22 16:29:27 +10:00
parent 852a78ea8d
commit 9a63a42cb7
19 changed files with 62 additions and 97 deletions

View file

@ -345,7 +345,7 @@ impl MetaItem {
let span = span.with_hi(segments.last().unwrap().ident.span.hi()); let span = span.with_hi(segments.last().unwrap().ident.span.hi());
Path { span, segments, tokens: None } Path { span, segments, tokens: None }
} }
Some(TokenTree::Token(Token { kind: token::Interpolated(nt), .. }, _)) => match &nt.0 { Some(TokenTree::Token(Token { kind: token::Interpolated(nt), .. }, _)) => match &**nt {
token::Nonterminal::NtMeta(item) => return item.meta(item.path.span), token::Nonterminal::NtMeta(item) => return item.meta(item.path.span),
token::Nonterminal::NtPath(path) => (**path).clone(), token::Nonterminal::NtPath(path) => (**path).clone(),
_ => return None, _ => return None,

View file

@ -783,8 +783,6 @@ pub fn visit_token<T: MutVisitor>(t: &mut Token, vis: &mut T) {
} }
token::Interpolated(nt) => { token::Interpolated(nt) => {
let nt = Lrc::make_mut(nt); let nt = Lrc::make_mut(nt);
let (nt, sp) = (&mut nt.0, &mut nt.1);
vis.visit_span(sp);
visit_nonterminal(nt, vis); visit_nonterminal(nt, vis);
} }
_ => {} _ => {}

View file

@ -111,7 +111,7 @@ impl Lit {
Ident(name, IdentIsRaw::No) if name.is_bool_lit() => Some(Lit::new(Bool, name, None)), Ident(name, IdentIsRaw::No) if name.is_bool_lit() => Some(Lit::new(Bool, name, None)),
Literal(token_lit) => Some(token_lit), Literal(token_lit) => Some(token_lit),
Interpolated(ref nt) Interpolated(ref nt)
if let NtExpr(expr) | NtLiteral(expr) = &nt.0 if let NtExpr(expr) | NtLiteral(expr) = &**nt
&& let ast::ExprKind::Lit(token_lit) = expr.kind => && let ast::ExprKind::Lit(token_lit) = expr.kind =>
{ {
Some(token_lit) Some(token_lit)
@ -333,7 +333,11 @@ pub enum TokenKind {
/// - It prevents `Token` from implementing `Copy`. /// - It prevents `Token` from implementing `Copy`.
/// It adds complexity and likely slows things down. Please don't add new /// It adds complexity and likely slows things down. Please don't add new
/// occurrences of this token kind! /// occurrences of this token kind!
Interpolated(Lrc<(Nonterminal, Span)>), ///
/// The span in the surrounding `Token` is that of the metavariable in the
/// macro's RHS. The span within the Nonterminal is that of the fragment
/// passed to the macro at the call site.
Interpolated(Lrc<Nonterminal>),
/// A doc comment token. /// A doc comment token.
/// `Symbol` is the doc comment's data excluding its "quotes" (`///`, `/**`, etc) /// `Symbol` is the doc comment's data excluding its "quotes" (`///`, `/**`, etc)
@ -441,7 +445,7 @@ impl Token {
/// if they keep spans or perform edition checks. /// if they keep spans or perform edition checks.
pub fn uninterpolated_span(&self) -> Span { pub fn uninterpolated_span(&self) -> Span {
match &self.kind { match &self.kind {
Interpolated(nt) => nt.0.use_span(), Interpolated(nt) => nt.use_span(),
_ => self.span, _ => self.span,
} }
} }
@ -486,7 +490,7 @@ impl Token {
PathSep | // global path PathSep | // global path
Lifetime(..) | // labeled loop Lifetime(..) | // labeled loop
Pound => true, // expression attributes Pound => true, // expression attributes
Interpolated(ref nt) => matches!(&nt.0, NtLiteral(..) | Interpolated(ref nt) => matches!(&**nt, NtLiteral(..) |
NtExpr(..) | NtExpr(..) |
NtBlock(..) | NtBlock(..) |
NtPath(..)), NtPath(..)),
@ -510,7 +514,7 @@ impl Token {
| DotDot | DotDotDot | DotDotEq // ranges | DotDot | DotDotDot | DotDotEq // ranges
| Lt | BinOp(Shl) // associated path | Lt | BinOp(Shl) // associated path
| PathSep => true, // global path | PathSep => true, // global path
Interpolated(ref nt) => matches!(&nt.0, NtLiteral(..) | Interpolated(ref nt) => matches!(&**nt, NtLiteral(..) |
NtPat(..) | NtPat(..) |
NtBlock(..) | NtBlock(..) |
NtPath(..)), NtPath(..)),
@ -533,7 +537,7 @@ impl Token {
Lifetime(..) | // lifetime bound in trait object Lifetime(..) | // lifetime bound in trait object
Lt | BinOp(Shl) | // associated path Lt | BinOp(Shl) | // associated path
PathSep => true, // global path PathSep => true, // global path
Interpolated(ref nt) => matches!(&nt.0, NtTy(..) | NtPath(..)), Interpolated(ref nt) => matches!(&**nt, NtTy(..) | NtPath(..)),
// For anonymous structs or unions, which only appear in specific positions // For anonymous structs or unions, which only appear in specific positions
// (type of struct fields or union fields), we don't consider them as regular types // (type of struct fields or union fields), we don't consider them as regular types
_ => false, _ => false,
@ -544,7 +548,7 @@ impl Token {
pub fn can_begin_const_arg(&self) -> bool { pub fn can_begin_const_arg(&self) -> bool {
match self.kind { match self.kind {
OpenDelim(Delimiter::Brace) => true, OpenDelim(Delimiter::Brace) => true,
Interpolated(ref nt) => matches!(&nt.0, NtExpr(..) | NtBlock(..) | NtLiteral(..)), Interpolated(ref nt) => matches!(&**nt, NtExpr(..) | NtBlock(..) | NtLiteral(..)),
_ => self.can_begin_literal_maybe_minus(), _ => self.can_begin_literal_maybe_minus(),
} }
} }
@ -589,7 +593,7 @@ impl Token {
match self.uninterpolate().kind { match self.uninterpolate().kind {
Literal(..) | BinOp(Minus) => true, Literal(..) | BinOp(Minus) => true,
Ident(name, IdentIsRaw::No) if name.is_bool_lit() => true, Ident(name, IdentIsRaw::No) if name.is_bool_lit() => true,
Interpolated(ref nt) => match &nt.0 { Interpolated(ref nt) => match &**nt {
NtLiteral(_) => true, NtLiteral(_) => true,
NtExpr(e) => match &e.kind { NtExpr(e) => match &e.kind {
ast::ExprKind::Lit(_) => true, ast::ExprKind::Lit(_) => true,
@ -610,7 +614,7 @@ impl Token {
/// otherwise returns the original token. /// otherwise returns the original token.
pub fn uninterpolate(&self) -> Cow<'_, Token> { pub fn uninterpolate(&self) -> Cow<'_, Token> {
match &self.kind { match &self.kind {
Interpolated(nt) => match &nt.0 { Interpolated(nt) => match &**nt {
NtIdent(ident, is_raw) => { NtIdent(ident, is_raw) => {
Cow::Owned(Token::new(Ident(ident.name, *is_raw), ident.span)) Cow::Owned(Token::new(Ident(ident.name, *is_raw), ident.span))
} }
@ -627,7 +631,7 @@ impl Token {
// We avoid using `Token::uninterpolate` here because it's slow. // We avoid using `Token::uninterpolate` here because it's slow.
match &self.kind { match &self.kind {
&Ident(name, is_raw) => Some((Ident::new(name, self.span), is_raw)), &Ident(name, is_raw) => Some((Ident::new(name, self.span), is_raw)),
Interpolated(nt) => match &nt.0 { Interpolated(nt) => match &**nt {
NtIdent(ident, is_raw) => Some((*ident, *is_raw)), NtIdent(ident, is_raw) => Some((*ident, *is_raw)),
_ => None, _ => None,
}, },
@ -641,7 +645,7 @@ impl Token {
// We avoid using `Token::uninterpolate` here because it's slow. // We avoid using `Token::uninterpolate` here because it's slow.
match &self.kind { match &self.kind {
&Lifetime(name) => Some(Ident::new(name, self.span)), &Lifetime(name) => Some(Ident::new(name, self.span)),
Interpolated(nt) => match &nt.0 { Interpolated(nt) => match &**nt {
NtLifetime(ident) => Some(*ident), NtLifetime(ident) => Some(*ident),
_ => None, _ => None,
}, },
@ -668,7 +672,7 @@ impl Token {
/// Returns `true` if the token is an interpolated path. /// Returns `true` if the token is an interpolated path.
fn is_whole_path(&self) -> bool { fn is_whole_path(&self) -> bool {
if let Interpolated(nt) = &self.kind if let Interpolated(nt) = &self.kind
&& let NtPath(..) = &nt.0 && let NtPath(..) = &**nt
{ {
return true; return true;
} }
@ -681,7 +685,7 @@ impl Token {
/// (which happens while parsing the result of macro expansion)? /// (which happens while parsing the result of macro expansion)?
pub fn is_whole_expr(&self) -> bool { pub fn is_whole_expr(&self) -> bool {
if let Interpolated(nt) = &self.kind if let Interpolated(nt) = &self.kind
&& let NtExpr(_) | NtLiteral(_) | NtPath(_) | NtBlock(_) = &nt.0 && let NtExpr(_) | NtLiteral(_) | NtPath(_) | NtBlock(_) = &**nt
{ {
return true; return true;
} }
@ -692,7 +696,7 @@ impl Token {
/// Is the token an interpolated block (`$b:block`)? /// Is the token an interpolated block (`$b:block`)?
pub fn is_whole_block(&self) -> bool { pub fn is_whole_block(&self) -> bool {
if let Interpolated(nt) = &self.kind if let Interpolated(nt) = &self.kind
&& let NtBlock(..) = &nt.0 && let NtBlock(..) = &**nt
{ {
return true; return true;
} }
@ -857,6 +861,7 @@ pub enum Nonterminal {
NtPat(P<ast::Pat>), NtPat(P<ast::Pat>),
NtExpr(P<ast::Expr>), NtExpr(P<ast::Expr>),
NtTy(P<ast::Ty>), NtTy(P<ast::Ty>),
/// The span is for the identifier argument passed to the macro.
NtIdent(Ident, IdentIsRaw), NtIdent(Ident, IdentIsRaw),
NtLifetime(Ident), NtLifetime(Ident),
NtLiteral(P<ast::Expr>), NtLiteral(P<ast::Expr>),

View file

@ -490,14 +490,14 @@ impl TokenStream {
fn flatten_token(token: &Token, spacing: Spacing) -> TokenTree { fn flatten_token(token: &Token, spacing: Spacing) -> TokenTree {
match &token.kind { match &token.kind {
token::Interpolated(nt) if let token::NtIdent(ident, is_raw) = nt.0 => { token::Interpolated(nt) if let token::NtIdent(ident, is_raw) = &**nt => {
TokenTree::Token(Token::new(token::Ident(ident.name, is_raw), ident.span), spacing) TokenTree::Token(Token::new(token::Ident(ident.name, *is_raw), ident.span), spacing)
} }
token::Interpolated(nt) => TokenTree::Delimited( token::Interpolated(nt) => TokenTree::Delimited(
DelimSpan::from_single(token.span), DelimSpan::from_single(token.span),
DelimSpacing::new(Spacing::JointHidden, spacing), DelimSpacing::new(Spacing::JointHidden, spacing),
Delimiter::Invisible, Delimiter::Invisible,
TokenStream::from_nonterminal_ast(&nt.0).flattened(), TokenStream::from_nonterminal_ast(&nt).flattened(),
), ),
_ => TokenTree::Token(token.clone(), spacing), _ => TokenTree::Token(token.clone(), spacing),
} }

View file

@ -926,7 +926,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
} }
token::Eof => "<eof>".into(), token::Eof => "<eof>".into(),
token::Interpolated(ref nt) => self.nonterminal_to_string(&nt.0).into(), token::Interpolated(ref nt) => self.nonterminal_to_string(&nt).into(),
} }
} }

View file

@ -73,12 +73,6 @@ pub(super) fn failed_to_match_macro<'cx>(
&& (matches!(expected_token.kind, TokenKind::Interpolated(_)) && (matches!(expected_token.kind, TokenKind::Interpolated(_))
|| matches!(token.kind, TokenKind::Interpolated(_))) || matches!(token.kind, TokenKind::Interpolated(_)))
{ {
if let TokenKind::Interpolated(node) = &expected_token.kind {
err.span_label(node.1, "");
}
if let TokenKind::Interpolated(node) = &token.kind {
err.span_label(node.1, "");
}
err.note("captured metavariables except for `:tt`, `:ident` and `:lifetime` cannot be compared to other tokens"); err.note("captured metavariables except for `:tt`, `:ident` and `:lifetime` cannot be compared to other tokens");
err.note("see <https://doc.rust-lang.org/nightly/reference/macros-by-example.html#forwarding-a-matched-fragment> for more information"); err.note("see <https://doc.rust-lang.org/nightly/reference/macros-by-example.html#forwarding-a-matched-fragment> for more information");

View file

@ -75,10 +75,9 @@ pub(crate) use ParseResult::*;
use crate::mbe::{macro_rules::Tracker, KleeneOp, TokenTree}; use crate::mbe::{macro_rules::Tracker, KleeneOp, TokenTree};
use rustc_ast::token::{self, DocComment, Nonterminal, NonterminalKind, Token}; use rustc_ast::token::{self, DocComment, NonterminalKind, Token};
use rustc_ast_pretty::pprust; use rustc_ast_pretty::pprust;
use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::sync::Lrc;
use rustc_errors::ErrorGuaranteed; use rustc_errors::ErrorGuaranteed;
use rustc_lint_defs::pluralize; use rustc_lint_defs::pluralize;
use rustc_parse::parser::{ParseNtResult, Parser}; use rustc_parse::parser::{ParseNtResult, Parser};
@ -392,7 +391,7 @@ pub(super) fn count_metavar_decls(matcher: &[TokenTree]) -> usize {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub(crate) enum NamedMatch { pub(crate) enum NamedMatch {
MatchedSeq(Vec<NamedMatch>), MatchedSeq(Vec<NamedMatch>),
MatchedSingle(ParseNtResult<Lrc<(Nonterminal, Span)>>), MatchedSingle(ParseNtResult),
} }
/// Performs a token equality check, ignoring syntax context (that is, an unhygienic comparison) /// Performs a token equality check, ignoring syntax context (that is, an unhygienic comparison)
@ -686,11 +685,7 @@ impl TtParser {
} }
Ok(nt) => nt, Ok(nt) => nt,
}; };
mp.push_match( mp.push_match(next_metavar, seq_depth, MatchedSingle(nt));
next_metavar,
seq_depth,
MatchedSingle(nt.map_nt(|nt| (Lrc::new((nt, span))))),
);
mp.idx += 1; mp.idx += 1;
} else { } else {
unreachable!() unreachable!()

View file

@ -127,7 +127,7 @@ impl MultiItemModifier for DeriveProcMacro {
Annotatable::Stmt(stmt) => token::NtStmt(stmt), Annotatable::Stmt(stmt) => token::NtStmt(stmt),
_ => unreachable!(), _ => unreachable!(),
}; };
TokenStream::token_alone(token::Interpolated(Lrc::new((nt, span))), DUMMY_SP) TokenStream::token_alone(token::Interpolated(Lrc::new(nt)), DUMMY_SP)
} else { } else {
item.to_tokens() item.to_tokens()
}; };

View file

@ -259,7 +259,7 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre
})); }));
} }
Interpolated(ref nt) if let NtIdent(ident, is_raw) = &nt.0 => { Interpolated(ref nt) if let NtIdent(ident, is_raw) = &**nt => {
trees.push(TokenTree::Ident(Ident { trees.push(TokenTree::Ident(Ident {
sym: ident.name, sym: ident.name,
is_raw: matches!(is_raw, IdentIsRaw::Yes), is_raw: matches!(is_raw, IdentIsRaw::Yes),
@ -268,14 +268,14 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre
} }
Interpolated(nt) => { Interpolated(nt) => {
let stream = TokenStream::from_nonterminal_ast(&nt.0); let stream = TokenStream::from_nonterminal_ast(&nt);
// A hack used to pass AST fragments to attribute and derive // A hack used to pass AST fragments to attribute and derive
// macros as a single nonterminal token instead of a token // macros as a single nonterminal token instead of a token
// stream. Such token needs to be "unwrapped" and not // stream. Such token needs to be "unwrapped" and not
// represented as a delimited group. // represented as a delimited group.
// FIXME: It needs to be removed, but there are some // FIXME: It needs to be removed, but there are some
// compatibility issues (see #73345). // compatibility issues (see #73345).
if crate::base::nt_pretty_printing_compatibility_hack(&nt.0, rustc.ecx.sess) { if crate::base::nt_pretty_printing_compatibility_hack(&nt, rustc.ecx.sess) {
trees.extend(Self::from_internal((stream, rustc))); trees.extend(Self::from_internal((stream, rustc)));
} else { } else {
trees.push(TokenTree::Group(Group { trees.push(TokenTree::Group(Group {

View file

@ -363,7 +363,7 @@ impl<'a> Parser<'a> {
// We can't use `maybe_whole` here because it would bump in the `None` // We can't use `maybe_whole` here because it would bump in the `None`
// case, which we don't want. // case, which we don't want.
if let token::Interpolated(nt) = &self.token.kind if let token::Interpolated(nt) = &self.token.kind
&& let token::NtMeta(attr_item) = &nt.0 && let token::NtMeta(attr_item) = &**nt
{ {
match attr_item.meta(attr_item.path.span) { match attr_item.meta(attr_item.path.span) {
Some(meta) => { Some(meta) => {

View file

@ -2372,9 +2372,9 @@ impl<'a> Parser<'a> {
// in a subsequent macro invocation (#71039). // in a subsequent macro invocation (#71039).
let mut tok = self.token.clone(); let mut tok = self.token.clone();
let mut labels = vec![]; let mut labels = vec![];
while let TokenKind::Interpolated(node) = &tok.kind { while let TokenKind::Interpolated(nt) = &tok.kind {
let tokens = node.0.tokens(); let tokens = nt.tokens();
labels.push(node.clone()); labels.push(nt.clone());
if let Some(tokens) = tokens if let Some(tokens) = tokens
&& let tokens = tokens.to_attr_token_stream() && let tokens = tokens.to_attr_token_stream()
&& let tokens = tokens.0.deref() && let tokens = tokens.0.deref()
@ -2387,27 +2387,20 @@ impl<'a> Parser<'a> {
} }
let mut iter = labels.into_iter().peekable(); let mut iter = labels.into_iter().peekable();
let mut show_link = false; let mut show_link = false;
while let Some(node) = iter.next() { while let Some(nt) = iter.next() {
let descr = node.0.descr(); let descr = nt.descr();
if let Some(next) = iter.peek() { if let Some(next) = iter.peek() {
let next_descr = next.0.descr(); let next_descr = next.descr();
if next_descr != descr { if next_descr != descr {
err.span_label(next.1, format!("this macro fragment matcher is {next_descr}")); err.span_label(next.use_span(), format!("this is expected to be {next_descr}"));
err.span_label(node.1, format!("this macro fragment matcher is {descr}"));
err.span_label( err.span_label(
next.0.use_span(), nt.use_span(),
format!("this is expected to be {next_descr}"),
);
err.span_label(
node.0.use_span(),
format!( format!(
"this is interpreted as {}, but it is expected to be {}", "this is interpreted as {}, but it is expected to be {}",
next_descr, descr, next_descr, descr,
), ),
); );
show_link = true; show_link = true;
} else {
err.span_label(node.1, "");
} }
} }
} }

View file

@ -45,7 +45,7 @@ use thin_vec::{thin_vec, ThinVec};
macro_rules! maybe_whole_expr { macro_rules! maybe_whole_expr {
($p:expr) => { ($p:expr) => {
if let token::Interpolated(nt) = &$p.token.kind { if let token::Interpolated(nt) = &$p.token.kind {
match &nt.0 { match &**nt {
token::NtExpr(e) | token::NtLiteral(e) => { token::NtExpr(e) | token::NtLiteral(e) => {
let e = e.clone(); let e = e.clone();
$p.bump(); $p.bump();

View file

@ -2841,7 +2841,7 @@ impl<'a> Parser<'a> {
fn is_named_param(&self) -> bool { fn is_named_param(&self) -> bool {
let offset = match &self.token.kind { let offset = match &self.token.kind {
token::Interpolated(nt) => match &nt.0 { token::Interpolated(nt) => match &**nt {
token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon), token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon),
_ => 0, _ => 0,
}, },

View file

@ -11,7 +11,6 @@ mod stmt;
mod ty; mod ty;
use crate::lexer::UnmatchedDelim; use crate::lexer::UnmatchedDelim;
use ast::token::IdentIsRaw;
pub use attr_wrapper::AttrWrapper; pub use attr_wrapper::AttrWrapper;
pub use diagnostics::AttemptLocalParseRecovery; pub use diagnostics::AttemptLocalParseRecovery;
pub(crate) use expr::ForbiddenLetReason; pub(crate) use expr::ForbiddenLetReason;
@ -21,7 +20,7 @@ pub use path::PathStyle;
use core::fmt; use core::fmt;
use rustc_ast::ptr::P; use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, Token, TokenKind}; use rustc_ast::token::{self, Delimiter, IdentIsRaw, Nonterminal, Token, TokenKind};
use rustc_ast::tokenstream::{AttributesData, DelimSpacing, DelimSpan, Spacing}; use rustc_ast::tokenstream::{AttributesData, DelimSpacing, DelimSpan, Spacing};
use rustc_ast::tokenstream::{TokenStream, TokenTree, TokenTreeCursor}; use rustc_ast::tokenstream::{TokenStream, TokenTree, TokenTreeCursor};
use rustc_ast::util::case::Case; use rustc_ast::util::case::Case;
@ -32,6 +31,7 @@ use rustc_ast::{
}; };
use rustc_ast_pretty::pprust; use rustc_ast_pretty::pprust;
use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::sync::Lrc;
use rustc_errors::{Applicability, Diag, FatalError, MultiSpan, PResult}; use rustc_errors::{Applicability, Diag, FatalError, MultiSpan, PResult};
use rustc_session::parse::ParseSess; use rustc_session::parse::ParseSess;
use rustc_span::symbol::{kw, sym, Ident, Symbol}; use rustc_span::symbol::{kw, sym, Ident, Symbol};
@ -107,7 +107,7 @@ pub enum TrailingToken {
macro_rules! maybe_whole { macro_rules! maybe_whole {
($p:expr, $constructor:ident, |$x:ident| $e:expr) => { ($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
if let token::Interpolated(nt) = &$p.token.kind if let token::Interpolated(nt) = &$p.token.kind
&& let token::$constructor(x) = &nt.0 && let token::$constructor(x) = &**nt
{ {
#[allow(unused_mut)] #[allow(unused_mut)]
let mut $x = x.clone(); let mut $x = x.clone();
@ -125,7 +125,7 @@ macro_rules! maybe_recover_from_interpolated_ty_qpath {
&& $self.may_recover() && $self.may_recover()
&& $self.look_ahead(1, |t| t == &token::PathSep) && $self.look_ahead(1, |t| t == &token::PathSep)
&& let token::Interpolated(nt) = &$self.token.kind && let token::Interpolated(nt) = &$self.token.kind
&& let token::NtTy(ty) = &nt.0 && let token::NtTy(ty) = &**nt
{ {
let ty = ty.clone(); let ty = ty.clone();
$self.bump(); $self.bump();
@ -407,7 +407,7 @@ pub(super) fn token_descr(token: &Token) -> String {
(Some(TokenDescription::Keyword), _) => Some("keyword"), (Some(TokenDescription::Keyword), _) => Some("keyword"),
(Some(TokenDescription::ReservedKeyword), _) => Some("reserved keyword"), (Some(TokenDescription::ReservedKeyword), _) => Some("reserved keyword"),
(Some(TokenDescription::DocComment), _) => Some("doc comment"), (Some(TokenDescription::DocComment), _) => Some("doc comment"),
(None, TokenKind::Interpolated(node)) => Some(node.0.descr()), (None, TokenKind::Interpolated(node)) => Some(node.descr()),
(None, _) => None, (None, _) => None,
}; };
@ -708,7 +708,7 @@ impl<'a> Parser<'a> {
fn check_inline_const(&self, dist: usize) -> bool { fn check_inline_const(&self, dist: usize) -> bool {
self.is_keyword_ahead(dist, &[kw::Const]) self.is_keyword_ahead(dist, &[kw::Const])
&& self.look_ahead(dist + 1, |t| match &t.kind { && self.look_ahead(dist + 1, |t| match &t.kind {
token::Interpolated(nt) => matches!(&nt.0, token::NtBlock(..)), token::Interpolated(nt) => matches!(&**nt, token::NtBlock(..)),
token::OpenDelim(Delimiter::Brace) => true, token::OpenDelim(Delimiter::Brace) => true,
_ => false, _ => false,
}) })
@ -1631,19 +1631,7 @@ pub enum FlatToken {
// Metavar captures of various kinds. // Metavar captures of various kinds.
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub enum ParseNtResult<NtType> { pub enum ParseNtResult {
Tt(TokenTree), Tt(TokenTree),
Nt(NtType), Nt(Lrc<Nonterminal>),
}
impl<T> ParseNtResult<T> {
pub fn map_nt<F, U>(self, mut f: F) -> ParseNtResult<U>
where
F: FnMut(T) -> U,
{
match self {
ParseNtResult::Tt(tt) => ParseNtResult::Tt(tt),
ParseNtResult::Nt(nt) => ParseNtResult::Nt(f(nt)),
}
}
} }

View file

@ -1,7 +1,8 @@
use rustc_ast::ptr::P; use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, Nonterminal, Nonterminal::*, NonterminalKind, Token}; use rustc_ast::token::{self, Delimiter, Nonterminal::*, NonterminalKind, Token};
use rustc_ast::HasTokens; use rustc_ast::HasTokens;
use rustc_ast_pretty::pprust; use rustc_ast_pretty::pprust;
use rustc_data_structures::sync::Lrc;
use rustc_errors::PResult; use rustc_errors::PResult;
use rustc_span::symbol::{kw, Ident}; use rustc_span::symbol::{kw, Ident};
@ -54,7 +55,7 @@ impl<'a> Parser<'a> {
}, },
NonterminalKind::Block => match &token.kind { NonterminalKind::Block => match &token.kind {
token::OpenDelim(Delimiter::Brace) => true, token::OpenDelim(Delimiter::Brace) => true,
token::Interpolated(nt) => match &nt.0 { token::Interpolated(nt) => match &**nt {
NtBlock(_) | NtLifetime(_) | NtStmt(_) | NtExpr(_) | NtLiteral(_) => true, NtBlock(_) | NtLifetime(_) | NtStmt(_) | NtExpr(_) | NtLiteral(_) => true,
NtItem(_) | NtPat(_) | NtTy(_) | NtIdent(..) | NtMeta(_) | NtPath(_) NtItem(_) | NtPat(_) | NtTy(_) | NtIdent(..) | NtMeta(_) | NtPath(_)
| NtVis(_) => false, | NtVis(_) => false,
@ -63,7 +64,7 @@ impl<'a> Parser<'a> {
}, },
NonterminalKind::Path | NonterminalKind::Meta => match &token.kind { NonterminalKind::Path | NonterminalKind::Meta => match &token.kind {
token::PathSep | token::Ident(..) => true, token::PathSep | token::Ident(..) => true,
token::Interpolated(nt) => may_be_ident(&nt.0), token::Interpolated(nt) => may_be_ident(nt),
_ => false, _ => false,
}, },
NonterminalKind::PatParam { .. } | NonterminalKind::PatWithOr => match &token.kind { NonterminalKind::PatParam { .. } | NonterminalKind::PatWithOr => match &token.kind {
@ -81,13 +82,13 @@ impl<'a> Parser<'a> {
token::BinOp(token::Shl) => true, // path (double UFCS) token::BinOp(token::Shl) => true, // path (double UFCS)
// leading vert `|` or-pattern // leading vert `|` or-pattern
token::BinOp(token::Or) => matches!(kind, NonterminalKind::PatWithOr), token::BinOp(token::Or) => matches!(kind, NonterminalKind::PatWithOr),
token::Interpolated(nt) => may_be_ident(&nt.0), token::Interpolated(nt) => may_be_ident(nt),
_ => false, _ => false,
}, },
NonterminalKind::Lifetime => match &token.kind { NonterminalKind::Lifetime => match &token.kind {
token::Lifetime(_) => true, token::Lifetime(_) => true,
token::Interpolated(nt) => { token::Interpolated(nt) => {
matches!(&nt.0, NtLifetime(_)) matches!(&**nt, NtLifetime(_))
} }
_ => false, _ => false,
}, },
@ -100,10 +101,7 @@ impl<'a> Parser<'a> {
/// Parse a non-terminal (e.g. MBE `:pat` or `:ident`). Inlined because there is only one call /// Parse a non-terminal (e.g. MBE `:pat` or `:ident`). Inlined because there is only one call
/// site. /// site.
#[inline] #[inline]
pub fn parse_nonterminal( pub fn parse_nonterminal(&mut self, kind: NonterminalKind) -> PResult<'a, ParseNtResult> {
&mut self,
kind: NonterminalKind,
) -> PResult<'a, ParseNtResult<Nonterminal>> {
// A `macro_rules!` invocation may pass a captured item/expr to a proc-macro, // A `macro_rules!` invocation may pass a captured item/expr to a proc-macro,
// which requires having captured tokens available. Since we cannot determine // which requires having captured tokens available. Since we cannot determine
// in advance whether or not a proc-macro will be (transitively) invoked, // in advance whether or not a proc-macro will be (transitively) invoked,
@ -196,7 +194,7 @@ impl<'a> Parser<'a> {
); );
} }
Ok(ParseNtResult::Nt(nt)) Ok(ParseNtResult::Nt(Lrc::new(nt)))
} }
} }

View file

@ -757,7 +757,7 @@ impl<'a> Parser<'a> {
// Make sure we don't allow e.g. `let mut $p;` where `$p:pat`. // Make sure we don't allow e.g. `let mut $p;` where `$p:pat`.
if let token::Interpolated(nt) = &self.token.kind { if let token::Interpolated(nt) = &self.token.kind {
if let token::NtPat(..) = &nt.0 { if let token::NtPat(..) = &**nt {
self.expected_ident_found_err().emit(); self.expected_ident_found_err().emit();
} }
} }

View file

@ -193,7 +193,7 @@ impl<'a> Parser<'a> {
maybe_whole!(self, NtPath, |path| reject_generics_if_mod_style(self, path.into_inner())); maybe_whole!(self, NtPath, |path| reject_generics_if_mod_style(self, path.into_inner()));
if let token::Interpolated(nt) = &self.token.kind { if let token::Interpolated(nt) = &self.token.kind {
if let token::NtTy(ty) = &nt.0 { if let token::NtTy(ty) = &**nt {
if let ast::TyKind::Path(None, path) = &ty.kind { if let ast::TyKind::Path(None, path) = &ty.kind {
let path = path.clone(); let path = path.clone();
self.bump(); self.bump();

View file

@ -1,8 +1,6 @@
error: no rules expected the token `enum E {}` error: no rules expected the token `enum E {}`
--> $DIR/nonterminal-matching.rs:19:10 --> $DIR/nonterminal-matching.rs:19:10
| |
LL | macro complex_nonterminal($nt_item: item) {
| --------------
LL | macro n(a $nt_item b) { LL | macro n(a $nt_item b) {
| --------------------- when calling this macro | --------------------- when calling this macro
... ...

View file

@ -73,14 +73,10 @@ error: expected expression, found pattern `1 + 1`
--> $DIR/trace_faulty_macros.rs:49:37 --> $DIR/trace_faulty_macros.rs:49:37
| |
LL | (let $p:pat = $e:expr) => {test!(($p,$e))}; LL | (let $p:pat = $e:expr) => {test!(($p,$e))};
| ------- -- this is interpreted as expression, but it is expected to be pattern | -- this is interpreted as expression, but it is expected to be pattern
| |
| this macro fragment matcher is expression
... ...
LL | (($p:pat, $e:pat)) => {let $p = $e;}; LL | (($p:pat, $e:pat)) => {let $p = $e;};
| ------ ^^ expected expression | ^^ expected expression
| |
| this macro fragment matcher is pattern
... ...
LL | test!(let x = 1+1); LL | test!(let x = 1+1);
| ------------------ | ------------------