More detail when expecting expression but encountering bad macro argument
Partially address #71039.
This commit is contained in:
parent
1be1e84872
commit
4e418805da
27 changed files with 200 additions and 67 deletions
|
@ -342,7 +342,7 @@ impl MetaItem {
|
||||||
let span = span.with_hi(segments.last().unwrap().ident.span.hi());
|
let span = span.with_hi(segments.last().unwrap().ident.span.hi());
|
||||||
Path { span, segments, tokens: None }
|
Path { span, segments, tokens: None }
|
||||||
}
|
}
|
||||||
Some(TokenTree::Token(Token { kind: token::Interpolated(nt), .. }, _)) => match &**nt {
|
Some(TokenTree::Token(Token { kind: token::Interpolated(nt), .. }, _)) => match &nt.0 {
|
||||||
token::Nonterminal::NtMeta(item) => return item.meta(item.path.span),
|
token::Nonterminal::NtMeta(item) => return item.meta(item.path.span),
|
||||||
token::Nonterminal::NtPath(path) => (**path).clone(),
|
token::Nonterminal::NtPath(path) => (**path).clone(),
|
||||||
_ => return None,
|
_ => return None,
|
||||||
|
|
|
@ -764,7 +764,10 @@ pub fn visit_token<T: MutVisitor>(t: &mut Token, vis: &mut T) {
|
||||||
return; // Avoid visiting the span for the second time.
|
return; // Avoid visiting the span for the second time.
|
||||||
}
|
}
|
||||||
token::Interpolated(nt) => {
|
token::Interpolated(nt) => {
|
||||||
visit_nonterminal(Lrc::make_mut(nt), vis);
|
let nt = Lrc::make_mut(nt);
|
||||||
|
let (nt, sp) = (&mut nt.0, &mut nt.1);
|
||||||
|
vis.visit_span(sp);
|
||||||
|
visit_nonterminal(nt, vis);
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
|
|
|
@ -110,7 +110,7 @@ impl Lit {
|
||||||
Ident(name, false) if name.is_bool_lit() => Some(Lit::new(Bool, name, None)),
|
Ident(name, false) if name.is_bool_lit() => Some(Lit::new(Bool, name, None)),
|
||||||
Literal(token_lit) => Some(token_lit),
|
Literal(token_lit) => Some(token_lit),
|
||||||
Interpolated(ref nt)
|
Interpolated(ref nt)
|
||||||
if let NtExpr(expr) | NtLiteral(expr) = &**nt
|
if let NtExpr(expr) | NtLiteral(expr) = &nt.0
|
||||||
&& let ast::ExprKind::Lit(token_lit) = expr.kind =>
|
&& let ast::ExprKind::Lit(token_lit) = expr.kind =>
|
||||||
{
|
{
|
||||||
Some(token_lit)
|
Some(token_lit)
|
||||||
|
@ -314,7 +314,7 @@ pub enum TokenKind {
|
||||||
/// - It prevents `Token` from implementing `Copy`.
|
/// - It prevents `Token` from implementing `Copy`.
|
||||||
/// It adds complexity and likely slows things down. Please don't add new
|
/// It adds complexity and likely slows things down. Please don't add new
|
||||||
/// occurrences of this token kind!
|
/// occurrences of this token kind!
|
||||||
Interpolated(Lrc<Nonterminal>),
|
Interpolated(Lrc<(Nonterminal, Span)>),
|
||||||
|
|
||||||
/// A doc comment token.
|
/// A doc comment token.
|
||||||
/// `Symbol` is the doc comment's data excluding its "quotes" (`///`, `/**`, etc)
|
/// `Symbol` is the doc comment's data excluding its "quotes" (`///`, `/**`, etc)
|
||||||
|
@ -421,7 +421,7 @@ impl Token {
|
||||||
/// if they keep spans or perform edition checks.
|
/// if they keep spans or perform edition checks.
|
||||||
pub fn uninterpolated_span(&self) -> Span {
|
pub fn uninterpolated_span(&self) -> Span {
|
||||||
match &self.kind {
|
match &self.kind {
|
||||||
Interpolated(nt) => nt.span(),
|
Interpolated(nt) => nt.0.use_span(),
|
||||||
_ => self.span,
|
_ => self.span,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -464,7 +464,7 @@ impl Token {
|
||||||
ModSep | // global path
|
ModSep | // global path
|
||||||
Lifetime(..) | // labeled loop
|
Lifetime(..) | // labeled loop
|
||||||
Pound => true, // expression attributes
|
Pound => true, // expression attributes
|
||||||
Interpolated(ref nt) => matches!(**nt, NtLiteral(..) |
|
Interpolated(ref nt) => matches!(&nt.0, NtLiteral(..) |
|
||||||
NtExpr(..) |
|
NtExpr(..) |
|
||||||
NtBlock(..) |
|
NtBlock(..) |
|
||||||
NtPath(..)),
|
NtPath(..)),
|
||||||
|
@ -488,7 +488,7 @@ impl Token {
|
||||||
| DotDot | DotDotDot | DotDotEq // ranges
|
| DotDot | DotDotDot | DotDotEq // ranges
|
||||||
| Lt | BinOp(Shl) // associated path
|
| Lt | BinOp(Shl) // associated path
|
||||||
| ModSep => true, // global path
|
| ModSep => true, // global path
|
||||||
Interpolated(ref nt) => matches!(**nt, NtLiteral(..) |
|
Interpolated(ref nt) => matches!(&nt.0, NtLiteral(..) |
|
||||||
NtPat(..) |
|
NtPat(..) |
|
||||||
NtBlock(..) |
|
NtBlock(..) |
|
||||||
NtPath(..)),
|
NtPath(..)),
|
||||||
|
@ -511,7 +511,7 @@ impl Token {
|
||||||
Lifetime(..) | // lifetime bound in trait object
|
Lifetime(..) | // lifetime bound in trait object
|
||||||
Lt | BinOp(Shl) | // associated path
|
Lt | BinOp(Shl) | // associated path
|
||||||
ModSep => true, // global path
|
ModSep => true, // global path
|
||||||
Interpolated(ref nt) => matches!(**nt, NtTy(..) | NtPath(..)),
|
Interpolated(ref nt) => matches!(&nt.0, NtTy(..) | NtPath(..)),
|
||||||
// For anonymous structs or unions, which only appear in specific positions
|
// For anonymous structs or unions, which only appear in specific positions
|
||||||
// (type of struct fields or union fields), we don't consider them as regular types
|
// (type of struct fields or union fields), we don't consider them as regular types
|
||||||
_ => false,
|
_ => false,
|
||||||
|
@ -522,7 +522,7 @@ impl Token {
|
||||||
pub fn can_begin_const_arg(&self) -> bool {
|
pub fn can_begin_const_arg(&self) -> bool {
|
||||||
match self.kind {
|
match self.kind {
|
||||||
OpenDelim(Delimiter::Brace) => true,
|
OpenDelim(Delimiter::Brace) => true,
|
||||||
Interpolated(ref nt) => matches!(**nt, NtExpr(..) | NtBlock(..) | NtLiteral(..)),
|
Interpolated(ref nt) => matches!(&nt.0, NtExpr(..) | NtBlock(..) | NtLiteral(..)),
|
||||||
_ => self.can_begin_literal_maybe_minus(),
|
_ => self.can_begin_literal_maybe_minus(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -576,7 +576,7 @@ impl Token {
|
||||||
match self.uninterpolate().kind {
|
match self.uninterpolate().kind {
|
||||||
Literal(..) | BinOp(Minus) => true,
|
Literal(..) | BinOp(Minus) => true,
|
||||||
Ident(name, false) if name.is_bool_lit() => true,
|
Ident(name, false) if name.is_bool_lit() => true,
|
||||||
Interpolated(ref nt) => match &**nt {
|
Interpolated(ref nt) => match &nt.0 {
|
||||||
NtLiteral(_) => true,
|
NtLiteral(_) => true,
|
||||||
NtExpr(e) => match &e.kind {
|
NtExpr(e) => match &e.kind {
|
||||||
ast::ExprKind::Lit(_) => true,
|
ast::ExprKind::Lit(_) => true,
|
||||||
|
@ -597,9 +597,9 @@ impl Token {
|
||||||
/// otherwise returns the original token.
|
/// otherwise returns the original token.
|
||||||
pub fn uninterpolate(&self) -> Cow<'_, Token> {
|
pub fn uninterpolate(&self) -> Cow<'_, Token> {
|
||||||
match &self.kind {
|
match &self.kind {
|
||||||
Interpolated(nt) => match **nt {
|
Interpolated(nt) => match &nt.0 {
|
||||||
NtIdent(ident, is_raw) => {
|
NtIdent(ident, is_raw) => {
|
||||||
Cow::Owned(Token::new(Ident(ident.name, is_raw), ident.span))
|
Cow::Owned(Token::new(Ident(ident.name, *is_raw), ident.span))
|
||||||
}
|
}
|
||||||
NtLifetime(ident) => Cow::Owned(Token::new(Lifetime(ident.name), ident.span)),
|
NtLifetime(ident) => Cow::Owned(Token::new(Lifetime(ident.name), ident.span)),
|
||||||
_ => Cow::Borrowed(self),
|
_ => Cow::Borrowed(self),
|
||||||
|
@ -614,8 +614,8 @@ impl Token {
|
||||||
// We avoid using `Token::uninterpolate` here because it's slow.
|
// We avoid using `Token::uninterpolate` here because it's slow.
|
||||||
match &self.kind {
|
match &self.kind {
|
||||||
&Ident(name, is_raw) => Some((Ident::new(name, self.span), is_raw)),
|
&Ident(name, is_raw) => Some((Ident::new(name, self.span), is_raw)),
|
||||||
Interpolated(nt) => match **nt {
|
Interpolated(nt) => match &nt.0 {
|
||||||
NtIdent(ident, is_raw) => Some((ident, is_raw)),
|
NtIdent(ident, is_raw) => Some((*ident, *is_raw)),
|
||||||
_ => None,
|
_ => None,
|
||||||
},
|
},
|
||||||
_ => None,
|
_ => None,
|
||||||
|
@ -628,8 +628,8 @@ impl Token {
|
||||||
// We avoid using `Token::uninterpolate` here because it's slow.
|
// We avoid using `Token::uninterpolate` here because it's slow.
|
||||||
match &self.kind {
|
match &self.kind {
|
||||||
&Lifetime(name) => Some(Ident::new(name, self.span)),
|
&Lifetime(name) => Some(Ident::new(name, self.span)),
|
||||||
Interpolated(nt) => match **nt {
|
Interpolated(nt) => match &nt.0 {
|
||||||
NtLifetime(ident) => Some(ident),
|
NtLifetime(ident) => Some(*ident),
|
||||||
_ => None,
|
_ => None,
|
||||||
},
|
},
|
||||||
_ => None,
|
_ => None,
|
||||||
|
@ -655,7 +655,7 @@ impl Token {
|
||||||
/// Returns `true` if the token is an interpolated path.
|
/// Returns `true` if the token is an interpolated path.
|
||||||
fn is_path(&self) -> bool {
|
fn is_path(&self) -> bool {
|
||||||
if let Interpolated(nt) = &self.kind
|
if let Interpolated(nt) = &self.kind
|
||||||
&& let NtPath(..) = **nt
|
&& let NtPath(..) = &nt.0
|
||||||
{
|
{
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -668,7 +668,7 @@ impl Token {
|
||||||
/// (which happens while parsing the result of macro expansion)?
|
/// (which happens while parsing the result of macro expansion)?
|
||||||
pub fn is_whole_expr(&self) -> bool {
|
pub fn is_whole_expr(&self) -> bool {
|
||||||
if let Interpolated(nt) = &self.kind
|
if let Interpolated(nt) = &self.kind
|
||||||
&& let NtExpr(_) | NtLiteral(_) | NtPath(_) | NtBlock(_) = **nt
|
&& let NtExpr(_) | NtLiteral(_) | NtPath(_) | NtBlock(_) = &nt.0
|
||||||
{
|
{
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -679,7 +679,7 @@ impl Token {
|
||||||
/// Is the token an interpolated block (`$b:block`)?
|
/// Is the token an interpolated block (`$b:block`)?
|
||||||
pub fn is_whole_block(&self) -> bool {
|
pub fn is_whole_block(&self) -> bool {
|
||||||
if let Interpolated(nt) = &self.kind
|
if let Interpolated(nt) = &self.kind
|
||||||
&& let NtBlock(..) = **nt
|
&& let NtBlock(..) = &nt.0
|
||||||
{
|
{
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -927,7 +927,7 @@ impl fmt::Display for NonterminalKind {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Nonterminal {
|
impl Nonterminal {
|
||||||
pub fn span(&self) -> Span {
|
pub fn use_span(&self) -> Span {
|
||||||
match self {
|
match self {
|
||||||
NtItem(item) => item.span,
|
NtItem(item) => item.span,
|
||||||
NtBlock(block) => block.span,
|
NtBlock(block) => block.span,
|
||||||
|
@ -941,6 +941,23 @@ impl Nonterminal {
|
||||||
NtVis(vis) => vis.span,
|
NtVis(vis) => vis.span,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn descr(&self) -> &'static str {
|
||||||
|
match self {
|
||||||
|
NtItem(..) => "item",
|
||||||
|
NtBlock(..) => "block",
|
||||||
|
NtStmt(..) => "statement",
|
||||||
|
NtPat(..) => "pattern",
|
||||||
|
NtExpr(..) => "expression",
|
||||||
|
NtLiteral(..) => "literal",
|
||||||
|
NtTy(..) => "type",
|
||||||
|
NtIdent(..) => "identifier",
|
||||||
|
NtLifetime(..) => "lifetime",
|
||||||
|
NtMeta(..) => "attribute",
|
||||||
|
NtPath(..) => "path",
|
||||||
|
NtVis(..) => "visibility",
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PartialEq for Nonterminal {
|
impl PartialEq for Nonterminal {
|
||||||
|
|
|
@ -477,13 +477,13 @@ impl TokenStream {
|
||||||
|
|
||||||
fn flatten_token(token: &Token, spacing: Spacing) -> TokenTree {
|
fn flatten_token(token: &Token, spacing: Spacing) -> TokenTree {
|
||||||
match &token.kind {
|
match &token.kind {
|
||||||
token::Interpolated(nt) if let token::NtIdent(ident, is_raw) = **nt => {
|
token::Interpolated(nt) if let token::NtIdent(ident, is_raw) = nt.0 => {
|
||||||
TokenTree::Token(Token::new(token::Ident(ident.name, is_raw), ident.span), spacing)
|
TokenTree::Token(Token::new(token::Ident(ident.name, is_raw), ident.span), spacing)
|
||||||
}
|
}
|
||||||
token::Interpolated(nt) => TokenTree::Delimited(
|
token::Interpolated(nt) => TokenTree::Delimited(
|
||||||
DelimSpan::from_single(token.span),
|
DelimSpan::from_single(token.span),
|
||||||
Delimiter::Invisible,
|
Delimiter::Invisible,
|
||||||
TokenStream::from_nonterminal_ast(nt).flattened(),
|
TokenStream::from_nonterminal_ast(&nt.0).flattened(),
|
||||||
),
|
),
|
||||||
_ => TokenTree::Token(token.clone(), spacing),
|
_ => TokenTree::Token(token.clone(), spacing),
|
||||||
}
|
}
|
||||||
|
|
|
@ -825,7 +825,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
|
||||||
}
|
}
|
||||||
token::Eof => "<eof>".into(),
|
token::Eof => "<eof>".into(),
|
||||||
|
|
||||||
token::Interpolated(ref nt) => self.nonterminal_to_string(nt).into(),
|
token::Interpolated(ref nt) => self.nonterminal_to_string(&nt.0).into(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -67,6 +67,12 @@ pub(super) fn failed_to_match_macro<'cx>(
|
||||||
&& (matches!(expected_token.kind, TokenKind::Interpolated(_))
|
&& (matches!(expected_token.kind, TokenKind::Interpolated(_))
|
||||||
|| matches!(token.kind, TokenKind::Interpolated(_)))
|
|| matches!(token.kind, TokenKind::Interpolated(_)))
|
||||||
{
|
{
|
||||||
|
if let TokenKind::Interpolated(node) = &expected_token.kind {
|
||||||
|
err.span_label(node.1, "");
|
||||||
|
}
|
||||||
|
if let TokenKind::Interpolated(node) = &token.kind {
|
||||||
|
err.span_label(node.1, "");
|
||||||
|
}
|
||||||
err.note("captured metavariables except for `:tt`, `:ident` and `:lifetime` cannot be compared to other tokens");
|
err.note("captured metavariables except for `:tt`, `:ident` and `:lifetime` cannot be compared to other tokens");
|
||||||
err.note("see <https://doc.rust-lang.org/nightly/reference/macros-by-example.html#forwarding-a-matched-fragment> for more information");
|
err.note("see <https://doc.rust-lang.org/nightly/reference/macros-by-example.html#forwarding-a-matched-fragment> for more information");
|
||||||
|
|
||||||
|
|
|
@ -397,7 +397,7 @@ pub(crate) enum NamedMatch {
|
||||||
MatchedTokenTree(rustc_ast::tokenstream::TokenTree),
|
MatchedTokenTree(rustc_ast::tokenstream::TokenTree),
|
||||||
|
|
||||||
// A metavar match of any type other than `tt`.
|
// A metavar match of any type other than `tt`.
|
||||||
MatchedNonterminal(Lrc<Nonterminal>),
|
MatchedNonterminal(Lrc<(Nonterminal, rustc_span::Span)>),
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Performs a token equality check, ignoring syntax context (that is, an unhygienic comparison)
|
/// Performs a token equality check, ignoring syntax context (that is, an unhygienic comparison)
|
||||||
|
@ -692,7 +692,7 @@ impl TtParser {
|
||||||
Ok(nt) => nt,
|
Ok(nt) => nt,
|
||||||
};
|
};
|
||||||
let m = match nt {
|
let m = match nt {
|
||||||
ParseNtResult::Nt(nt) => MatchedNonterminal(Lrc::new(nt)),
|
ParseNtResult::Nt(nt) => MatchedNonterminal(Lrc::new((nt, span))),
|
||||||
ParseNtResult::Tt(tt) => MatchedTokenTree(tt),
|
ParseNtResult::Tt(tt) => MatchedTokenTree(tt),
|
||||||
};
|
};
|
||||||
mp.push_match(next_metavar, seq_depth, m);
|
mp.push_match(next_metavar, seq_depth, m);
|
||||||
|
|
|
@ -126,7 +126,7 @@ impl MultiItemModifier for DeriveProcMacro {
|
||||||
Annotatable::Stmt(stmt) => token::NtStmt(stmt),
|
Annotatable::Stmt(stmt) => token::NtStmt(stmt),
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
};
|
};
|
||||||
TokenStream::token_alone(token::Interpolated(Lrc::new(nt)), DUMMY_SP)
|
TokenStream::token_alone(token::Interpolated(Lrc::new((nt, span))), DUMMY_SP)
|
||||||
} else {
|
} else {
|
||||||
item.to_tokens()
|
item.to_tokens()
|
||||||
};
|
};
|
||||||
|
|
|
@ -226,18 +226,23 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
Interpolated(nt) if let NtIdent(ident, is_raw) = *nt => trees
|
Interpolated(ref nt) if let NtIdent(ident, is_raw) = &nt.0 => {
|
||||||
.push(TokenTree::Ident(Ident { sym: ident.name, is_raw, span: ident.span })),
|
trees.push(TokenTree::Ident(Ident {
|
||||||
|
sym: ident.name,
|
||||||
|
is_raw: *is_raw,
|
||||||
|
span: ident.span,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
Interpolated(nt) => {
|
Interpolated(nt) => {
|
||||||
let stream = TokenStream::from_nonterminal_ast(&nt);
|
let stream = TokenStream::from_nonterminal_ast(&nt.0);
|
||||||
// A hack used to pass AST fragments to attribute and derive
|
// A hack used to pass AST fragments to attribute and derive
|
||||||
// macros as a single nonterminal token instead of a token
|
// macros as a single nonterminal token instead of a token
|
||||||
// stream. Such token needs to be "unwrapped" and not
|
// stream. Such token needs to be "unwrapped" and not
|
||||||
// represented as a delimited group.
|
// represented as a delimited group.
|
||||||
// FIXME: It needs to be removed, but there are some
|
// FIXME: It needs to be removed, but there are some
|
||||||
// compatibility issues (see #73345).
|
// compatibility issues (see #73345).
|
||||||
if crate::base::nt_pretty_printing_compatibility_hack(&nt, rustc.sess()) {
|
if crate::base::nt_pretty_printing_compatibility_hack(&nt.0, rustc.sess()) {
|
||||||
trees.extend(Self::from_internal((stream, rustc)));
|
trees.extend(Self::from_internal((stream, rustc)));
|
||||||
} else {
|
} else {
|
||||||
trees.push(TokenTree::Group(Group {
|
trees.push(TokenTree::Group(Group {
|
||||||
|
|
|
@ -249,7 +249,7 @@ impl<'a> Parser<'a> {
|
||||||
/// The delimiters or `=` are still put into the resulting token stream.
|
/// The delimiters or `=` are still put into the resulting token stream.
|
||||||
pub fn parse_attr_item(&mut self, capture_tokens: bool) -> PResult<'a, ast::AttrItem> {
|
pub fn parse_attr_item(&mut self, capture_tokens: bool) -> PResult<'a, ast::AttrItem> {
|
||||||
let item = match &self.token.kind {
|
let item = match &self.token.kind {
|
||||||
token::Interpolated(nt) => match &**nt {
|
token::Interpolated(nt) => match &nt.0 {
|
||||||
Nonterminal::NtMeta(item) => Some(item.clone().into_inner()),
|
Nonterminal::NtMeta(item) => Some(item.clone().into_inner()),
|
||||||
_ => None,
|
_ => None,
|
||||||
},
|
},
|
||||||
|
@ -369,7 +369,7 @@ impl<'a> Parser<'a> {
|
||||||
/// ```
|
/// ```
|
||||||
pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> {
|
pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> {
|
||||||
let nt_meta = match &self.token.kind {
|
let nt_meta = match &self.token.kind {
|
||||||
token::Interpolated(nt) => match &**nt {
|
token::Interpolated(nt) => match &nt.0 {
|
||||||
token::NtMeta(e) => Some(e.clone()),
|
token::NtMeta(e) => Some(e.clone()),
|
||||||
_ => None,
|
_ => None,
|
||||||
},
|
},
|
||||||
|
|
|
@ -24,11 +24,12 @@ use crate::parser;
|
||||||
use rustc_ast as ast;
|
use rustc_ast as ast;
|
||||||
use rustc_ast::ptr::P;
|
use rustc_ast::ptr::P;
|
||||||
use rustc_ast::token::{self, Delimiter, Lit, LitKind, TokenKind};
|
use rustc_ast::token::{self, Delimiter, Lit, LitKind, TokenKind};
|
||||||
|
use rustc_ast::tokenstream::AttrTokenTree;
|
||||||
use rustc_ast::util::parser::AssocOp;
|
use rustc_ast::util::parser::AssocOp;
|
||||||
use rustc_ast::{
|
use rustc_ast::{
|
||||||
AngleBracketedArg, AngleBracketedArgs, AnonConst, AttrVec, BinOpKind, BindingAnnotation, Block,
|
AngleBracketedArg, AngleBracketedArgs, AnonConst, AttrVec, BinOpKind, BindingAnnotation, Block,
|
||||||
BlockCheckMode, Expr, ExprKind, GenericArg, Generics, Item, ItemKind, Param, Pat, PatKind,
|
BlockCheckMode, Expr, ExprKind, GenericArg, Generics, HasTokens, Item, ItemKind, Param, Pat,
|
||||||
Path, PathSegment, QSelf, Ty, TyKind,
|
PatKind, Path, PathSegment, QSelf, Ty, TyKind,
|
||||||
};
|
};
|
||||||
use rustc_ast_pretty::pprust;
|
use rustc_ast_pretty::pprust;
|
||||||
use rustc_data_structures::fx::FxHashSet;
|
use rustc_data_structures::fx::FxHashSet;
|
||||||
|
@ -2252,6 +2253,59 @@ impl<'a> Parser<'a> {
|
||||||
err.subdiagnostic(ExprParenthesesNeeded::surrounding(*sp));
|
err.subdiagnostic(ExprParenthesesNeeded::surrounding(*sp));
|
||||||
}
|
}
|
||||||
err.span_label(span, "expected expression");
|
err.span_label(span, "expected expression");
|
||||||
|
|
||||||
|
// Walk the chain of macro expansions for the current token to point at how the original
|
||||||
|
// code was interpreted. This helps the user realize when a macro argument of one type is
|
||||||
|
// later reinterpreted as a different type, like `$x:expr` being reinterpreted as `$x:pat`
|
||||||
|
// in a subsequent macro invocation (#71039).
|
||||||
|
let mut tok = self.token.clone();
|
||||||
|
let mut labels = vec![];
|
||||||
|
while let TokenKind::Interpolated(node) = &tok.kind {
|
||||||
|
let tokens = node.0.tokens();
|
||||||
|
labels.push(node.clone());
|
||||||
|
if let Some(tokens) = tokens
|
||||||
|
&& let tokens = tokens.to_attr_token_stream()
|
||||||
|
&& let tokens = tokens.0.deref()
|
||||||
|
&& let [AttrTokenTree::Token(token, _)] = &tokens[..]
|
||||||
|
{
|
||||||
|
tok = token.clone();
|
||||||
|
} else {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let mut iter = labels.into_iter().peekable();
|
||||||
|
let mut show_link = false;
|
||||||
|
while let Some(node) = iter.next() {
|
||||||
|
let descr = node.0.descr();
|
||||||
|
if let Some(next) = iter.peek() {
|
||||||
|
let next_descr = next.0.descr();
|
||||||
|
if next_descr != descr {
|
||||||
|
err.span_label(next.1, format!("this macro fragment matcher is {next_descr}"));
|
||||||
|
err.span_label(node.1, format!("this macro fragment matcher is {descr}"));
|
||||||
|
err.span_label(
|
||||||
|
next.0.use_span(),
|
||||||
|
format!("this is expected to be {next_descr}"),
|
||||||
|
);
|
||||||
|
err.span_label(
|
||||||
|
node.0.use_span(),
|
||||||
|
format!(
|
||||||
|
"this is interpreted as {}, but it is expected to be {}",
|
||||||
|
next_descr, descr,
|
||||||
|
),
|
||||||
|
);
|
||||||
|
show_link = true;
|
||||||
|
} else {
|
||||||
|
err.span_label(node.1, "");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if show_link {
|
||||||
|
err.note(
|
||||||
|
"when forwarding a matched fragment to another macro-by-example, matchers in the \
|
||||||
|
second macro will see an opaque AST of the fragment type, not the underlying \
|
||||||
|
tokens",
|
||||||
|
);
|
||||||
|
}
|
||||||
err
|
err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -46,7 +46,7 @@ use thin_vec::{thin_vec, ThinVec};
|
||||||
macro_rules! maybe_whole_expr {
|
macro_rules! maybe_whole_expr {
|
||||||
($p:expr) => {
|
($p:expr) => {
|
||||||
if let token::Interpolated(nt) = &$p.token.kind {
|
if let token::Interpolated(nt) = &$p.token.kind {
|
||||||
match &**nt {
|
match &nt.0 {
|
||||||
token::NtExpr(e) | token::NtLiteral(e) => {
|
token::NtExpr(e) | token::NtLiteral(e) => {
|
||||||
let e = e.clone();
|
let e = e.clone();
|
||||||
$p.bump();
|
$p.bump();
|
||||||
|
@ -1952,7 +1952,7 @@ impl<'a> Parser<'a> {
|
||||||
mk_lit_char: impl FnOnce(Symbol, Span) -> L,
|
mk_lit_char: impl FnOnce(Symbol, Span) -> L,
|
||||||
) -> PResult<'a, L> {
|
) -> PResult<'a, L> {
|
||||||
if let token::Interpolated(nt) = &self.token.kind
|
if let token::Interpolated(nt) = &self.token.kind
|
||||||
&& let token::NtExpr(e) | token::NtLiteral(e) = &**nt
|
&& let token::NtExpr(e) | token::NtLiteral(e) = &nt.0
|
||||||
&& matches!(e.kind, ExprKind::Err)
|
&& matches!(e.kind, ExprKind::Err)
|
||||||
{
|
{
|
||||||
let mut err = errors::InvalidInterpolatedExpression { span: self.token.span }
|
let mut err = errors::InvalidInterpolatedExpression { span: self.token.span }
|
||||||
|
|
|
@ -123,7 +123,7 @@ impl<'a> Parser<'a> {
|
||||||
// Don't use `maybe_whole` so that we have precise control
|
// Don't use `maybe_whole` so that we have precise control
|
||||||
// over when we bump the parser
|
// over when we bump the parser
|
||||||
if let token::Interpolated(nt) = &self.token.kind
|
if let token::Interpolated(nt) = &self.token.kind
|
||||||
&& let token::NtItem(item) = &**nt
|
&& let token::NtItem(item) = &nt.0
|
||||||
{
|
{
|
||||||
let mut item = item.clone();
|
let mut item = item.clone();
|
||||||
self.bump();
|
self.bump();
|
||||||
|
@ -2750,7 +2750,7 @@ impl<'a> Parser<'a> {
|
||||||
|
|
||||||
fn is_named_param(&self) -> bool {
|
fn is_named_param(&self) -> bool {
|
||||||
let offset = match &self.token.kind {
|
let offset = match &self.token.kind {
|
||||||
token::Interpolated(nt) => match **nt {
|
token::Interpolated(nt) => match &nt.0 {
|
||||||
token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon),
|
token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon),
|
||||||
_ => 0,
|
_ => 0,
|
||||||
},
|
},
|
||||||
|
|
|
@ -93,7 +93,7 @@ pub enum TrailingToken {
|
||||||
macro_rules! maybe_whole {
|
macro_rules! maybe_whole {
|
||||||
($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
|
($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
|
||||||
if let token::Interpolated(nt) = &$p.token.kind {
|
if let token::Interpolated(nt) = &$p.token.kind {
|
||||||
if let token::$constructor(x) = &**nt {
|
if let token::$constructor(x) = &nt.0 {
|
||||||
let $x = x.clone();
|
let $x = x.clone();
|
||||||
$p.bump();
|
$p.bump();
|
||||||
return Ok($e);
|
return Ok($e);
|
||||||
|
@ -110,7 +110,7 @@ macro_rules! maybe_recover_from_interpolated_ty_qpath {
|
||||||
&& $self.may_recover()
|
&& $self.may_recover()
|
||||||
&& $self.look_ahead(1, |t| t == &token::ModSep)
|
&& $self.look_ahead(1, |t| t == &token::ModSep)
|
||||||
&& let token::Interpolated(nt) = &$self.token.kind
|
&& let token::Interpolated(nt) = &$self.token.kind
|
||||||
&& let token::NtTy(ty) = &**nt
|
&& let token::NtTy(ty) = &nt.0
|
||||||
{
|
{
|
||||||
let ty = ty.clone();
|
let ty = ty.clone();
|
||||||
$self.bump();
|
$self.bump();
|
||||||
|
@ -367,12 +367,14 @@ impl TokenDescription {
|
||||||
pub(super) fn token_descr(token: &Token) -> String {
|
pub(super) fn token_descr(token: &Token) -> String {
|
||||||
let name = pprust::token_to_string(token).to_string();
|
let name = pprust::token_to_string(token).to_string();
|
||||||
|
|
||||||
let kind = TokenDescription::from_token(token).map(|kind| match kind {
|
let kind = match (TokenDescription::from_token(token), &token.kind) {
|
||||||
TokenDescription::ReservedIdentifier => "reserved identifier",
|
(Some(TokenDescription::ReservedIdentifier), _) => Some("reserved identifier"),
|
||||||
TokenDescription::Keyword => "keyword",
|
(Some(TokenDescription::Keyword), _) => Some("keyword"),
|
||||||
TokenDescription::ReservedKeyword => "reserved keyword",
|
(Some(TokenDescription::ReservedKeyword), _) => Some("reserved keyword"),
|
||||||
TokenDescription::DocComment => "doc comment",
|
(Some(TokenDescription::DocComment), _) => Some("doc comment"),
|
||||||
});
|
(None, TokenKind::Interpolated(node)) => Some(node.0.descr()),
|
||||||
|
(None, _) => None,
|
||||||
|
};
|
||||||
|
|
||||||
if let Some(kind) = kind { format!("{kind} `{name}`") } else { format!("`{name}`") }
|
if let Some(kind) = kind { format!("{kind} `{name}`") } else { format!("`{name}`") }
|
||||||
}
|
}
|
||||||
|
@ -662,7 +664,7 @@ impl<'a> Parser<'a> {
|
||||||
fn check_inline_const(&self, dist: usize) -> bool {
|
fn check_inline_const(&self, dist: usize) -> bool {
|
||||||
self.is_keyword_ahead(dist, &[kw::Const])
|
self.is_keyword_ahead(dist, &[kw::Const])
|
||||||
&& self.look_ahead(dist + 1, |t| match &t.kind {
|
&& self.look_ahead(dist + 1, |t| match &t.kind {
|
||||||
token::Interpolated(nt) => matches!(**nt, token::NtBlock(..)),
|
token::Interpolated(nt) => matches!(&nt.0, token::NtBlock(..)),
|
||||||
token::OpenDelim(Delimiter::Brace) => true,
|
token::OpenDelim(Delimiter::Brace) => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
})
|
})
|
||||||
|
|
|
@ -50,12 +50,12 @@ impl<'a> Parser<'a> {
|
||||||
NonterminalKind::Literal => token.can_begin_literal_maybe_minus(),
|
NonterminalKind::Literal => token.can_begin_literal_maybe_minus(),
|
||||||
NonterminalKind::Vis => match token.kind {
|
NonterminalKind::Vis => match token.kind {
|
||||||
// The follow-set of :vis + "priv" keyword + interpolated
|
// The follow-set of :vis + "priv" keyword + interpolated
|
||||||
token::Comma | token::Ident(..) | token::Interpolated(..) => true,
|
token::Comma | token::Ident(..) | token::Interpolated(_) => true,
|
||||||
_ => token.can_begin_type(),
|
_ => token.can_begin_type(),
|
||||||
},
|
},
|
||||||
NonterminalKind::Block => match &token.kind {
|
NonterminalKind::Block => match &token.kind {
|
||||||
token::OpenDelim(Delimiter::Brace) => true,
|
token::OpenDelim(Delimiter::Brace) => true,
|
||||||
token::Interpolated(nt) => match **nt {
|
token::Interpolated(nt) => match &nt.0 {
|
||||||
NtBlock(_) | NtLifetime(_) | NtStmt(_) | NtExpr(_) | NtLiteral(_) => true,
|
NtBlock(_) | NtLifetime(_) | NtStmt(_) | NtExpr(_) | NtLiteral(_) => true,
|
||||||
NtItem(_) | NtPat(_) | NtTy(_) | NtIdent(..) | NtMeta(_) | NtPath(_)
|
NtItem(_) | NtPat(_) | NtTy(_) | NtIdent(..) | NtMeta(_) | NtPath(_)
|
||||||
| NtVis(_) => false,
|
| NtVis(_) => false,
|
||||||
|
@ -64,7 +64,7 @@ impl<'a> Parser<'a> {
|
||||||
},
|
},
|
||||||
NonterminalKind::Path | NonterminalKind::Meta => match &token.kind {
|
NonterminalKind::Path | NonterminalKind::Meta => match &token.kind {
|
||||||
token::ModSep | token::Ident(..) => true,
|
token::ModSep | token::Ident(..) => true,
|
||||||
token::Interpolated(nt) => may_be_ident(nt),
|
token::Interpolated(nt) => may_be_ident(&nt.0),
|
||||||
_ => false,
|
_ => false,
|
||||||
},
|
},
|
||||||
NonterminalKind::PatParam { .. } | NonterminalKind::PatWithOr => {
|
NonterminalKind::PatParam { .. } | NonterminalKind::PatWithOr => {
|
||||||
|
@ -75,7 +75,7 @@ impl<'a> Parser<'a> {
|
||||||
token::BinOp(token::And) | // reference
|
token::BinOp(token::And) | // reference
|
||||||
token::BinOp(token::Minus) | // negative literal
|
token::BinOp(token::Minus) | // negative literal
|
||||||
token::AndAnd | // double reference
|
token::AndAnd | // double reference
|
||||||
token::Literal(..) | // literal
|
token::Literal(_) | // literal
|
||||||
token::DotDot | // range pattern (future compat)
|
token::DotDot | // range pattern (future compat)
|
||||||
token::DotDotDot | // range pattern (future compat)
|
token::DotDotDot | // range pattern (future compat)
|
||||||
token::ModSep | // path
|
token::ModSep | // path
|
||||||
|
@ -83,14 +83,14 @@ impl<'a> Parser<'a> {
|
||||||
token::BinOp(token::Shl) => true, // path (double UFCS)
|
token::BinOp(token::Shl) => true, // path (double UFCS)
|
||||||
// leading vert `|` or-pattern
|
// leading vert `|` or-pattern
|
||||||
token::BinOp(token::Or) => matches!(kind, NonterminalKind::PatWithOr),
|
token::BinOp(token::Or) => matches!(kind, NonterminalKind::PatWithOr),
|
||||||
token::Interpolated(nt) => may_be_ident(nt),
|
token::Interpolated(nt) => may_be_ident(&nt.0),
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
NonterminalKind::Lifetime => match &token.kind {
|
NonterminalKind::Lifetime => match &token.kind {
|
||||||
token::Lifetime(_) => true,
|
token::Lifetime(_) => true,
|
||||||
token::Interpolated(nt) => {
|
token::Interpolated(nt) => {
|
||||||
matches!(**nt, NtLifetime(_))
|
matches!(&nt.0, NtLifetime(_))
|
||||||
}
|
}
|
||||||
_ => false,
|
_ => false,
|
||||||
},
|
},
|
||||||
|
@ -191,7 +191,7 @@ impl<'a> Parser<'a> {
|
||||||
panic!(
|
panic!(
|
||||||
"Missing tokens for nt {:?} at {:?}: {:?}",
|
"Missing tokens for nt {:?} at {:?}: {:?}",
|
||||||
nt,
|
nt,
|
||||||
nt.span(),
|
nt.use_span(),
|
||||||
pprust::nonterminal_to_string(&nt)
|
pprust::nonterminal_to_string(&nt)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -592,7 +592,7 @@ impl<'a> Parser<'a> {
|
||||||
|
|
||||||
// Make sure we don't allow e.g. `let mut $p;` where `$p:pat`.
|
// Make sure we don't allow e.g. `let mut $p;` where `$p:pat`.
|
||||||
if let token::Interpolated(nt) = &self.token.kind {
|
if let token::Interpolated(nt) = &self.token.kind {
|
||||||
if let token::NtPat(_) = **nt {
|
if let token::NtPat(..) = &nt.0 {
|
||||||
self.expected_ident_found_err().emit();
|
self.expected_ident_found_err().emit();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -185,7 +185,7 @@ impl<'a> Parser<'a> {
|
||||||
});
|
});
|
||||||
|
|
||||||
if let token::Interpolated(nt) = &self.token.kind {
|
if let token::Interpolated(nt) = &self.token.kind {
|
||||||
if let token::NtTy(ty) = &**nt {
|
if let token::NtTy(ty) = &nt.0 {
|
||||||
if let ast::TyKind::Path(None, path) = &ty.kind {
|
if let ast::TyKind::Path(None, path) = &ty.kind {
|
||||||
let path = path.clone();
|
let path = path.clone();
|
||||||
self.bump();
|
self.bump();
|
||||||
|
|
|
@ -53,7 +53,7 @@ impl<'a> Parser<'a> {
|
||||||
// Don't use `maybe_whole` so that we have precise control
|
// Don't use `maybe_whole` so that we have precise control
|
||||||
// over when we bump the parser
|
// over when we bump the parser
|
||||||
if let token::Interpolated(nt) = &self.token.kind
|
if let token::Interpolated(nt) = &self.token.kind
|
||||||
&& let token::NtStmt(stmt) = &**nt
|
&& let token::NtStmt(stmt) = &nt.0
|
||||||
{
|
{
|
||||||
let mut stmt = stmt.clone();
|
let mut stmt = stmt.clone();
|
||||||
self.bump();
|
self.bump();
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
macro_rules! get_opt {
|
macro_rules! get_opt {
|
||||||
($tgt:expr, $field:ident) => {
|
($tgt:expr, $field:ident) => {
|
||||||
if $tgt.has_$field() {} //~ ERROR expected `{`, found `foo`
|
if $tgt.has_$field() {} //~ ERROR expected `{`, found identifier `foo`
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
error: expected `{`, found `foo`
|
error: expected `{`, found identifier `foo`
|
||||||
--> $DIR/issue-39848.rs:3:21
|
--> $DIR/issue-39848.rs:3:21
|
||||||
|
|
|
|
||||||
LL | if $tgt.has_$field() {}
|
LL | if $tgt.has_$field() {}
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
error: no rules expected the token `enum E {}`
|
error: no rules expected the token `enum E {}`
|
||||||
--> $DIR/nonterminal-matching.rs:19:10
|
--> $DIR/nonterminal-matching.rs:19:10
|
||||||
|
|
|
|
||||||
|
LL | macro complex_nonterminal($nt_item: item) {
|
||||||
|
| --------------
|
||||||
LL | macro n(a $nt_item b) {
|
LL | macro n(a $nt_item b) {
|
||||||
| --------------------- when calling this macro
|
| --------------------- when calling this macro
|
||||||
...
|
...
|
||||||
|
|
|
@ -9,7 +9,7 @@ macro_rules! values {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
//~^^^^^ ERROR expected one of `(`, `,`, `=`, `{`, or `}`, found `(String)`
|
//~^^^^^ ERROR expected one of `(`, `,`, `=`, `{`, or `}`, found type `(String)`
|
||||||
//~| ERROR macro expansion ignores token `(String)` and any following
|
//~| ERROR macro expansion ignores token `(String)` and any following
|
||||||
|
|
||||||
values!(STRING(1) as (String) => cfg(test),);
|
values!(STRING(1) as (String) => cfg(test),);
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
error: expected one of `(`, `,`, `=`, `{`, or `}`, found `(String)`
|
error: expected one of `(`, `,`, `=`, `{`, or `}`, found type `(String)`
|
||||||
--> $DIR/syntax-error-recovery.rs:7:26
|
--> $DIR/syntax-error-recovery.rs:7:26
|
||||||
|
|
|
|
||||||
LL | $token $($inner)? = $value,
|
LL | $token $($inner)? = $value,
|
||||||
|
|
|
@ -41,3 +41,14 @@ fn use_bang_macro_as_attr() {}
|
||||||
|
|
||||||
#[derive(Debug)] //~ ERROR `derive` may only be applied to `struct`s
|
#[derive(Debug)] //~ ERROR `derive` may only be applied to `struct`s
|
||||||
fn use_derive_macro_as_attr() {}
|
fn use_derive_macro_as_attr() {}
|
||||||
|
|
||||||
|
macro_rules! test {
|
||||||
|
(let $p:pat = $e:expr) => {test!(($p,$e))};
|
||||||
|
// this should be expr
|
||||||
|
// vvv
|
||||||
|
(($p:pat, $e:pat)) => {let $p = $e;}; //~ ERROR expected expression, found pattern `1 + 1`
|
||||||
|
}
|
||||||
|
|
||||||
|
fn foo() {
|
||||||
|
test!(let x = 1+1);
|
||||||
|
}
|
||||||
|
|
|
@ -50,7 +50,7 @@ LL | my_recursive_macro!();
|
||||||
= note: expanding `my_recursive_macro! { }`
|
= note: expanding `my_recursive_macro! { }`
|
||||||
= note: to `my_recursive_macro! () ;`
|
= note: to `my_recursive_macro! () ;`
|
||||||
|
|
||||||
error: expected expression, found `A { a: a, b: 0, c: _, .. }`
|
error: expected expression, found pattern `A { a: a, b: 0, c: _, .. }`
|
||||||
--> $DIR/trace_faulty_macros.rs:16:9
|
--> $DIR/trace_faulty_macros.rs:16:9
|
||||||
|
|
|
|
||||||
LL | $a
|
LL | $a
|
||||||
|
@ -69,6 +69,28 @@ LL | #[derive(Debug)]
|
||||||
LL | fn use_derive_macro_as_attr() {}
|
LL | fn use_derive_macro_as_attr() {}
|
||||||
| -------------------------------- not a `struct`, `enum` or `union`
|
| -------------------------------- not a `struct`, `enum` or `union`
|
||||||
|
|
||||||
|
error: expected expression, found pattern `1 + 1`
|
||||||
|
--> $DIR/trace_faulty_macros.rs:49:37
|
||||||
|
|
|
||||||
|
LL | (let $p:pat = $e:expr) => {test!(($p,$e))};
|
||||||
|
| ------- -- this is interpreted as expression, but it is expected to be pattern
|
||||||
|
| |
|
||||||
|
| this macro fragment matcher is expression
|
||||||
|
...
|
||||||
|
LL | (($p:pat, $e:pat)) => {let $p = $e;};
|
||||||
|
| ------ ^^ expected expression
|
||||||
|
| |
|
||||||
|
| this macro fragment matcher is pattern
|
||||||
|
...
|
||||||
|
LL | test!(let x = 1+1);
|
||||||
|
| ------------------
|
||||||
|
| | |
|
||||||
|
| | this is expected to be expression
|
||||||
|
| in this macro invocation
|
||||||
|
|
|
||||||
|
= note: when forwarding a matched fragment to another macro-by-example, matchers in the second macro will see an opaque AST of the fragment type, not the underlying tokens
|
||||||
|
= note: this error originates in the macro `test` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||||
|
|
||||||
note: trace_macro
|
note: trace_macro
|
||||||
--> $DIR/trace_faulty_macros.rs:36:13
|
--> $DIR/trace_faulty_macros.rs:36:13
|
||||||
|
|
|
|
||||||
|
@ -80,6 +102,17 @@ LL | let a = pat_macro!();
|
||||||
= note: expanding `pat_macro! { A { a : a, b : 0, c : _, .. } }`
|
= note: expanding `pat_macro! { A { a : a, b : 0, c : _, .. } }`
|
||||||
= note: to `A { a: a, b: 0, c: _, .. }`
|
= note: to `A { a: a, b: 0, c: _, .. }`
|
||||||
|
|
||||||
error: aborting due to 4 previous errors
|
note: trace_macro
|
||||||
|
--> $DIR/trace_faulty_macros.rs:53:5
|
||||||
|
|
|
||||||
|
LL | test!(let x = 1+1);
|
||||||
|
| ^^^^^^^^^^^^^^^^^^
|
||||||
|
|
|
||||||
|
= note: expanding `test! { let x = 1 + 1 }`
|
||||||
|
= note: to `test! ((x, 1 + 1))`
|
||||||
|
= note: expanding `test! { (x, 1 + 1) }`
|
||||||
|
= note: to `let x = 1 + 1 ;`
|
||||||
|
|
||||||
|
error: aborting due to 5 previous errors
|
||||||
|
|
||||||
For more information about this error, try `rustc --explain E0774`.
|
For more information about this error, try `rustc --explain E0774`.
|
||||||
|
|
|
@ -6,9 +6,9 @@ macro_rules! generate_field_accesses {
|
||||||
|
|
||||||
s.$a; // OK
|
s.$a; // OK
|
||||||
{ s.$b; } //~ ERROR unexpected token: `1.1`
|
{ s.$b; } //~ ERROR unexpected token: `1.1`
|
||||||
//~| ERROR expected one of `.`, `;`, `?`, `}`, or an operator, found `1.1`
|
//~| ERROR expected one of `.`, `;`, `?`, `}`, or an operator, found literal `1.1`
|
||||||
{ s.$c; } //~ ERROR unexpected token: `1.1`
|
{ s.$c; } //~ ERROR unexpected token: `1.1`
|
||||||
//~| ERROR expected one of `.`, `;`, `?`, `}`, or an operator, found `1.1`
|
//~| ERROR expected one of `.`, `;`, `?`, `}`, or an operator, found expression `1.1`
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -9,7 +9,7 @@ LL | generate_field_accesses!(1.1, 1.1, 1.1);
|
||||||
|
|
|
|
||||||
= note: this error originates in the macro `generate_field_accesses` (in Nightly builds, run with -Z macro-backtrace for more info)
|
= note: this error originates in the macro `generate_field_accesses` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||||
|
|
||||||
error: expected one of `.`, `;`, `?`, `}`, or an operator, found `1.1`
|
error: expected one of `.`, `;`, `?`, `}`, or an operator, found literal `1.1`
|
||||||
--> $DIR/float-field-interpolated.rs:8:13
|
--> $DIR/float-field-interpolated.rs:8:13
|
||||||
|
|
|
|
||||||
LL | { s.$b; }
|
LL | { s.$b; }
|
||||||
|
@ -31,7 +31,7 @@ LL | generate_field_accesses!(1.1, 1.1, 1.1);
|
||||||
|
|
|
|
||||||
= note: this error originates in the macro `generate_field_accesses` (in Nightly builds, run with -Z macro-backtrace for more info)
|
= note: this error originates in the macro `generate_field_accesses` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||||
|
|
||||||
error: expected one of `.`, `;`, `?`, `}`, or an operator, found `1.1`
|
error: expected one of `.`, `;`, `?`, `}`, or an operator, found expression `1.1`
|
||||||
--> $DIR/float-field-interpolated.rs:10:13
|
--> $DIR/float-field-interpolated.rs:10:13
|
||||||
|
|
|
|
||||||
LL | { s.$c; }
|
LL | { s.$c; }
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue