Auto merge of #124141 - nnethercote:rm-Nonterminal-and-TokenKind-Interpolated, r=petrochenkov
Remove `Nonterminal` and `TokenKind::Interpolated` A third attempt at this; the first attempt was #96724 and the second was #114647. r? `@ghost`
This commit is contained in:
commit
f836ae4e66
61 changed files with 164 additions and 532 deletions
|
@ -6,7 +6,6 @@ use std::fmt;
|
||||||
use std::marker::PhantomData;
|
use std::marker::PhantomData;
|
||||||
|
|
||||||
use crate::ptr::P;
|
use crate::ptr::P;
|
||||||
use crate::token::Nonterminal;
|
|
||||||
use crate::tokenstream::LazyAttrTokenStream;
|
use crate::tokenstream::LazyAttrTokenStream;
|
||||||
use crate::{
|
use crate::{
|
||||||
Arm, AssocItem, AttrItem, AttrKind, AttrVec, Attribute, Block, Crate, Expr, ExprField,
|
Arm, AssocItem, AttrItem, AttrKind, AttrVec, Attribute, Block, Crate, Expr, ExprField,
|
||||||
|
@ -206,19 +205,6 @@ impl HasTokens for Attribute {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HasTokens for Nonterminal {
|
|
||||||
fn tokens(&self) -> Option<&LazyAttrTokenStream> {
|
|
||||||
match self {
|
|
||||||
Nonterminal::NtBlock(block) => block.tokens(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fn tokens_mut(&mut self) -> Option<&mut Option<LazyAttrTokenStream>> {
|
|
||||||
match self {
|
|
||||||
Nonterminal::NtBlock(block) => block.tokens_mut(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A trait for AST nodes having (or not having) attributes.
|
/// A trait for AST nodes having (or not having) attributes.
|
||||||
pub trait HasAttrs {
|
pub trait HasAttrs {
|
||||||
/// This is `true` if this `HasAttrs` might support 'custom' (proc-macro) inner
|
/// This is `true` if this `HasAttrs` might support 'custom' (proc-macro) inner
|
||||||
|
|
|
@ -6,7 +6,6 @@
|
||||||
|
|
||||||
// tidy-alphabetical-start
|
// tidy-alphabetical-start
|
||||||
#![allow(internal_features)]
|
#![allow(internal_features)]
|
||||||
#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
|
|
||||||
#![doc(
|
#![doc(
|
||||||
html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/",
|
html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/",
|
||||||
test(attr(deny(warnings)))
|
test(attr(deny(warnings)))
|
||||||
|
|
|
@ -844,9 +844,9 @@ fn visit_lazy_tts<T: MutVisitor>(vis: &mut T, lazy_tts: &mut Option<LazyAttrToke
|
||||||
visit_lazy_tts_opt_mut(vis, lazy_tts.as_mut());
|
visit_lazy_tts_opt_mut(vis, lazy_tts.as_mut());
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Applies ident visitor if it's an ident; applies other visits to interpolated nodes.
|
/// Applies ident visitor if it's an ident. In practice this is not actually
|
||||||
/// In practice the ident part is not actually used by specific visitors right now,
|
/// used by specific visitors right now, but there's a test below checking that
|
||||||
/// but there's a test below checking that it works.
|
/// it works.
|
||||||
// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
|
// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
|
||||||
pub fn visit_token<T: MutVisitor>(vis: &mut T, t: &mut Token) {
|
pub fn visit_token<T: MutVisitor>(vis: &mut T, t: &mut Token) {
|
||||||
let Token { kind, span } = t;
|
let Token { kind, span } = t;
|
||||||
|
@ -864,45 +864,11 @@ pub fn visit_token<T: MutVisitor>(vis: &mut T, t: &mut Token) {
|
||||||
token::NtLifetime(ident, _is_raw) => {
|
token::NtLifetime(ident, _is_raw) => {
|
||||||
vis.visit_ident(ident);
|
vis.visit_ident(ident);
|
||||||
}
|
}
|
||||||
token::Interpolated(nt) => {
|
|
||||||
let nt = Arc::make_mut(nt);
|
|
||||||
visit_nonterminal(vis, nt);
|
|
||||||
}
|
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
vis.visit_span(span);
|
vis.visit_span(span);
|
||||||
}
|
}
|
||||||
|
|
||||||
// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
|
|
||||||
/// Applies the visitor to elements of interpolated nodes.
|
|
||||||
//
|
|
||||||
// N.B., this can occur only when applying a visitor to partially expanded
|
|
||||||
// code, where parsed pieces have gotten implanted ito *other* macro
|
|
||||||
// invocations. This is relevant for macro hygiene, but possibly not elsewhere.
|
|
||||||
//
|
|
||||||
// One problem here occurs because the types for flat_map_item, flat_map_stmt,
|
|
||||||
// etc., allow the visitor to return *multiple* items; this is a problem for the
|
|
||||||
// nodes here, because they insist on having exactly one piece. One solution
|
|
||||||
// would be to mangle the MutVisitor trait to include one-to-many and
|
|
||||||
// one-to-one versions of these entry points, but that would probably confuse a
|
|
||||||
// lot of people and help very few. Instead, I'm just going to put in dynamic
|
|
||||||
// checks. I think the performance impact of this will be pretty much
|
|
||||||
// nonexistent. The danger is that someone will apply a `MutVisitor` to a
|
|
||||||
// partially expanded node, and will be confused by the fact that their
|
|
||||||
// `flat_map_item` or `flat_map_stmt` isn't getting called on `NtItem` or `NtStmt`
|
|
||||||
// nodes. Hopefully they'll wind up reading this comment, and doing something
|
|
||||||
// appropriate.
|
|
||||||
//
|
|
||||||
// BTW, design choice: I considered just changing the type of, e.g., `NtItem` to
|
|
||||||
// contain multiple items, but decided against it when I looked at
|
|
||||||
// `parse_item_or_view_item` and tried to figure out what I would do with
|
|
||||||
// multiple items there....
|
|
||||||
fn visit_nonterminal<T: MutVisitor>(vis: &mut T, nt: &mut token::Nonterminal) {
|
|
||||||
match nt {
|
|
||||||
token::NtBlock(block) => vis.visit_block(block),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
|
// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
|
||||||
fn visit_defaultness<T: MutVisitor>(vis: &mut T, defaultness: &mut Defaultness) {
|
fn visit_defaultness<T: MutVisitor>(vis: &mut T, defaultness: &mut Defaultness) {
|
||||||
match defaultness {
|
match defaultness {
|
||||||
|
|
|
@ -1,13 +1,10 @@
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
pub use LitKind::*;
|
pub use LitKind::*;
|
||||||
pub use Nonterminal::*;
|
|
||||||
pub use NtExprKind::*;
|
pub use NtExprKind::*;
|
||||||
pub use NtPatKind::*;
|
pub use NtPatKind::*;
|
||||||
pub use TokenKind::*;
|
pub use TokenKind::*;
|
||||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
|
||||||
use rustc_macros::{Decodable, Encodable, HashStable_Generic};
|
use rustc_macros::{Decodable, Encodable, HashStable_Generic};
|
||||||
use rustc_span::edition::Edition;
|
use rustc_span::edition::Edition;
|
||||||
use rustc_span::{DUMMY_SP, ErrorGuaranteed, Span, kw, sym};
|
use rustc_span::{DUMMY_SP, ErrorGuaranteed, Span, kw, sym};
|
||||||
|
@ -16,7 +13,6 @@ use rustc_span::{DUMMY_SP, ErrorGuaranteed, Span, kw, sym};
|
||||||
use rustc_span::{Ident, Symbol};
|
use rustc_span::{Ident, Symbol};
|
||||||
|
|
||||||
use crate::ast;
|
use crate::ast;
|
||||||
use crate::ptr::P;
|
|
||||||
use crate::util::case::Case;
|
use crate::util::case::Case;
|
||||||
|
|
||||||
#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
|
#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
|
||||||
|
@ -34,10 +30,6 @@ pub enum InvisibleOrigin {
|
||||||
// Converted from `proc_macro::Delimiter` in
|
// Converted from `proc_macro::Delimiter` in
|
||||||
// `proc_macro::Delimiter::to_internal`, i.e. returned by a proc macro.
|
// `proc_macro::Delimiter::to_internal`, i.e. returned by a proc macro.
|
||||||
ProcMacro,
|
ProcMacro,
|
||||||
|
|
||||||
// Converted from `TokenKind::Interpolated` in
|
|
||||||
// `TokenStream::flatten_token`. Treated similarly to `ProcMacro`.
|
|
||||||
FlattenToken,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PartialEq for InvisibleOrigin {
|
impl PartialEq for InvisibleOrigin {
|
||||||
|
@ -134,9 +126,7 @@ impl Delimiter {
|
||||||
match self {
|
match self {
|
||||||
Delimiter::Parenthesis | Delimiter::Bracket | Delimiter::Brace => false,
|
Delimiter::Parenthesis | Delimiter::Bracket | Delimiter::Brace => false,
|
||||||
Delimiter::Invisible(InvisibleOrigin::MetaVar(_)) => false,
|
Delimiter::Invisible(InvisibleOrigin::MetaVar(_)) => false,
|
||||||
Delimiter::Invisible(InvisibleOrigin::FlattenToken | InvisibleOrigin::ProcMacro) => {
|
Delimiter::Invisible(InvisibleOrigin::ProcMacro) => true,
|
||||||
true
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -337,9 +327,7 @@ impl From<IdentIsRaw> for bool {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// SAFETY: due to the `Clone` impl below, all fields of all variants other than
|
#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
|
||||||
// `Interpolated` must impl `Copy`.
|
|
||||||
#[derive(PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
|
|
||||||
pub enum TokenKind {
|
pub enum TokenKind {
|
||||||
/* Expression-operator symbols. */
|
/* Expression-operator symbols. */
|
||||||
/// `=`
|
/// `=`
|
||||||
|
@ -468,21 +456,6 @@ pub enum TokenKind {
|
||||||
/// the `lifetime` metavariable in the macro's RHS.
|
/// the `lifetime` metavariable in the macro's RHS.
|
||||||
NtLifetime(Ident, IdentIsRaw),
|
NtLifetime(Ident, IdentIsRaw),
|
||||||
|
|
||||||
/// An embedded AST node, as produced by a macro. This only exists for
|
|
||||||
/// historical reasons. We'd like to get rid of it, for multiple reasons.
|
|
||||||
/// - It's conceptually very strange. Saying a token can contain an AST
|
|
||||||
/// node is like saying, in natural language, that a word can contain a
|
|
||||||
/// sentence.
|
|
||||||
/// - It requires special handling in a bunch of places in the parser.
|
|
||||||
/// - It prevents `Token` from implementing `Copy`.
|
|
||||||
/// It adds complexity and likely slows things down. Please don't add new
|
|
||||||
/// occurrences of this token kind!
|
|
||||||
///
|
|
||||||
/// The span in the surrounding `Token` is that of the metavariable in the
|
|
||||||
/// macro's RHS. The span within the Nonterminal is that of the fragment
|
|
||||||
/// passed to the macro at the call site.
|
|
||||||
Interpolated(Arc<Nonterminal>),
|
|
||||||
|
|
||||||
/// A doc comment token.
|
/// A doc comment token.
|
||||||
/// `Symbol` is the doc comment's data excluding its "quotes" (`///`, `/**`, etc)
|
/// `Symbol` is the doc comment's data excluding its "quotes" (`///`, `/**`, etc)
|
||||||
/// similarly to symbols in string literal tokens.
|
/// similarly to symbols in string literal tokens.
|
||||||
|
@ -492,20 +465,7 @@ pub enum TokenKind {
|
||||||
Eof,
|
Eof,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clone for TokenKind {
|
#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
|
||||||
fn clone(&self) -> Self {
|
|
||||||
// `TokenKind` would impl `Copy` if it weren't for `Interpolated`. So
|
|
||||||
// for all other variants, this implementation of `clone` is just like
|
|
||||||
// a copy. This is faster than the `derive(Clone)` version which has a
|
|
||||||
// separate path for every variant.
|
|
||||||
match self {
|
|
||||||
Interpolated(nt) => Interpolated(Arc::clone(nt)),
|
|
||||||
_ => unsafe { std::ptr::read(self) },
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
|
|
||||||
pub struct Token {
|
pub struct Token {
|
||||||
pub kind: TokenKind,
|
pub kind: TokenKind,
|
||||||
pub span: Span,
|
pub span: Span,
|
||||||
|
@ -600,7 +560,7 @@ impl Token {
|
||||||
| FatArrow | Pound | Dollar | Question | SingleQuote => true,
|
| FatArrow | Pound | Dollar | Question | SingleQuote => true,
|
||||||
|
|
||||||
OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) | Ident(..)
|
OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) | Ident(..)
|
||||||
| NtIdent(..) | Lifetime(..) | NtLifetime(..) | Interpolated(..) | Eof => false,
|
| NtIdent(..) | Lifetime(..) | NtLifetime(..) | Eof => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -631,7 +591,6 @@ impl Token {
|
||||||
PathSep | // global path
|
PathSep | // global path
|
||||||
Lifetime(..) | // labeled loop
|
Lifetime(..) | // labeled loop
|
||||||
Pound => true, // expression attributes
|
Pound => true, // expression attributes
|
||||||
Interpolated(ref nt) => matches!(&**nt, NtBlock(..)),
|
|
||||||
OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(
|
OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(
|
||||||
MetaVarKind::Block |
|
MetaVarKind::Block |
|
||||||
MetaVarKind::Expr { .. } |
|
MetaVarKind::Expr { .. } |
|
||||||
|
@ -703,7 +662,6 @@ impl Token {
|
||||||
match self.kind {
|
match self.kind {
|
||||||
OpenDelim(Delimiter::Brace) | Literal(..) | Minus => true,
|
OpenDelim(Delimiter::Brace) | Literal(..) | Minus => true,
|
||||||
Ident(name, IdentIsRaw::No) if name.is_bool_lit() => true,
|
Ident(name, IdentIsRaw::No) if name.is_bool_lit() => true,
|
||||||
Interpolated(ref nt) => matches!(&**nt, NtBlock(..)),
|
|
||||||
OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(
|
OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(
|
||||||
MetaVarKind::Expr { .. } | MetaVarKind::Block | MetaVarKind::Literal,
|
MetaVarKind::Expr { .. } | MetaVarKind::Block | MetaVarKind::Literal,
|
||||||
))) => true,
|
))) => true,
|
||||||
|
@ -831,31 +789,20 @@ impl Token {
|
||||||
/// Is this a pre-parsed expression dropped into the token stream
|
/// Is this a pre-parsed expression dropped into the token stream
|
||||||
/// (which happens while parsing the result of macro expansion)?
|
/// (which happens while parsing the result of macro expansion)?
|
||||||
pub fn is_metavar_expr(&self) -> bool {
|
pub fn is_metavar_expr(&self) -> bool {
|
||||||
#[allow(irrefutable_let_patterns)] // FIXME: temporary
|
matches!(
|
||||||
if let Interpolated(nt) = &self.kind
|
|
||||||
&& let NtBlock(_) = &**nt
|
|
||||||
{
|
|
||||||
true
|
|
||||||
} else if matches!(
|
|
||||||
self.is_metavar_seq(),
|
self.is_metavar_seq(),
|
||||||
Some(MetaVarKind::Expr { .. } | MetaVarKind::Literal | MetaVarKind::Path)
|
Some(
|
||||||
) {
|
MetaVarKind::Expr { .. }
|
||||||
true
|
| MetaVarKind::Literal
|
||||||
} else {
|
| MetaVarKind::Path
|
||||||
matches!(self.is_metavar_seq(), Some(MetaVarKind::Path))
|
| MetaVarKind::Block
|
||||||
}
|
)
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Is the token an interpolated block (`$b:block`)?
|
/// Are we at a block from a metavar (`$b:block`)?
|
||||||
pub fn is_whole_block(&self) -> bool {
|
pub fn is_metavar_block(&self) -> bool {
|
||||||
#[allow(irrefutable_let_patterns)] // FIXME: temporary
|
matches!(self.is_metavar_seq(), Some(MetaVarKind::Block))
|
||||||
if let Interpolated(nt) = &self.kind
|
|
||||||
&& let NtBlock(..) = &**nt
|
|
||||||
{
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
false
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns `true` if the token is either the `mut` or `const` keyword.
|
/// Returns `true` if the token is either the `mut` or `const` keyword.
|
||||||
|
@ -1024,7 +971,7 @@ impl Token {
|
||||||
| PercentEq | CaretEq | AndEq | OrEq | ShlEq | ShrEq | At | DotDotDot | DotDotEq
|
| PercentEq | CaretEq | AndEq | OrEq | ShlEq | ShrEq | At | DotDotDot | DotDotEq
|
||||||
| Comma | Semi | PathSep | RArrow | LArrow | FatArrow | Pound | Dollar | Question
|
| Comma | Semi | PathSep | RArrow | LArrow | FatArrow | Pound | Dollar | Question
|
||||||
| OpenDelim(..) | CloseDelim(..) | Literal(..) | Ident(..) | NtIdent(..)
|
| OpenDelim(..) | CloseDelim(..) | Literal(..) | Ident(..) | NtIdent(..)
|
||||||
| Lifetime(..) | NtLifetime(..) | Interpolated(..) | DocComment(..) | Eof,
|
| Lifetime(..) | NtLifetime(..) | DocComment(..) | Eof,
|
||||||
_,
|
_,
|
||||||
) => {
|
) => {
|
||||||
return None;
|
return None;
|
||||||
|
@ -1063,12 +1010,6 @@ pub enum NtExprKind {
|
||||||
Expr2021 { inferred: bool },
|
Expr2021 { inferred: bool },
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Encodable, Decodable)]
|
|
||||||
/// For interpolation during macro expansion.
|
|
||||||
pub enum Nonterminal {
|
|
||||||
NtBlock(P<ast::Block>),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Encodable, Decodable, Hash, HashStable_Generic)]
|
#[derive(Debug, Copy, Clone, PartialEq, Eq, Encodable, Decodable, Hash, HashStable_Generic)]
|
||||||
pub enum NonterminalKind {
|
pub enum NonterminalKind {
|
||||||
Item,
|
Item,
|
||||||
|
@ -1152,47 +1093,6 @@ impl fmt::Display for NonterminalKind {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Nonterminal {
|
|
||||||
pub fn use_span(&self) -> Span {
|
|
||||||
match self {
|
|
||||||
NtBlock(block) => block.span,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn descr(&self) -> &'static str {
|
|
||||||
match self {
|
|
||||||
NtBlock(..) => "block",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PartialEq for Nonterminal {
|
|
||||||
fn eq(&self, _rhs: &Self) -> bool {
|
|
||||||
// FIXME: Assume that all nonterminals are not equal, we can't compare them
|
|
||||||
// correctly based on data from AST. This will prevent them from matching each other
|
|
||||||
// in macros. The comparison will become possible only when each nonterminal has an
|
|
||||||
// attached token stream from which it was parsed.
|
|
||||||
false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Debug for Nonterminal {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
match *self {
|
|
||||||
NtBlock(..) => f.pad("NtBlock(..)"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<CTX> HashStable<CTX> for Nonterminal
|
|
||||||
where
|
|
||||||
CTX: crate::HashStableContext,
|
|
||||||
{
|
|
||||||
fn hash_stable(&self, _hcx: &mut CTX, _hasher: &mut StableHasher) {
|
|
||||||
panic!("interpolated tokens should not be present in the HIR")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Some types are used a lot. Make sure they don't unintentionally get bigger.
|
// Some types are used a lot. Make sure they don't unintentionally get bigger.
|
||||||
#[cfg(target_pointer_width = "64")]
|
#[cfg(target_pointer_width = "64")]
|
||||||
mod size_asserts {
|
mod size_asserts {
|
||||||
|
@ -1202,7 +1102,6 @@ mod size_asserts {
|
||||||
// tidy-alphabetical-start
|
// tidy-alphabetical-start
|
||||||
static_assert_size!(Lit, 12);
|
static_assert_size!(Lit, 12);
|
||||||
static_assert_size!(LitKind, 2);
|
static_assert_size!(LitKind, 2);
|
||||||
static_assert_size!(Nonterminal, 8);
|
|
||||||
static_assert_size!(Token, 24);
|
static_assert_size!(Token, 24);
|
||||||
static_assert_size!(TokenKind, 16);
|
static_assert_size!(TokenKind, 16);
|
||||||
// tidy-alphabetical-end
|
// tidy-alphabetical-end
|
||||||
|
|
|
@ -25,7 +25,7 @@ use rustc_span::{DUMMY_SP, Span, SpanDecoder, SpanEncoder, Symbol, sym};
|
||||||
|
|
||||||
use crate::ast::AttrStyle;
|
use crate::ast::AttrStyle;
|
||||||
use crate::ast_traits::{HasAttrs, HasTokens};
|
use crate::ast_traits::{HasAttrs, HasTokens};
|
||||||
use crate::token::{self, Delimiter, InvisibleOrigin, Nonterminal, Token, TokenKind};
|
use crate::token::{self, Delimiter, Token, TokenKind};
|
||||||
use crate::{AttrVec, Attribute};
|
use crate::{AttrVec, Attribute};
|
||||||
|
|
||||||
/// Part of a `TokenStream`.
|
/// Part of a `TokenStream`.
|
||||||
|
@ -305,11 +305,6 @@ pub struct AttrsTarget {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A `TokenStream` is an abstract sequence of tokens, organized into [`TokenTree`]s.
|
/// A `TokenStream` is an abstract sequence of tokens, organized into [`TokenTree`]s.
|
||||||
///
|
|
||||||
/// The goal is for procedural macros to work with `TokenStream`s and `TokenTree`s
|
|
||||||
/// instead of a representation of the abstract syntax tree.
|
|
||||||
/// Today's `TokenTree`s can still contain AST via `token::Interpolated` for
|
|
||||||
/// backwards compatibility.
|
|
||||||
#[derive(Clone, Debug, Default, Encodable, Decodable)]
|
#[derive(Clone, Debug, Default, Encodable, Decodable)]
|
||||||
pub struct TokenStream(pub(crate) Arc<Vec<TokenTree>>);
|
pub struct TokenStream(pub(crate) Arc<Vec<TokenTree>>);
|
||||||
|
|
||||||
|
@ -476,61 +471,6 @@ impl TokenStream {
|
||||||
TokenStream::new(tts)
|
TokenStream::new(tts)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn from_nonterminal_ast(nt: &Nonterminal) -> TokenStream {
|
|
||||||
match nt {
|
|
||||||
Nonterminal::NtBlock(block) => TokenStream::from_ast(block),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn flatten_token(token: &Token, spacing: Spacing) -> TokenTree {
|
|
||||||
match token.kind {
|
|
||||||
token::NtIdent(ident, is_raw) => {
|
|
||||||
TokenTree::Token(Token::new(token::Ident(ident.name, is_raw), ident.span), spacing)
|
|
||||||
}
|
|
||||||
token::NtLifetime(ident, is_raw) => TokenTree::Delimited(
|
|
||||||
DelimSpan::from_single(token.span),
|
|
||||||
DelimSpacing::new(Spacing::JointHidden, spacing),
|
|
||||||
Delimiter::Invisible(InvisibleOrigin::FlattenToken),
|
|
||||||
TokenStream::token_alone(token::Lifetime(ident.name, is_raw), ident.span),
|
|
||||||
),
|
|
||||||
token::Interpolated(ref nt) => TokenTree::Delimited(
|
|
||||||
DelimSpan::from_single(token.span),
|
|
||||||
DelimSpacing::new(Spacing::JointHidden, spacing),
|
|
||||||
Delimiter::Invisible(InvisibleOrigin::FlattenToken),
|
|
||||||
TokenStream::from_nonterminal_ast(&nt).flattened(),
|
|
||||||
),
|
|
||||||
_ => TokenTree::Token(token.clone(), spacing),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn flatten_token_tree(tree: &TokenTree) -> TokenTree {
|
|
||||||
match tree {
|
|
||||||
TokenTree::Token(token, spacing) => TokenStream::flatten_token(token, *spacing),
|
|
||||||
TokenTree::Delimited(span, spacing, delim, tts) => {
|
|
||||||
TokenTree::Delimited(*span, *spacing, *delim, tts.flattened())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
pub fn flattened(&self) -> TokenStream {
|
|
||||||
fn can_skip(stream: &TokenStream) -> bool {
|
|
||||||
stream.iter().all(|tree| match tree {
|
|
||||||
TokenTree::Token(token, _) => !matches!(
|
|
||||||
token.kind,
|
|
||||||
token::NtIdent(..) | token::NtLifetime(..) | token::Interpolated(..)
|
|
||||||
),
|
|
||||||
TokenTree::Delimited(.., inner) => can_skip(inner),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
if can_skip(self) {
|
|
||||||
return self.clone();
|
|
||||||
}
|
|
||||||
|
|
||||||
self.iter().map(|tree| TokenStream::flatten_token_tree(tree)).collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
// If `vec` is not empty, try to glue `tt` onto its last token. The return
|
// If `vec` is not empty, try to glue `tt` onto its last token. The return
|
||||||
// value indicates if gluing took place.
|
// value indicates if gluing took place.
|
||||||
fn try_glue_to_last(vec: &mut Vec<TokenTree>, tt: &TokenTree) -> bool {
|
fn try_glue_to_last(vec: &mut Vec<TokenTree>, tt: &TokenTree) -> bool {
|
||||||
|
|
|
@ -32,7 +32,6 @@
|
||||||
|
|
||||||
// tidy-alphabetical-start
|
// tidy-alphabetical-start
|
||||||
#![allow(internal_features)]
|
#![allow(internal_features)]
|
||||||
#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
|
|
||||||
#![doc(rust_logo)]
|
#![doc(rust_logo)]
|
||||||
#![feature(assert_matches)]
|
#![feature(assert_matches)]
|
||||||
#![feature(box_patterns)]
|
#![feature(box_patterns)]
|
||||||
|
@ -917,7 +916,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn lower_delim_args(&self, args: &DelimArgs) -> DelimArgs {
|
fn lower_delim_args(&self, args: &DelimArgs) -> DelimArgs {
|
||||||
DelimArgs { dspan: args.dspan, delim: args.delim, tokens: args.tokens.flattened() }
|
DelimArgs { dspan: args.dspan, delim: args.delim, tokens: args.tokens.clone() }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Lower an associated item constraint.
|
/// Lower an associated item constraint.
|
||||||
|
|
|
@ -5,14 +5,10 @@ pub mod state;
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
|
|
||||||
use rustc_ast as ast;
|
use rustc_ast as ast;
|
||||||
use rustc_ast::token::{Nonterminal, Token, TokenKind};
|
use rustc_ast::token::{Token, TokenKind};
|
||||||
use rustc_ast::tokenstream::{TokenStream, TokenTree};
|
use rustc_ast::tokenstream::{TokenStream, TokenTree};
|
||||||
pub use state::{AnnNode, Comments, PpAnn, PrintState, State, print_crate};
|
pub use state::{AnnNode, Comments, PpAnn, PrintState, State, print_crate};
|
||||||
|
|
||||||
pub fn nonterminal_to_string(nt: &Nonterminal) -> String {
|
|
||||||
State::new().nonterminal_to_string(nt)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Print the token kind precisely, without converting `$crate` into its respective crate name.
|
/// Print the token kind precisely, without converting `$crate` into its respective crate name.
|
||||||
pub fn token_kind_to_string(tok: &TokenKind) -> Cow<'static, str> {
|
pub fn token_kind_to_string(tok: &TokenKind) -> Cow<'static, str> {
|
||||||
State::new().token_kind_to_string(tok)
|
State::new().token_kind_to_string(tok)
|
||||||
|
|
|
@ -11,7 +11,7 @@ use std::sync::Arc;
|
||||||
|
|
||||||
use rustc_ast::attr::AttrIdGenerator;
|
use rustc_ast::attr::AttrIdGenerator;
|
||||||
use rustc_ast::ptr::P;
|
use rustc_ast::ptr::P;
|
||||||
use rustc_ast::token::{self, CommentKind, Delimiter, IdentIsRaw, Nonterminal, Token, TokenKind};
|
use rustc_ast::token::{self, CommentKind, Delimiter, IdentIsRaw, Token, TokenKind};
|
||||||
use rustc_ast::tokenstream::{Spacing, TokenStream, TokenTree};
|
use rustc_ast::tokenstream::{Spacing, TokenStream, TokenTree};
|
||||||
use rustc_ast::util::classify;
|
use rustc_ast::util::classify;
|
||||||
use rustc_ast::util::comments::{Comment, CommentStyle};
|
use rustc_ast::util::comments::{Comment, CommentStyle};
|
||||||
|
@ -876,14 +876,6 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn nonterminal_to_string(&self, nt: &Nonterminal) -> String {
|
|
||||||
// We extract the token stream from the AST fragment and pretty print
|
|
||||||
// it, rather than using AST pretty printing, because `Nonterminal` is
|
|
||||||
// slated for removal in #124141. (This method will also then be
|
|
||||||
// removed.)
|
|
||||||
self.tts_to_string(&TokenStream::from_nonterminal_ast(nt))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Print the token kind precisely, without converting `$crate` into its respective crate name.
|
/// Print the token kind precisely, without converting `$crate` into its respective crate name.
|
||||||
fn token_kind_to_string(&self, tok: &TokenKind) -> Cow<'static, str> {
|
fn token_kind_to_string(&self, tok: &TokenKind) -> Cow<'static, str> {
|
||||||
self.token_kind_to_string_ext(tok, None)
|
self.token_kind_to_string_ext(tok, None)
|
||||||
|
@ -976,8 +968,6 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
|
||||||
doc_comment_to_string(comment_kind, attr_style, data).into()
|
doc_comment_to_string(comment_kind, attr_style, data).into()
|
||||||
}
|
}
|
||||||
token::Eof => "<eof>".into(),
|
token::Eof => "<eof>".into(),
|
||||||
|
|
||||||
token::Interpolated(ref nt) => self.nonterminal_to_string(&nt).into(),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -320,7 +320,7 @@ impl<'sess> AttributeParser<'sess> {
|
||||||
ast::AttrArgs::Delimited(args) => AttrArgs::Delimited(DelimArgs {
|
ast::AttrArgs::Delimited(args) => AttrArgs::Delimited(DelimArgs {
|
||||||
dspan: args.dspan,
|
dspan: args.dspan,
|
||||||
delim: args.delim,
|
delim: args.delim,
|
||||||
tokens: args.tokens.flattened(),
|
tokens: args.tokens.clone(),
|
||||||
}),
|
}),
|
||||||
// This is an inert key-value attribute - it will never be visible to macros
|
// This is an inert key-value attribute - it will never be visible to macros
|
||||||
// after it gets lowered to HIR. Therefore, we can extract literals to handle
|
// after it gets lowered to HIR. Therefore, we can extract literals to handle
|
||||||
|
|
|
@ -77,7 +77,6 @@
|
||||||
|
|
||||||
// tidy-alphabetical-start
|
// tidy-alphabetical-start
|
||||||
#![allow(internal_features)]
|
#![allow(internal_features)]
|
||||||
#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
|
|
||||||
#![doc(rust_logo)]
|
#![doc(rust_logo)]
|
||||||
#![feature(let_chains)]
|
#![feature(let_chains)]
|
||||||
#![feature(rustdoc_internals)]
|
#![feature(rustdoc_internals)]
|
||||||
|
|
|
@ -485,25 +485,7 @@ impl<'a> MetaItemListParserContext<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// or a path.
|
// or a path.
|
||||||
let path =
|
let path = self.next_path()?;
|
||||||
if let Some(TokenTree::Token(Token { kind: token::Interpolated(_), span, .. }, _)) =
|
|
||||||
self.inside_delimiters.peek()
|
|
||||||
{
|
|
||||||
self.inside_delimiters.next();
|
|
||||||
// We go into this path if an expr ended up in an attribute that
|
|
||||||
// expansion did not turn into a literal. Say, `#[repr(align(macro!()))]`
|
|
||||||
// where the macro didn't expand to a literal. An error is already given
|
|
||||||
// for this at this point, and then we do continue. This makes this path
|
|
||||||
// reachable...
|
|
||||||
let e = self.dcx.span_delayed_bug(
|
|
||||||
*span,
|
|
||||||
"expr in place where literal is expected (builtin attr parsing)",
|
|
||||||
);
|
|
||||||
|
|
||||||
return Some(MetaItemOrLitParser::Err(*span, e));
|
|
||||||
} else {
|
|
||||||
self.next_path()?
|
|
||||||
};
|
|
||||||
|
|
||||||
// Paths can be followed by:
|
// Paths can be followed by:
|
||||||
// - `(more meta items)` (another list)
|
// - `(more meta items)` (another list)
|
||||||
|
|
|
@ -2,7 +2,6 @@
|
||||||
|
|
||||||
// tidy-alphabetical-start
|
// tidy-alphabetical-start
|
||||||
#![allow(internal_features)]
|
#![allow(internal_features)]
|
||||||
#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
|
|
||||||
#![doc(rust_logo)]
|
#![doc(rust_logo)]
|
||||||
#![feature(assert_matches)]
|
#![feature(assert_matches)]
|
||||||
#![feature(box_patterns)]
|
#![feature(box_patterns)]
|
||||||
|
|
|
@ -92,11 +92,7 @@ impl CfgEval<'_> {
|
||||||
// the location of `#[cfg]` and `#[cfg_attr]` in the token stream. The tokenization
|
// the location of `#[cfg]` and `#[cfg_attr]` in the token stream. The tokenization
|
||||||
// process is lossless, so this process is invisible to proc-macros.
|
// process is lossless, so this process is invisible to proc-macros.
|
||||||
|
|
||||||
// 'Flatten' all nonterminals (i.e. `TokenKind::Interpolated`)
|
// Interesting cases:
|
||||||
// to `None`-delimited groups containing the corresponding tokens. This
|
|
||||||
// is normally delayed until the proc-macro server actually needs to
|
|
||||||
// provide a `TokenKind::Interpolated` to a proc-macro. We do this earlier,
|
|
||||||
// so that we can handle cases like:
|
|
||||||
//
|
//
|
||||||
// ```rust
|
// ```rust
|
||||||
// #[cfg_eval] #[cfg] $item
|
// #[cfg_eval] #[cfg] $item
|
||||||
|
@ -104,8 +100,8 @@ impl CfgEval<'_> {
|
||||||
//
|
//
|
||||||
// where `$item` is `#[cfg_attr] struct Foo {}`. We want to make
|
// where `$item` is `#[cfg_attr] struct Foo {}`. We want to make
|
||||||
// sure to evaluate *all* `#[cfg]` and `#[cfg_attr]` attributes - the simplest
|
// sure to evaluate *all* `#[cfg]` and `#[cfg_attr]` attributes - the simplest
|
||||||
// way to do this is to do a single parse of a stream without any nonterminals.
|
// way to do this is to do a single parse of the token stream.
|
||||||
let orig_tokens = annotatable.to_tokens().flattened();
|
let orig_tokens = annotatable.to_tokens();
|
||||||
|
|
||||||
// Re-parse the tokens, setting the `capture_cfg` flag to save extra information
|
// Re-parse the tokens, setting the `capture_cfg` flag to save extra information
|
||||||
// to the captured `AttrTokenStream` (specifically, we capture
|
// to the captured `AttrTokenStream` (specifically, we capture
|
||||||
|
|
|
@ -5,7 +5,6 @@
|
||||||
#![allow(internal_features)]
|
#![allow(internal_features)]
|
||||||
#![allow(rustc::diagnostic_outside_of_impl)]
|
#![allow(rustc::diagnostic_outside_of_impl)]
|
||||||
#![allow(rustc::untranslatable_diagnostic)]
|
#![allow(rustc::untranslatable_diagnostic)]
|
||||||
#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
|
|
||||||
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
|
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
|
||||||
#![doc(rust_logo)]
|
#![doc(rust_logo)]
|
||||||
#![feature(assert_matches)]
|
#![feature(assert_matches)]
|
||||||
|
|
|
@ -2,7 +2,6 @@
|
||||||
#![allow(internal_features)]
|
#![allow(internal_features)]
|
||||||
#![allow(rustc::diagnostic_outside_of_impl)]
|
#![allow(rustc::diagnostic_outside_of_impl)]
|
||||||
#![allow(rustc::untranslatable_diagnostic)]
|
#![allow(rustc::untranslatable_diagnostic)]
|
||||||
#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
|
|
||||||
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
|
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
|
||||||
#![doc(rust_logo)]
|
#![doc(rust_logo)]
|
||||||
#![feature(assert_matches)]
|
#![feature(assert_matches)]
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
// tidy-alphabetical-start
|
// tidy-alphabetical-start
|
||||||
#![allow(internal_features)]
|
#![allow(internal_features)]
|
||||||
#![allow(rustc::diagnostic_outside_of_impl)]
|
#![allow(rustc::diagnostic_outside_of_impl)]
|
||||||
#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
|
|
||||||
#![doc(rust_logo)]
|
#![doc(rust_logo)]
|
||||||
#![feature(assert_matches)]
|
#![feature(assert_matches)]
|
||||||
#![feature(box_patterns)]
|
#![feature(box_patterns)]
|
||||||
|
|
|
@ -3,7 +3,6 @@
|
||||||
|
|
||||||
// tidy-alphabetical-start
|
// tidy-alphabetical-start
|
||||||
#![allow(internal_features)]
|
#![allow(internal_features)]
|
||||||
#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
|
|
||||||
#![doc(rust_logo)]
|
#![doc(rust_logo)]
|
||||||
#![feature(rustdoc_internals)]
|
#![feature(rustdoc_internals)]
|
||||||
// tidy-alphabetical-end
|
// tidy-alphabetical-end
|
||||||
|
|
|
@ -7,7 +7,6 @@
|
||||||
// tidy-alphabetical-start
|
// tidy-alphabetical-start
|
||||||
#![allow(internal_features)]
|
#![allow(internal_features)]
|
||||||
#![allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable
|
#![allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable
|
||||||
#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
|
|
||||||
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
|
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
|
||||||
#![doc(rust_logo)]
|
#![doc(rust_logo)]
|
||||||
#![feature(decl_macro)]
|
#![feature(decl_macro)]
|
||||||
|
|
|
@ -237,18 +237,6 @@ impl<'a> StripUnconfigured<'a> {
|
||||||
inner = self.configure_tokens(&inner);
|
inner = self.configure_tokens(&inner);
|
||||||
Some(AttrTokenTree::Delimited(sp, spacing, delim, inner))
|
Some(AttrTokenTree::Delimited(sp, spacing, delim, inner))
|
||||||
}
|
}
|
||||||
AttrTokenTree::Token(
|
|
||||||
Token {
|
|
||||||
kind:
|
|
||||||
TokenKind::NtIdent(..)
|
|
||||||
| TokenKind::NtLifetime(..)
|
|
||||||
| TokenKind::Interpolated(..),
|
|
||||||
..
|
|
||||||
},
|
|
||||||
_,
|
|
||||||
) => {
|
|
||||||
panic!("Nonterminal should have been flattened: {:?}", tree);
|
|
||||||
}
|
|
||||||
AttrTokenTree::Token(
|
AttrTokenTree::Token(
|
||||||
Token { kind: TokenKind::OpenDelim(_) | TokenKind::CloseDelim(_), .. },
|
Token { kind: TokenKind::OpenDelim(_) | TokenKind::CloseDelim(_), .. },
|
||||||
_,
|
_,
|
||||||
|
|
|
@ -66,9 +66,7 @@ pub(super) fn failed_to_match_macro(
|
||||||
}
|
}
|
||||||
|
|
||||||
if let MatcherLoc::Token { token: expected_token } = &remaining_matcher
|
if let MatcherLoc::Token { token: expected_token } = &remaining_matcher
|
||||||
&& (matches!(expected_token.kind, TokenKind::Interpolated(_))
|
&& (matches!(expected_token.kind, TokenKind::OpenDelim(Delimiter::Invisible(_)))
|
||||||
|| matches!(token.kind, TokenKind::Interpolated(_))
|
|
||||||
|| matches!(expected_token.kind, TokenKind::OpenDelim(Delimiter::Invisible(_)))
|
|
||||||
|| matches!(token.kind, TokenKind::OpenDelim(Delimiter::Invisible(_))))
|
|| matches!(token.kind, TokenKind::OpenDelim(Delimiter::Invisible(_))))
|
||||||
{
|
{
|
||||||
err.note("captured metavariables except for `:tt`, `:ident` and `:lifetime` cannot be compared to other tokens");
|
err.note("captured metavariables except for `:tt`, `:ident` and `:lifetime` cannot be compared to other tokens");
|
||||||
|
@ -162,7 +160,7 @@ impl<'dcx, 'matcher> Tracker<'matcher> for CollectTrackerAndEmitter<'dcx, 'match
|
||||||
.is_none_or(|failure| failure.is_better_position(*approx_position))
|
.is_none_or(|failure| failure.is_better_position(*approx_position))
|
||||||
{
|
{
|
||||||
self.best_failure = Some(BestFailure {
|
self.best_failure = Some(BestFailure {
|
||||||
token: token.clone(),
|
token: *token,
|
||||||
position_in_tokenstream: *approx_position,
|
position_in_tokenstream: *approx_position,
|
||||||
msg,
|
msg,
|
||||||
remaining_matcher: self
|
remaining_matcher: self
|
||||||
|
|
|
@ -179,7 +179,7 @@ pub(super) fn compute_locs(matcher: &[TokenTree]) -> Vec<MatcherLoc> {
|
||||||
for tt in tts {
|
for tt in tts {
|
||||||
match tt {
|
match tt {
|
||||||
TokenTree::Token(token) => {
|
TokenTree::Token(token) => {
|
||||||
locs.push(MatcherLoc::Token { token: token.clone() });
|
locs.push(MatcherLoc::Token { token: *token });
|
||||||
}
|
}
|
||||||
TokenTree::Delimited(span, _, delimited) => {
|
TokenTree::Delimited(span, _, delimited) => {
|
||||||
let open_token = Token::new(token::OpenDelim(delimited.delim), span.open);
|
let open_token = Token::new(token::OpenDelim(delimited.delim), span.open);
|
||||||
|
@ -648,7 +648,7 @@ impl TtParser {
|
||||||
// There are no possible next positions AND we aren't waiting for the black-box
|
// There are no possible next positions AND we aren't waiting for the black-box
|
||||||
// parser: syntax error.
|
// parser: syntax error.
|
||||||
return Failure(T::build_failure(
|
return Failure(T::build_failure(
|
||||||
parser.token.clone(),
|
parser.token,
|
||||||
parser.approx_token_stream_pos(),
|
parser.approx_token_stream_pos(),
|
||||||
"no rules expected this token in macro call",
|
"no rules expected this token in macro call",
|
||||||
));
|
));
|
||||||
|
|
|
@ -810,7 +810,7 @@ impl<'tt> FirstSets<'tt> {
|
||||||
// token could be the separator token itself.
|
// token could be the separator token itself.
|
||||||
|
|
||||||
if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
|
if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
|
||||||
first.add_one_maybe(TtHandle::from_token(sep.clone()));
|
first.add_one_maybe(TtHandle::from_token(*sep));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Reverse scan: Sequence comes before `first`.
|
// Reverse scan: Sequence comes before `first`.
|
||||||
|
@ -873,7 +873,7 @@ impl<'tt> FirstSets<'tt> {
|
||||||
// If the sequence contents can be empty, then the first
|
// If the sequence contents can be empty, then the first
|
||||||
// token could be the separator token itself.
|
// token could be the separator token itself.
|
||||||
if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
|
if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
|
||||||
first.add_one_maybe(TtHandle::from_token(sep.clone()));
|
first.add_one_maybe(TtHandle::from_token(*sep));
|
||||||
}
|
}
|
||||||
|
|
||||||
assert!(first.maybe_empty);
|
assert!(first.maybe_empty);
|
||||||
|
@ -949,7 +949,7 @@ impl<'tt> Clone for TtHandle<'tt> {
|
||||||
// This variant *must* contain a `mbe::TokenTree::Token`, and not
|
// This variant *must* contain a `mbe::TokenTree::Token`, and not
|
||||||
// any other variant of `mbe::TokenTree`.
|
// any other variant of `mbe::TokenTree`.
|
||||||
TtHandle::Token(mbe::TokenTree::Token(tok)) => {
|
TtHandle::Token(mbe::TokenTree::Token(tok)) => {
|
||||||
TtHandle::Token(mbe::TokenTree::Token(tok.clone()))
|
TtHandle::Token(mbe::TokenTree::Token(*tok))
|
||||||
}
|
}
|
||||||
|
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
|
@ -1125,7 +1125,7 @@ fn check_matcher_core<'tt>(
|
||||||
let mut new;
|
let mut new;
|
||||||
let my_suffix = if let Some(sep) = &seq_rep.separator {
|
let my_suffix = if let Some(sep) = &seq_rep.separator {
|
||||||
new = suffix_first.clone();
|
new = suffix_first.clone();
|
||||||
new.add_one_maybe(TtHandle::from_token(sep.clone()));
|
new.add_one_maybe(TtHandle::from_token(*sep));
|
||||||
&new
|
&new
|
||||||
} else {
|
} else {
|
||||||
&suffix_first
|
&suffix_first
|
||||||
|
|
|
@ -283,7 +283,7 @@ fn parse_tree<'a>(
|
||||||
}
|
}
|
||||||
|
|
||||||
// `tree` is an arbitrary token. Keep it.
|
// `tree` is an arbitrary token. Keep it.
|
||||||
tokenstream::TokenTree::Token(token, _) => TokenTree::Token(token.clone()),
|
tokenstream::TokenTree::Token(token, _) => TokenTree::Token(*token),
|
||||||
|
|
||||||
// `tree` is the beginning of a delimited set of tokens (e.g., `(` or `{`). We need to
|
// `tree` is the beginning of a delimited set of tokens (e.g., `(` or `{`). We need to
|
||||||
// descend into the delimited set and further parse it.
|
// descend into the delimited set and further parse it.
|
||||||
|
@ -321,7 +321,7 @@ fn parse_kleene_op(
|
||||||
match iter.next() {
|
match iter.next() {
|
||||||
Some(tokenstream::TokenTree::Token(token, _)) => match kleene_op(token) {
|
Some(tokenstream::TokenTree::Token(token, _)) => match kleene_op(token) {
|
||||||
Some(op) => Ok(Ok((op, token.span))),
|
Some(op) => Ok(Ok((op, token.span))),
|
||||||
None => Ok(Err(token.clone())),
|
None => Ok(Err(*token)),
|
||||||
},
|
},
|
||||||
tree => Err(tree.map_or(span, tokenstream::TokenTree::span)),
|
tree => Err(tree.map_or(span, tokenstream::TokenTree::span)),
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
use std::mem;
|
use std::mem;
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use rustc_ast::mut_visit::{self, MutVisitor};
|
use rustc_ast::mut_visit::{self, MutVisitor};
|
||||||
use rustc_ast::token::{
|
use rustc_ast::token::{
|
||||||
|
@ -165,7 +164,7 @@ pub(super) fn transcribe<'a>(
|
||||||
if repeat_idx < repeat_len {
|
if repeat_idx < repeat_len {
|
||||||
frame.idx = 0;
|
frame.idx = 0;
|
||||||
if let Some(sep) = sep {
|
if let Some(sep) = sep {
|
||||||
result.push(TokenTree::Token(sep.clone(), Spacing::Alone));
|
result.push(TokenTree::Token(*sep, Spacing::Alone));
|
||||||
}
|
}
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
@ -307,7 +306,9 @@ pub(super) fn transcribe<'a>(
|
||||||
let tt = match cur_matched {
|
let tt = match cur_matched {
|
||||||
MatchedSingle(ParseNtResult::Tt(tt)) => {
|
MatchedSingle(ParseNtResult::Tt(tt)) => {
|
||||||
// `tt`s are emitted into the output stream directly as "raw tokens",
|
// `tt`s are emitted into the output stream directly as "raw tokens",
|
||||||
// without wrapping them into groups.
|
// without wrapping them into groups. Other variables are emitted into
|
||||||
|
// the output stream as groups with `Delimiter::Invisible` to maintain
|
||||||
|
// parsing priorities.
|
||||||
maybe_use_metavar_location(psess, &stack, sp, tt, &mut marker)
|
maybe_use_metavar_location(psess, &stack, sp, tt, &mut marker)
|
||||||
}
|
}
|
||||||
MatchedSingle(ParseNtResult::Ident(ident, is_raw)) => {
|
MatchedSingle(ParseNtResult::Ident(ident, is_raw)) => {
|
||||||
|
@ -325,6 +326,11 @@ pub(super) fn transcribe<'a>(
|
||||||
MatchedSingle(ParseNtResult::Item(item)) => {
|
MatchedSingle(ParseNtResult::Item(item)) => {
|
||||||
mk_delimited(item.span, MetaVarKind::Item, TokenStream::from_ast(item))
|
mk_delimited(item.span, MetaVarKind::Item, TokenStream::from_ast(item))
|
||||||
}
|
}
|
||||||
|
MatchedSingle(ParseNtResult::Block(block)) => mk_delimited(
|
||||||
|
block.span,
|
||||||
|
MetaVarKind::Block,
|
||||||
|
TokenStream::from_ast(block),
|
||||||
|
),
|
||||||
MatchedSingle(ParseNtResult::Stmt(stmt)) => {
|
MatchedSingle(ParseNtResult::Stmt(stmt)) => {
|
||||||
let stream = if let StmtKind::Empty = stmt.kind {
|
let stream = if let StmtKind::Empty = stmt.kind {
|
||||||
// FIXME: Properly collect tokens for empty statements.
|
// FIXME: Properly collect tokens for empty statements.
|
||||||
|
@ -385,15 +391,6 @@ pub(super) fn transcribe<'a>(
|
||||||
MatchedSingle(ParseNtResult::Vis(vis)) => {
|
MatchedSingle(ParseNtResult::Vis(vis)) => {
|
||||||
mk_delimited(vis.span, MetaVarKind::Vis, TokenStream::from_ast(vis))
|
mk_delimited(vis.span, MetaVarKind::Vis, TokenStream::from_ast(vis))
|
||||||
}
|
}
|
||||||
MatchedSingle(ParseNtResult::Nt(nt)) => {
|
|
||||||
// Other variables are emitted into the output stream as groups with
|
|
||||||
// `Delimiter::Invisible` to maintain parsing priorities.
|
|
||||||
// `Interpolated` is currently used for such groups in rustc parser.
|
|
||||||
marker.visit_span(&mut sp);
|
|
||||||
let use_span = nt.use_span();
|
|
||||||
with_metavar_spans(|mspans| mspans.insert(use_span, sp));
|
|
||||||
TokenTree::token_alone(token::Interpolated(Arc::clone(nt)), sp)
|
|
||||||
}
|
|
||||||
MatchedSeq(..) => {
|
MatchedSeq(..) => {
|
||||||
// We were unable to descend far enough. This is an error.
|
// We were unable to descend far enough. This is an error.
|
||||||
return Err(dcx.create_err(VarStillRepeating { span: sp, ident }));
|
return Err(dcx.create_err(VarStillRepeating { span: sp, ident }));
|
||||||
|
@ -441,7 +438,7 @@ pub(super) fn transcribe<'a>(
|
||||||
// Nothing much to do here. Just push the token to the result, being careful to
|
// Nothing much to do here. Just push the token to the result, being careful to
|
||||||
// preserve syntax context.
|
// preserve syntax context.
|
||||||
mbe::TokenTree::Token(token) => {
|
mbe::TokenTree::Token(token) => {
|
||||||
let mut token = token.clone();
|
let mut token = *token;
|
||||||
mut_visit::visit_token(&mut marker, &mut token);
|
mut_visit::visit_token(&mut marker, &mut token);
|
||||||
let tt = TokenTree::Token(token, Spacing::Alone);
|
let tt = TokenTree::Token(token, Spacing::Alone);
|
||||||
result.push(tt);
|
result.push(tt);
|
||||||
|
|
|
@ -309,15 +309,6 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
Interpolated(nt) => {
|
|
||||||
let stream = TokenStream::from_nonterminal_ast(&nt);
|
|
||||||
trees.push(TokenTree::Group(Group {
|
|
||||||
delimiter: pm::Delimiter::None,
|
|
||||||
stream: Some(stream),
|
|
||||||
span: DelimSpan::from_single(span),
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
|
|
||||||
OpenDelim(..) | CloseDelim(..) => unreachable!(),
|
OpenDelim(..) | CloseDelim(..) => unreachable!(),
|
||||||
Eof => unreachable!(),
|
Eof => unreachable!(),
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,6 @@
|
||||||
|
|
||||||
// tidy-alphabetical-start
|
// tidy-alphabetical-start
|
||||||
#![allow(internal_features)]
|
#![allow(internal_features)]
|
||||||
#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
|
|
||||||
#![feature(associated_type_defaults)]
|
#![feature(associated_type_defaults)]
|
||||||
#![feature(box_patterns)]
|
#![feature(box_patterns)]
|
||||||
#![feature(closure_track_caller)]
|
#![feature(closure_track_caller)]
|
||||||
|
|
|
@ -59,7 +59,6 @@ This API is completely unstable and subject to change.
|
||||||
#![allow(internal_features)]
|
#![allow(internal_features)]
|
||||||
#![allow(rustc::diagnostic_outside_of_impl)]
|
#![allow(rustc::diagnostic_outside_of_impl)]
|
||||||
#![allow(rustc::untranslatable_diagnostic)]
|
#![allow(rustc::untranslatable_diagnostic)]
|
||||||
#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
|
|
||||||
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
|
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
|
||||||
#![doc(rust_logo)]
|
#![doc(rust_logo)]
|
||||||
#![feature(assert_matches)]
|
#![feature(assert_matches)]
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
// tidy-alphabetical-start
|
// tidy-alphabetical-start
|
||||||
#![allow(rustc::diagnostic_outside_of_impl)]
|
#![allow(rustc::diagnostic_outside_of_impl)]
|
||||||
#![allow(rustc::untranslatable_diagnostic)]
|
#![allow(rustc::untranslatable_diagnostic)]
|
||||||
#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
|
|
||||||
#![feature(array_windows)]
|
#![feature(array_windows)]
|
||||||
#![feature(box_patterns)]
|
#![feature(box_patterns)]
|
||||||
#![feature(if_let_guard)]
|
#![feature(if_let_guard)]
|
||||||
|
|
|
@ -2,7 +2,6 @@
|
||||||
|
|
||||||
// tidy-alphabetical-start
|
// tidy-alphabetical-start
|
||||||
#![allow(internal_features)]
|
#![allow(internal_features)]
|
||||||
#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
|
|
||||||
#![deny(missing_docs)]
|
#![deny(missing_docs)]
|
||||||
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
|
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
|
||||||
#![doc(rust_logo)]
|
#![doc(rust_logo)]
|
||||||
|
|
|
@ -21,7 +21,6 @@
|
||||||
|
|
||||||
// tidy-alphabetical-start
|
// tidy-alphabetical-start
|
||||||
#![allow(internal_features)]
|
#![allow(internal_features)]
|
||||||
#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
|
|
||||||
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
|
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
|
||||||
#![doc(rust_logo)]
|
#![doc(rust_logo)]
|
||||||
#![feature(array_windows)]
|
#![feature(array_windows)]
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
// tidy-alphabetical-start
|
// tidy-alphabetical-start
|
||||||
#![allow(internal_features)]
|
#![allow(internal_features)]
|
||||||
#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
|
|
||||||
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
|
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
|
||||||
#![doc(rust_logo)]
|
#![doc(rust_logo)]
|
||||||
#![feature(coroutines)]
|
#![feature(coroutines)]
|
||||||
|
|
|
@ -29,7 +29,6 @@
|
||||||
#![allow(rustc::diagnostic_outside_of_impl)]
|
#![allow(rustc::diagnostic_outside_of_impl)]
|
||||||
#![allow(rustc::potential_query_instability)]
|
#![allow(rustc::potential_query_instability)]
|
||||||
#![allow(rustc::untranslatable_diagnostic)]
|
#![allow(rustc::untranslatable_diagnostic)]
|
||||||
#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
|
|
||||||
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
|
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
|
||||||
#![doc(rust_logo)]
|
#![doc(rust_logo)]
|
||||||
#![feature(allocator_api)]
|
#![feature(allocator_api)]
|
||||||
|
|
|
@ -3,7 +3,6 @@
|
||||||
// tidy-alphabetical-start
|
// tidy-alphabetical-start
|
||||||
#![allow(rustc::diagnostic_outside_of_impl)]
|
#![allow(rustc::diagnostic_outside_of_impl)]
|
||||||
#![allow(rustc::untranslatable_diagnostic)]
|
#![allow(rustc::untranslatable_diagnostic)]
|
||||||
#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
|
|
||||||
#![feature(assert_matches)]
|
#![feature(assert_matches)]
|
||||||
#![feature(box_patterns)]
|
#![feature(box_patterns)]
|
||||||
#![feature(if_let_guard)]
|
#![feature(if_let_guard)]
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
// tidy-alphabetical-start
|
// tidy-alphabetical-start
|
||||||
#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
|
|
||||||
#![feature(assert_matches)]
|
#![feature(assert_matches)]
|
||||||
#![feature(associated_type_defaults)]
|
#![feature(associated_type_defaults)]
|
||||||
#![feature(box_patterns)]
|
#![feature(box_patterns)]
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
// tidy-alphabetical-start
|
// tidy-alphabetical-start
|
||||||
#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
|
|
||||||
#![feature(array_windows)]
|
#![feature(array_windows)]
|
||||||
#![feature(assert_matches)]
|
#![feature(assert_matches)]
|
||||||
#![feature(box_patterns)]
|
#![feature(box_patterns)]
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
// tidy-alphabetical-start
|
// tidy-alphabetical-start
|
||||||
#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
|
|
||||||
#![feature(array_windows)]
|
#![feature(array_windows)]
|
||||||
#![feature(file_buffered)]
|
#![feature(file_buffered)]
|
||||||
#![feature(if_let_guard)]
|
#![feature(if_let_guard)]
|
||||||
|
|
|
@ -376,7 +376,7 @@ pub(super) fn check_for_substitution(
|
||||||
ascii_name,
|
ascii_name,
|
||||||
})
|
})
|
||||||
};
|
};
|
||||||
(token.clone(), sugg)
|
(*token, sugg)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Extract string if found at current position with given delimiters
|
/// Extract string if found at current position with given delimiters
|
||||||
|
|
|
@ -4,7 +4,6 @@
|
||||||
#![allow(internal_features)]
|
#![allow(internal_features)]
|
||||||
#![allow(rustc::diagnostic_outside_of_impl)]
|
#![allow(rustc::diagnostic_outside_of_impl)]
|
||||||
#![allow(rustc::untranslatable_diagnostic)]
|
#![allow(rustc::untranslatable_diagnostic)]
|
||||||
#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
|
|
||||||
#![feature(array_windows)]
|
#![feature(array_windows)]
|
||||||
#![feature(assert_matches)]
|
#![feature(assert_matches)]
|
||||||
#![feature(box_patterns)]
|
#![feature(box_patterns)]
|
||||||
|
|
|
@ -120,7 +120,7 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
|
||||||
// produce an empty `TokenStream` if no calls were made, and omit the
|
// produce an empty `TokenStream` if no calls were made, and omit the
|
||||||
// final token otherwise.
|
// final token otherwise.
|
||||||
let mut cursor_snapshot = self.cursor_snapshot.clone();
|
let mut cursor_snapshot = self.cursor_snapshot.clone();
|
||||||
let tokens = iter::once(FlatToken::Token(self.start_token.clone()))
|
let tokens = iter::once(FlatToken::Token(self.start_token))
|
||||||
.chain(iter::repeat_with(|| FlatToken::Token(cursor_snapshot.next())))
|
.chain(iter::repeat_with(|| FlatToken::Token(cursor_snapshot.next())))
|
||||||
.take(self.num_calls as usize);
|
.take(self.num_calls as usize);
|
||||||
|
|
||||||
|
@ -186,7 +186,7 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
|
||||||
impl<'a> Parser<'a> {
|
impl<'a> Parser<'a> {
|
||||||
pub(super) fn collect_pos(&self) -> CollectPos {
|
pub(super) fn collect_pos(&self) -> CollectPos {
|
||||||
CollectPos {
|
CollectPos {
|
||||||
start_token: (self.token.clone(), self.token_spacing),
|
start_token: (self.token, self.token_spacing),
|
||||||
cursor_snapshot: self.token_cursor.clone(),
|
cursor_snapshot: self.token_cursor.clone(),
|
||||||
start_pos: self.num_bump_calls,
|
start_pos: self.num_bump_calls,
|
||||||
}
|
}
|
||||||
|
|
|
@ -322,7 +322,7 @@ impl<'a> Parser<'a> {
|
||||||
let mut recovered_ident = None;
|
let mut recovered_ident = None;
|
||||||
// we take this here so that the correct original token is retained in
|
// we take this here so that the correct original token is retained in
|
||||||
// the diagnostic, regardless of eager recovery.
|
// the diagnostic, regardless of eager recovery.
|
||||||
let bad_token = self.token.clone();
|
let bad_token = self.token;
|
||||||
|
|
||||||
// suggest prepending a keyword in identifier position with `r#`
|
// suggest prepending a keyword in identifier position with `r#`
|
||||||
let suggest_raw = if let Some((ident, IdentIsRaw::No)) = self.token.ident()
|
let suggest_raw = if let Some((ident, IdentIsRaw::No)) = self.token.ident()
|
||||||
|
@ -382,7 +382,7 @@ impl<'a> Parser<'a> {
|
||||||
// if the previous token is a valid keyword
|
// if the previous token is a valid keyword
|
||||||
// that might use a generic, then suggest a correct
|
// that might use a generic, then suggest a correct
|
||||||
// generic placement (later on)
|
// generic placement (later on)
|
||||||
let maybe_keyword = self.prev_token.clone();
|
let maybe_keyword = self.prev_token;
|
||||||
if valid_prev_keywords.into_iter().any(|x| maybe_keyword.is_keyword(x)) {
|
if valid_prev_keywords.into_iter().any(|x| maybe_keyword.is_keyword(x)) {
|
||||||
// if we have a valid keyword, attempt to parse generics
|
// if we have a valid keyword, attempt to parse generics
|
||||||
// also obtain the keywords symbol
|
// also obtain the keywords symbol
|
||||||
|
@ -530,7 +530,7 @@ impl<'a> Parser<'a> {
|
||||||
// let y = 42;
|
// let y = 42;
|
||||||
let guar = self.dcx().emit_err(ExpectedSemi {
|
let guar = self.dcx().emit_err(ExpectedSemi {
|
||||||
span: self.token.span,
|
span: self.token.span,
|
||||||
token: self.token.clone(),
|
token: self.token,
|
||||||
unexpected_token_label: None,
|
unexpected_token_label: None,
|
||||||
sugg: ExpectedSemiSugg::ChangeToSemi(self.token.span),
|
sugg: ExpectedSemiSugg::ChangeToSemi(self.token.span),
|
||||||
});
|
});
|
||||||
|
@ -555,7 +555,7 @@ impl<'a> Parser<'a> {
|
||||||
let span = self.prev_token.span.shrink_to_hi();
|
let span = self.prev_token.span.shrink_to_hi();
|
||||||
let guar = self.dcx().emit_err(ExpectedSemi {
|
let guar = self.dcx().emit_err(ExpectedSemi {
|
||||||
span,
|
span,
|
||||||
token: self.token.clone(),
|
token: self.token,
|
||||||
unexpected_token_label: Some(self.token.span),
|
unexpected_token_label: Some(self.token.span),
|
||||||
sugg: ExpectedSemiSugg::AddSemi(span),
|
sugg: ExpectedSemiSugg::AddSemi(span),
|
||||||
});
|
});
|
||||||
|
@ -801,7 +801,7 @@ impl<'a> Parser<'a> {
|
||||||
let span = self.prev_token.span.shrink_to_hi();
|
let span = self.prev_token.span.shrink_to_hi();
|
||||||
let mut err = self.dcx().create_err(ExpectedSemi {
|
let mut err = self.dcx().create_err(ExpectedSemi {
|
||||||
span,
|
span,
|
||||||
token: self.token.clone(),
|
token: self.token,
|
||||||
unexpected_token_label: Some(self.token.span),
|
unexpected_token_label: Some(self.token.span),
|
||||||
sugg: ExpectedSemiSugg::AddSemi(span),
|
sugg: ExpectedSemiSugg::AddSemi(span),
|
||||||
});
|
});
|
||||||
|
|
|
@ -344,7 +344,7 @@ impl<'a> Parser<'a> {
|
||||||
fn error_found_expr_would_be_stmt(&self, lhs: &Expr) {
|
fn error_found_expr_would_be_stmt(&self, lhs: &Expr) {
|
||||||
self.dcx().emit_err(errors::FoundExprWouldBeStmt {
|
self.dcx().emit_err(errors::FoundExprWouldBeStmt {
|
||||||
span: self.token.span,
|
span: self.token.span,
|
||||||
token: self.token.clone(),
|
token: self.token,
|
||||||
suggestion: ExprParenthesesNeeded::surrounding(lhs.span),
|
suggestion: ExprParenthesesNeeded::surrounding(lhs.span),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -417,7 +417,7 @@ impl<'a> Parser<'a> {
|
||||||
cur_op_span: Span,
|
cur_op_span: Span,
|
||||||
) -> PResult<'a, P<Expr>> {
|
) -> PResult<'a, P<Expr>> {
|
||||||
let rhs = if self.is_at_start_of_range_notation_rhs() {
|
let rhs = if self.is_at_start_of_range_notation_rhs() {
|
||||||
let maybe_lt = self.token.clone();
|
let maybe_lt = self.token;
|
||||||
let attrs = self.parse_outer_attributes()?;
|
let attrs = self.parse_outer_attributes()?;
|
||||||
Some(
|
Some(
|
||||||
self.parse_expr_assoc_with(Bound::Excluded(prec), attrs)
|
self.parse_expr_assoc_with(Bound::Excluded(prec), attrs)
|
||||||
|
@ -611,7 +611,7 @@ impl<'a> Parser<'a> {
|
||||||
|
|
||||||
/// Recover on `not expr` in favor of `!expr`.
|
/// Recover on `not expr` in favor of `!expr`.
|
||||||
fn recover_not_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> {
|
fn recover_not_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> {
|
||||||
let negated_token = self.look_ahead(1, |t| t.clone());
|
let negated_token = self.look_ahead(1, |t| *t);
|
||||||
|
|
||||||
let sub_diag = if negated_token.is_numeric_lit() {
|
let sub_diag = if negated_token.is_numeric_lit() {
|
||||||
errors::NotAsNegationOperatorSub::SuggestNotBitwise
|
errors::NotAsNegationOperatorSub::SuggestNotBitwise
|
||||||
|
@ -637,9 +637,7 @@ impl<'a> Parser<'a> {
|
||||||
/// Returns the span of expr if it was not interpolated, or the span of the interpolated token.
|
/// Returns the span of expr if it was not interpolated, or the span of the interpolated token.
|
||||||
fn interpolated_or_expr_span(&self, expr: &Expr) -> Span {
|
fn interpolated_or_expr_span(&self, expr: &Expr) -> Span {
|
||||||
match self.prev_token.kind {
|
match self.prev_token.kind {
|
||||||
TokenKind::NtIdent(..) | TokenKind::NtLifetime(..) | TokenKind::Interpolated(..) => {
|
TokenKind::NtIdent(..) | TokenKind::NtLifetime(..) => self.prev_token.span,
|
||||||
self.prev_token.span
|
|
||||||
}
|
|
||||||
TokenKind::CloseDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(_))) => {
|
TokenKind::CloseDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(_))) => {
|
||||||
// `expr.span` is the interpolated span, because invisible open
|
// `expr.span` is the interpolated span, because invisible open
|
||||||
// and close delims both get marked with the same span, one
|
// and close delims both get marked with the same span, one
|
||||||
|
@ -1386,15 +1384,7 @@ impl<'a> Parser<'a> {
|
||||||
maybe_recover_from_interpolated_ty_qpath!(self, true);
|
maybe_recover_from_interpolated_ty_qpath!(self, true);
|
||||||
|
|
||||||
let span = self.token.span;
|
let span = self.token.span;
|
||||||
if let token::Interpolated(nt) = &self.token.kind {
|
if let Some(expr) = self.eat_metavar_seq_with_matcher(
|
||||||
match &**nt {
|
|
||||||
token::NtBlock(block) => {
|
|
||||||
let block = block.clone();
|
|
||||||
self.bump();
|
|
||||||
return Ok(self.mk_expr(self.prev_token.span, ExprKind::Block(block, None)));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
} else if let Some(expr) = self.eat_metavar_seq_with_matcher(
|
|
||||||
|mv_kind| matches!(mv_kind, MetaVarKind::Expr { .. }),
|
|mv_kind| matches!(mv_kind, MetaVarKind::Expr { .. }),
|
||||||
|this| {
|
|this| {
|
||||||
// Force collection (as opposed to just `parse_expr`) is required to avoid the
|
// Force collection (as opposed to just `parse_expr`) is required to avoid the
|
||||||
|
@ -1415,9 +1405,13 @@ impl<'a> Parser<'a> {
|
||||||
self.eat_metavar_seq(MetaVarKind::Literal, |this| this.parse_literal_maybe_minus())
|
self.eat_metavar_seq(MetaVarKind::Literal, |this| this.parse_literal_maybe_minus())
|
||||||
{
|
{
|
||||||
return Ok(lit);
|
return Ok(lit);
|
||||||
} else if let Some(path) = self.eat_metavar_seq(MetaVarKind::Path, |this| {
|
} else if let Some(block) =
|
||||||
this.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type))
|
self.eat_metavar_seq(MetaVarKind::Block, |this| this.parse_block())
|
||||||
}) {
|
{
|
||||||
|
return Ok(self.mk_expr(span, ExprKind::Block(block, None)));
|
||||||
|
} else if let Some(path) =
|
||||||
|
self.eat_metavar_seq(MetaVarKind::Path, |this| this.parse_path(PathStyle::Type))
|
||||||
|
{
|
||||||
return Ok(self.mk_expr(span, ExprKind::Path(None, path)));
|
return Ok(self.mk_expr(span, ExprKind::Path(None, path)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1612,7 +1606,7 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_expr_path_start(&mut self) -> PResult<'a, P<Expr>> {
|
fn parse_expr_path_start(&mut self) -> PResult<'a, P<Expr>> {
|
||||||
let maybe_eq_tok = self.prev_token.clone();
|
let maybe_eq_tok = self.prev_token;
|
||||||
let (qself, path) = if self.eat_lt() {
|
let (qself, path) = if self.eat_lt() {
|
||||||
let lt_span = self.prev_token.span;
|
let lt_span = self.prev_token.span;
|
||||||
let (qself, path) = self.parse_qpath(PathStyle::Expr).map_err(|mut err| {
|
let (qself, path) = self.parse_qpath(PathStyle::Expr).map_err(|mut err| {
|
||||||
|
@ -1671,7 +1665,7 @@ impl<'a> Parser<'a> {
|
||||||
} else if self.eat_keyword(exp!(Loop)) {
|
} else if self.eat_keyword(exp!(Loop)) {
|
||||||
self.parse_expr_loop(label, lo)
|
self.parse_expr_loop(label, lo)
|
||||||
} else if self.check_noexpect(&token::OpenDelim(Delimiter::Brace))
|
} else if self.check_noexpect(&token::OpenDelim(Delimiter::Brace))
|
||||||
|| self.token.is_whole_block()
|
|| self.token.is_metavar_block()
|
||||||
{
|
{
|
||||||
self.parse_expr_block(label, lo, BlockCheckMode::Default)
|
self.parse_expr_block(label, lo, BlockCheckMode::Default)
|
||||||
} else if !ate_colon
|
} else if !ate_colon
|
||||||
|
@ -2073,7 +2067,7 @@ impl<'a> Parser<'a> {
|
||||||
&mut self,
|
&mut self,
|
||||||
mk_lit_char: impl FnOnce(Symbol, Span) -> L,
|
mk_lit_char: impl FnOnce(Symbol, Span) -> L,
|
||||||
) -> PResult<'a, L> {
|
) -> PResult<'a, L> {
|
||||||
let token = self.token.clone();
|
let token = self.token;
|
||||||
let err = |self_: &Self| {
|
let err = |self_: &Self| {
|
||||||
let msg = format!("unexpected token: {}", super::token_descr(&token));
|
let msg = format!("unexpected token: {}", super::token_descr(&token));
|
||||||
self_.dcx().struct_span_err(token.span, msg)
|
self_.dcx().struct_span_err(token.span, msg)
|
||||||
|
@ -2354,7 +2348,7 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if self.token.is_whole_block() {
|
if self.token.is_metavar_block() {
|
||||||
self.dcx().emit_err(errors::InvalidBlockMacroSegment {
|
self.dcx().emit_err(errors::InvalidBlockMacroSegment {
|
||||||
span: self.token.span,
|
span: self.token.span,
|
||||||
context: lo.to(self.token.span),
|
context: lo.to(self.token.span),
|
||||||
|
@ -2379,7 +2373,7 @@ impl<'a> Parser<'a> {
|
||||||
fn parse_expr_closure(&mut self) -> PResult<'a, P<Expr>> {
|
fn parse_expr_closure(&mut self) -> PResult<'a, P<Expr>> {
|
||||||
let lo = self.token.span;
|
let lo = self.token.span;
|
||||||
|
|
||||||
let before = self.prev_token.clone();
|
let before = self.prev_token;
|
||||||
let binder = if self.check_keyword(exp!(For)) {
|
let binder = if self.check_keyword(exp!(For)) {
|
||||||
let lo = self.token.span;
|
let lo = self.token.span;
|
||||||
let (lifetime_defs, _) = self.parse_late_bound_lifetime_defs()?;
|
let (lifetime_defs, _) = self.parse_late_bound_lifetime_defs()?;
|
||||||
|
@ -2411,8 +2405,8 @@ impl<'a> Parser<'a> {
|
||||||
FnRetTy::Default(_) => {
|
FnRetTy::Default(_) => {
|
||||||
let restrictions =
|
let restrictions =
|
||||||
self.restrictions - Restrictions::STMT_EXPR - Restrictions::ALLOW_LET;
|
self.restrictions - Restrictions::STMT_EXPR - Restrictions::ALLOW_LET;
|
||||||
let prev = self.prev_token.clone();
|
let prev = self.prev_token;
|
||||||
let token = self.token.clone();
|
let token = self.token;
|
||||||
let attrs = self.parse_outer_attributes()?;
|
let attrs = self.parse_outer_attributes()?;
|
||||||
match self.parse_expr_res(restrictions, attrs) {
|
match self.parse_expr_res(restrictions, attrs) {
|
||||||
Ok((expr, _)) => expr,
|
Ok((expr, _)) => expr,
|
||||||
|
@ -2477,7 +2471,7 @@ impl<'a> Parser<'a> {
|
||||||
if self.may_recover()
|
if self.may_recover()
|
||||||
&& self.token.can_begin_expr()
|
&& self.token.can_begin_expr()
|
||||||
&& !matches!(self.token.kind, TokenKind::OpenDelim(Delimiter::Brace))
|
&& !matches!(self.token.kind, TokenKind::OpenDelim(Delimiter::Brace))
|
||||||
&& !self.token.is_whole_block()
|
&& !self.token.is_metavar_block()
|
||||||
{
|
{
|
||||||
let snapshot = self.create_snapshot_for_diagnostic();
|
let snapshot = self.create_snapshot_for_diagnostic();
|
||||||
let restrictions =
|
let restrictions =
|
||||||
|
@ -2659,7 +2653,7 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
let attrs = self.parse_outer_attributes()?; // For recovery.
|
let attrs = self.parse_outer_attributes()?; // For recovery.
|
||||||
let maybe_fatarrow = self.token.clone();
|
let maybe_fatarrow = self.token;
|
||||||
let block = if self.check(exp!(OpenBrace)) {
|
let block = if self.check(exp!(OpenBrace)) {
|
||||||
self.parse_block()?
|
self.parse_block()?
|
||||||
} else if let Some(block) = recover_block_from_condition(self) {
|
} else if let Some(block) = recover_block_from_condition(self) {
|
||||||
|
@ -3524,7 +3518,7 @@ impl<'a> Parser<'a> {
|
||||||
self.token.is_keyword(kw::Do)
|
self.token.is_keyword(kw::Do)
|
||||||
&& self.is_keyword_ahead(1, &[kw::Catch])
|
&& self.is_keyword_ahead(1, &[kw::Catch])
|
||||||
&& self
|
&& self
|
||||||
.look_ahead(2, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block())
|
.look_ahead(2, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_metavar_block())
|
||||||
&& !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
|
&& !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3535,7 +3529,7 @@ impl<'a> Parser<'a> {
|
||||||
fn is_try_block(&self) -> bool {
|
fn is_try_block(&self) -> bool {
|
||||||
self.token.is_keyword(kw::Try)
|
self.token.is_keyword(kw::Try)
|
||||||
&& self
|
&& self
|
||||||
.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block())
|
.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_metavar_block())
|
||||||
&& self.token_uninterpolated_span().at_least_rust_2018()
|
&& self.token_uninterpolated_span().at_least_rust_2018()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3569,12 +3563,12 @@ impl<'a> Parser<'a> {
|
||||||
// `async move {`
|
// `async move {`
|
||||||
self.is_keyword_ahead(lookahead + 1, &[kw::Move, kw::Use])
|
self.is_keyword_ahead(lookahead + 1, &[kw::Move, kw::Use])
|
||||||
&& self.look_ahead(lookahead + 2, |t| {
|
&& self.look_ahead(lookahead + 2, |t| {
|
||||||
*t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block()
|
*t == token::OpenDelim(Delimiter::Brace) || t.is_metavar_block()
|
||||||
})
|
})
|
||||||
) || (
|
) || (
|
||||||
// `async {`
|
// `async {`
|
||||||
self.look_ahead(lookahead + 1, |t| {
|
self.look_ahead(lookahead + 1, |t| {
|
||||||
*t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block()
|
*t == token::OpenDelim(Delimiter::Brace) || t.is_metavar_block()
|
||||||
})
|
})
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
@ -3867,7 +3861,7 @@ impl<'a> Parser<'a> {
|
||||||
return Err(this.dcx().create_err(errors::ExpectedStructField {
|
return Err(this.dcx().create_err(errors::ExpectedStructField {
|
||||||
span: this.look_ahead(1, |t| t.span),
|
span: this.look_ahead(1, |t| t.span),
|
||||||
ident_span: this.token.span,
|
ident_span: this.token.span,
|
||||||
token: this.look_ahead(1, |t| t.clone()),
|
token: this.look_ahead(1, |t| *t),
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
let (ident, expr) = if is_shorthand {
|
let (ident, expr) = if is_shorthand {
|
||||||
|
|
|
@ -1732,8 +1732,7 @@ impl<'a> Parser<'a> {
|
||||||
self.expect_semi()?;
|
self.expect_semi()?;
|
||||||
body
|
body
|
||||||
} else {
|
} else {
|
||||||
let err =
|
let err = errors::UnexpectedTokenAfterStructName::new(self.token.span, self.token);
|
||||||
errors::UnexpectedTokenAfterStructName::new(self.token.span, self.token.clone());
|
|
||||||
return Err(self.dcx().create_err(err));
|
return Err(self.dcx().create_err(err));
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -2303,7 +2302,7 @@ impl<'a> Parser<'a> {
|
||||||
|| self.token.is_keyword(kw::Union))
|
|| self.token.is_keyword(kw::Union))
|
||||||
&& self.look_ahead(1, |t| t.is_ident())
|
&& self.look_ahead(1, |t| t.is_ident())
|
||||||
{
|
{
|
||||||
let kw_token = self.token.clone();
|
let kw_token = self.token;
|
||||||
let kw_str = pprust::token_to_string(&kw_token);
|
let kw_str = pprust::token_to_string(&kw_token);
|
||||||
let item = self.parse_item(ForceCollect::No)?;
|
let item = self.parse_item(ForceCollect::No)?;
|
||||||
let mut item = item.unwrap().span;
|
let mut item = item.unwrap().span;
|
||||||
|
@ -2536,7 +2535,7 @@ impl<'a> Parser<'a> {
|
||||||
self.expect_semi()?;
|
self.expect_semi()?;
|
||||||
*sig_hi = self.prev_token.span;
|
*sig_hi = self.prev_token.span;
|
||||||
(AttrVec::new(), None)
|
(AttrVec::new(), None)
|
||||||
} else if self.check(exp!(OpenBrace)) || self.token.is_whole_block() {
|
} else if self.check(exp!(OpenBrace)) || self.token.is_metavar_block() {
|
||||||
self.parse_block_common(self.token.span, BlockCheckMode::Default, None)
|
self.parse_block_common(self.token.span, BlockCheckMode::Default, None)
|
||||||
.map(|(attrs, body)| (attrs, Some(body)))?
|
.map(|(attrs, body)| (attrs, Some(body)))?
|
||||||
} else if self.token == token::Eq {
|
} else if self.token == token::Eq {
|
||||||
|
|
|
@ -13,7 +13,6 @@ mod ty;
|
||||||
|
|
||||||
use std::assert_matches::debug_assert_matches;
|
use std::assert_matches::debug_assert_matches;
|
||||||
use std::ops::Range;
|
use std::ops::Range;
|
||||||
use std::sync::Arc;
|
|
||||||
use std::{fmt, mem, slice};
|
use std::{fmt, mem, slice};
|
||||||
|
|
||||||
use attr_wrapper::{AttrWrapper, UsePreAttrPos};
|
use attr_wrapper::{AttrWrapper, UsePreAttrPos};
|
||||||
|
@ -24,8 +23,8 @@ pub use pat::{CommaRecoveryMode, RecoverColon, RecoverComma};
|
||||||
use path::PathStyle;
|
use path::PathStyle;
|
||||||
use rustc_ast::ptr::P;
|
use rustc_ast::ptr::P;
|
||||||
use rustc_ast::token::{
|
use rustc_ast::token::{
|
||||||
self, Delimiter, IdentIsRaw, InvisibleOrigin, MetaVarKind, Nonterminal, NtExprKind, NtPatKind,
|
self, Delimiter, IdentIsRaw, InvisibleOrigin, MetaVarKind, NtExprKind, NtPatKind, Token,
|
||||||
Token, TokenKind,
|
TokenKind,
|
||||||
};
|
};
|
||||||
use rustc_ast::tokenstream::{AttrsTarget, Spacing, TokenStream, TokenTree};
|
use rustc_ast::tokenstream::{AttrsTarget, Spacing, TokenStream, TokenTree};
|
||||||
use rustc_ast::util::case::Case;
|
use rustc_ast::util::case::Case;
|
||||||
|
@ -98,21 +97,6 @@ pub enum ForceCollect {
|
||||||
No,
|
No,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[macro_export]
|
|
||||||
macro_rules! maybe_whole {
|
|
||||||
($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
|
|
||||||
#[allow(irrefutable_let_patterns)] // FIXME: temporary
|
|
||||||
if let token::Interpolated(nt) = &$p.token.kind
|
|
||||||
&& let token::$constructor(x) = &**nt
|
|
||||||
{
|
|
||||||
#[allow(unused_mut)]
|
|
||||||
let mut $x = x.clone();
|
|
||||||
$p.bump();
|
|
||||||
return Ok($e);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/// If the next tokens are ill-formed `$ty::` recover them as `<$ty>::`.
|
/// If the next tokens are ill-formed `$ty::` recover them as `<$ty>::`.
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! maybe_recover_from_interpolated_ty_qpath {
|
macro_rules! maybe_recover_from_interpolated_ty_qpath {
|
||||||
|
@ -342,12 +326,12 @@ impl TokenCursor {
|
||||||
// below can be removed.
|
// below can be removed.
|
||||||
if let Some(tree) = self.curr.curr() {
|
if let Some(tree) = self.curr.curr() {
|
||||||
match tree {
|
match tree {
|
||||||
&TokenTree::Token(ref token, spacing) => {
|
&TokenTree::Token(token, spacing) => {
|
||||||
debug_assert!(!matches!(
|
debug_assert!(!matches!(
|
||||||
token.kind,
|
token.kind,
|
||||||
token::OpenDelim(_) | token::CloseDelim(_)
|
token::OpenDelim(_) | token::CloseDelim(_)
|
||||||
));
|
));
|
||||||
let res = (token.clone(), spacing);
|
let res = (token, spacing);
|
||||||
self.curr.bump();
|
self.curr.bump();
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
@ -459,7 +443,6 @@ pub fn token_descr(token: &Token) -> String {
|
||||||
(Some(TokenDescription::MetaVar(kind)), _) => format!("`{kind}` metavariable"),
|
(Some(TokenDescription::MetaVar(kind)), _) => format!("`{kind}` metavariable"),
|
||||||
(None, TokenKind::NtIdent(..)) => format!("identifier `{s}`"),
|
(None, TokenKind::NtIdent(..)) => format!("identifier `{s}`"),
|
||||||
(None, TokenKind::NtLifetime(..)) => format!("lifetime `{s}`"),
|
(None, TokenKind::NtLifetime(..)) => format!("lifetime `{s}`"),
|
||||||
(None, TokenKind::Interpolated(node)) => format!("{} `{s}`", node.descr()),
|
|
||||||
(None, _) => format!("`{s}`"),
|
(None, _) => format!("`{s}`"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -855,8 +838,10 @@ impl<'a> Parser<'a> {
|
||||||
fn check_inline_const(&self, dist: usize) -> bool {
|
fn check_inline_const(&self, dist: usize) -> bool {
|
||||||
self.is_keyword_ahead(dist, &[kw::Const])
|
self.is_keyword_ahead(dist, &[kw::Const])
|
||||||
&& self.look_ahead(dist + 1, |t| match &t.kind {
|
&& self.look_ahead(dist + 1, |t| match &t.kind {
|
||||||
token::Interpolated(nt) => matches!(&**nt, token::NtBlock(..)),
|
|
||||||
token::OpenDelim(Delimiter::Brace) => true,
|
token::OpenDelim(Delimiter::Brace) => true,
|
||||||
|
token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(
|
||||||
|
MetaVarKind::Block,
|
||||||
|
))) => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -1402,7 +1387,7 @@ impl<'a> Parser<'a> {
|
||||||
// Avoid const blocks and const closures to be parsed as const items
|
// Avoid const blocks and const closures to be parsed as const items
|
||||||
if (self.check_const_closure() == is_closure)
|
if (self.check_const_closure() == is_closure)
|
||||||
&& !self
|
&& !self
|
||||||
.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block())
|
.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_metavar_block())
|
||||||
&& self.eat_keyword_case(exp!(Const), case)
|
&& self.eat_keyword_case(exp!(Const), case)
|
||||||
{
|
{
|
||||||
Const::Yes(self.prev_token_uninterpolated_span())
|
Const::Yes(self.prev_token_uninterpolated_span())
|
||||||
|
@ -1532,7 +1517,7 @@ impl<'a> Parser<'a> {
|
||||||
_ => {
|
_ => {
|
||||||
let prev_spacing = self.token_spacing;
|
let prev_spacing = self.token_spacing;
|
||||||
self.bump();
|
self.bump();
|
||||||
TokenTree::Token(self.prev_token.clone(), prev_spacing)
|
TokenTree::Token(self.prev_token, prev_spacing)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1718,7 +1703,7 @@ impl<'a> Parser<'a> {
|
||||||
dbg_fmt.field("prev_token", &self.prev_token);
|
dbg_fmt.field("prev_token", &self.prev_token);
|
||||||
let mut tokens = vec![];
|
let mut tokens = vec![];
|
||||||
for i in 0..lookahead {
|
for i in 0..lookahead {
|
||||||
let tok = self.look_ahead(i, |tok| tok.kind.clone());
|
let tok = self.look_ahead(i, |tok| tok.kind);
|
||||||
let is_eof = tok == TokenKind::Eof;
|
let is_eof = tok == TokenKind::Eof;
|
||||||
tokens.push(tok);
|
tokens.push(tok);
|
||||||
if is_eof {
|
if is_eof {
|
||||||
|
@ -1759,7 +1744,6 @@ impl<'a> Parser<'a> {
|
||||||
pub fn token_uninterpolated_span(&self) -> Span {
|
pub fn token_uninterpolated_span(&self) -> Span {
|
||||||
match &self.token.kind {
|
match &self.token.kind {
|
||||||
token::NtIdent(ident, _) | token::NtLifetime(ident, _) => ident.span,
|
token::NtIdent(ident, _) | token::NtLifetime(ident, _) => ident.span,
|
||||||
token::Interpolated(nt) => nt.use_span(),
|
|
||||||
token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(_))) => {
|
token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(_))) => {
|
||||||
self.look_ahead(1, |t| t.span)
|
self.look_ahead(1, |t| t.span)
|
||||||
}
|
}
|
||||||
|
@ -1771,7 +1755,6 @@ impl<'a> Parser<'a> {
|
||||||
pub fn prev_token_uninterpolated_span(&self) -> Span {
|
pub fn prev_token_uninterpolated_span(&self) -> Span {
|
||||||
match &self.prev_token.kind {
|
match &self.prev_token.kind {
|
||||||
token::NtIdent(ident, _) | token::NtLifetime(ident, _) => ident.span,
|
token::NtIdent(ident, _) | token::NtLifetime(ident, _) => ident.span,
|
||||||
token::Interpolated(nt) => nt.use_span(),
|
|
||||||
token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(_))) => {
|
token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(_))) => {
|
||||||
self.look_ahead(0, |t| t.span)
|
self.look_ahead(0, |t| t.span)
|
||||||
}
|
}
|
||||||
|
@ -1828,6 +1811,7 @@ pub enum ParseNtResult {
|
||||||
Ident(Ident, IdentIsRaw),
|
Ident(Ident, IdentIsRaw),
|
||||||
Lifetime(Ident, IdentIsRaw),
|
Lifetime(Ident, IdentIsRaw),
|
||||||
Item(P<ast::Item>),
|
Item(P<ast::Item>),
|
||||||
|
Block(P<ast::Block>),
|
||||||
Stmt(P<ast::Stmt>),
|
Stmt(P<ast::Stmt>),
|
||||||
Pat(P<ast::Pat>, NtPatKind),
|
Pat(P<ast::Pat>, NtPatKind),
|
||||||
Expr(P<ast::Expr>, NtExprKind),
|
Expr(P<ast::Expr>, NtExprKind),
|
||||||
|
@ -1836,7 +1820,4 @@ pub enum ParseNtResult {
|
||||||
Meta(P<ast::AttrItem>),
|
Meta(P<ast::AttrItem>),
|
||||||
Path(P<ast::Path>),
|
Path(P<ast::Path>),
|
||||||
Vis(P<ast::Visibility>),
|
Vis(P<ast::Visibility>),
|
||||||
|
|
||||||
/// This variant will eventually be removed, along with `Token::Interpolate`.
|
|
||||||
Nt(Arc<Nonterminal>),
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,14 +1,7 @@
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use rustc_ast::HasTokens;
|
|
||||||
use rustc_ast::ptr::P;
|
use rustc_ast::ptr::P;
|
||||||
use rustc_ast::token::Nonterminal::*;
|
|
||||||
use rustc_ast::token::NtExprKind::*;
|
use rustc_ast::token::NtExprKind::*;
|
||||||
use rustc_ast::token::NtPatKind::*;
|
use rustc_ast::token::NtPatKind::*;
|
||||||
use rustc_ast::token::{
|
use rustc_ast::token::{self, Delimiter, InvisibleOrigin, MetaVarKind, NonterminalKind, Token};
|
||||||
self, Delimiter, InvisibleOrigin, MetaVarKind, Nonterminal, NonterminalKind, Token,
|
|
||||||
};
|
|
||||||
use rustc_ast_pretty::pprust;
|
|
||||||
use rustc_errors::PResult;
|
use rustc_errors::PResult;
|
||||||
use rustc_span::{Ident, kw};
|
use rustc_span::{Ident, kw};
|
||||||
|
|
||||||
|
@ -45,13 +38,6 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Old variant of `may_be_ident`. Being phased out.
|
|
||||||
fn nt_may_be_ident(nt: &Nonterminal) -> bool {
|
|
||||||
match nt {
|
|
||||||
NtBlock(_) => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
match kind {
|
match kind {
|
||||||
// `expr_2021` and earlier
|
// `expr_2021` and earlier
|
||||||
NonterminalKind::Expr(Expr2021 { .. }) => {
|
NonterminalKind::Expr(Expr2021 { .. }) => {
|
||||||
|
@ -83,16 +69,12 @@ impl<'a> Parser<'a> {
|
||||||
| token::Ident(..)
|
| token::Ident(..)
|
||||||
| token::NtIdent(..)
|
| token::NtIdent(..)
|
||||||
| token::NtLifetime(..)
|
| token::NtLifetime(..)
|
||||||
| token::Interpolated(_)
|
|
||||||
| token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(_))) => true,
|
| token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(_))) => true,
|
||||||
_ => token.can_begin_type(),
|
_ => token.can_begin_type(),
|
||||||
},
|
},
|
||||||
NonterminalKind::Block => match &token.kind {
|
NonterminalKind::Block => match &token.kind {
|
||||||
token::OpenDelim(Delimiter::Brace) => true,
|
token::OpenDelim(Delimiter::Brace) => true,
|
||||||
token::NtLifetime(..) => true,
|
token::NtLifetime(..) => true,
|
||||||
token::Interpolated(nt) => match &**nt {
|
|
||||||
NtBlock(_) => true,
|
|
||||||
},
|
|
||||||
token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(k))) => match k {
|
token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(k))) => match k {
|
||||||
MetaVarKind::Block
|
MetaVarKind::Block
|
||||||
| MetaVarKind::Stmt
|
| MetaVarKind::Stmt
|
||||||
|
@ -112,7 +94,6 @@ impl<'a> Parser<'a> {
|
||||||
},
|
},
|
||||||
NonterminalKind::Path | NonterminalKind::Meta => match &token.kind {
|
NonterminalKind::Path | NonterminalKind::Meta => match &token.kind {
|
||||||
token::PathSep | token::Ident(..) | token::NtIdent(..) => true,
|
token::PathSep | token::Ident(..) | token::NtIdent(..) => true,
|
||||||
token::Interpolated(nt) => nt_may_be_ident(nt),
|
|
||||||
token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(kind))) => {
|
token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(kind))) => {
|
||||||
may_be_ident(*kind)
|
may_be_ident(*kind)
|
||||||
}
|
}
|
||||||
|
@ -136,110 +117,85 @@ impl<'a> Parser<'a> {
|
||||||
// A `macro_rules!` invocation may pass a captured item/expr to a proc-macro,
|
// A `macro_rules!` invocation may pass a captured item/expr to a proc-macro,
|
||||||
// which requires having captured tokens available. Since we cannot determine
|
// which requires having captured tokens available. Since we cannot determine
|
||||||
// in advance whether or not a proc-macro will be (transitively) invoked,
|
// in advance whether or not a proc-macro will be (transitively) invoked,
|
||||||
// we always capture tokens for any `Nonterminal` which needs them.
|
// we always capture tokens for any nonterminal that needs them.
|
||||||
let mut nt = match kind {
|
match kind {
|
||||||
// Note that TT is treated differently to all the others.
|
// Note that TT is treated differently to all the others.
|
||||||
NonterminalKind::TT => return Ok(ParseNtResult::Tt(self.parse_token_tree())),
|
NonterminalKind::TT => Ok(ParseNtResult::Tt(self.parse_token_tree())),
|
||||||
NonterminalKind::Item => match self.parse_item(ForceCollect::Yes)? {
|
NonterminalKind::Item => match self.parse_item(ForceCollect::Yes)? {
|
||||||
Some(item) => return Ok(ParseNtResult::Item(item)),
|
Some(item) => Ok(ParseNtResult::Item(item)),
|
||||||
None => {
|
None => Err(self.dcx().create_err(UnexpectedNonterminal::Item(self.token.span))),
|
||||||
return Err(self
|
|
||||||
.dcx()
|
|
||||||
.create_err(UnexpectedNonterminal::Item(self.token.span)));
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
NonterminalKind::Block => {
|
NonterminalKind::Block => {
|
||||||
// While a block *expression* may have attributes (e.g. `#[my_attr] { ... }`),
|
// While a block *expression* may have attributes (e.g. `#[my_attr] { ... }`),
|
||||||
// the ':block' matcher does not support them
|
// the ':block' matcher does not support them
|
||||||
NtBlock(self.collect_tokens_no_attrs(|this| this.parse_block())?)
|
Ok(ParseNtResult::Block(self.collect_tokens_no_attrs(|this| this.parse_block())?))
|
||||||
}
|
}
|
||||||
NonterminalKind::Stmt => match self.parse_stmt(ForceCollect::Yes)? {
|
NonterminalKind::Stmt => match self.parse_stmt(ForceCollect::Yes)? {
|
||||||
Some(stmt) => return Ok(ParseNtResult::Stmt(P(stmt))),
|
Some(stmt) => Ok(ParseNtResult::Stmt(P(stmt))),
|
||||||
None => {
|
None => {
|
||||||
return Err(self
|
Err(self.dcx().create_err(UnexpectedNonterminal::Statement(self.token.span)))
|
||||||
.dcx()
|
|
||||||
.create_err(UnexpectedNonterminal::Statement(self.token.span)));
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
NonterminalKind::Pat(pat_kind) => {
|
NonterminalKind::Pat(pat_kind) => Ok(ParseNtResult::Pat(
|
||||||
return Ok(ParseNtResult::Pat(
|
self.collect_tokens_no_attrs(|this| match pat_kind {
|
||||||
self.collect_tokens_no_attrs(|this| match pat_kind {
|
PatParam { .. } => this.parse_pat_no_top_alt(None, None),
|
||||||
PatParam { .. } => this.parse_pat_no_top_alt(None, None),
|
PatWithOr => this.parse_pat_no_top_guard(
|
||||||
PatWithOr => this.parse_pat_no_top_guard(
|
None,
|
||||||
None,
|
RecoverComma::No,
|
||||||
RecoverComma::No,
|
RecoverColon::No,
|
||||||
RecoverColon::No,
|
CommaRecoveryMode::EitherTupleOrPipe,
|
||||||
CommaRecoveryMode::EitherTupleOrPipe,
|
),
|
||||||
),
|
})?,
|
||||||
})?,
|
pat_kind,
|
||||||
pat_kind,
|
)),
|
||||||
));
|
|
||||||
}
|
|
||||||
NonterminalKind::Expr(expr_kind) => {
|
NonterminalKind::Expr(expr_kind) => {
|
||||||
return Ok(ParseNtResult::Expr(self.parse_expr_force_collect()?, expr_kind));
|
Ok(ParseNtResult::Expr(self.parse_expr_force_collect()?, expr_kind))
|
||||||
}
|
}
|
||||||
NonterminalKind::Literal => {
|
NonterminalKind::Literal => {
|
||||||
// The `:literal` matcher does not support attributes.
|
// The `:literal` matcher does not support attributes.
|
||||||
return Ok(ParseNtResult::Literal(
|
Ok(ParseNtResult::Literal(
|
||||||
self.collect_tokens_no_attrs(|this| this.parse_literal_maybe_minus())?,
|
self.collect_tokens_no_attrs(|this| this.parse_literal_maybe_minus())?,
|
||||||
));
|
))
|
||||||
}
|
}
|
||||||
NonterminalKind::Ty => {
|
NonterminalKind::Ty => Ok(ParseNtResult::Ty(
|
||||||
return Ok(ParseNtResult::Ty(
|
self.collect_tokens_no_attrs(|this| this.parse_ty_no_question_mark_recover())?,
|
||||||
self.collect_tokens_no_attrs(|this| this.parse_ty_no_question_mark_recover())?,
|
)),
|
||||||
));
|
// This could be handled like a token, since it is one.
|
||||||
}
|
|
||||||
// this could be handled like a token, since it is one
|
|
||||||
NonterminalKind::Ident => {
|
NonterminalKind::Ident => {
|
||||||
return if let Some((ident, is_raw)) = get_macro_ident(&self.token) {
|
if let Some((ident, is_raw)) = get_macro_ident(&self.token) {
|
||||||
self.bump();
|
self.bump();
|
||||||
Ok(ParseNtResult::Ident(ident, is_raw))
|
Ok(ParseNtResult::Ident(ident, is_raw))
|
||||||
} else {
|
} else {
|
||||||
Err(self.dcx().create_err(UnexpectedNonterminal::Ident {
|
Err(self.dcx().create_err(UnexpectedNonterminal::Ident {
|
||||||
span: self.token.span,
|
span: self.token.span,
|
||||||
token: self.token.clone(),
|
token: self.token,
|
||||||
}))
|
}))
|
||||||
};
|
}
|
||||||
}
|
|
||||||
NonterminalKind::Path => {
|
|
||||||
return Ok(ParseNtResult::Path(P(
|
|
||||||
self.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type))?
|
|
||||||
)));
|
|
||||||
}
|
}
|
||||||
|
NonterminalKind::Path => Ok(ParseNtResult::Path(P(
|
||||||
|
self.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type))?
|
||||||
|
))),
|
||||||
NonterminalKind::Meta => {
|
NonterminalKind::Meta => {
|
||||||
return Ok(ParseNtResult::Meta(P(self.parse_attr_item(ForceCollect::Yes)?)));
|
Ok(ParseNtResult::Meta(P(self.parse_attr_item(ForceCollect::Yes)?)))
|
||||||
}
|
}
|
||||||
NonterminalKind::Vis => {
|
NonterminalKind::Vis => {
|
||||||
return Ok(ParseNtResult::Vis(P(self.collect_tokens_no_attrs(|this| {
|
Ok(ParseNtResult::Vis(P(self
|
||||||
this.parse_visibility(FollowedByType::Yes)
|
.collect_tokens_no_attrs(|this| this.parse_visibility(FollowedByType::Yes))?)))
|
||||||
})?)));
|
|
||||||
}
|
}
|
||||||
NonterminalKind::Lifetime => {
|
NonterminalKind::Lifetime => {
|
||||||
// We want to keep `'keyword` parsing, just like `keyword` is still
|
// We want to keep `'keyword` parsing, just like `keyword` is still
|
||||||
// an ident for nonterminal purposes.
|
// an ident for nonterminal purposes.
|
||||||
return if let Some((ident, is_raw)) = self.token.lifetime() {
|
if let Some((ident, is_raw)) = self.token.lifetime() {
|
||||||
self.bump();
|
self.bump();
|
||||||
Ok(ParseNtResult::Lifetime(ident, is_raw))
|
Ok(ParseNtResult::Lifetime(ident, is_raw))
|
||||||
} else {
|
} else {
|
||||||
Err(self.dcx().create_err(UnexpectedNonterminal::Lifetime {
|
Err(self.dcx().create_err(UnexpectedNonterminal::Lifetime {
|
||||||
span: self.token.span,
|
span: self.token.span,
|
||||||
token: self.token.clone(),
|
token: self.token,
|
||||||
}))
|
}))
|
||||||
};
|
}
|
||||||
}
|
}
|
||||||
};
|
|
||||||
|
|
||||||
// If tokens are supported at all, they should be collected.
|
|
||||||
if matches!(nt.tokens_mut(), Some(None)) {
|
|
||||||
panic!(
|
|
||||||
"Missing tokens for nt {:?} at {:?}: {:?}",
|
|
||||||
nt,
|
|
||||||
nt.use_span(),
|
|
||||||
pprust::nonterminal_to_string(&nt)
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(ParseNtResult::Nt(Arc::new(nt)))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -363,7 +363,7 @@ impl<'a> Parser<'a> {
|
||||||
self.dcx().emit_err(TrailingVertNotAllowed {
|
self.dcx().emit_err(TrailingVertNotAllowed {
|
||||||
span: self.token.span,
|
span: self.token.span,
|
||||||
start: lo,
|
start: lo,
|
||||||
token: self.token.clone(),
|
token: self.token,
|
||||||
note_double_vert: matches!(self.token.kind, token::OrOr),
|
note_double_vert: matches!(self.token.kind, token::OrOr),
|
||||||
});
|
});
|
||||||
self.bump();
|
self.bump();
|
||||||
|
@ -1519,8 +1519,8 @@ impl<'a> Parser<'a> {
|
||||||
etc = PatFieldsRest::Rest;
|
etc = PatFieldsRest::Rest;
|
||||||
let mut etc_sp = self.token.span;
|
let mut etc_sp = self.token.span;
|
||||||
if first_etc_and_maybe_comma_span.is_none() {
|
if first_etc_and_maybe_comma_span.is_none() {
|
||||||
if let Some(comma_tok) = self
|
if let Some(comma_tok) =
|
||||||
.look_ahead(1, |t| if *t == token::Comma { Some(t.clone()) } else { None })
|
self.look_ahead(1, |&t| if t == token::Comma { Some(t) } else { None })
|
||||||
{
|
{
|
||||||
let nw_span = self
|
let nw_span = self
|
||||||
.psess
|
.psess
|
||||||
|
|
|
@ -393,8 +393,8 @@ impl<'a> Parser<'a> {
|
||||||
} else {
|
} else {
|
||||||
// `(T, U) -> R`
|
// `(T, U) -> R`
|
||||||
|
|
||||||
let prev_token_before_parsing = self.prev_token.clone();
|
let prev_token_before_parsing = self.prev_token;
|
||||||
let token_before_parsing = self.token.clone();
|
let token_before_parsing = self.token;
|
||||||
let mut snapshot = None;
|
let mut snapshot = None;
|
||||||
if self.may_recover()
|
if self.may_recover()
|
||||||
&& prev_token_before_parsing == token::PathSep
|
&& prev_token_before_parsing == token::PathSep
|
||||||
|
|
|
@ -23,8 +23,8 @@ use super::{
|
||||||
AttrWrapper, BlockMode, FnParseMode, ForceCollect, Parser, Restrictions, SemiColonMode,
|
AttrWrapper, BlockMode, FnParseMode, ForceCollect, Parser, Restrictions, SemiColonMode,
|
||||||
Trailing, UsePreAttrPos,
|
Trailing, UsePreAttrPos,
|
||||||
};
|
};
|
||||||
use crate::errors::MalformedLoopLabel;
|
use crate::errors::{self, MalformedLoopLabel};
|
||||||
use crate::{errors, exp, maybe_whole};
|
use crate::exp;
|
||||||
|
|
||||||
impl<'a> Parser<'a> {
|
impl<'a> Parser<'a> {
|
||||||
/// Parses a statement. This stops just before trailing semicolons on everything but items.
|
/// Parses a statement. This stops just before trailing semicolons on everything but items.
|
||||||
|
@ -696,9 +696,11 @@ impl<'a> Parser<'a> {
|
||||||
blk_mode: BlockCheckMode,
|
blk_mode: BlockCheckMode,
|
||||||
loop_header: Option<Span>,
|
loop_header: Option<Span>,
|
||||||
) -> PResult<'a, (AttrVec, P<Block>)> {
|
) -> PResult<'a, (AttrVec, P<Block>)> {
|
||||||
maybe_whole!(self, NtBlock, |block| (AttrVec::new(), block));
|
if let Some(block) = self.eat_metavar_seq(MetaVarKind::Block, |this| this.parse_block()) {
|
||||||
|
return Ok((AttrVec::new(), block));
|
||||||
|
}
|
||||||
|
|
||||||
let maybe_ident = self.prev_token.clone();
|
let maybe_ident = self.prev_token;
|
||||||
self.maybe_recover_unexpected_block_label(loop_header);
|
self.maybe_recover_unexpected_block_label(loop_header);
|
||||||
if !self.eat(exp!(OpenBrace)) {
|
if !self.eat(exp!(OpenBrace)) {
|
||||||
return self.error_block_no_opening_brace();
|
return self.error_block_no_opening_brace();
|
||||||
|
@ -911,7 +913,7 @@ impl<'a> Parser<'a> {
|
||||||
{
|
{
|
||||||
if self.token == token::Colon
|
if self.token == token::Colon
|
||||||
&& self.look_ahead(1, |token| {
|
&& self.look_ahead(1, |token| {
|
||||||
token.is_whole_block()
|
token.is_metavar_block()
|
||||||
|| matches!(
|
|| matches!(
|
||||||
token.kind,
|
token.kind,
|
||||||
token::Ident(
|
token::Ident(
|
||||||
|
|
|
@ -2554,7 +2554,7 @@ fn look(p: &Parser<'_>, dist: usize, kind: rustc_ast::token::TokenKind) {
|
||||||
// Do the `assert_eq` outside the closure so that `track_caller` works.
|
// Do the `assert_eq` outside the closure so that `track_caller` works.
|
||||||
// (`#![feature(closure_track_caller)]` + `#[track_caller]` on the closure
|
// (`#![feature(closure_track_caller)]` + `#[track_caller]` on the closure
|
||||||
// doesn't give the line number in the test below if the assertion fails.)
|
// doesn't give the line number in the test below if the assertion fails.)
|
||||||
let tok = p.look_ahead(dist, |tok| tok.clone());
|
let tok = p.look_ahead(dist, |tok| *tok);
|
||||||
assert_eq!(kind, tok.kind);
|
assert_eq!(kind, tok.kind);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -547,7 +547,7 @@ impl<'a> Parser<'a> {
|
||||||
|
|
||||||
// Recovery
|
// Recovery
|
||||||
mutbl = Mutability::Mut;
|
mutbl = Mutability::Mut;
|
||||||
let (dyn_tok, dyn_tok_sp) = (self.token.clone(), self.token_spacing);
|
let (dyn_tok, dyn_tok_sp) = (self.token, self.token_spacing);
|
||||||
self.bump();
|
self.bump();
|
||||||
self.bump_with((dyn_tok, dyn_tok_sp));
|
self.bump_with((dyn_tok, dyn_tok_sp));
|
||||||
}
|
}
|
||||||
|
@ -886,7 +886,7 @@ impl<'a> Parser<'a> {
|
||||||
/// ```
|
/// ```
|
||||||
fn parse_generic_bound(&mut self) -> PResult<'a, GenericBound> {
|
fn parse_generic_bound(&mut self) -> PResult<'a, GenericBound> {
|
||||||
let lo = self.token.span;
|
let lo = self.token.span;
|
||||||
let leading_token = self.prev_token.clone();
|
let leading_token = self.prev_token;
|
||||||
let has_parens = self.eat(exp!(OpenParen));
|
let has_parens = self.eat(exp!(OpenParen));
|
||||||
|
|
||||||
let bound = if self.token.is_lifetime() {
|
let bound = if self.token.is_lifetime() {
|
||||||
|
|
|
@ -6,7 +6,6 @@
|
||||||
|
|
||||||
// tidy-alphabetical-start
|
// tidy-alphabetical-start
|
||||||
#![allow(internal_features)]
|
#![allow(internal_features)]
|
||||||
#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
|
|
||||||
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
|
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
|
||||||
#![doc(rust_logo)]
|
#![doc(rust_logo)]
|
||||||
#![feature(let_chains)]
|
#![feature(let_chains)]
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
// tidy-alphabetical-start
|
// tidy-alphabetical-start
|
||||||
#![allow(internal_features)]
|
#![allow(internal_features)]
|
||||||
#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
|
|
||||||
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
|
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
|
||||||
#![doc(rust_logo)]
|
#![doc(rust_logo)]
|
||||||
#![feature(associated_type_defaults)]
|
#![feature(associated_type_defaults)]
|
||||||
|
|
|
@ -3,7 +3,6 @@
|
||||||
// tidy-alphabetical-start
|
// tidy-alphabetical-start
|
||||||
#![allow(internal_features)]
|
#![allow(internal_features)]
|
||||||
#![allow(unused_parens)]
|
#![allow(unused_parens)]
|
||||||
#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
|
|
||||||
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
|
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
|
||||||
#![doc(rust_logo)]
|
#![doc(rust_logo)]
|
||||||
#![feature(min_specialization)]
|
#![feature(min_specialization)]
|
||||||
|
|
|
@ -4,7 +4,6 @@
|
||||||
//! compiler.
|
//! compiler.
|
||||||
|
|
||||||
// tidy-alphabetical-start
|
// tidy-alphabetical-start
|
||||||
#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
|
|
||||||
#![feature(let_chains)]
|
#![feature(let_chains)]
|
||||||
// tidy-alphabetical-end
|
// tidy-alphabetical-end
|
||||||
|
|
||||||
|
|
|
@ -9,7 +9,6 @@
|
||||||
// tidy-alphabetical-start
|
// tidy-alphabetical-start
|
||||||
#![allow(internal_features)]
|
#![allow(internal_features)]
|
||||||
#![allow(rustc::usage_of_ty_tykind)]
|
#![allow(rustc::usage_of_ty_tykind)]
|
||||||
#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
|
|
||||||
#![doc(
|
#![doc(
|
||||||
html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/",
|
html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/",
|
||||||
test(attr(allow(unused_variables), deny(warnings)))
|
test(attr(allow(unused_variables), deny(warnings)))
|
||||||
|
|
|
@ -89,7 +89,6 @@
|
||||||
|
|
||||||
// tidy-alphabetical-start
|
// tidy-alphabetical-start
|
||||||
#![allow(internal_features)]
|
#![allow(internal_features)]
|
||||||
#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
|
|
||||||
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
|
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
|
||||||
#![doc(rust_logo)]
|
#![doc(rust_logo)]
|
||||||
#![feature(let_chains)]
|
#![feature(let_chains)]
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
// tidy-alphabetical-start
|
// tidy-alphabetical-start
|
||||||
#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
|
|
||||||
#![feature(never_type)]
|
#![feature(never_type)]
|
||||||
// tidy-alphabetical-end
|
// tidy-alphabetical-end
|
||||||
|
|
||||||
|
|
|
@ -6,7 +6,6 @@
|
||||||
|
|
||||||
// tidy-alphabetical-start
|
// tidy-alphabetical-start
|
||||||
#![allow(internal_features)]
|
#![allow(internal_features)]
|
||||||
#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
|
|
||||||
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
|
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
|
||||||
#![doc(rust_logo)]
|
#![doc(rust_logo)]
|
||||||
#![feature(assert_matches)]
|
#![feature(assert_matches)]
|
||||||
|
|
|
@ -858,18 +858,18 @@ impl MacroArgParser {
|
||||||
};
|
};
|
||||||
|
|
||||||
self.result.push(ParsedMacroArg {
|
self.result.push(ParsedMacroArg {
|
||||||
kind: MacroArgKind::Repeat(delim, inner, another, self.last_tok.clone()),
|
kind: MacroArgKind::Repeat(delim, inner, another, self.last_tok),
|
||||||
});
|
});
|
||||||
Some(())
|
Some(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn update_buffer(&mut self, t: &Token) {
|
fn update_buffer(&mut self, t: Token) {
|
||||||
if self.buf.is_empty() {
|
if self.buf.is_empty() {
|
||||||
self.start_tok = t.clone();
|
self.start_tok = t;
|
||||||
} else {
|
} else {
|
||||||
let needs_space = match next_space(&self.last_tok.kind) {
|
let needs_space = match next_space(&self.last_tok.kind) {
|
||||||
SpaceState::Ident => ident_like(t),
|
SpaceState::Ident => ident_like(&t),
|
||||||
SpaceState::Punctuation => !ident_like(t),
|
SpaceState::Punctuation => !ident_like(&t),
|
||||||
SpaceState::Always => true,
|
SpaceState::Always => true,
|
||||||
SpaceState::Never => false,
|
SpaceState::Never => false,
|
||||||
};
|
};
|
||||||
|
@ -878,7 +878,7 @@ impl MacroArgParser {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
self.buf.push_str(&pprust::token_to_string(t));
|
self.buf.push_str(&pprust::token_to_string(&t));
|
||||||
}
|
}
|
||||||
|
|
||||||
fn need_space_prefix(&self) -> bool {
|
fn need_space_prefix(&self) -> bool {
|
||||||
|
@ -937,7 +937,7 @@ impl MacroArgParser {
|
||||||
) if self.is_meta_var => {
|
) if self.is_meta_var => {
|
||||||
self.add_meta_variable(&mut iter)?;
|
self.add_meta_variable(&mut iter)?;
|
||||||
}
|
}
|
||||||
TokenTree::Token(ref t, _) => self.update_buffer(t),
|
&TokenTree::Token(t, _) => self.update_buffer(t),
|
||||||
&TokenTree::Delimited(_dspan, _spacing, delimited, ref tts) => {
|
&TokenTree::Delimited(_dspan, _spacing, delimited, ref tts) => {
|
||||||
if !self.buf.is_empty() {
|
if !self.buf.is_empty() {
|
||||||
if next_space(&self.last_tok.kind) == SpaceState::Always {
|
if next_space(&self.last_tok.kind) == SpaceState::Always {
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
//
|
//
|
||||||
//@ run-pass
|
//@ run-pass
|
||||||
//
|
//
|
||||||
// Description - ensure Interpolated blocks can act as valid function bodies
|
// Description - ensure block metavariables can act as valid function bodies
|
||||||
// Covered cases: free functions, struct methods, and default trait functions
|
// Covered cases: free functions, struct methods, and default trait functions
|
||||||
|
|
||||||
macro_rules! def_fn {
|
macro_rules! def_fn {
|
||||||
|
|
|
@ -5,14 +5,14 @@ macro_rules! values {
|
||||||
$(
|
$(
|
||||||
#[$attr]
|
#[$attr]
|
||||||
$token $($inner)? = $value,
|
$token $($inner)? = $value,
|
||||||
|
//~^ ERROR expected one of `!` or `::`, found `<eof>`
|
||||||
)*
|
)*
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
//~^^^^^ ERROR expected one of `(`, `,`, `=`, `{`, or `}`, found `ty` metavariable
|
//~^^^^^^ ERROR expected one of `(`, `,`, `=`, `{`, or `}`, found `ty` metavariable
|
||||||
//~| ERROR macro expansion ignores `ty` metavariable and any tokens following
|
//~| ERROR macro expansion ignores `ty` metavariable and any tokens following
|
||||||
|
|
||||||
values!(STRING(1) as (String) => cfg(test),);
|
values!(STRING(1) as (String) => cfg(test),);
|
||||||
//~^ ERROR expected one of `!` or `::`, found `<eof>`
|
|
||||||
|
|
||||||
fn main() {}
|
fn main() {}
|
||||||
|
|
|
@ -22,10 +22,10 @@ LL | values!(STRING(1) as (String) => cfg(test),);
|
||||||
= note: the usage of `values!` is likely invalid in item context
|
= note: the usage of `values!` is likely invalid in item context
|
||||||
|
|
||||||
error: expected one of `!` or `::`, found `<eof>`
|
error: expected one of `!` or `::`, found `<eof>`
|
||||||
--> $DIR/syntax-error-recovery.rs:15:9
|
--> $DIR/syntax-error-recovery.rs:7:17
|
||||||
|
|
|
|
||||||
LL | values!(STRING(1) as (String) => cfg(test),);
|
LL | $token $($inner)? = $value,
|
||||||
| ^^^^^^ expected one of `!` or `::`
|
| ^^^^^^ expected one of `!` or `::`
|
||||||
|
|
||||||
error: aborting due to 3 previous errors
|
error: aborting due to 3 previous errors
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue