Auto merge of #138083 - nnethercote:rm-NtItem-NtStmt, r=petrochenkov
Remove `NtItem` and `NtStmt` Another piece of #124141. r? `@petrochenkov`
This commit is contained in:
commit
aaa2d47dae
49 changed files with 273 additions and 145 deletions
|
@ -7,7 +7,7 @@ use std::sync::Arc;
|
|||
|
||||
use rustc_ast::attr::{AttributeExt, MarkedAttrs};
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::Nonterminal;
|
||||
use rustc_ast::token::MetaVarKind;
|
||||
use rustc_ast::tokenstream::TokenStream;
|
||||
use rustc_ast::visit::{AssocCtxt, Visitor};
|
||||
use rustc_ast::{self as ast, AttrVec, Attribute, HasAttrs, Item, NodeId, PatKind};
|
||||
|
@ -19,7 +19,7 @@ use rustc_feature::Features;
|
|||
use rustc_hir as hir;
|
||||
use rustc_lint_defs::{BufferedEarlyLint, RegisteredTools};
|
||||
use rustc_parse::MACRO_ARGUMENTS;
|
||||
use rustc_parse::parser::Parser;
|
||||
use rustc_parse::parser::{ForceCollect, Parser};
|
||||
use rustc_session::config::CollapseMacroDebuginfo;
|
||||
use rustc_session::parse::ParseSess;
|
||||
use rustc_session::{Limit, Session};
|
||||
|
@ -1405,13 +1405,13 @@ pub fn parse_macro_name_and_helper_attrs(
|
|||
/// If this item looks like a specific enums from `rental`, emit a fatal error.
|
||||
/// See #73345 and #83125 for more details.
|
||||
/// FIXME(#73933): Remove this eventually.
|
||||
fn pretty_printing_compatibility_hack(item: &Item, sess: &Session) {
|
||||
fn pretty_printing_compatibility_hack(item: &Item, psess: &ParseSess) {
|
||||
let name = item.ident.name;
|
||||
if name == sym::ProceduralMasqueradeDummyType
|
||||
&& let ast::ItemKind::Enum(enum_def, _) = &item.kind
|
||||
&& let [variant] = &*enum_def.variants
|
||||
&& variant.ident.name == sym::Input
|
||||
&& let FileName::Real(real) = sess.source_map().span_to_filename(item.ident.span)
|
||||
&& let FileName::Real(real) = psess.source_map().span_to_filename(item.ident.span)
|
||||
&& let Some(c) = real
|
||||
.local_path()
|
||||
.unwrap_or(Path::new(""))
|
||||
|
@ -1429,7 +1429,7 @@ fn pretty_printing_compatibility_hack(item: &Item, sess: &Session) {
|
|||
};
|
||||
|
||||
if crate_matches {
|
||||
sess.dcx().emit_fatal(errors::ProcMacroBackCompat {
|
||||
psess.dcx().emit_fatal(errors::ProcMacroBackCompat {
|
||||
crate_name: "rental".to_string(),
|
||||
fixed_version: "0.5.6".to_string(),
|
||||
});
|
||||
|
@ -1437,7 +1437,7 @@ fn pretty_printing_compatibility_hack(item: &Item, sess: &Session) {
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) fn ann_pretty_printing_compatibility_hack(ann: &Annotatable, sess: &Session) {
|
||||
pub(crate) fn ann_pretty_printing_compatibility_hack(ann: &Annotatable, psess: &ParseSess) {
|
||||
let item = match ann {
|
||||
Annotatable::Item(item) => item,
|
||||
Annotatable::Stmt(stmt) => match &stmt.kind {
|
||||
|
@ -1446,17 +1446,36 @@ pub(crate) fn ann_pretty_printing_compatibility_hack(ann: &Annotatable, sess: &S
|
|||
},
|
||||
_ => return,
|
||||
};
|
||||
pretty_printing_compatibility_hack(item, sess)
|
||||
pretty_printing_compatibility_hack(item, psess)
|
||||
}
|
||||
|
||||
pub(crate) fn nt_pretty_printing_compatibility_hack(nt: &Nonterminal, sess: &Session) {
|
||||
let item = match nt {
|
||||
Nonterminal::NtItem(item) => item,
|
||||
Nonterminal::NtStmt(stmt) => match &stmt.kind {
|
||||
ast::StmtKind::Item(item) => item,
|
||||
_ => return,
|
||||
},
|
||||
pub(crate) fn stream_pretty_printing_compatibility_hack(
|
||||
kind: MetaVarKind,
|
||||
stream: &TokenStream,
|
||||
psess: &ParseSess,
|
||||
) {
|
||||
let item = match kind {
|
||||
MetaVarKind::Item => {
|
||||
let mut parser = Parser::new(psess, stream.clone(), None);
|
||||
// No need to collect tokens for this simple check.
|
||||
parser
|
||||
.parse_item(ForceCollect::No)
|
||||
.expect("failed to reparse item")
|
||||
.expect("an actual item")
|
||||
}
|
||||
MetaVarKind::Stmt => {
|
||||
let mut parser = Parser::new(psess, stream.clone(), None);
|
||||
// No need to collect tokens for this simple check.
|
||||
let stmt = parser
|
||||
.parse_stmt(ForceCollect::No)
|
||||
.expect("failed to reparse")
|
||||
.expect("an actual stmt");
|
||||
match &stmt.kind {
|
||||
ast::StmtKind::Item(item) => item.clone(),
|
||||
_ => return,
|
||||
}
|
||||
}
|
||||
_ => return,
|
||||
};
|
||||
pretty_printing_compatibility_hack(item, sess)
|
||||
pretty_printing_compatibility_hack(&item, psess)
|
||||
}
|
||||
|
|
|
@ -7,7 +7,7 @@ use rustc_ast::token::{
|
|||
TokenKind,
|
||||
};
|
||||
use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree};
|
||||
use rustc_ast::{ExprKind, TyKind};
|
||||
use rustc_ast::{ExprKind, StmtKind, TyKind};
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_errors::{Diag, DiagCtxtHandle, PResult, pluralize};
|
||||
use rustc_parse::lexer::nfc_normalize;
|
||||
|
@ -323,6 +323,18 @@ pub(super) fn transcribe<'a>(
|
|||
let kind = token::NtLifetime(*ident, *is_raw);
|
||||
TokenTree::token_alone(kind, sp)
|
||||
}
|
||||
MatchedSingle(ParseNtResult::Item(item)) => {
|
||||
mk_delimited(item.span, MetaVarKind::Item, TokenStream::from_ast(item))
|
||||
}
|
||||
MatchedSingle(ParseNtResult::Stmt(stmt)) => {
|
||||
let stream = if let StmtKind::Empty = stmt.kind {
|
||||
// FIXME: Properly collect tokens for empty statements.
|
||||
TokenStream::token_alone(token::Semi, stmt.span)
|
||||
} else {
|
||||
TokenStream::from_ast(stmt)
|
||||
};
|
||||
mk_delimited(stmt.span, MetaVarKind::Stmt, stream)
|
||||
}
|
||||
MatchedSingle(ParseNtResult::Pat(pat, pat_kind)) => mk_delimited(
|
||||
pat.span,
|
||||
MetaVarKind::Pat(*pat_kind),
|
||||
|
|
|
@ -122,7 +122,7 @@ impl MultiItemModifier for DeriveProcMacro {
|
|||
// We had a lint for a long time, but now we just emit a hard error.
|
||||
// Eventually we might remove the special case hard error check
|
||||
// altogether. See #73345.
|
||||
crate::base::ann_pretty_printing_compatibility_hack(&item, &ecx.sess);
|
||||
crate::base::ann_pretty_printing_compatibility_hack(&item, &ecx.sess.psess);
|
||||
let input = item.to_tokens();
|
||||
let stream = {
|
||||
let _timer =
|
||||
|
|
|
@ -115,11 +115,43 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre
|
|||
|
||||
while let Some(tree) = iter.next() {
|
||||
let (Token { kind, span }, joint) = match tree.clone() {
|
||||
tokenstream::TokenTree::Delimited(span, _, delim, tts) => {
|
||||
let delimiter = pm::Delimiter::from_internal(delim);
|
||||
tokenstream::TokenTree::Delimited(span, _, mut delim, mut stream) => {
|
||||
// We used to have an alternative behaviour for crates that
|
||||
// needed it: a hack used to pass AST fragments to
|
||||
// attribute and derive macros as a single nonterminal
|
||||
// token instead of a token stream. Such token needs to be
|
||||
// "unwrapped" and not represented as a delimited group. We
|
||||
// had a lint for a long time, but now we just emit a hard
|
||||
// error. Eventually we might remove the special case hard
|
||||
// error check altogether. See #73345.
|
||||
if let Delimiter::Invisible(InvisibleOrigin::MetaVar(kind)) = delim {
|
||||
crate::base::stream_pretty_printing_compatibility_hack(
|
||||
kind,
|
||||
&stream,
|
||||
rustc.psess(),
|
||||
);
|
||||
}
|
||||
|
||||
// In `mk_delimited` we avoid nesting invisible delimited
|
||||
// of the same `MetaVarKind`. Here we do the same but
|
||||
// ignore the `MetaVarKind` because it is discarded when we
|
||||
// convert it to a `Group`.
|
||||
while let Delimiter::Invisible(InvisibleOrigin::MetaVar(_)) = delim {
|
||||
if stream.len() == 1
|
||||
&& let tree = stream.iter().next().unwrap()
|
||||
&& let tokenstream::TokenTree::Delimited(_, _, delim2, stream2) = tree
|
||||
&& let Delimiter::Invisible(InvisibleOrigin::MetaVar(_)) = delim2
|
||||
{
|
||||
delim = *delim2;
|
||||
stream = stream2.clone();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
trees.push(TokenTree::Group(Group {
|
||||
delimiter,
|
||||
stream: Some(tts),
|
||||
delimiter: pm::Delimiter::from_internal(delim),
|
||||
stream: Some(stream),
|
||||
span: DelimSpan {
|
||||
open: span.open,
|
||||
close: span.close,
|
||||
|
@ -279,15 +311,6 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre
|
|||
|
||||
Interpolated(nt) => {
|
||||
let stream = TokenStream::from_nonterminal_ast(&nt);
|
||||
// We used to have an alternative behaviour for crates that
|
||||
// needed it: a hack used to pass AST fragments to
|
||||
// attribute and derive macros as a single nonterminal
|
||||
// token instead of a token stream. Such token needs to be
|
||||
// "unwrapped" and not represented as a delimited group. We
|
||||
// had a lint for a long time, but now we just emit a hard
|
||||
// error. Eventually we might remove the special case hard
|
||||
// error check altogether. See #73345.
|
||||
crate::base::nt_pretty_printing_compatibility_hack(&nt, rustc.ecx.sess);
|
||||
trees.push(TokenTree::Group(Group {
|
||||
delimiter: pm::Delimiter::None,
|
||||
stream: Some(stream),
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue