1
Fork 0

Remove pretty-print/reparse hack, and add derive-specific hack

This commit is contained in:
Aaron Hill 2020-11-23 01:43:55 -05:00
parent 2987785df3
commit 530a629635
No known key found for this signature in database
GPG key ID: B4087E510E98B164
8 changed files with 186 additions and 410 deletions

View file

@ -37,7 +37,7 @@
use rustc_ast::node_id::NodeMap;
use rustc_ast::token::{self, DelimToken, Nonterminal, Token};
use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
use rustc_ast::tokenstream::{CanSynthesizeMissingTokens, DelimSpan, TokenStream, TokenTree};
use rustc_ast::visit::{self, AssocCtxt, Visitor};
use rustc_ast::walk_list;
use rustc_ast::{self as ast, *};
@ -206,7 +206,8 @@ pub trait ResolverAstLowering {
) -> LocalDefId;
}
type NtToTokenstream = fn(&Nonterminal, &ParseSess, Span) -> TokenStream;
type NtToTokenstream =
fn(&Nonterminal, &ParseSess, Span, CanSynthesizeMissingTokens) -> TokenStream;
/// Context of `impl Trait` in code, which determines whether it is allowed in an HIR subtree,
/// and if so, what meaning it has.
@ -393,6 +394,47 @@ enum AnonymousLifetimeMode {
PassThrough,
}
struct TokenStreamLowering<'a> {
parse_sess: &'a ParseSess,
synthesize_tokens: CanSynthesizeMissingTokens,
nt_to_tokenstream: NtToTokenstream,
}
impl<'a> TokenStreamLowering<'a> {
fn lower_token_stream(&mut self, tokens: TokenStream) -> TokenStream {
tokens.into_trees().flat_map(|tree| self.lower_token_tree(tree).into_trees()).collect()
}
fn lower_token_tree(&mut self, tree: TokenTree) -> TokenStream {
match tree {
TokenTree::Token(token) => self.lower_token(token),
TokenTree::Delimited(span, delim, tts) => {
TokenTree::Delimited(span, delim, self.lower_token_stream(tts)).into()
}
}
}
fn lower_token(&mut self, token: Token) -> TokenStream {
match token.kind {
token::Interpolated(nt) => {
let tts = (self.nt_to_tokenstream)(
&nt,
self.parse_sess,
token.span,
self.synthesize_tokens,
);
TokenTree::Delimited(
DelimSpan::from_single(token.span),
DelimToken::NoDelim,
self.lower_token_stream(tts),
)
.into()
}
_ => TokenTree::Token(token).into(),
}
}
}
struct ImplTraitTypeIdVisitor<'a> {
ids: &'a mut SmallVec<[NodeId; 1]>,
}
@ -955,42 +997,51 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
match *args {
MacArgs::Empty => MacArgs::Empty,
MacArgs::Delimited(dspan, delim, ref tokens) => {
MacArgs::Delimited(dspan, delim, self.lower_token_stream(tokens.clone()))
}
MacArgs::Eq(eq_span, ref tokens) => {
MacArgs::Eq(eq_span, self.lower_token_stream(tokens.clone()))
}
}
}
fn lower_token_stream(&mut self, tokens: TokenStream) -> TokenStream {
tokens.into_trees().flat_map(|tree| self.lower_token_tree(tree).into_trees()).collect()
}
fn lower_token_tree(&mut self, tree: TokenTree) -> TokenStream {
match tree {
TokenTree::Token(token) => self.lower_token(token),
TokenTree::Delimited(span, delim, tts) => {
TokenTree::Delimited(span, delim, self.lower_token_stream(tts)).into()
}
}
}
fn lower_token(&mut self, token: Token) -> TokenStream {
match token.kind {
token::Interpolated(nt) => {
let tts = (self.nt_to_tokenstream)(&nt, &self.sess.parse_sess, token.span);
TokenTree::Delimited(
DelimSpan::from_single(token.span),
DelimToken::NoDelim,
self.lower_token_stream(tts),
// This is either a non-key-value attribute, or a `macro_rules!` body.
// We either not have any nonterminals present (in the case of an attribute),
// or have tokens available for all nonterminals in the case of a nested
// `macro_rules`: e.g:
//
// ```rust
// macro_rules! outer {
// ($e:expr) => {
// macro_rules! inner {
// () => { $e }
// }
// }
// }
// ```
//
// In both cases, we don't want to synthesize any tokens
MacArgs::Delimited(
dspan,
delim,
self.lower_token_stream(tokens.clone(), CanSynthesizeMissingTokens::No),
)
.into()
}
_ => TokenTree::Token(token).into(),
// This is an inert key-value attribute - it will never be visible to macros
// after it gets lowered to HIR. Therefore, we can synthesize tokens with fake
// spans to handle nonterminals in `#[doc]` (e.g. `#[doc = $e]`).
MacArgs::Eq(eq_span, ref tokens) => MacArgs::Eq(
eq_span,
self.lower_token_stream(tokens.clone(), CanSynthesizeMissingTokens::Yes),
),
}
}
fn lower_token_stream(
&self,
tokens: TokenStream,
synthesize_tokens: CanSynthesizeMissingTokens,
) -> TokenStream {
TokenStreamLowering {
parse_sess: &self.sess.parse_sess,
synthesize_tokens,
nt_to_tokenstream: self.nt_to_tokenstream,
}
.lower_token_stream(tokens)
}
/// Given an associated type constraint like one of these:
///
/// ```