1
Fork 0

Implement token-based handling of attributes during expansion

This PR modifies the macro expansion infrastructure to handle attributes
in a fully token-based manner. As a result:

* Derives macros no longer lose spans when their input is modified
  by eager cfg-expansion. This is accomplished by performing eager
  cfg-expansion on the token stream that we pass to the derive
  proc-macro
* Inner attributes now preserve spans in all cases, including when we
  have multiple inner attributes in a row.

This is accomplished through the following changes:

* New structs `AttrAnnotatedTokenStream` and `AttrAnnotatedTokenTree` are introduced.
  These are very similar to a normal `TokenTree`, but they also track
  the position of attributes and attribute targets within the stream.
  They are built when we collect tokens during parsing.
  An `AttrAnnotatedTokenStream` is converted to a regular `TokenStream` when
  we invoke a macro.
* Token capturing and `LazyTokenStream` are modified to work with
  `AttrAnnotatedTokenStream`. A new `ReplaceRange` type is introduced, which
  is created during the parsing of a nested AST node to make the 'outer'
  AST node aware of the attributes and attribute target stored deeper in the token stream.
* When we need to perform eager cfg-expansion (either due to `#[derive]` or `#[cfg_eval]`),
we tokenize and reparse our target, capturing additional information about the locations of
`#[cfg]` and `#[cfg_attr]` attributes at any depth within the target.
This is a performance optimization, allowing us to perform less work
in the typical case where captured tokens never have eager cfg-expansion run.
This commit is contained in:
Aaron Hill 2020-11-28 18:33:17 -05:00
parent 25ea6be13e
commit a93c4f05de
No known key found for this signature in database
GPG key ID: B4087E510E98B164
33 changed files with 2046 additions and 1192 deletions

View file

@ -37,8 +37,8 @@
#![recursion_limit = "256"]
use rustc_ast::node_id::NodeMap;
use rustc_ast::token::{self, DelimToken, Nonterminal, Token};
use rustc_ast::tokenstream::{CanSynthesizeMissingTokens, DelimSpan, TokenStream, TokenTree};
use rustc_ast::token::{self, Token};
use rustc_ast::tokenstream::{CanSynthesizeMissingTokens, TokenStream, TokenTree};
use rustc_ast::visit::{self, AssocCtxt, Visitor};
use rustc_ast::walk_list;
use rustc_ast::{self as ast, *};
@ -56,7 +56,7 @@ use rustc_hir::{ConstArg, GenericArg, ParamName};
use rustc_index::vec::{Idx, IndexVec};
use rustc_session::lint::builtin::{BARE_TRAIT_OBJECTS, MISSING_ABI};
use rustc_session::lint::{BuiltinLintDiagnostics, LintBuffer};
use rustc_session::parse::ParseSess;
use rustc_session::utils::{FlattenNonterminals, NtToTokenstream};
use rustc_session::Session;
use rustc_span::hygiene::ExpnId;
use rustc_span::source_map::{respan, DesugaringKind};
@ -213,8 +213,6 @@ pub trait ResolverAstLowering {
) -> LocalDefId;
}
type NtToTokenstream = fn(&Nonterminal, &ParseSess, CanSynthesizeMissingTokens) -> TokenStream;
/// Context of `impl Trait` in code, which determines whether it is allowed in an HIR subtree,
/// and if so, what meaning it has.
#[derive(Debug)]
@ -403,42 +401,6 @@ enum AnonymousLifetimeMode {
PassThrough,
}
struct TokenStreamLowering<'a> {
parse_sess: &'a ParseSess,
synthesize_tokens: CanSynthesizeMissingTokens,
nt_to_tokenstream: NtToTokenstream,
}
impl<'a> TokenStreamLowering<'a> {
fn lower_token_stream(&mut self, tokens: TokenStream) -> TokenStream {
tokens.into_trees().flat_map(|tree| self.lower_token_tree(tree).into_trees()).collect()
}
fn lower_token_tree(&mut self, tree: TokenTree) -> TokenStream {
match tree {
TokenTree::Token(token) => self.lower_token(token),
TokenTree::Delimited(span, delim, tts) => {
TokenTree::Delimited(span, delim, self.lower_token_stream(tts)).into()
}
}
}
fn lower_token(&mut self, token: Token) -> TokenStream {
match token.kind {
token::Interpolated(nt) => {
let tts = (self.nt_to_tokenstream)(&nt, self.parse_sess, self.synthesize_tokens);
TokenTree::Delimited(
DelimSpan::from_single(token.span),
DelimToken::NoDelim,
self.lower_token_stream(tts),
)
.into()
}
_ => TokenTree::Token(token).into(),
}
}
}
impl<'a, 'hir> LoweringContext<'a, 'hir> {
fn lower_crate(mut self, c: &Crate) -> hir::Crate<'hir> {
/// Full-crate AST visitor that inserts into a fresh
@ -1037,12 +999,12 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
}
}
let tokens = TokenStreamLowering {
let tokens = FlattenNonterminals {
parse_sess: &self.sess.parse_sess,
synthesize_tokens: CanSynthesizeMissingTokens::Yes,
nt_to_tokenstream: self.nt_to_tokenstream,
}
.lower_token(token.clone());
.process_token(token.clone());
MacArgs::Eq(eq_span, unwrap_single_token(self.sess, tokens, token.span))
}
}
@ -1053,12 +1015,12 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
tokens: TokenStream,
synthesize_tokens: CanSynthesizeMissingTokens,
) -> TokenStream {
TokenStreamLowering {
FlattenNonterminals {
parse_sess: &self.sess.parse_sess,
synthesize_tokens,
nt_to_tokenstream: self.nt_to_tokenstream,
}
.lower_token_stream(tokens)
.process_token_stream(tokens)
}
/// Given an associated type constraint like one of these: