1
Fork 0

Implement token-based handling of attributes during expansion

This PR modifies the macro expansion infrastructure to handle attributes
in a fully token-based manner. As a result:

* Derives macros no longer lose spans when their input is modified
  by eager cfg-expansion. This is accomplished by performing eager
  cfg-expansion on the token stream that we pass to the derive
  proc-macro
* Inner attributes now preserve spans in all cases, including when we
  have multiple inner attributes in a row.

This is accomplished through the following changes:

* New structs `AttrAnnotatedTokenStream` and `AttrAnnotatedTokenTree` are introduced.
  These are very similar to a normal `TokenTree`, but they also track
  the position of attributes and attribute targets within the stream.
  They are built when we collect tokens during parsing.
  An `AttrAnnotatedTokenStream` is converted to a regular `TokenStream` when
  we invoke a macro.
* Token capturing and `LazyTokenStream` are modified to work with
  `AttrAnnotatedTokenStream`. A new `ReplaceRange` type is introduced, which
  is created during the parsing of a nested AST node to make the 'outer'
  AST node aware of the attributes and attribute target stored deeper in the token stream.
* When we need to perform eager cfg-expansion (either due to `#[derive]` or `#[cfg_eval]`),
we tokenize and reparse our target, capturing additional information about the locations of
`#[cfg]` and `#[cfg_attr]` attributes at any depth within the target.
This is a performance optimization, allowing us to perform less work
in the typical case where captured tokens never have eager cfg-expansion run.
This commit is contained in:
Aaron Hill 2020-11-28 18:33:17 -05:00
parent 25ea6be13e
commit a93c4f05de
No known key found for this signature in database
GPG key ID: B4087E510E98B164
33 changed files with 2046 additions and 1192 deletions

View file

@ -1,20 +1,32 @@
use super::ptr::P;
use super::token::Nonterminal;
use super::tokenstream::LazyTokenStream;
use super::{Arm, ExprField, FieldDef, GenericParam, Param, PatField, Variant};
use super::{AssocItem, Expr, ForeignItem, Item, Local};
use super::{AssocItem, Expr, ForeignItem, Item, Local, MacCallStmt};
use super::{AttrItem, AttrKind, Block, Pat, Path, Ty, Visibility};
use super::{AttrVec, Attribute, Stmt, StmtKind};
use std::fmt::Debug;
/// An `AstLike` represents an AST node (or some wrapper around
/// and AST node) which stores some combination of attributes
/// and tokens.
pub trait AstLike: Sized {
pub trait AstLike: Sized + Debug {
/// This is `true` if this `AstLike` might support 'custom' (proc-macro) inner
/// attributes. Attributes like `#![cfg]` and `#![cfg_attr]` are not
/// considered 'custom' attributes
///
/// If this is `false`, then this `AstLike` definitely does
/// not support 'custom' inner attributes, which enables some optimizations
/// during token collection.
const SUPPORTS_CUSTOM_INNER_ATTRS: bool;
fn attrs(&self) -> &[Attribute];
fn visit_attrs(&mut self, f: impl FnOnce(&mut Vec<Attribute>));
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>>;
}
impl<T: AstLike + 'static> AstLike for P<T> {
const SUPPORTS_CUSTOM_INNER_ATTRS: bool = T::SUPPORTS_CUSTOM_INNER_ATTRS;
fn attrs(&self) -> &[Attribute] {
(**self).attrs()
}
@ -26,6 +38,55 @@ impl<T: AstLike + 'static> AstLike for P<T> {
}
}
impl AstLike for crate::token::Nonterminal {
const SUPPORTS_CUSTOM_INNER_ATTRS: bool = true;
fn attrs(&self) -> &[Attribute] {
match self {
Nonterminal::NtItem(item) => item.attrs(),
Nonterminal::NtStmt(stmt) => stmt.attrs(),
Nonterminal::NtExpr(expr) | Nonterminal::NtLiteral(expr) => expr.attrs(),
Nonterminal::NtPat(_)
| Nonterminal::NtTy(_)
| Nonterminal::NtMeta(_)
| Nonterminal::NtPath(_)
| Nonterminal::NtVis(_)
| Nonterminal::NtTT(_)
| Nonterminal::NtBlock(_)
| Nonterminal::NtIdent(..)
| Nonterminal::NtLifetime(_) => &[],
}
}
fn visit_attrs(&mut self, f: impl FnOnce(&mut Vec<Attribute>)) {
match self {
Nonterminal::NtItem(item) => item.visit_attrs(f),
Nonterminal::NtStmt(stmt) => stmt.visit_attrs(f),
Nonterminal::NtExpr(expr) | Nonterminal::NtLiteral(expr) => expr.visit_attrs(f),
Nonterminal::NtPat(_)
| Nonterminal::NtTy(_)
| Nonterminal::NtMeta(_)
| Nonterminal::NtPath(_)
| Nonterminal::NtVis(_)
| Nonterminal::NtTT(_)
| Nonterminal::NtBlock(_)
| Nonterminal::NtIdent(..)
| Nonterminal::NtLifetime(_) => {}
}
}
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
match self {
Nonterminal::NtItem(item) => item.tokens_mut(),
Nonterminal::NtStmt(stmt) => stmt.tokens_mut(),
Nonterminal::NtExpr(expr) | Nonterminal::NtLiteral(expr) => expr.tokens_mut(),
Nonterminal::NtPat(pat) => pat.tokens_mut(),
Nonterminal::NtTy(ty) => ty.tokens_mut(),
Nonterminal::NtMeta(attr_item) => attr_item.tokens_mut(),
Nonterminal::NtPath(path) => path.tokens_mut(),
Nonterminal::NtVis(vis) => vis.tokens_mut(),
_ => panic!("Called tokens_mut on {:?}", self),
}
}
}
fn visit_attrvec(attrs: &mut AttrVec, f: impl FnOnce(&mut Vec<Attribute>)) {
crate::mut_visit::visit_clobber(attrs, |attrs| {
let mut vec = attrs.into();
@ -35,6 +96,10 @@ fn visit_attrvec(attrs: &mut AttrVec, f: impl FnOnce(&mut Vec<Attribute>)) {
}
impl AstLike for StmtKind {
// This might be an `StmtKind::Item`, which contains
// an item that supports inner attrs
const SUPPORTS_CUSTOM_INNER_ATTRS: bool = true;
fn attrs(&self) -> &[Attribute] {
match self {
StmtKind::Local(local) => local.attrs(),
@ -66,6 +131,8 @@ impl AstLike for StmtKind {
}
impl AstLike for Stmt {
const SUPPORTS_CUSTOM_INNER_ATTRS: bool = StmtKind::SUPPORTS_CUSTOM_INNER_ATTRS;
fn attrs(&self) -> &[Attribute] {
self.kind.attrs()
}
@ -79,6 +146,8 @@ impl AstLike for Stmt {
}
impl AstLike for Attribute {
const SUPPORTS_CUSTOM_INNER_ATTRS: bool = false;
fn attrs(&self) -> &[Attribute] {
&[]
}
@ -94,6 +163,8 @@ impl AstLike for Attribute {
}
impl<T: AstLike> AstLike for Option<T> {
const SUPPORTS_CUSTOM_INNER_ATTRS: bool = T::SUPPORTS_CUSTOM_INNER_ATTRS;
fn attrs(&self) -> &[Attribute] {
self.as_ref().map(|inner| inner.attrs()).unwrap_or(&[])
}
@ -127,8 +198,13 @@ impl VecOrAttrVec for AttrVec {
}
macro_rules! derive_has_tokens_and_attrs {
($($ty:path),*) => { $(
(
const SUPPORTS_CUSTOM_INNER_ATTRS: bool = $inner_attrs:literal;
$($ty:path),*
) => { $(
impl AstLike for $ty {
const SUPPORTS_CUSTOM_INNER_ATTRS: bool = $inner_attrs;
fn attrs(&self) -> &[Attribute] {
&self.attrs
}
@ -140,6 +216,7 @@ macro_rules! derive_has_tokens_and_attrs {
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
Some(&mut self.tokens)
}
}
)* }
}
@ -147,6 +224,8 @@ macro_rules! derive_has_tokens_and_attrs {
macro_rules! derive_has_attrs_no_tokens {
($($ty:path),*) => { $(
impl AstLike for $ty {
const SUPPORTS_CUSTOM_INNER_ATTRS: bool = false;
fn attrs(&self) -> &[Attribute] {
&self.attrs
}
@ -165,12 +244,13 @@ macro_rules! derive_has_attrs_no_tokens {
macro_rules! derive_has_tokens_no_attrs {
($($ty:path),*) => { $(
impl AstLike for $ty {
const SUPPORTS_CUSTOM_INNER_ATTRS: bool = false;
fn attrs(&self) -> &[Attribute] {
&[]
}
fn visit_attrs(&mut self, _f: impl FnOnce(&mut Vec<Attribute>)) {}
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
Some(&mut self.tokens)
}
@ -178,10 +258,18 @@ macro_rules! derive_has_tokens_no_attrs {
)* }
}
// These AST nodes support both inert and active
// attributes, so they also have tokens.
// These ast nodes support both active and inert attributes,
// so they have tokens collected to pass to proc macros
derive_has_tokens_and_attrs! {
Item, Expr, Local, AssocItem, ForeignItem
// Both `Item` and `AssocItem` can have bodies, which
// can contain inner attributes
const SUPPORTS_CUSTOM_INNER_ATTRS: bool = true;
Item, AssocItem, ForeignItem
}
derive_has_tokens_and_attrs! {
const SUPPORTS_CUSTOM_INNER_ATTRS: bool = false;
Local, MacCallStmt, Expr
}
// These ast nodes only support inert attributes, so they don't

View file

@ -6,7 +6,9 @@ use crate::ast::{Lit, LitKind};
use crate::ast::{MacArgs, MacDelimiter, MetaItem, MetaItemKind, NestedMetaItem};
use crate::ast::{Path, PathSegment};
use crate::token::{self, CommentKind, Token};
use crate::tokenstream::{DelimSpan, LazyTokenStream, TokenStream, TokenTree, TreeAndSpacing};
use crate::tokenstream::{AttrAnnotatedTokenStream, AttrAnnotatedTokenTree};
use crate::tokenstream::{DelimSpan, Spacing, TokenTree, TreeAndSpacing};
use crate::tokenstream::{LazyTokenStream, TokenStream};
use rustc_index::bit_set::GrowableBitSet;
use rustc_span::source_map::BytePos;
@ -268,14 +270,18 @@ impl Attribute {
}
}
pub fn tokens(&self) -> TokenStream {
pub fn tokens(&self) -> AttrAnnotatedTokenStream {
match self.kind {
AttrKind::Normal(_, ref tokens) => tokens
.as_ref()
.unwrap_or_else(|| panic!("attribute is missing tokens: {:?}", self))
.create_token_stream(),
AttrKind::DocComment(comment_kind, data) => TokenStream::from(TokenTree::Token(
Token::new(token::DocComment(comment_kind, self.style, data), self.span),
AttrKind::DocComment(comment_kind, data) => AttrAnnotatedTokenStream::from((
AttrAnnotatedTokenTree::Token(Token::new(
token::DocComment(comment_kind, self.style, data),
self.span,
)),
Spacing::Alone,
)),
}
}

View file

@ -630,6 +630,33 @@ pub fn noop_flat_map_param<T: MutVisitor>(mut param: Param, vis: &mut T) -> Smal
smallvec![param]
}
// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
pub fn visit_attr_annotated_tt<T: MutVisitor>(tt: &mut AttrAnnotatedTokenTree, vis: &mut T) {
match tt {
AttrAnnotatedTokenTree::Token(token) => {
visit_token(token, vis);
}
AttrAnnotatedTokenTree::Delimited(DelimSpan { open, close }, _delim, tts) => {
vis.visit_span(open);
vis.visit_span(close);
visit_attr_annotated_tts(tts, vis);
}
AttrAnnotatedTokenTree::Attributes(data) => {
for attr in &mut *data.attrs {
match &mut attr.kind {
AttrKind::Normal(_, attr_tokens) => {
visit_lazy_tts(attr_tokens, vis);
}
AttrKind::DocComment(..) => {
vis.visit_span(&mut attr.span);
}
}
}
visit_lazy_tts_opt_mut(Some(&mut data.tokens), vis);
}
}
}
// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
pub fn visit_tt<T: MutVisitor>(tt: &mut TokenTree, vis: &mut T) {
match tt {
@ -652,16 +679,30 @@ pub fn visit_tts<T: MutVisitor>(TokenStream(tts): &mut TokenStream, vis: &mut T)
}
}
pub fn visit_lazy_tts<T: MutVisitor>(lazy_tts: &mut Option<LazyTokenStream>, vis: &mut T) {
if vis.token_visiting_enabled() {
visit_opt(lazy_tts, |lazy_tts| {
let mut tts = lazy_tts.create_token_stream();
visit_tts(&mut tts, vis);
*lazy_tts = LazyTokenStream::new(tts);
})
pub fn visit_attr_annotated_tts<T: MutVisitor>(
AttrAnnotatedTokenStream(tts): &mut AttrAnnotatedTokenStream,
vis: &mut T,
) {
if vis.token_visiting_enabled() && !tts.is_empty() {
let tts = Lrc::make_mut(tts);
visit_vec(tts, |(tree, _is_joint)| visit_attr_annotated_tt(tree, vis));
}
}
pub fn visit_lazy_tts_opt_mut<T: MutVisitor>(lazy_tts: Option<&mut LazyTokenStream>, vis: &mut T) {
if vis.token_visiting_enabled() {
if let Some(lazy_tts) = lazy_tts {
let mut tts = lazy_tts.create_token_stream();
visit_attr_annotated_tts(&mut tts, vis);
*lazy_tts = LazyTokenStream::new(tts);
}
}
}
pub fn visit_lazy_tts<T: MutVisitor>(lazy_tts: &mut Option<LazyTokenStream>, vis: &mut T) {
visit_lazy_tts_opt_mut(lazy_tts.as_mut(), vis);
}
// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
// Applies ident visitor if it's an ident; applies other visits to interpolated nodes.
// In practice the ident part is not actually used by specific visitors right now,

View file

@ -14,6 +14,7 @@
//! ownership of the original.
use crate::token::{self, DelimToken, Token, TokenKind};
use crate::AttrVec;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::sync::{self, Lrc};
@ -123,11 +124,11 @@ where
}
pub trait CreateTokenStream: sync::Send + sync::Sync {
fn create_token_stream(&self) -> TokenStream;
fn create_token_stream(&self) -> AttrAnnotatedTokenStream;
}
impl CreateTokenStream for TokenStream {
fn create_token_stream(&self) -> TokenStream {
impl CreateTokenStream for AttrAnnotatedTokenStream {
fn create_token_stream(&self) -> AttrAnnotatedTokenStream {
self.clone()
}
}
@ -143,14 +144,14 @@ impl LazyTokenStream {
LazyTokenStream(Lrc::new(Box::new(inner)))
}
pub fn create_token_stream(&self) -> TokenStream {
pub fn create_token_stream(&self) -> AttrAnnotatedTokenStream {
self.0.create_token_stream()
}
}
impl fmt::Debug for LazyTokenStream {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt("LazyTokenStream", f)
write!(f, "LazyTokenStream({:?})", self.create_token_stream())
}
}
@ -173,6 +174,145 @@ impl<CTX> HashStable<CTX> for LazyTokenStream {
}
}
/// A `AttrAnnotatedTokenStream` is similar to a `TokenStream`, but with extra
/// information about the tokens for attribute targets. This is used
/// during expansion to perform early cfg-expansion, and to process attributes
/// during proc-macro invocations.
#[derive(Clone, Debug, Default, Encodable, Decodable)]
pub struct AttrAnnotatedTokenStream(pub Lrc<Vec<(AttrAnnotatedTokenTree, Spacing)>>);
/// Like `TokenTree`, but for `AttrAnnotatedTokenStream`
#[derive(Clone, Debug, Encodable, Decodable)]
pub enum AttrAnnotatedTokenTree {
Token(Token),
Delimited(DelimSpan, DelimToken, AttrAnnotatedTokenStream),
/// Stores the attributes for an attribute target,
/// along with the tokens for that attribute target.
/// See `AttributesData` for more information
Attributes(AttributesData),
}
impl AttrAnnotatedTokenStream {
pub fn new(tokens: Vec<(AttrAnnotatedTokenTree, Spacing)>) -> AttrAnnotatedTokenStream {
AttrAnnotatedTokenStream(Lrc::new(tokens))
}
/// Converts this `AttrAnnotatedTokenStream` to a plain `TokenStream
/// During conversion, `AttrAnnotatedTokenTree::Attributes` get 'flattened'
/// back to a `TokenStream` of the form `outer_attr attr_target`.
/// If there are inner attributes, they are inserted into the proper
/// place in the attribute target tokens.
pub fn to_tokenstream(&self) -> TokenStream {
let trees: Vec<_> = self
.0
.iter()
.flat_map(|tree| match &tree.0 {
AttrAnnotatedTokenTree::Token(inner) => {
smallvec![(TokenTree::Token(inner.clone()), tree.1)].into_iter()
}
AttrAnnotatedTokenTree::Delimited(span, delim, stream) => smallvec![(
TokenTree::Delimited(*span, *delim, stream.to_tokenstream()),
tree.1,
)]
.into_iter(),
AttrAnnotatedTokenTree::Attributes(data) => {
let mut outer_attrs = Vec::new();
let mut inner_attrs = Vec::new();
let attrs: Vec<_> = data.attrs.clone().into();
for attr in attrs {
match attr.style {
crate::AttrStyle::Outer => {
assert!(
inner_attrs.len() == 0,
"Found outer attribute {:?} after inner attrs {:?}",
attr,
inner_attrs
);
outer_attrs.push(attr);
}
crate::AttrStyle::Inner => {
inner_attrs.push(attr);
}
}
}
let mut target_tokens: Vec<_> = data
.tokens
.create_token_stream()
.to_tokenstream()
.0
.iter()
.cloned()
.collect();
if !inner_attrs.is_empty() {
let mut found = false;
// Check the last two trees (to account for a trailing semi)
for (tree, _) in target_tokens.iter_mut().rev().take(2) {
if let TokenTree::Delimited(span, delim, delim_tokens) = tree {
// Inner attributes are only supported on extern blocks, functions, impls,
// and modules. All of these have their inner attributes placed at
// the beginning of the rightmost outermost braced group:
// e.g. fn foo() { #![my_attr} }
//
// Therefore, we can insert them back into the right location
// without needing to do any extra position tracking.
//
// Note: Outline modules are an exception - they can
// have attributes like `#![my_attr]` at the start of a file.
// Support for custom attributes in this position is not
// properly implemented - we always synthesize fake tokens,
// so we never reach this code.
let mut builder = TokenStreamBuilder::new();
for inner_attr in &inner_attrs {
builder.push(inner_attr.tokens().to_tokenstream());
}
builder.push(delim_tokens.clone());
*tree = TokenTree::Delimited(*span, *delim, builder.build());
found = true;
break;
}
}
assert!(
found,
"Failed to find trailing delimited group in: {:?}",
target_tokens
);
}
let mut flat: SmallVec<[_; 1]> = SmallVec::new();
for attr in outer_attrs {
// FIXME: Make this more efficient
flat.extend(attr.tokens().to_tokenstream().0.clone().iter().cloned());
}
flat.extend(target_tokens);
flat.into_iter()
}
})
.collect();
TokenStream::new(trees)
}
}
/// Stores the tokens for an attribute target, along
/// with its attributes.
///
/// This is constructed during parsing when we need to capture
/// tokens.
///
/// For example, `#[cfg(FALSE)] struct Foo {}` would
/// have an `attrs` field contaiing the `#[cfg(FALSE)]` attr,
/// and a `tokens` field storing the (unparesd) tokens `struct Foo {}`
#[derive(Clone, Debug, Encodable, Decodable)]
pub struct AttributesData {
/// Attributes, both outer and inner.
/// These are stored in the original order that they were parsed in.
pub attrs: AttrVec,
/// The underlying tokens for the attribute target that `attrs`
/// are applied to
pub tokens: LazyTokenStream,
}
/// A `TokenStream` is an abstract sequence of tokens, organized into [`TokenTree`]s.
///
/// The goal is for procedural macros to work with `TokenStream`s and `TokenTree`s
@ -235,6 +375,12 @@ impl TokenStream {
}
}
impl From<(AttrAnnotatedTokenTree, Spacing)> for AttrAnnotatedTokenStream {
fn from((tree, spacing): (AttrAnnotatedTokenTree, Spacing)) -> AttrAnnotatedTokenStream {
AttrAnnotatedTokenStream::new(vec![(tree, spacing)])
}
}
impl From<TokenTree> for TokenStream {
fn from(tree: TokenTree) -> TokenStream {
TokenStream::new(vec![(tree, Spacing::Alone)])
@ -457,6 +603,10 @@ impl Cursor {
}
}
pub fn index(&self) -> usize {
self.index
}
pub fn append(&mut self, new_stream: TokenStream) {
if new_stream.is_empty() {
return;