Rename some attribute types for consistency.
- `AttributesData` -> `AttrsTarget` - `AttrTokenTree::Attributes` -> `AttrTokenTree::AttrsTarget` - `FlatToken::AttrTarget` -> `FlatToken::AttrsTarget`
This commit is contained in:
parent
9d33a8fe51
commit
3a5c4b6e4e
6 changed files with 42 additions and 44 deletions
|
@ -704,7 +704,7 @@ fn visit_attr_tt<T: MutVisitor>(tt: &mut AttrTokenTree, vis: &mut T) {
|
||||||
visit_attr_tts(tts, vis);
|
visit_attr_tts(tts, vis);
|
||||||
visit_delim_span(dspan, vis);
|
visit_delim_span(dspan, vis);
|
||||||
}
|
}
|
||||||
AttrTokenTree::Attributes(AttributesData { attrs, tokens }) => {
|
AttrTokenTree::AttrsTarget(AttrsTarget { attrs, tokens }) => {
|
||||||
visit_attrs(attrs, vis);
|
visit_attrs(attrs, vis);
|
||||||
visit_lazy_tts_opt_mut(Some(tokens), vis);
|
visit_lazy_tts_opt_mut(Some(tokens), vis);
|
||||||
}
|
}
|
||||||
|
|
|
@ -170,8 +170,8 @@ pub enum AttrTokenTree {
|
||||||
Delimited(DelimSpan, DelimSpacing, Delimiter, AttrTokenStream),
|
Delimited(DelimSpan, DelimSpacing, Delimiter, AttrTokenStream),
|
||||||
/// Stores the attributes for an attribute target,
|
/// Stores the attributes for an attribute target,
|
||||||
/// along with the tokens for that attribute target.
|
/// along with the tokens for that attribute target.
|
||||||
/// See `AttributesData` for more information
|
/// See `AttrsTarget` for more information
|
||||||
Attributes(AttributesData),
|
AttrsTarget(AttrsTarget),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AttrTokenStream {
|
impl AttrTokenStream {
|
||||||
|
@ -180,7 +180,7 @@ impl AttrTokenStream {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Converts this `AttrTokenStream` to a plain `Vec<TokenTree>`.
|
/// Converts this `AttrTokenStream` to a plain `Vec<TokenTree>`.
|
||||||
/// During conversion, `AttrTokenTree::Attributes` get 'flattened'
|
/// During conversion, `AttrTokenTree::AttrsTarget` get 'flattened'
|
||||||
/// back to a `TokenStream` of the form `outer_attr attr_target`.
|
/// back to a `TokenStream` of the form `outer_attr attr_target`.
|
||||||
/// If there are inner attributes, they are inserted into the proper
|
/// If there are inner attributes, they are inserted into the proper
|
||||||
/// place in the attribute target tokens.
|
/// place in the attribute target tokens.
|
||||||
|
@ -199,13 +199,13 @@ impl AttrTokenStream {
|
||||||
TokenStream::new(stream.to_token_trees()),
|
TokenStream::new(stream.to_token_trees()),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
AttrTokenTree::Attributes(data) => {
|
AttrTokenTree::AttrsTarget(target) => {
|
||||||
let idx = data
|
let idx = target
|
||||||
.attrs
|
.attrs
|
||||||
.partition_point(|attr| matches!(attr.style, crate::AttrStyle::Outer));
|
.partition_point(|attr| matches!(attr.style, crate::AttrStyle::Outer));
|
||||||
let (outer_attrs, inner_attrs) = data.attrs.split_at(idx);
|
let (outer_attrs, inner_attrs) = target.attrs.split_at(idx);
|
||||||
|
|
||||||
let mut target_tokens = data.tokens.to_attr_token_stream().to_token_trees();
|
let mut target_tokens = target.tokens.to_attr_token_stream().to_token_trees();
|
||||||
if !inner_attrs.is_empty() {
|
if !inner_attrs.is_empty() {
|
||||||
let mut found = false;
|
let mut found = false;
|
||||||
// Check the last two trees (to account for a trailing semi)
|
// Check the last two trees (to account for a trailing semi)
|
||||||
|
@ -262,7 +262,7 @@ impl AttrTokenStream {
|
||||||
/// have an `attrs` field containing the `#[cfg(FALSE)]` attr,
|
/// have an `attrs` field containing the `#[cfg(FALSE)]` attr,
|
||||||
/// and a `tokens` field storing the (unparsed) tokens `struct Foo {}`
|
/// and a `tokens` field storing the (unparsed) tokens `struct Foo {}`
|
||||||
#[derive(Clone, Debug, Encodable, Decodable)]
|
#[derive(Clone, Debug, Encodable, Decodable)]
|
||||||
pub struct AttributesData {
|
pub struct AttrsTarget {
|
||||||
/// Attributes, both outer and inner.
|
/// Attributes, both outer and inner.
|
||||||
/// These are stored in the original order that they were parsed in.
|
/// These are stored in the original order that they were parsed in.
|
||||||
pub attrs: AttrVec,
|
pub attrs: AttrVec,
|
||||||
|
@ -444,9 +444,9 @@ impl TokenStream {
|
||||||
let attr_stream = if attrs.is_empty() {
|
let attr_stream = if attrs.is_empty() {
|
||||||
tokens.to_attr_token_stream()
|
tokens.to_attr_token_stream()
|
||||||
} else {
|
} else {
|
||||||
let attr_data =
|
let target =
|
||||||
AttributesData { attrs: attrs.iter().cloned().collect(), tokens: tokens.clone() };
|
AttrsTarget { attrs: attrs.iter().cloned().collect(), tokens: tokens.clone() };
|
||||||
AttrTokenStream::new(vec![AttrTokenTree::Attributes(attr_data)])
|
AttrTokenStream::new(vec![AttrTokenTree::AttrsTarget(target)])
|
||||||
};
|
};
|
||||||
TokenStream::new(attr_stream.to_token_trees())
|
TokenStream::new(attr_stream.to_token_trees())
|
||||||
}
|
}
|
||||||
|
|
|
@ -193,7 +193,7 @@ impl CfgEval<'_> {
|
||||||
|
|
||||||
// Re-parse the tokens, setting the `capture_cfg` flag to save extra information
|
// Re-parse the tokens, setting the `capture_cfg` flag to save extra information
|
||||||
// to the captured `AttrTokenStream` (specifically, we capture
|
// to the captured `AttrTokenStream` (specifically, we capture
|
||||||
// `AttrTokenTree::AttributesData` for all occurrences of `#[cfg]` and `#[cfg_attr]`)
|
// `AttrTokenTree::AttrsTarget` for all occurrences of `#[cfg]` and `#[cfg_attr]`)
|
||||||
let mut parser = Parser::new(&self.0.sess.psess, orig_tokens, None);
|
let mut parser = Parser::new(&self.0.sess.psess, orig_tokens, None);
|
||||||
parser.capture_cfg = true;
|
parser.capture_cfg = true;
|
||||||
match parse_annotatable_with(&mut parser) {
|
match parse_annotatable_with(&mut parser) {
|
||||||
|
|
|
@ -172,7 +172,7 @@ impl<'a> StripUnconfigured<'a> {
|
||||||
fn configure_tokens(&self, stream: &AttrTokenStream) -> AttrTokenStream {
|
fn configure_tokens(&self, stream: &AttrTokenStream) -> AttrTokenStream {
|
||||||
fn can_skip(stream: &AttrTokenStream) -> bool {
|
fn can_skip(stream: &AttrTokenStream) -> bool {
|
||||||
stream.0.iter().all(|tree| match tree {
|
stream.0.iter().all(|tree| match tree {
|
||||||
AttrTokenTree::Attributes(_) => false,
|
AttrTokenTree::AttrsTarget(_) => false,
|
||||||
AttrTokenTree::Token(..) => true,
|
AttrTokenTree::Token(..) => true,
|
||||||
AttrTokenTree::Delimited(.., inner) => can_skip(inner),
|
AttrTokenTree::Delimited(.., inner) => can_skip(inner),
|
||||||
})
|
})
|
||||||
|
@ -186,14 +186,14 @@ impl<'a> StripUnconfigured<'a> {
|
||||||
.0
|
.0
|
||||||
.iter()
|
.iter()
|
||||||
.flat_map(|tree| match tree.clone() {
|
.flat_map(|tree| match tree.clone() {
|
||||||
AttrTokenTree::Attributes(mut data) => {
|
AttrTokenTree::AttrsTarget(mut target) => {
|
||||||
data.attrs.flat_map_in_place(|attr| self.process_cfg_attr(&attr));
|
target.attrs.flat_map_in_place(|attr| self.process_cfg_attr(&attr));
|
||||||
|
|
||||||
if self.in_cfg(&data.attrs) {
|
if self.in_cfg(&target.attrs) {
|
||||||
data.tokens = LazyAttrTokenStream::new(
|
target.tokens = LazyAttrTokenStream::new(
|
||||||
self.configure_tokens(&data.tokens.to_attr_token_stream()),
|
self.configure_tokens(&target.tokens.to_attr_token_stream()),
|
||||||
);
|
);
|
||||||
Some(AttrTokenTree::Attributes(data)).into_iter()
|
Some(AttrTokenTree::AttrsTarget(target)).into_iter()
|
||||||
} else {
|
} else {
|
||||||
None.into_iter()
|
None.into_iter()
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use super::{Capturing, FlatToken, ForceCollect, Parser, ReplaceRange, TokenCursor, TrailingToken};
|
use super::{Capturing, FlatToken, ForceCollect, Parser, ReplaceRange, TokenCursor, TrailingToken};
|
||||||
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
|
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
|
||||||
use rustc_ast::tokenstream::{AttrTokenStream, AttrTokenTree, AttributesData, DelimSpacing};
|
use rustc_ast::tokenstream::{AttrTokenStream, AttrTokenTree, AttrsTarget, DelimSpacing};
|
||||||
use rustc_ast::tokenstream::{DelimSpan, LazyAttrTokenStream, Spacing, ToAttrTokenStream};
|
use rustc_ast::tokenstream::{DelimSpan, LazyAttrTokenStream, Spacing, ToAttrTokenStream};
|
||||||
use rustc_ast::{self as ast};
|
use rustc_ast::{self as ast};
|
||||||
use rustc_ast::{AttrVec, Attribute, HasAttrs, HasTokens};
|
use rustc_ast::{AttrVec, Attribute, HasAttrs, HasTokens};
|
||||||
|
@ -145,22 +145,22 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
|
||||||
// start position, we ensure that any replace range which encloses
|
// start position, we ensure that any replace range which encloses
|
||||||
// another replace range will capture the *replaced* tokens for the inner
|
// another replace range will capture the *replaced* tokens for the inner
|
||||||
// range, not the original tokens.
|
// range, not the original tokens.
|
||||||
for (range, attr_data) in replace_ranges.into_iter().rev() {
|
for (range, target) in replace_ranges.into_iter().rev() {
|
||||||
assert!(!range.is_empty(), "Cannot replace an empty range: {range:?}");
|
assert!(!range.is_empty(), "Cannot replace an empty range: {range:?}");
|
||||||
|
|
||||||
// Replace the tokens in range with zero or one `FlatToken::AttrTarget`s, plus
|
// Replace the tokens in range with zero or one `FlatToken::AttrsTarget`s, plus
|
||||||
// enough `FlatToken::Empty`s to fill up the rest of the range. This keeps the
|
// enough `FlatToken::Empty`s to fill up the rest of the range. This keeps the
|
||||||
// total length of `tokens` constant throughout the replacement process, allowing
|
// total length of `tokens` constant throughout the replacement process, allowing
|
||||||
// us to use all of the `ReplaceRanges` entries without adjusting indices.
|
// us to use all of the `ReplaceRanges` entries without adjusting indices.
|
||||||
let attr_data_len = attr_data.is_some() as usize;
|
let target_len = target.is_some() as usize;
|
||||||
tokens.splice(
|
tokens.splice(
|
||||||
(range.start as usize)..(range.end as usize),
|
(range.start as usize)..(range.end as usize),
|
||||||
attr_data
|
target
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|attr_data| (FlatToken::AttrTarget(attr_data), Spacing::Alone))
|
.map(|target| (FlatToken::AttrsTarget(target), Spacing::Alone))
|
||||||
.chain(
|
.chain(
|
||||||
iter::repeat((FlatToken::Empty, Spacing::Alone))
|
iter::repeat((FlatToken::Empty, Spacing::Alone))
|
||||||
.take(range.len() - attr_data_len),
|
.take(range.len() - target_len),
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -346,13 +346,12 @@ impl<'a> Parser<'a> {
|
||||||
{
|
{
|
||||||
assert!(!self.break_last_token, "Should not have unglued last token with cfg attr");
|
assert!(!self.break_last_token, "Should not have unglued last token with cfg attr");
|
||||||
|
|
||||||
// Replace the entire AST node that we just parsed, including attributes,
|
// Replace the entire AST node that we just parsed, including attributes, with
|
||||||
// with `attr_data`. If this AST node is inside an item
|
// `target`. If this AST node is inside an item that has `#[derive]`, then this will
|
||||||
// that has `#[derive]`, then this will allow us to cfg-expand this
|
// allow us to cfg-expand this AST node.
|
||||||
// AST node.
|
|
||||||
let start_pos = if has_outer_attrs { attrs.start_pos } else { start_pos };
|
let start_pos = if has_outer_attrs { attrs.start_pos } else { start_pos };
|
||||||
let attr_data = AttributesData { attrs: final_attrs.iter().cloned().collect(), tokens };
|
let target = AttrsTarget { attrs: final_attrs.iter().cloned().collect(), tokens };
|
||||||
self.capture_state.replace_ranges.push((start_pos..end_pos, Some(attr_data)));
|
self.capture_state.replace_ranges.push((start_pos..end_pos, Some(target)));
|
||||||
self.capture_state.replace_ranges.extend(inner_attr_replace_ranges);
|
self.capture_state.replace_ranges.extend(inner_attr_replace_ranges);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -414,11 +413,11 @@ fn make_attr_token_stream(
|
||||||
.expect("Bottom token frame is missing!")
|
.expect("Bottom token frame is missing!")
|
||||||
.inner
|
.inner
|
||||||
.push(AttrTokenTree::Token(token, spacing)),
|
.push(AttrTokenTree::Token(token, spacing)),
|
||||||
FlatToken::AttrTarget(data) => stack
|
FlatToken::AttrsTarget(target) => stack
|
||||||
.last_mut()
|
.last_mut()
|
||||||
.expect("Bottom token frame is missing!")
|
.expect("Bottom token frame is missing!")
|
||||||
.inner
|
.inner
|
||||||
.push(AttrTokenTree::Attributes(data)),
|
.push(AttrTokenTree::AttrsTarget(target)),
|
||||||
FlatToken::Empty => {}
|
FlatToken::Empty => {}
|
||||||
}
|
}
|
||||||
token_and_spacing = iter.next();
|
token_and_spacing = iter.next();
|
||||||
|
|
|
@ -20,7 +20,7 @@ use path::PathStyle;
|
||||||
|
|
||||||
use rustc_ast::ptr::P;
|
use rustc_ast::ptr::P;
|
||||||
use rustc_ast::token::{self, Delimiter, IdentIsRaw, Nonterminal, Token, TokenKind};
|
use rustc_ast::token::{self, Delimiter, IdentIsRaw, Nonterminal, Token, TokenKind};
|
||||||
use rustc_ast::tokenstream::{AttributesData, DelimSpacing, DelimSpan, Spacing};
|
use rustc_ast::tokenstream::{AttrsTarget, DelimSpacing, DelimSpan, Spacing};
|
||||||
use rustc_ast::tokenstream::{TokenStream, TokenTree, TokenTreeCursor};
|
use rustc_ast::tokenstream::{TokenStream, TokenTree, TokenTreeCursor};
|
||||||
use rustc_ast::util::case::Case;
|
use rustc_ast::util::case::Case;
|
||||||
use rustc_ast::{
|
use rustc_ast::{
|
||||||
|
@ -203,13 +203,13 @@ struct ClosureSpans {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Indicates a range of tokens that should be replaced by
|
/// Indicates a range of tokens that should be replaced by
|
||||||
/// the tokens in the provided `AttributesData`. This is used in two
|
/// the tokens in the provided `AttrsTarget`. This is used in two
|
||||||
/// places during token collection:
|
/// places during token collection:
|
||||||
///
|
///
|
||||||
/// 1. During the parsing of an AST node that may have a `#[derive]`
|
/// 1. During the parsing of an AST node that may have a `#[derive]`
|
||||||
/// attribute, we parse a nested AST node that has `#[cfg]` or `#[cfg_attr]`
|
/// attribute, we parse a nested AST node that has `#[cfg]` or `#[cfg_attr]`
|
||||||
/// In this case, we use a `ReplaceRange` to replace the entire inner AST node
|
/// In this case, we use a `ReplaceRange` to replace the entire inner AST node
|
||||||
/// with `FlatToken::AttrTarget`, allowing us to perform eager cfg-expansion
|
/// with `FlatToken::AttrsTarget`, allowing us to perform eager cfg-expansion
|
||||||
/// on an `AttrTokenStream`.
|
/// on an `AttrTokenStream`.
|
||||||
///
|
///
|
||||||
/// 2. When we parse an inner attribute while collecting tokens. We
|
/// 2. When we parse an inner attribute while collecting tokens. We
|
||||||
|
@ -219,7 +219,7 @@ struct ClosureSpans {
|
||||||
/// the first macro inner attribute to invoke a proc-macro).
|
/// the first macro inner attribute to invoke a proc-macro).
|
||||||
/// When create a `TokenStream`, the inner attributes get inserted
|
/// When create a `TokenStream`, the inner attributes get inserted
|
||||||
/// into the proper place in the token stream.
|
/// into the proper place in the token stream.
|
||||||
type ReplaceRange = (Range<u32>, Option<AttributesData>);
|
type ReplaceRange = (Range<u32>, Option<AttrsTarget>);
|
||||||
|
|
||||||
/// Controls how we capture tokens. Capturing can be expensive,
|
/// Controls how we capture tokens. Capturing can be expensive,
|
||||||
/// so we try to avoid performing capturing in cases where
|
/// so we try to avoid performing capturing in cases where
|
||||||
|
@ -1608,11 +1608,10 @@ enum FlatToken {
|
||||||
/// A token - this holds both delimiter (e.g. '{' and '}')
|
/// A token - this holds both delimiter (e.g. '{' and '}')
|
||||||
/// and non-delimiter tokens
|
/// and non-delimiter tokens
|
||||||
Token(Token),
|
Token(Token),
|
||||||
/// Holds the `AttributesData` for an AST node. The
|
/// Holds the `AttrsTarget` for an AST node. The `AttrsTarget` is inserted
|
||||||
/// `AttributesData` is inserted directly into the
|
/// directly into the constructed `AttrTokenStream` as an
|
||||||
/// constructed `AttrTokenStream` as
|
/// `AttrTokenTree::AttrsTarget`.
|
||||||
/// an `AttrTokenTree::Attributes`.
|
AttrsTarget(AttrsTarget),
|
||||||
AttrTarget(AttributesData),
|
|
||||||
/// A special 'empty' token that is ignored during the conversion
|
/// A special 'empty' token that is ignored during the conversion
|
||||||
/// to an `AttrTokenStream`. This is used to simplify the
|
/// to an `AttrTokenStream`. This is used to simplify the
|
||||||
/// handling of replace ranges.
|
/// handling of replace ranges.
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue