1
Fork 0

Change AttrTokenStream::to_tokenstream to to_token_trees.

I.e. change the return type from `TokenStream` to `Vec<TokenTree>`.

Most of the callsites require a `TokenStream`, but the recursive call
used to create `target_tokens` requires a `Vec<TokenTree>`. It's easy
to convert a `Vec<TokenTree>` to a `TokenStream` (just call
`TokenStream::new`) but it's harder to convert a `TokenStream` to a
`Vec<TokenTree>` (either iterate/clone/collect, or use `Lrc::into_inner`
if appropriate).

So this commit changes the return value to simplify that `target_tokens`
call site.
This commit is contained in:
Nicholas Nethercote 2024-06-27 10:42:46 +10:00
parent d6c0b8117e
commit f852568fa6
2 changed files with 15 additions and 22 deletions

View file

@ -204,12 +204,14 @@ impl Attribute {
pub fn tokens(&self) -> TokenStream { pub fn tokens(&self) -> TokenStream {
match &self.kind { match &self.kind {
AttrKind::Normal(normal) => normal AttrKind::Normal(normal) => TokenStream::new(
.tokens normal
.as_ref() .tokens
.unwrap_or_else(|| panic!("attribute is missing tokens: {self:?}")) .as_ref()
.to_attr_token_stream() .unwrap_or_else(|| panic!("attribute is missing tokens: {self:?}"))
.to_tokenstream(), .to_attr_token_stream()
.to_token_trees(),
),
&AttrKind::DocComment(comment_kind, data) => TokenStream::token_alone( &AttrKind::DocComment(comment_kind, data) => TokenStream::token_alone(
token::DocComment(comment_kind, self.style, data), token::DocComment(comment_kind, self.style, data),
self.span, self.span,

View file

@ -180,14 +180,13 @@ impl AttrTokenStream {
AttrTokenStream(Lrc::new(tokens)) AttrTokenStream(Lrc::new(tokens))
} }
/// Converts this `AttrTokenStream` to a plain `TokenStream`. /// Converts this `AttrTokenStream` to a plain `Vec<TokenTree>`.
/// During conversion, `AttrTokenTree::Attributes` get 'flattened' /// During conversion, `AttrTokenTree::Attributes` get 'flattened'
/// back to a `TokenStream` of the form `outer_attr attr_target`. /// back to a `TokenStream` of the form `outer_attr attr_target`.
/// If there are inner attributes, they are inserted into the proper /// If there are inner attributes, they are inserted into the proper
/// place in the attribute target tokens. /// place in the attribute target tokens.
pub fn to_tokenstream(&self) -> TokenStream { pub fn to_token_trees(&self) -> Vec<TokenTree> {
let trees: Vec<_> = self self.0
.0
.iter() .iter()
.flat_map(|tree| match &tree { .flat_map(|tree| match &tree {
AttrTokenTree::Token(inner, spacing) => { AttrTokenTree::Token(inner, spacing) => {
@ -198,7 +197,7 @@ impl AttrTokenStream {
*span, *span,
*spacing, *spacing,
*delim, *delim,
stream.to_tokenstream() TokenStream::new(stream.to_token_trees())
),] ),]
.into_iter() .into_iter()
} }
@ -208,14 +207,7 @@ impl AttrTokenStream {
.partition_point(|attr| matches!(attr.style, crate::AttrStyle::Outer)); .partition_point(|attr| matches!(attr.style, crate::AttrStyle::Outer));
let (outer_attrs, inner_attrs) = data.attrs.split_at(idx); let (outer_attrs, inner_attrs) = data.attrs.split_at(idx);
let mut target_tokens: Vec<_> = data let mut target_tokens = data.tokens.to_attr_token_stream().to_token_trees();
.tokens
.to_attr_token_stream()
.to_tokenstream()
.0
.iter()
.cloned()
.collect();
if !inner_attrs.is_empty() { if !inner_attrs.is_empty() {
let mut found = false; let mut found = false;
// Check the last two trees (to account for a trailing semi) // Check the last two trees (to account for a trailing semi)
@ -260,8 +252,7 @@ impl AttrTokenStream {
flat.into_iter() flat.into_iter()
} }
}) })
.collect(); .collect()
TokenStream::new(trees)
} }
} }
@ -461,7 +452,7 @@ impl TokenStream {
AttributesData { attrs: attrs.iter().cloned().collect(), tokens: tokens.clone() }; AttributesData { attrs: attrs.iter().cloned().collect(), tokens: tokens.clone() };
AttrTokenStream::new(vec![AttrTokenTree::Attributes(attr_data)]) AttrTokenStream::new(vec![AttrTokenTree::Attributes(attr_data)])
}; };
attr_stream.to_tokenstream() TokenStream::new(attr_stream.to_token_trees())
} }
pub fn from_nonterminal_ast(nt: &Nonterminal) -> TokenStream { pub fn from_nonterminal_ast(nt: &Nonterminal) -> TokenStream {