1
Fork 0

Do not collect tokens for doc comments

This commit is contained in:
Vadim Petrochenkov 2020-11-05 20:27:48 +03:00
parent 1773f60ea5
commit 12de1e8985
17 changed files with 138 additions and 159 deletions

View file

@ -291,8 +291,7 @@ impl<'a> StripUnconfigured<'a> {
expanded_attrs
.into_iter()
.flat_map(|(item, span)| {
let orig_tokens =
attr.tokens.as_ref().unwrap_or_else(|| panic!("Missing tokens for {:?}", attr));
let orig_tokens = attr.tokens();
// We are taking an attribute of the form `#[cfg_attr(pred, attr)]`
// and producing an attribute of the form `#[attr]`. We
@ -302,7 +301,7 @@ impl<'a> StripUnconfigured<'a> {
// Use the `#` in `#[cfg_attr(pred, attr)]` as the `#` token
// for `attr` when we expand it to `#[attr]`
let pound_token = orig_tokens.create_token_stream().trees().next().unwrap();
let pound_token = orig_tokens.trees().next().unwrap();
if !matches!(pound_token, TokenTree::Token(Token { kind: TokenKind::Pound, .. })) {
panic!("Bad tokens for attribute {:?}", attr);
}
@ -316,13 +315,12 @@ impl<'a> StripUnconfigured<'a> {
.unwrap_or_else(|| panic!("Missing tokens for {:?}", item))
.create_token_stream(),
);
let mut attr = attr::mk_attr_from_item(attr.style, item, span);
attr.tokens = Some(LazyTokenStream::new(TokenStream::new(vec![
let tokens = Some(LazyTokenStream::new(TokenStream::new(vec![
(pound_token, Spacing::Alone),
(bracket_group, Spacing::Alone),
])));
self.process_cfg_attr(attr)
self.process_cfg_attr(attr::mk_attr_from_item(item, tokens, attr.style, span))
})
.collect()
}

View file

@ -1778,15 +1778,13 @@ impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> {
let meta = attr::mk_list_item(Ident::with_dummy_span(sym::doc), items);
*at = ast::Attribute {
kind: ast::AttrKind::Normal(AttrItem {
path: meta.path,
args: meta.kind.mac_args(meta.span),
tokens: None,
}),
kind: ast::AttrKind::Normal(
AttrItem { path: meta.path, args: meta.kind.mac_args(meta.span), tokens: None },
None,
),
span: at.span,
id: at.id,
style: at.style,
tokens: None,
};
} else {
noop_visit_attribute(at, self)