1
Fork 0

Auto merge of #77255 - Aaron1011:feature/collect-attr-tokens, r=petrochenkov

Unconditionally capture tokens for attributes.

This allows us to avoid synthesizing tokens in `prepend_attr`, since we
have the original tokens available.

We still need to synthesize tokens when expanding `cfg_attr`,
but this is an unavoidable consequence of the syntax of `cfg_attr` -
the user does not supply the `#` and `[]` tokens that a `cfg_attr`
expands to.

This is based on PR https://github.com/rust-lang/rust/pull/77250 - this PR exposes a bug in the current `collect_tokens` implementation, which is fixed by the rewrite.
This commit is contained in:
bors 2020-10-24 19:23:32 +00:00
commit ffa2e7ae8f
19 changed files with 251 additions and 138 deletions

View file

@ -1178,8 +1178,9 @@ impl<'a> Parser<'a> {
/// Records all tokens consumed by the provided callback,
/// including the current token. These tokens are collected
/// into a `TokenStream`, and returned along with the result
/// of the callback.
/// into a `LazyTokenStream`, and returned along with the result
/// of the callback. The returned `LazyTokenStream` will be `None`
/// if not tokens were captured.
///
/// Note: If your callback consumes an opening delimiter
/// (including the case where you call `collect_tokens`
@ -1195,7 +1196,7 @@ impl<'a> Parser<'a> {
pub fn collect_tokens<R>(
&mut self,
f: impl FnOnce(&mut Self) -> PResult<'a, R>,
) -> PResult<'a, (R, LazyTokenStream)> {
) -> PResult<'a, (R, Option<LazyTokenStream>)> {
let start_token = (self.token.clone(), self.token_spacing);
let mut cursor_snapshot = self.token_cursor.clone();
@ -1205,6 +1206,11 @@ impl<'a> Parser<'a> {
let num_calls = new_calls - cursor_snapshot.num_next_calls;
let desugar_doc_comments = self.desugar_doc_comments;
// We didn't capture any tokens
if num_calls == 0 {
return Ok((ret, None));
}
// Produces a `TokenStream` on-demand. Using `cursor_snapshot`
// and `num_calls`, we can reconstruct the `TokenStream` seen
// by the callback. This allows us to avoid producing a `TokenStream`
@ -1233,7 +1239,7 @@ impl<'a> Parser<'a> {
};
let stream = LazyTokenStream::new(LazyTokenStreamInner::Lazy(Box::new(lazy_cb)));
Ok((ret, stream))
Ok((ret, Some(stream)))
}
/// `::{` or `::*`