1
Fork 0

Set tokens on AST node in collect_tokens

A new `HasTokens` trait is introduced, which is used to move logic from
the callers of `collect_tokens` into the body of `collect_tokens`.

In addition to reducing duplication, this paves the way for PR #80689,
which needs to perform additional logic during token collection.
This commit is contained in:
Aaron Hill 2021-01-13 16:28:57 -05:00
parent 9bc8b00b4a
commit a961e6785c
No known key found for this signature in database
GPG key ID: B4087E510E98B164
7 changed files with 101 additions and 147 deletions

View file

@ -19,8 +19,8 @@ use rustc_ast::token::{self, DelimToken, Token, TokenKind};
use rustc_ast::tokenstream::{self, DelimSpan, LazyTokenStream, Spacing};
use rustc_ast::tokenstream::{CreateTokenStream, TokenStream, TokenTree, TreeAndSpacing};
use rustc_ast::DUMMY_NODE_ID;
use rustc_ast::{self as ast, AnonConst, AttrStyle, AttrVec, Const, CrateSugar, Extern, Unsafe};
use rustc_ast::{Async, Expr, ExprKind, MacArgs, MacDelimiter, Mutability, StrLit};
use rustc_ast::{self as ast, AnonConst, AttrStyle, AttrVec, Const, CrateSugar, Extern, HasTokens};
use rustc_ast::{Async, Expr, ExprKind, MacArgs, MacDelimiter, Mutability, StrLit, Unsafe};
use rustc_ast::{Visibility, VisibilityKind};
use rustc_ast_pretty::pprust;
use rustc_data_structures::sync::Lrc;
@ -1234,10 +1234,10 @@ impl<'a> Parser<'a> {
/// This restriction shouldn't be an issue in practice,
/// since this function is used to record the tokens for
/// a parsed AST item, which always has matching delimiters.
pub fn collect_tokens<R>(
pub fn collect_tokens<R: HasTokens>(
&mut self,
f: impl FnOnce(&mut Self) -> PResult<'a, R>,
) -> PResult<'a, (R, Option<LazyTokenStream>)> {
) -> PResult<'a, R> {
let start_token = (self.token.clone(), self.token_spacing);
let cursor_snapshot = TokenCursor {
frame: self.token_cursor.frame.clone(),
@ -1249,7 +1249,7 @@ impl<'a> Parser<'a> {
append_unglued_token: self.token_cursor.append_unglued_token.clone(),
};
let ret = f(self)?;
let mut ret = f(self)?;
// Produces a `TokenStream` on-demand. Using `cursor_snapshot`
// and `num_calls`, we can reconstruct the `TokenStream` seen
@ -1319,7 +1319,8 @@ impl<'a> Parser<'a> {
trailing_semi: false,
append_unglued_token: self.token_cursor.append_unglued_token.clone(),
};
Ok((ret, Some(LazyTokenStream::new(lazy_impl))))
ret.finalize_tokens(LazyTokenStream::new(lazy_impl));
Ok(ret)
}
/// `::{` or `::*`