1
Fork 0

Refactor token collection to capture trailing token immediately

This commit is contained in:
Aaron Hill 2021-01-14 10:42:01 -05:00
parent dc1eee2f25
commit ccfc292999
No known key found for this signature in database
GPG key ID: B4087E510E98B164
5 changed files with 124 additions and 118 deletions

View file

@ -61,6 +61,11 @@ pub enum ForceCollect {
No,
}
pub enum TrailingToken {
None,
Semi,
}
/// Like `maybe_whole_expr`, but for things other than expressions.
#[macro_export]
macro_rules! maybe_whole {
@ -1225,6 +1230,13 @@ impl<'a> Parser<'a> {
}
}
pub fn collect_tokens<R: HasTokens>(
&mut self,
f: impl FnOnce(&mut Self) -> PResult<'a, R>,
) -> PResult<'a, R> {
self.collect_tokens_trailing_token(|this| Ok((f(this)?, TrailingToken::None)))
}
/// Records all tokens consumed by the provided callback,
/// including the current token. These tokens are collected
/// into a `LazyTokenStream`, and returned along with the result
@ -1241,9 +1253,9 @@ impl<'a> Parser<'a> {
/// This restriction shouldn't be an issue in practice,
/// since this function is used to record the tokens for
/// a parsed AST item, which always has matching delimiters.
pub fn collect_tokens<R: HasTokens>(
pub fn collect_tokens_trailing_token<R: HasTokens>(
&mut self,
f: impl FnOnce(&mut Self) -> PResult<'a, R>,
f: impl FnOnce(&mut Self) -> PResult<'a, (R, TrailingToken)>,
) -> PResult<'a, R> {
let start_token = (self.token.clone(), self.token_spacing);
let cursor_snapshot = TokenCursor {
@ -1256,7 +1268,7 @@ impl<'a> Parser<'a> {
append_unglued_token: self.token_cursor.append_unglued_token.clone(),
};
let mut ret = f(self)?;
let (mut ret, trailing_token) = f(self)?;
// Produces a `TokenStream` on-demand. Using `cursor_snapshot`
// and `num_calls`, we can reconstruct the `TokenStream` seen
@ -1275,15 +1287,10 @@ impl<'a> Parser<'a> {
cursor_snapshot: TokenCursor,
num_calls: usize,
desugar_doc_comments: bool,
trailing_semi: bool,
append_unglued_token: Option<TreeAndSpacing>,
}
impl CreateTokenStream for LazyTokenStreamImpl {
fn create_token_stream(&self) -> TokenStream {
let mut num_calls = self.num_calls;
if self.trailing_semi {
num_calls += 1;
}
// The token produced by the final call to `next` or `next_desugared`
// was not actually consumed by the callback. The combination
// of chaining the initial token and using `take` produces the desired
@ -1291,39 +1298,33 @@ impl<'a> Parser<'a> {
// and omit the final token otherwise.
let mut cursor_snapshot = self.cursor_snapshot.clone();
let tokens = std::iter::once(self.start_token.clone())
.chain((0..num_calls).map(|_| {
.chain((0..self.num_calls).map(|_| {
if self.desugar_doc_comments {
cursor_snapshot.next_desugared()
} else {
cursor_snapshot.next()
}
}))
.take(num_calls);
.take(self.num_calls);
make_token_stream(tokens, self.append_unglued_token.clone())
}
fn add_trailing_semi(&self) -> Box<dyn CreateTokenStream> {
if self.trailing_semi {
panic!("Called `add_trailing_semi` twice!");
}
if self.append_unglued_token.is_some() {
panic!(
"Cannot call `add_trailing_semi` when we have an unglued token {:?}",
self.append_unglued_token
);
}
let mut new = self.clone();
new.trailing_semi = true;
Box::new(new)
}
let mut num_calls = self.token_cursor.num_next_calls - cursor_snapshot.num_next_calls;
match trailing_token {
TrailingToken::None => {}
TrailingToken::Semi => {
assert_eq!(self.token.kind, token::Semi);
num_calls += 1;
}
}
let lazy_impl = LazyTokenStreamImpl {
start_token,
num_calls: self.token_cursor.num_next_calls - cursor_snapshot.num_next_calls,
num_calls,
cursor_snapshot,
desugar_doc_comments: self.desugar_doc_comments,
trailing_semi: false,
append_unglued_token: self.token_cursor.append_unglued_token.clone(),
};
ret.finalize_tokens(LazyTokenStream::new(lazy_impl));
@ -1427,9 +1428,9 @@ macro_rules! maybe_collect_tokens {
if matches!($force_collect, ForceCollect::Yes)
|| $crate::parser::attr::maybe_needs_tokens($attrs)
{
$self.collect_tokens($f)
$self.collect_tokens_trailing_token($f)
} else {
$f($self)
Ok($f($self)?.0)
}
};
}