Remove TreeAndSpacing.

A `TokenStream` contains a `Lrc<Vec<(TokenTree, Spacing)>>`. But this is
not quite right. `Spacing` makes sense for `TokenTree::Token`, but does
not make sense for `TokenTree::Delimited`, because a
`TokenTree::Delimited` cannot be joined with another `TokenTree`.

This commit fixes this problem, by adding `Spacing` to `TokenTree::Token`,
changing `TokenStream` to contain a `Lrc<Vec<TokenTree>>`, and removing the
`TreeAndSpacing` typedef.

The commit removes these two impls:
- `impl From<TokenTree> for TokenStream`
- `impl From<TokenTree> for TreeAndSpacing`

These were useful, but also resulted in code with many `.into()` calls
that was hard to read, particularly for anyone not highly familiar with
the relevant types. This commit makes some other changes to compensate:
- `TokenTree::token()` becomes `TokenTree::token_{alone,joint}()`.
- `TokenStream::token_{alone,joint}()` are added.
- `TokenStream::delimited` is added.

This results in things like this:
```rust
TokenTree::token(token::Semi, stmt.span).into()
```
changing to this:
```rust
TokenStream::token_alone(token::Semi, stmt.span)
```
This makes the type of the result, and its spacing, clearer.

These changes also simplifies `Cursor` and `CursorRef`, because they no longer
need to distinguish between `next` and `next_with_spacing`.
This commit is contained in:
Nicholas Nethercote 2022-07-28 10:31:04 +10:00
parent bdf520fd41
commit 332dffb1f9
23 changed files with 317 additions and 307 deletions

View file

@ -268,13 +268,13 @@ impl TokenCursor {
// FIXME: we currently don't return `Delimiter` open/close delims. To fix #67062 we will
// need to, whereupon the `delim != Delimiter::Invisible` conditions below can be
// removed.
if let Some((tree, spacing)) = self.frame.tree_cursor.next_with_spacing_ref() {
if let Some(tree) = self.frame.tree_cursor.next_ref() {
match tree {
&TokenTree::Token(ref token) => match (desugar_doc_comments, token) {
&TokenTree::Token(ref token, spacing) => match (desugar_doc_comments, token) {
(true, &Token { kind: token::DocComment(_, attr_style, data), span }) => {
return self.desugar(attr_style, data, span);
}
_ => return (token.clone(), *spacing),
_ => return (token.clone(), spacing),
},
&TokenTree::Delimited(sp, delim, ref tts) => {
// Set `open_delim` to true here because we deal with it immediately.
@ -318,12 +318,14 @@ impl TokenCursor {
delim_span,
Delimiter::Bracket,
[
TokenTree::token(token::Ident(sym::doc, false), span),
TokenTree::token(token::Eq, span),
TokenTree::token(TokenKind::lit(token::StrRaw(num_of_hashes), data, None), span),
TokenTree::token_alone(token::Ident(sym::doc, false), span),
TokenTree::token_alone(token::Eq, span),
TokenTree::token_alone(
TokenKind::lit(token::StrRaw(num_of_hashes), data, None),
span,
),
]
.iter()
.cloned()
.into_iter()
.collect::<TokenStream>(),
);
@ -332,14 +334,16 @@ impl TokenCursor {
TokenCursorFrame::new(
None,
if attr_style == AttrStyle::Inner {
[TokenTree::token(token::Pound, span), TokenTree::token(token::Not, span), body]
.iter()
.cloned()
.collect::<TokenStream>()
[
TokenTree::token_alone(token::Pound, span),
TokenTree::token_alone(token::Not, span),
body,
]
.into_iter()
.collect::<TokenStream>()
} else {
[TokenTree::token(token::Pound, span), body]
.iter()
.cloned()
[TokenTree::token_alone(token::Pound, span), body]
.into_iter()
.collect::<TokenStream>()
},
),
@ -1042,7 +1046,7 @@ impl<'a> Parser<'a> {
if all_normal {
return match frame.tree_cursor.look_ahead(dist - 1) {
Some(tree) => match tree {
TokenTree::Token(token) => looker(token),
TokenTree::Token(token, _) => looker(token),
TokenTree::Delimited(dspan, delim, _) => {
looker(&Token::new(token::OpenDelim(*delim), dspan.open))
}
@ -1226,7 +1230,7 @@ impl<'a> Parser<'a> {
token::CloseDelim(_) | token::Eof => unreachable!(),
_ => {
self.bump();
TokenTree::Token(self.prev_token.clone())
TokenTree::Token(self.prev_token.clone(), Spacing::Alone)
}
}
}
@ -1245,7 +1249,7 @@ impl<'a> Parser<'a> {
loop {
match self.token.kind {
token::Eof | token::CloseDelim(..) => break,
_ => result.push(self.parse_token_tree().into()),
_ => result.push(self.parse_token_tree()),
}
}
TokenStream::new(result)