Auto merge of #99887 - nnethercote:rm-TreeAndSpacing, r=petrochenkov
Remove `TreeAndSpacing`. A `TokenStream` contains a `Lrc<Vec<(TokenTree, Spacing)>>`. But this is not quite right. `Spacing` makes sense for `TokenTree::Token`, but does not make sense for `TokenTree::Delimited`, because a `TokenTree::Delimited` cannot be joined with another `TokenTree`. This commit fixes this problem, by adding `Spacing` to `TokenTree::Token`, changing `TokenStream` to contain a `Lrc<Vec<TokenTree>>`, and removing the `TreeAndSpacing` typedef. The commit removes these two impls: - `impl From<TokenTree> for TokenStream` - `impl From<TokenTree> for TreeAndSpacing` These were useful, but also resulted in code with many `.into()` calls that was hard to read, particularly for anyone not highly familiar with the relevant types. This commit makes some other changes to compensate: - `TokenTree::token()` becomes `TokenTree::token_{alone,joint}()`. - `TokenStream::token_{alone,joint}()` are added. - `TokenStream::delimited` is added. This results in things like this: ```rust TokenTree::token(token::Semi, stmt.span).into() ``` changing to this: ```rust TokenStream::token_alone(token::Semi, stmt.span) ``` This makes the type of the result, and its spacing, clearer. These changes also simplifies `Cursor` and `CursorRef`, because they no longer need to distinguish between `next` and `next_with_spacing`. r? `@petrochenkov`
This commit is contained in:
commit
1202bbaf48
23 changed files with 317 additions and 307 deletions
|
@ -1,11 +1,7 @@
|
|||
use super::{StringReader, UnmatchedBrace};
|
||||
|
||||
use rustc_ast::token::{self, Delimiter, Token};
|
||||
use rustc_ast::tokenstream::{
|
||||
DelimSpan,
|
||||
Spacing::{self, *},
|
||||
TokenStream, TokenTree, TreeAndSpacing,
|
||||
};
|
||||
use rustc_ast::tokenstream::{DelimSpan, Spacing, TokenStream, TokenTree};
|
||||
use rustc_ast_pretty::pprust::token_to_string;
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_errors::PResult;
|
||||
|
@ -77,7 +73,7 @@ impl<'a> TokenTreesReader<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
fn parse_token_tree(&mut self) -> PResult<'a, TreeAndSpacing> {
|
||||
fn parse_token_tree(&mut self) -> PResult<'a, TokenTree> {
|
||||
let sm = self.string_reader.sess.source_map();
|
||||
|
||||
match self.token.kind {
|
||||
|
@ -223,7 +219,7 @@ impl<'a> TokenTreesReader<'a> {
|
|||
_ => {}
|
||||
}
|
||||
|
||||
Ok(TokenTree::Delimited(delim_span, delim, tts).into())
|
||||
Ok(TokenTree::Delimited(delim_span, delim, tts))
|
||||
}
|
||||
token::CloseDelim(delim) => {
|
||||
// An unexpected closing delimiter (i.e., there is no
|
||||
|
@ -258,12 +254,12 @@ impl<'a> TokenTreesReader<'a> {
|
|||
Err(err)
|
||||
}
|
||||
_ => {
|
||||
let tt = TokenTree::Token(self.token.take());
|
||||
let tok = self.token.take();
|
||||
let mut spacing = self.bump();
|
||||
if !self.token.is_op() {
|
||||
spacing = Alone;
|
||||
spacing = Spacing::Alone;
|
||||
}
|
||||
Ok((tt, spacing))
|
||||
Ok(TokenTree::Token(tok, spacing))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -277,20 +273,20 @@ impl<'a> TokenTreesReader<'a> {
|
|||
|
||||
#[derive(Default)]
|
||||
struct TokenStreamBuilder {
|
||||
buf: Vec<TreeAndSpacing>,
|
||||
buf: Vec<TokenTree>,
|
||||
}
|
||||
|
||||
impl TokenStreamBuilder {
|
||||
fn push(&mut self, (tree, joint): TreeAndSpacing) {
|
||||
if let Some((TokenTree::Token(prev_token), Joint)) = self.buf.last()
|
||||
&& let TokenTree::Token(token) = &tree
|
||||
fn push(&mut self, tree: TokenTree) {
|
||||
if let Some(TokenTree::Token(prev_token, Spacing::Joint)) = self.buf.last()
|
||||
&& let TokenTree::Token(token, joint) = &tree
|
||||
&& let Some(glued) = prev_token.glue(token)
|
||||
{
|
||||
self.buf.pop();
|
||||
self.buf.push((TokenTree::Token(glued), joint));
|
||||
self.buf.push(TokenTree::Token(glued, *joint));
|
||||
return;
|
||||
}
|
||||
self.buf.push((tree, joint))
|
||||
self.buf.push(tree);
|
||||
}
|
||||
|
||||
fn into_token_stream(self) -> TokenStream {
|
||||
|
|
|
@ -1664,8 +1664,8 @@ impl<'a> Parser<'a> {
|
|||
let body = self.parse_token_tree(); // `MacBody`
|
||||
// Convert `MacParams MacBody` into `{ MacParams => MacBody }`.
|
||||
let bspan = body.span();
|
||||
let arrow = TokenTree::token(token::FatArrow, pspan.between(bspan)); // `=>`
|
||||
let tokens = TokenStream::new(vec![params.into(), arrow.into(), body.into()]);
|
||||
let arrow = TokenTree::token_alone(token::FatArrow, pspan.between(bspan)); // `=>`
|
||||
let tokens = TokenStream::new(vec![params, arrow, body]);
|
||||
let dspan = DelimSpan::from_pair(pspan.shrink_to_lo(), bspan.shrink_to_hi());
|
||||
P(MacArgs::Delimited(dspan, MacDelimiter::Brace, tokens))
|
||||
} else {
|
||||
|
|
|
@ -268,13 +268,13 @@ impl TokenCursor {
|
|||
// FIXME: we currently don't return `Delimiter` open/close delims. To fix #67062 we will
|
||||
// need to, whereupon the `delim != Delimiter::Invisible` conditions below can be
|
||||
// removed.
|
||||
if let Some((tree, spacing)) = self.frame.tree_cursor.next_with_spacing_ref() {
|
||||
if let Some(tree) = self.frame.tree_cursor.next_ref() {
|
||||
match tree {
|
||||
&TokenTree::Token(ref token) => match (desugar_doc_comments, token) {
|
||||
&TokenTree::Token(ref token, spacing) => match (desugar_doc_comments, token) {
|
||||
(true, &Token { kind: token::DocComment(_, attr_style, data), span }) => {
|
||||
return self.desugar(attr_style, data, span);
|
||||
}
|
||||
_ => return (token.clone(), *spacing),
|
||||
_ => return (token.clone(), spacing),
|
||||
},
|
||||
&TokenTree::Delimited(sp, delim, ref tts) => {
|
||||
// Set `open_delim` to true here because we deal with it immediately.
|
||||
|
@ -318,12 +318,14 @@ impl TokenCursor {
|
|||
delim_span,
|
||||
Delimiter::Bracket,
|
||||
[
|
||||
TokenTree::token(token::Ident(sym::doc, false), span),
|
||||
TokenTree::token(token::Eq, span),
|
||||
TokenTree::token(TokenKind::lit(token::StrRaw(num_of_hashes), data, None), span),
|
||||
TokenTree::token_alone(token::Ident(sym::doc, false), span),
|
||||
TokenTree::token_alone(token::Eq, span),
|
||||
TokenTree::token_alone(
|
||||
TokenKind::lit(token::StrRaw(num_of_hashes), data, None),
|
||||
span,
|
||||
),
|
||||
]
|
||||
.iter()
|
||||
.cloned()
|
||||
.into_iter()
|
||||
.collect::<TokenStream>(),
|
||||
);
|
||||
|
||||
|
@ -332,14 +334,16 @@ impl TokenCursor {
|
|||
TokenCursorFrame::new(
|
||||
None,
|
||||
if attr_style == AttrStyle::Inner {
|
||||
[TokenTree::token(token::Pound, span), TokenTree::token(token::Not, span), body]
|
||||
.iter()
|
||||
.cloned()
|
||||
.collect::<TokenStream>()
|
||||
[
|
||||
TokenTree::token_alone(token::Pound, span),
|
||||
TokenTree::token_alone(token::Not, span),
|
||||
body,
|
||||
]
|
||||
.into_iter()
|
||||
.collect::<TokenStream>()
|
||||
} else {
|
||||
[TokenTree::token(token::Pound, span), body]
|
||||
.iter()
|
||||
.cloned()
|
||||
[TokenTree::token_alone(token::Pound, span), body]
|
||||
.into_iter()
|
||||
.collect::<TokenStream>()
|
||||
},
|
||||
),
|
||||
|
@ -1042,7 +1046,7 @@ impl<'a> Parser<'a> {
|
|||
if all_normal {
|
||||
return match frame.tree_cursor.look_ahead(dist - 1) {
|
||||
Some(tree) => match tree {
|
||||
TokenTree::Token(token) => looker(token),
|
||||
TokenTree::Token(token, _) => looker(token),
|
||||
TokenTree::Delimited(dspan, delim, _) => {
|
||||
looker(&Token::new(token::OpenDelim(*delim), dspan.open))
|
||||
}
|
||||
|
@ -1226,7 +1230,7 @@ impl<'a> Parser<'a> {
|
|||
token::CloseDelim(_) | token::Eof => unreachable!(),
|
||||
_ => {
|
||||
self.bump();
|
||||
TokenTree::Token(self.prev_token.clone())
|
||||
TokenTree::Token(self.prev_token.clone(), Spacing::Alone)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1245,7 +1249,7 @@ impl<'a> Parser<'a> {
|
|||
loop {
|
||||
match self.token.kind {
|
||||
token::Eof | token::CloseDelim(..) => break,
|
||||
_ => result.push(self.parse_token_tree().into()),
|
||||
_ => result.push(self.parse_token_tree()),
|
||||
}
|
||||
}
|
||||
TokenStream::new(result)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue