Rollup merge of #65261 - nnethercote:rm-Option-from-TokenStream, r=petrochenkov
Remove `Option` from `TokenStream` A code simplification. r? @petrochenkov
This commit is contained in:
commit
6d28ed1ae6
10 changed files with 106 additions and 150 deletions
|
@ -551,7 +551,7 @@ impl MetaItem {
|
||||||
impl MetaItemKind {
|
impl MetaItemKind {
|
||||||
pub fn tokens(&self, span: Span) -> TokenStream {
|
pub fn tokens(&self, span: Span) -> TokenStream {
|
||||||
match *self {
|
match *self {
|
||||||
MetaItemKind::Word => TokenStream::empty(),
|
MetaItemKind::Word => TokenStream::default(),
|
||||||
MetaItemKind::NameValue(ref lit) => {
|
MetaItemKind::NameValue(ref lit) => {
|
||||||
let mut vec = vec![TokenTree::token(token::Eq, span).into()];
|
let mut vec = vec![TokenTree::token(token::Eq, span).into()];
|
||||||
lit.tokens().append_to_tree_and_joint_vec(&mut vec);
|
lit.tokens().append_to_tree_and_joint_vec(&mut vec);
|
||||||
|
|
|
@ -676,12 +676,12 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Some(TokenTree::Token(..)) => {}
|
Some(TokenTree::Token(..)) => {}
|
||||||
None => return TokenStream::empty(),
|
None => return TokenStream::default(),
|
||||||
}
|
}
|
||||||
self.cx.span_err(span, "custom attribute invocations must be \
|
self.cx.span_err(span, "custom attribute invocations must be \
|
||||||
of the form `#[foo]` or `#[foo(..)]`, the macro name must only be \
|
of the form `#[foo]` or `#[foo(..)]`, the macro name must only be \
|
||||||
followed by a delimiter token");
|
followed by a delimiter token");
|
||||||
TokenStream::empty()
|
TokenStream::default()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn gate_proc_macro_attr_item(&self, span: Span, item: &Annotatable) {
|
fn gate_proc_macro_attr_item(&self, span: Span, item: &Annotatable) {
|
||||||
|
|
|
@ -95,7 +95,7 @@ pub(super) fn transcribe(
|
||||||
) -> TokenStream {
|
) -> TokenStream {
|
||||||
// Nothing for us to transcribe...
|
// Nothing for us to transcribe...
|
||||||
if src.is_empty() {
|
if src.is_empty() {
|
||||||
return TokenStream::empty();
|
return TokenStream::default();
|
||||||
}
|
}
|
||||||
|
|
||||||
// We descend into the RHS (`src`), expanding things as we go. This stack contains the things
|
// We descend into the RHS (`src`), expanding things as we go. This stack contains the things
|
||||||
|
|
|
@ -15,7 +15,7 @@ pub fn placeholder(kind: AstFragmentKind, id: ast::NodeId) -> AstFragment {
|
||||||
fn mac_placeholder() -> ast::Mac {
|
fn mac_placeholder() -> ast::Mac {
|
||||||
ast::Mac {
|
ast::Mac {
|
||||||
path: ast::Path { span: DUMMY_SP, segments: Vec::new() },
|
path: ast::Path { span: DUMMY_SP, segments: Vec::new() },
|
||||||
tts: TokenStream::empty().into(),
|
tts: TokenStream::default().into(),
|
||||||
delim: ast::MacDelimiter::Brace,
|
delim: ast::MacDelimiter::Brace,
|
||||||
span: DUMMY_SP,
|
span: DUMMY_SP,
|
||||||
prior_type_ascription: None,
|
prior_type_ascription: None,
|
||||||
|
@ -32,12 +32,12 @@ pub fn placeholder(kind: AstFragmentKind, id: ast::NodeId) -> AstFragment {
|
||||||
attrs: ThinVec::new(),
|
attrs: ThinVec::new(),
|
||||||
kind: ast::ExprKind::Mac(mac_placeholder()),
|
kind: ast::ExprKind::Mac(mac_placeholder()),
|
||||||
});
|
});
|
||||||
let ty = P(ast::Ty {
|
let ty = || P(ast::Ty {
|
||||||
id,
|
id,
|
||||||
kind: ast::TyKind::Mac(mac_placeholder()),
|
kind: ast::TyKind::Mac(mac_placeholder()),
|
||||||
span,
|
span,
|
||||||
});
|
});
|
||||||
let pat = P(ast::Pat {
|
let pat = || P(ast::Pat {
|
||||||
id,
|
id,
|
||||||
kind: ast::PatKind::Mac(mac_placeholder()),
|
kind: ast::PatKind::Mac(mac_placeholder()),
|
||||||
span,
|
span,
|
||||||
|
@ -83,7 +83,7 @@ pub fn placeholder(kind: AstFragmentKind, id: ast::NodeId) -> AstFragment {
|
||||||
body: expr_placeholder(),
|
body: expr_placeholder(),
|
||||||
guard: None,
|
guard: None,
|
||||||
id,
|
id,
|
||||||
pat,
|
pat: pat(),
|
||||||
span,
|
span,
|
||||||
is_placeholder: true,
|
is_placeholder: true,
|
||||||
}
|
}
|
||||||
|
@ -105,7 +105,7 @@ pub fn placeholder(kind: AstFragmentKind, id: ast::NodeId) -> AstFragment {
|
||||||
id,
|
id,
|
||||||
ident,
|
ident,
|
||||||
is_shorthand: false,
|
is_shorthand: false,
|
||||||
pat,
|
pat: pat(),
|
||||||
span,
|
span,
|
||||||
is_placeholder: true,
|
is_placeholder: true,
|
||||||
}
|
}
|
||||||
|
@ -124,9 +124,9 @@ pub fn placeholder(kind: AstFragmentKind, id: ast::NodeId) -> AstFragment {
|
||||||
ast::Param {
|
ast::Param {
|
||||||
attrs: Default::default(),
|
attrs: Default::default(),
|
||||||
id,
|
id,
|
||||||
pat,
|
pat: pat(),
|
||||||
span,
|
span,
|
||||||
ty,
|
ty: ty(),
|
||||||
is_placeholder: true,
|
is_placeholder: true,
|
||||||
}
|
}
|
||||||
]),
|
]),
|
||||||
|
@ -136,7 +136,7 @@ pub fn placeholder(kind: AstFragmentKind, id: ast::NodeId) -> AstFragment {
|
||||||
id,
|
id,
|
||||||
ident: None,
|
ident: None,
|
||||||
span,
|
span,
|
||||||
ty,
|
ty: ty(),
|
||||||
vis,
|
vis,
|
||||||
is_placeholder: true,
|
is_placeholder: true,
|
||||||
}
|
}
|
||||||
|
|
|
@ -394,7 +394,7 @@ impl server::Types for Rustc<'_> {
|
||||||
|
|
||||||
impl server::TokenStream for Rustc<'_> {
|
impl server::TokenStream for Rustc<'_> {
|
||||||
fn new(&mut self) -> Self::TokenStream {
|
fn new(&mut self) -> Self::TokenStream {
|
||||||
TokenStream::empty()
|
TokenStream::default()
|
||||||
}
|
}
|
||||||
fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
|
fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
|
||||||
stream.is_empty()
|
stream.is_empty()
|
||||||
|
|
|
@ -610,10 +610,8 @@ pub fn noop_visit_tt<T: MutVisitor>(tt: &mut TokenTree, vis: &mut T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn noop_visit_tts<T: MutVisitor>(TokenStream(tts): &mut TokenStream, vis: &mut T) {
|
pub fn noop_visit_tts<T: MutVisitor>(TokenStream(tts): &mut TokenStream, vis: &mut T) {
|
||||||
visit_opt(tts, |tts| {
|
let tts = Lrc::make_mut(tts);
|
||||||
let tts = Lrc::make_mut(tts);
|
visit_vec(tts, |(tree, _is_joint)| vis.visit_tt(tree));
|
||||||
visit_vec(tts, |(tree, _is_joint)| vis.visit_tt(tree));
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Applies ident visitor if it's an ident; applies other visits to interpolated nodes.
|
// Applies ident visitor if it's an ident; applies other visits to interpolated nodes.
|
||||||
|
|
|
@ -203,7 +203,7 @@ impl<'a> Parser<'a> {
|
||||||
};
|
};
|
||||||
TokenStream::from_streams(smallvec![eq.into(), tokens])
|
TokenStream::from_streams(smallvec![eq.into(), tokens])
|
||||||
} else {
|
} else {
|
||||||
TokenStream::empty()
|
TokenStream::default()
|
||||||
};
|
};
|
||||||
ast::AttrItem { path, tokens }
|
ast::AttrItem { path, tokens }
|
||||||
})
|
})
|
||||||
|
|
|
@ -1273,7 +1273,7 @@ impl<'a> Parser<'a> {
|
||||||
// This can happen due to a bad interaction of two unrelated recovery mechanisms with
|
// This can happen due to a bad interaction of two unrelated recovery mechanisms with
|
||||||
// mismatched delimiters *and* recovery lookahead on the likely typo `pub ident(`
|
// mismatched delimiters *and* recovery lookahead on the likely typo `pub ident(`
|
||||||
// (#62881).
|
// (#62881).
|
||||||
return Ok((ret?, TokenStream::new(vec![])));
|
return Ok((ret?, TokenStream::default()));
|
||||||
} else {
|
} else {
|
||||||
&mut self.token_cursor.stack[prev].last_token
|
&mut self.token_cursor.stack[prev].last_token
|
||||||
};
|
};
|
||||||
|
@ -1288,7 +1288,7 @@ impl<'a> Parser<'a> {
|
||||||
// This can happen due to a bad interaction of two unrelated recovery mechanisms
|
// This can happen due to a bad interaction of two unrelated recovery mechanisms
|
||||||
// with mismatched delimiters *and* recovery lookahead on the likely typo
|
// with mismatched delimiters *and* recovery lookahead on the likely typo
|
||||||
// `pub ident(` (#62895, different but similar to the case above).
|
// `pub ident(` (#62895, different but similar to the case above).
|
||||||
return Ok((ret?, TokenStream::new(vec![])));
|
return Ok((ret?, TokenStream::default()));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -136,13 +136,8 @@ impl TokenTree {
|
||||||
/// The goal is for procedural macros to work with `TokenStream`s and `TokenTree`s
|
/// The goal is for procedural macros to work with `TokenStream`s and `TokenTree`s
|
||||||
/// instead of a representation of the abstract syntax tree.
|
/// instead of a representation of the abstract syntax tree.
|
||||||
/// Today's `TokenTree`s can still contain AST via `token::Interpolated` for back-compat.
|
/// Today's `TokenTree`s can still contain AST via `token::Interpolated` for back-compat.
|
||||||
///
|
#[derive(Clone, Debug, Default)]
|
||||||
/// The use of `Option` is an optimization that avoids the need for an
|
pub struct TokenStream(pub Lrc<Vec<TreeAndJoint>>);
|
||||||
/// allocation when the stream is empty. However, it is not guaranteed that an
|
|
||||||
/// empty stream is represented with `None`; it may be represented as a `Some`
|
|
||||||
/// around an empty `Vec`.
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct TokenStream(pub Option<Lrc<Vec<TreeAndJoint>>>);
|
|
||||||
|
|
||||||
pub type TreeAndJoint = (TokenTree, IsJoint);
|
pub type TreeAndJoint = (TokenTree, IsJoint);
|
||||||
|
|
||||||
|
@ -163,36 +158,34 @@ impl TokenStream {
|
||||||
/// separating the two arguments with a comma for diagnostic suggestions.
|
/// separating the two arguments with a comma for diagnostic suggestions.
|
||||||
pub(crate) fn add_comma(&self) -> Option<(TokenStream, Span)> {
|
pub(crate) fn add_comma(&self) -> Option<(TokenStream, Span)> {
|
||||||
// Used to suggest if a user writes `foo!(a b);`
|
// Used to suggest if a user writes `foo!(a b);`
|
||||||
if let Some(ref stream) = self.0 {
|
let mut suggestion = None;
|
||||||
let mut suggestion = None;
|
let mut iter = self.0.iter().enumerate().peekable();
|
||||||
let mut iter = stream.iter().enumerate().peekable();
|
while let Some((pos, ts)) = iter.next() {
|
||||||
while let Some((pos, ts)) = iter.next() {
|
if let Some((_, next)) = iter.peek() {
|
||||||
if let Some((_, next)) = iter.peek() {
|
let sp = match (&ts, &next) {
|
||||||
let sp = match (&ts, &next) {
|
(_, (TokenTree::Token(Token { kind: token::Comma, .. }), _)) => continue,
|
||||||
(_, (TokenTree::Token(Token { kind: token::Comma, .. }), _)) => continue,
|
((TokenTree::Token(token_left), NonJoint),
|
||||||
((TokenTree::Token(token_left), NonJoint),
|
(TokenTree::Token(token_right), _))
|
||||||
(TokenTree::Token(token_right), _))
|
if ((token_left.is_ident() && !token_left.is_reserved_ident())
|
||||||
if ((token_left.is_ident() && !token_left.is_reserved_ident())
|
|| token_left.is_lit()) &&
|
||||||
|| token_left.is_lit()) &&
|
((token_right.is_ident() && !token_right.is_reserved_ident())
|
||||||
((token_right.is_ident() && !token_right.is_reserved_ident())
|
|| token_right.is_lit()) => token_left.span,
|
||||||
|| token_right.is_lit()) => token_left.span,
|
((TokenTree::Delimited(sp, ..), NonJoint), _) => sp.entire(),
|
||||||
((TokenTree::Delimited(sp, ..), NonJoint), _) => sp.entire(),
|
_ => continue,
|
||||||
_ => continue,
|
};
|
||||||
};
|
let sp = sp.shrink_to_hi();
|
||||||
let sp = sp.shrink_to_hi();
|
let comma = (TokenTree::token(token::Comma, sp), NonJoint);
|
||||||
let comma = (TokenTree::token(token::Comma, sp), NonJoint);
|
suggestion = Some((pos, comma, sp));
|
||||||
suggestion = Some((pos, comma, sp));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if let Some((pos, comma, sp)) = suggestion {
|
|
||||||
let mut new_stream = vec![];
|
|
||||||
let parts = stream.split_at(pos + 1);
|
|
||||||
new_stream.extend_from_slice(parts.0);
|
|
||||||
new_stream.push(comma);
|
|
||||||
new_stream.extend_from_slice(parts.1);
|
|
||||||
return Some((TokenStream::new(new_stream), sp));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if let Some((pos, comma, sp)) = suggestion {
|
||||||
|
let mut new_stream = vec![];
|
||||||
|
let parts = self.0.split_at(pos + 1);
|
||||||
|
new_stream.extend_from_slice(parts.0);
|
||||||
|
new_stream.push(comma);
|
||||||
|
new_stream.extend_from_slice(parts.1);
|
||||||
|
return Some((TokenStream::new(new_stream), sp));
|
||||||
|
}
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -224,28 +217,21 @@ impl PartialEq<TokenStream> for TokenStream {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TokenStream {
|
impl TokenStream {
|
||||||
pub fn len(&self) -> usize {
|
pub fn new(streams: Vec<TreeAndJoint>) -> TokenStream {
|
||||||
if let Some(ref slice) = self.0 {
|
TokenStream(Lrc::new(streams))
|
||||||
slice.len()
|
|
||||||
} else {
|
|
||||||
0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn empty() -> TokenStream {
|
|
||||||
TokenStream(None)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_empty(&self) -> bool {
|
pub fn is_empty(&self) -> bool {
|
||||||
match self.0 {
|
self.0.is_empty()
|
||||||
None => true,
|
}
|
||||||
Some(ref stream) => stream.is_empty(),
|
|
||||||
}
|
pub fn len(&self) -> usize {
|
||||||
|
self.0.len()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn from_streams(mut streams: SmallVec<[TokenStream; 2]>) -> TokenStream {
|
pub(crate) fn from_streams(mut streams: SmallVec<[TokenStream; 2]>) -> TokenStream {
|
||||||
match streams.len() {
|
match streams.len() {
|
||||||
0 => TokenStream::empty(),
|
0 => TokenStream::default(),
|
||||||
1 => streams.pop().unwrap(),
|
1 => streams.pop().unwrap(),
|
||||||
_ => {
|
_ => {
|
||||||
// We are going to extend the first stream in `streams` with
|
// We are going to extend the first stream in `streams` with
|
||||||
|
@ -269,41 +255,24 @@ impl TokenStream {
|
||||||
// Get the first stream. If it's `None`, create an empty
|
// Get the first stream. If it's `None`, create an empty
|
||||||
// stream.
|
// stream.
|
||||||
let mut iter = streams.drain();
|
let mut iter = streams.drain();
|
||||||
let mut first_stream_lrc = match iter.next().unwrap().0 {
|
let mut first_stream_lrc = iter.next().unwrap().0;
|
||||||
Some(first_stream_lrc) => first_stream_lrc,
|
|
||||||
None => Lrc::new(vec![]),
|
|
||||||
};
|
|
||||||
|
|
||||||
// Append the elements to the first stream, after reserving
|
// Append the elements to the first stream, after reserving
|
||||||
// space for them.
|
// space for them.
|
||||||
let first_vec_mut = Lrc::make_mut(&mut first_stream_lrc);
|
let first_vec_mut = Lrc::make_mut(&mut first_stream_lrc);
|
||||||
first_vec_mut.reserve(num_appends);
|
first_vec_mut.reserve(num_appends);
|
||||||
for stream in iter {
|
for stream in iter {
|
||||||
if let Some(stream) = stream.0 {
|
first_vec_mut.extend(stream.0.iter().cloned());
|
||||||
first_vec_mut.extend(stream.iter().cloned());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create the final `TokenStream`.
|
// Create the final `TokenStream`.
|
||||||
match first_vec_mut.len() {
|
TokenStream(first_stream_lrc)
|
||||||
0 => TokenStream(None),
|
|
||||||
_ => TokenStream(Some(first_stream_lrc)),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new(streams: Vec<TreeAndJoint>) -> TokenStream {
|
|
||||||
match streams.len() {
|
|
||||||
0 => TokenStream(None),
|
|
||||||
_ => TokenStream(Some(Lrc::new(streams))),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn append_to_tree_and_joint_vec(self, vec: &mut Vec<TreeAndJoint>) {
|
pub fn append_to_tree_and_joint_vec(self, vec: &mut Vec<TreeAndJoint>) {
|
||||||
if let Some(stream) = self.0 {
|
vec.extend(self.0.iter().cloned());
|
||||||
vec.extend(stream.iter().cloned());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn trees(&self) -> Cursor {
|
pub fn trees(&self) -> Cursor {
|
||||||
|
@ -370,24 +339,22 @@ impl TokenStream {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn map_enumerated<F: FnMut(usize, TokenTree) -> TokenTree>(self, mut f: F) -> TokenStream {
|
pub fn map_enumerated<F: FnMut(usize, TokenTree) -> TokenTree>(self, mut f: F) -> TokenStream {
|
||||||
TokenStream(self.0.map(|stream| {
|
TokenStream(Lrc::new(
|
||||||
Lrc::new(
|
self.0
|
||||||
stream
|
.iter()
|
||||||
.iter()
|
.enumerate()
|
||||||
.enumerate()
|
.map(|(i, (tree, is_joint))| (f(i, tree.clone()), *is_joint))
|
||||||
.map(|(i, (tree, is_joint))| (f(i, tree.clone()), *is_joint))
|
.collect()
|
||||||
.collect())
|
))
|
||||||
}))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn map<F: FnMut(TokenTree) -> TokenTree>(self, mut f: F) -> TokenStream {
|
pub fn map<F: FnMut(TokenTree) -> TokenTree>(self, mut f: F) -> TokenStream {
|
||||||
TokenStream(self.0.map(|stream| {
|
TokenStream(Lrc::new(
|
||||||
Lrc::new(
|
self.0
|
||||||
stream
|
.iter()
|
||||||
.iter()
|
.map(|(tree, is_joint)| (f(tree.clone()), *is_joint))
|
||||||
.map(|(tree, is_joint)| (f(tree.clone()), *is_joint))
|
.collect()
|
||||||
.collect())
|
))
|
||||||
}))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -405,44 +372,43 @@ impl TokenStreamBuilder {
|
||||||
|
|
||||||
// If `self` is not empty and the last tree within the last stream is a
|
// If `self` is not empty and the last tree within the last stream is a
|
||||||
// token tree marked with `Joint`...
|
// token tree marked with `Joint`...
|
||||||
if let Some(TokenStream(Some(ref mut last_stream_lrc))) = self.0.last_mut() {
|
if let Some(TokenStream(ref mut last_stream_lrc)) = self.0.last_mut() {
|
||||||
if let Some((TokenTree::Token(last_token), Joint)) = last_stream_lrc.last() {
|
if let Some((TokenTree::Token(last_token), Joint)) = last_stream_lrc.last() {
|
||||||
|
|
||||||
// ...and `stream` is not empty and the first tree within it is
|
// ...and `stream` is not empty and the first tree within it is
|
||||||
// a token tree...
|
// a token tree...
|
||||||
if let TokenStream(Some(ref mut stream_lrc)) = stream {
|
let TokenStream(ref mut stream_lrc) = stream;
|
||||||
if let Some((TokenTree::Token(token), is_joint)) = stream_lrc.first() {
|
if let Some((TokenTree::Token(token), is_joint)) = stream_lrc.first() {
|
||||||
|
|
||||||
// ...and the two tokens can be glued together...
|
// ...and the two tokens can be glued together...
|
||||||
if let Some(glued_tok) = last_token.glue(&token) {
|
if let Some(glued_tok) = last_token.glue(&token) {
|
||||||
|
|
||||||
// ...then do so, by overwriting the last token
|
// ...then do so, by overwriting the last token
|
||||||
// tree in `self` and removing the first token tree
|
// tree in `self` and removing the first token tree
|
||||||
// from `stream`. This requires using `make_mut()`
|
// from `stream`. This requires using `make_mut()`
|
||||||
// on the last stream in `self` and on `stream`,
|
// on the last stream in `self` and on `stream`,
|
||||||
// and in practice this doesn't cause cloning 99.9%
|
// and in practice this doesn't cause cloning 99.9%
|
||||||
// of the time.
|
// of the time.
|
||||||
|
|
||||||
// Overwrite the last token tree with the merged
|
// Overwrite the last token tree with the merged
|
||||||
// token.
|
// token.
|
||||||
let last_vec_mut = Lrc::make_mut(last_stream_lrc);
|
let last_vec_mut = Lrc::make_mut(last_stream_lrc);
|
||||||
*last_vec_mut.last_mut().unwrap() =
|
*last_vec_mut.last_mut().unwrap() =
|
||||||
(TokenTree::Token(glued_tok), *is_joint);
|
(TokenTree::Token(glued_tok), *is_joint);
|
||||||
|
|
||||||
// Remove the first token tree from `stream`. (This
|
// Remove the first token tree from `stream`. (This
|
||||||
// is almost always the only tree in `stream`.)
|
// is almost always the only tree in `stream`.)
|
||||||
let stream_vec_mut = Lrc::make_mut(stream_lrc);
|
let stream_vec_mut = Lrc::make_mut(stream_lrc);
|
||||||
stream_vec_mut.remove(0);
|
stream_vec_mut.remove(0);
|
||||||
|
|
||||||
// Don't push `stream` if it's empty -- that could
|
// Don't push `stream` if it's empty -- that could
|
||||||
// block subsequent token gluing, by getting
|
// block subsequent token gluing, by getting
|
||||||
// between two token trees that should be glued
|
// between two token trees that should be glued
|
||||||
// together.
|
// together.
|
||||||
if !stream.is_empty() {
|
if !stream.is_empty() {
|
||||||
self.0.push(stream);
|
self.0.push(stream);
|
||||||
}
|
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -475,16 +441,11 @@ impl Cursor {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn next_with_joint(&mut self) -> Option<TreeAndJoint> {
|
pub fn next_with_joint(&mut self) -> Option<TreeAndJoint> {
|
||||||
match self.stream.0 {
|
if self.index < self.stream.len() {
|
||||||
None => None,
|
self.index += 1;
|
||||||
Some(ref stream) => {
|
Some(self.stream.0[self.index - 1].clone())
|
||||||
if self.index < stream.len() {
|
} else {
|
||||||
self.index += 1;
|
None
|
||||||
Some(stream[self.index - 1].clone())
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -493,16 +454,13 @@ impl Cursor {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
let index = self.index;
|
let index = self.index;
|
||||||
let stream = mem::replace(&mut self.stream, TokenStream(None));
|
let stream = mem::take(&mut self.stream);
|
||||||
*self = TokenStream::from_streams(smallvec![stream, new_stream]).into_trees();
|
*self = TokenStream::from_streams(smallvec![stream, new_stream]).into_trees();
|
||||||
self.index = index;
|
self.index = index;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn look_ahead(&self, n: usize) -> Option<TokenTree> {
|
pub fn look_ahead(&self, n: usize) -> Option<TokenTree> {
|
||||||
match self.stream.0 {
|
self.stream.0[self.index ..].get(n).map(|(tree, _)| tree.clone())
|
||||||
None => None,
|
|
||||||
Some(ref stream) => stream[self.index ..].get(n).map(|(tree, _)| tree.clone()),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -20,7 +20,7 @@ fn plugin_macro_def(name: Name, span: Span) -> P<Item> {
|
||||||
attr::mk_word_item(Ident::new(sym::rustc_builtin_macro, span)));
|
attr::mk_word_item(Ident::new(sym::rustc_builtin_macro, span)));
|
||||||
|
|
||||||
let parens: TreeAndJoint = TokenTree::Delimited(
|
let parens: TreeAndJoint = TokenTree::Delimited(
|
||||||
DelimSpan::from_single(span), token::Paren, TokenStream::empty()
|
DelimSpan::from_single(span), token::Paren, TokenStream::default()
|
||||||
).into();
|
).into();
|
||||||
let trees = vec![parens.clone(), TokenTree::token(token::FatArrow, span).into(), parens];
|
let trees = vec![parens.clone(), TokenTree::token(token::FatArrow, span).into(), parens];
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue