Simplify RefTokenTreeCursor::look_ahead
.
It's only ever used with a lookahead of 0, so this commit removes the lookahead and renames it `peek`.
This commit is contained in:
parent
0bf6e82c54
commit
3575e7943b
4 changed files with 10 additions and 10 deletions
|
@ -678,8 +678,8 @@ impl<'t> RefTokenTreeCursor<'t> {
|
||||||
RefTokenTreeCursor { stream, index: 0 }
|
RefTokenTreeCursor { stream, index: 0 }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn look_ahead(&self, n: usize) -> Option<&TokenTree> {
|
pub fn peek(&self) -> Option<&TokenTree> {
|
||||||
self.stream.0.get(self.index + n)
|
self.stream.0.get(self.index)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -74,7 +74,7 @@ impl MetaVarExpr {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
result.push(element);
|
result.push(element);
|
||||||
if iter.look_ahead(0).is_none() {
|
if iter.peek().is_none() {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
if !try_eat_comma(&mut iter) {
|
if !try_eat_comma(&mut iter) {
|
||||||
|
@ -166,7 +166,7 @@ fn parse_count<'psess>(
|
||||||
eat_dollar(iter, psess, span)?;
|
eat_dollar(iter, psess, span)?;
|
||||||
let ident = parse_ident(iter, psess, span)?;
|
let ident = parse_ident(iter, psess, span)?;
|
||||||
let depth = if try_eat_comma(iter) {
|
let depth = if try_eat_comma(iter) {
|
||||||
if iter.look_ahead(0).is_none() {
|
if iter.peek().is_none() {
|
||||||
return Err(psess.dcx().struct_span_err(
|
return Err(psess.dcx().struct_span_err(
|
||||||
span,
|
span,
|
||||||
"`count` followed by a comma must have an associated index indicating its depth",
|
"`count` followed by a comma must have an associated index indicating its depth",
|
||||||
|
@ -252,7 +252,7 @@ fn parse_token<'psess, 't>(
|
||||||
/// Tries to move the iterator forward returning `true` if there is a comma. If not, then the
|
/// Tries to move the iterator forward returning `true` if there is a comma. If not, then the
|
||||||
/// iterator is not modified and the result is `false`.
|
/// iterator is not modified and the result is `false`.
|
||||||
fn try_eat_comma(iter: &mut RefTokenTreeCursor<'_>) -> bool {
|
fn try_eat_comma(iter: &mut RefTokenTreeCursor<'_>) -> bool {
|
||||||
if let Some(TokenTree::Token(Token { kind: token::Comma, .. }, _)) = iter.look_ahead(0) {
|
if let Some(TokenTree::Token(Token { kind: token::Comma, .. }, _)) = iter.peek() {
|
||||||
let _ = iter.next();
|
let _ = iter.next();
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -262,7 +262,7 @@ fn try_eat_comma(iter: &mut RefTokenTreeCursor<'_>) -> bool {
|
||||||
/// Tries to move the iterator forward returning `true` if there is a dollar sign. If not, then the
|
/// Tries to move the iterator forward returning `true` if there is a dollar sign. If not, then the
|
||||||
/// iterator is not modified and the result is `false`.
|
/// iterator is not modified and the result is `false`.
|
||||||
fn try_eat_dollar(iter: &mut RefTokenTreeCursor<'_>) -> bool {
|
fn try_eat_dollar(iter: &mut RefTokenTreeCursor<'_>) -> bool {
|
||||||
if let Some(TokenTree::Token(Token { kind: token::Dollar, .. }, _)) = iter.look_ahead(0) {
|
if let Some(TokenTree::Token(Token { kind: token::Dollar, .. }, _)) = iter.peek() {
|
||||||
let _ = iter.next();
|
let _ = iter.next();
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -275,7 +275,7 @@ fn eat_dollar<'psess>(
|
||||||
psess: &'psess ParseSess,
|
psess: &'psess ParseSess,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> PResult<'psess, ()> {
|
) -> PResult<'psess, ()> {
|
||||||
if let Some(TokenTree::Token(Token { kind: token::Dollar, .. }, _)) = iter.look_ahead(0) {
|
if let Some(TokenTree::Token(Token { kind: token::Dollar, .. }, _)) = iter.peek() {
|
||||||
let _ = iter.next();
|
let _ = iter.next();
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
|
@ -86,7 +86,7 @@ fn contains_unhygienic_crate_reference(tts: &TokenStream) -> Option<Span> {
|
||||||
while let Some(curr) = cursor.next() {
|
while let Some(curr) = cursor.next() {
|
||||||
if !prev_is_dollar
|
if !prev_is_dollar
|
||||||
&& let Some(span) = is_crate_keyword(curr)
|
&& let Some(span) = is_crate_keyword(curr)
|
||||||
&& let Some(next) = cursor.look_ahead(0)
|
&& let Some(next) = cursor.peek()
|
||||||
&& is_token(next, &TokenKind::PathSep)
|
&& is_token(next, &TokenKind::PathSep)
|
||||||
{
|
{
|
||||||
return Some(span);
|
return Some(span);
|
||||||
|
|
|
@ -1190,7 +1190,7 @@ impl<'a> MacroParser<'a> {
|
||||||
// (`(` ... `)` `=>` `{` ... `}`)*
|
// (`(` ... `)` `=>` `{` ... `}`)*
|
||||||
fn parse(&mut self) -> Option<Macro> {
|
fn parse(&mut self) -> Option<Macro> {
|
||||||
let mut branches = vec![];
|
let mut branches = vec![];
|
||||||
while self.toks.look_ahead(0).is_some() {
|
while self.toks.peek().is_some() {
|
||||||
branches.push(self.parse_branch()?);
|
branches.push(self.parse_branch()?);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1237,7 +1237,7 @@ impl<'a> MacroParser<'a> {
|
||||||
span,
|
span,
|
||||||
},
|
},
|
||||||
_,
|
_,
|
||||||
)) = self.toks.look_ahead(0)
|
)) = self.toks.peek()
|
||||||
{
|
{
|
||||||
hi = span.hi();
|
hi = span.hi();
|
||||||
self.toks.next();
|
self.toks.next();
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue