Rollup merge of #134161 - nnethercote:overhaul-token-cursors, r=spastorino
Overhaul token cursors Some nice cleanups here. r? `````@davidtwco`````
This commit is contained in:
commit
477f222b02
20 changed files with 199 additions and 210 deletions
|
@ -1,7 +1,6 @@
|
||||||
//! Functions dealing with attributes and meta items.
|
//! Functions dealing with attributes and meta items.
|
||||||
|
|
||||||
use std::fmt::Debug;
|
use std::fmt::Debug;
|
||||||
use std::iter;
|
|
||||||
use std::sync::atomic::{AtomicU32, Ordering};
|
use std::sync::atomic::{AtomicU32, Ordering};
|
||||||
|
|
||||||
use rustc_index::bit_set::GrowableBitSet;
|
use rustc_index::bit_set::GrowableBitSet;
|
||||||
|
@ -16,7 +15,9 @@ use crate::ast::{
|
||||||
};
|
};
|
||||||
use crate::ptr::P;
|
use crate::ptr::P;
|
||||||
use crate::token::{self, CommentKind, Delimiter, Token};
|
use crate::token::{self, CommentKind, Delimiter, Token};
|
||||||
use crate::tokenstream::{DelimSpan, LazyAttrTokenStream, Spacing, TokenStream, TokenTree};
|
use crate::tokenstream::{
|
||||||
|
DelimSpan, LazyAttrTokenStream, Spacing, TokenStream, TokenStreamIter, TokenTree,
|
||||||
|
};
|
||||||
use crate::util::comments;
|
use crate::util::comments;
|
||||||
use crate::util::literal::escape_string_symbol;
|
use crate::util::literal::escape_string_symbol;
|
||||||
|
|
||||||
|
@ -365,12 +366,9 @@ impl MetaItem {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn from_tokens<'a, I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItem>
|
fn from_tokens(iter: &mut TokenStreamIter<'_>) -> Option<MetaItem> {
|
||||||
where
|
|
||||||
I: Iterator<Item = &'a TokenTree>,
|
|
||||||
{
|
|
||||||
// FIXME: Share code with `parse_path`.
|
// FIXME: Share code with `parse_path`.
|
||||||
let tt = tokens.next().map(|tt| TokenTree::uninterpolate(tt));
|
let tt = iter.next().map(|tt| TokenTree::uninterpolate(tt));
|
||||||
let path = match tt.as_deref() {
|
let path = match tt.as_deref() {
|
||||||
Some(&TokenTree::Token(
|
Some(&TokenTree::Token(
|
||||||
Token { kind: ref kind @ (token::Ident(..) | token::PathSep), span },
|
Token { kind: ref kind @ (token::Ident(..) | token::PathSep), span },
|
||||||
|
@ -378,9 +376,9 @@ impl MetaItem {
|
||||||
)) => 'arm: {
|
)) => 'arm: {
|
||||||
let mut segments = if let &token::Ident(name, _) = kind {
|
let mut segments = if let &token::Ident(name, _) = kind {
|
||||||
if let Some(TokenTree::Token(Token { kind: token::PathSep, .. }, _)) =
|
if let Some(TokenTree::Token(Token { kind: token::PathSep, .. }, _)) =
|
||||||
tokens.peek()
|
iter.peek()
|
||||||
{
|
{
|
||||||
tokens.next();
|
iter.next();
|
||||||
thin_vec![PathSegment::from_ident(Ident::new(name, span))]
|
thin_vec![PathSegment::from_ident(Ident::new(name, span))]
|
||||||
} else {
|
} else {
|
||||||
break 'arm Path::from_ident(Ident::new(name, span));
|
break 'arm Path::from_ident(Ident::new(name, span));
|
||||||
|
@ -390,16 +388,16 @@ impl MetaItem {
|
||||||
};
|
};
|
||||||
loop {
|
loop {
|
||||||
if let Some(&TokenTree::Token(Token { kind: token::Ident(name, _), span }, _)) =
|
if let Some(&TokenTree::Token(Token { kind: token::Ident(name, _), span }, _)) =
|
||||||
tokens.next().map(|tt| TokenTree::uninterpolate(tt)).as_deref()
|
iter.next().map(|tt| TokenTree::uninterpolate(tt)).as_deref()
|
||||||
{
|
{
|
||||||
segments.push(PathSegment::from_ident(Ident::new(name, span)));
|
segments.push(PathSegment::from_ident(Ident::new(name, span)));
|
||||||
} else {
|
} else {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
if let Some(TokenTree::Token(Token { kind: token::PathSep, .. }, _)) =
|
if let Some(TokenTree::Token(Token { kind: token::PathSep, .. }, _)) =
|
||||||
tokens.peek()
|
iter.peek()
|
||||||
{
|
{
|
||||||
tokens.next();
|
iter.next();
|
||||||
} else {
|
} else {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -420,8 +418,8 @@ impl MetaItem {
|
||||||
}
|
}
|
||||||
_ => return None,
|
_ => return None,
|
||||||
};
|
};
|
||||||
let list_closing_paren_pos = tokens.peek().map(|tt| tt.span().hi());
|
let list_closing_paren_pos = iter.peek().map(|tt| tt.span().hi());
|
||||||
let kind = MetaItemKind::from_tokens(tokens)?;
|
let kind = MetaItemKind::from_tokens(iter)?;
|
||||||
let hi = match &kind {
|
let hi = match &kind {
|
||||||
MetaItemKind::NameValue(lit) => lit.span.hi(),
|
MetaItemKind::NameValue(lit) => lit.span.hi(),
|
||||||
MetaItemKind::List(..) => list_closing_paren_pos.unwrap_or(path.span.hi()),
|
MetaItemKind::List(..) => list_closing_paren_pos.unwrap_or(path.span.hi()),
|
||||||
|
@ -438,12 +436,12 @@ impl MetaItem {
|
||||||
impl MetaItemKind {
|
impl MetaItemKind {
|
||||||
// public because it can be called in the hir
|
// public because it can be called in the hir
|
||||||
pub fn list_from_tokens(tokens: TokenStream) -> Option<ThinVec<MetaItemInner>> {
|
pub fn list_from_tokens(tokens: TokenStream) -> Option<ThinVec<MetaItemInner>> {
|
||||||
let mut tokens = tokens.trees().peekable();
|
let mut iter = tokens.iter();
|
||||||
let mut result = ThinVec::new();
|
let mut result = ThinVec::new();
|
||||||
while tokens.peek().is_some() {
|
while iter.peek().is_some() {
|
||||||
let item = MetaItemInner::from_tokens(&mut tokens)?;
|
let item = MetaItemInner::from_tokens(&mut iter)?;
|
||||||
result.push(item);
|
result.push(item);
|
||||||
match tokens.next() {
|
match iter.next() {
|
||||||
None | Some(TokenTree::Token(Token { kind: token::Comma, .. }, _)) => {}
|
None | Some(TokenTree::Token(Token { kind: token::Comma, .. }, _)) => {}
|
||||||
_ => return None,
|
_ => return None,
|
||||||
}
|
}
|
||||||
|
@ -451,12 +449,10 @@ impl MetaItemKind {
|
||||||
Some(result)
|
Some(result)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn name_value_from_tokens<'a>(
|
fn name_value_from_tokens(iter: &mut TokenStreamIter<'_>) -> Option<MetaItemKind> {
|
||||||
tokens: &mut impl Iterator<Item = &'a TokenTree>,
|
match iter.next() {
|
||||||
) -> Option<MetaItemKind> {
|
|
||||||
match tokens.next() {
|
|
||||||
Some(TokenTree::Delimited(.., Delimiter::Invisible(_), inner_tokens)) => {
|
Some(TokenTree::Delimited(.., Delimiter::Invisible(_), inner_tokens)) => {
|
||||||
MetaItemKind::name_value_from_tokens(&mut inner_tokens.trees())
|
MetaItemKind::name_value_from_tokens(&mut inner_tokens.iter())
|
||||||
}
|
}
|
||||||
Some(TokenTree::Token(token, _)) => {
|
Some(TokenTree::Token(token, _)) => {
|
||||||
MetaItemLit::from_token(token).map(MetaItemKind::NameValue)
|
MetaItemLit::from_token(token).map(MetaItemKind::NameValue)
|
||||||
|
@ -465,19 +461,17 @@ impl MetaItemKind {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn from_tokens<'a>(
|
fn from_tokens(iter: &mut TokenStreamIter<'_>) -> Option<MetaItemKind> {
|
||||||
tokens: &mut iter::Peekable<impl Iterator<Item = &'a TokenTree>>,
|
match iter.peek() {
|
||||||
) -> Option<MetaItemKind> {
|
|
||||||
match tokens.peek() {
|
|
||||||
Some(TokenTree::Delimited(.., Delimiter::Parenthesis, inner_tokens)) => {
|
Some(TokenTree::Delimited(.., Delimiter::Parenthesis, inner_tokens)) => {
|
||||||
let inner_tokens = inner_tokens.clone();
|
let inner_tokens = inner_tokens.clone();
|
||||||
tokens.next();
|
iter.next();
|
||||||
MetaItemKind::list_from_tokens(inner_tokens).map(MetaItemKind::List)
|
MetaItemKind::list_from_tokens(inner_tokens).map(MetaItemKind::List)
|
||||||
}
|
}
|
||||||
Some(TokenTree::Delimited(..)) => None,
|
Some(TokenTree::Delimited(..)) => None,
|
||||||
Some(TokenTree::Token(Token { kind: token::Eq, .. }, _)) => {
|
Some(TokenTree::Token(Token { kind: token::Eq, .. }, _)) => {
|
||||||
tokens.next();
|
iter.next();
|
||||||
MetaItemKind::name_value_from_tokens(tokens)
|
MetaItemKind::name_value_from_tokens(iter)
|
||||||
}
|
}
|
||||||
_ => Some(MetaItemKind::Word),
|
_ => Some(MetaItemKind::Word),
|
||||||
}
|
}
|
||||||
|
@ -593,22 +587,19 @@ impl MetaItemInner {
|
||||||
self.meta_item().is_some()
|
self.meta_item().is_some()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn from_tokens<'a, I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItemInner>
|
fn from_tokens(iter: &mut TokenStreamIter<'_>) -> Option<MetaItemInner> {
|
||||||
where
|
match iter.peek() {
|
||||||
I: Iterator<Item = &'a TokenTree>,
|
|
||||||
{
|
|
||||||
match tokens.peek() {
|
|
||||||
Some(TokenTree::Token(token, _)) if let Some(lit) = MetaItemLit::from_token(token) => {
|
Some(TokenTree::Token(token, _)) if let Some(lit) = MetaItemLit::from_token(token) => {
|
||||||
tokens.next();
|
iter.next();
|
||||||
return Some(MetaItemInner::Lit(lit));
|
return Some(MetaItemInner::Lit(lit));
|
||||||
}
|
}
|
||||||
Some(TokenTree::Delimited(.., Delimiter::Invisible(_), inner_tokens)) => {
|
Some(TokenTree::Delimited(.., Delimiter::Invisible(_), inner_tokens)) => {
|
||||||
tokens.next();
|
iter.next();
|
||||||
return MetaItemInner::from_tokens(&mut inner_tokens.trees().peekable());
|
return MetaItemInner::from_tokens(&mut inner_tokens.iter());
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
MetaItem::from_tokens(tokens).map(MetaItemInner::MetaItem)
|
MetaItem::from_tokens(iter).map(MetaItemInner::MetaItem)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -99,7 +99,7 @@ where
|
||||||
CTX: crate::HashStableContext,
|
CTX: crate::HashStableContext,
|
||||||
{
|
{
|
||||||
fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) {
|
fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) {
|
||||||
for sub_tt in self.trees() {
|
for sub_tt in self.iter() {
|
||||||
sub_tt.hash_stable(hcx, hasher);
|
sub_tt.hash_stable(hcx, hasher);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -406,7 +406,7 @@ impl Eq for TokenStream {}
|
||||||
|
|
||||||
impl PartialEq<TokenStream> for TokenStream {
|
impl PartialEq<TokenStream> for TokenStream {
|
||||||
fn eq(&self, other: &TokenStream) -> bool {
|
fn eq(&self, other: &TokenStream) -> bool {
|
||||||
self.trees().eq(other.trees())
|
self.iter().eq(other.iter())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -423,24 +423,24 @@ impl TokenStream {
|
||||||
self.0.len()
|
self.0.len()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn trees(&self) -> RefTokenTreeCursor<'_> {
|
pub fn get(&self, index: usize) -> Option<&TokenTree> {
|
||||||
RefTokenTreeCursor::new(self)
|
self.0.get(index)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn into_trees(self) -> TokenTreeCursor {
|
pub fn iter(&self) -> TokenStreamIter<'_> {
|
||||||
TokenTreeCursor::new(self)
|
TokenStreamIter::new(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Compares two `TokenStream`s, checking equality without regarding span information.
|
/// Compares two `TokenStream`s, checking equality without regarding span information.
|
||||||
pub fn eq_unspanned(&self, other: &TokenStream) -> bool {
|
pub fn eq_unspanned(&self, other: &TokenStream) -> bool {
|
||||||
let mut t1 = self.trees();
|
let mut iter1 = self.iter();
|
||||||
let mut t2 = other.trees();
|
let mut iter2 = other.iter();
|
||||||
for (t1, t2) in iter::zip(&mut t1, &mut t2) {
|
for (tt1, tt2) in iter::zip(&mut iter1, &mut iter2) {
|
||||||
if !t1.eq_unspanned(t2) {
|
if !tt1.eq_unspanned(tt2) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
t1.next().is_none() && t2.next().is_none()
|
iter1.next().is_none() && iter2.next().is_none()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a token stream containing a single token with alone spacing. The
|
/// Create a token stream containing a single token with alone spacing. The
|
||||||
|
@ -509,7 +509,7 @@ impl TokenStream {
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn flattened(&self) -> TokenStream {
|
pub fn flattened(&self) -> TokenStream {
|
||||||
fn can_skip(stream: &TokenStream) -> bool {
|
fn can_skip(stream: &TokenStream) -> bool {
|
||||||
stream.trees().all(|tree| match tree {
|
stream.iter().all(|tree| match tree {
|
||||||
TokenTree::Token(token, _) => !matches!(
|
TokenTree::Token(token, _) => !matches!(
|
||||||
token.kind,
|
token.kind,
|
||||||
token::NtIdent(..) | token::NtLifetime(..) | token::Interpolated(..)
|
token::NtIdent(..) | token::NtLifetime(..) | token::Interpolated(..)
|
||||||
|
@ -522,7 +522,7 @@ impl TokenStream {
|
||||||
return self.clone();
|
return self.clone();
|
||||||
}
|
}
|
||||||
|
|
||||||
self.trees().map(|tree| TokenStream::flatten_token_tree(tree)).collect()
|
self.iter().map(|tree| TokenStream::flatten_token_tree(tree)).collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
// If `vec` is not empty, try to glue `tt` onto its last token. The return
|
// If `vec` is not empty, try to glue `tt` onto its last token. The return
|
||||||
|
@ -665,25 +665,26 @@ impl TokenStream {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// By-reference iterator over a [`TokenStream`], that produces `&TokenTree`
|
|
||||||
/// items.
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct RefTokenTreeCursor<'t> {
|
pub struct TokenStreamIter<'t> {
|
||||||
stream: &'t TokenStream,
|
stream: &'t TokenStream,
|
||||||
index: usize,
|
index: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'t> RefTokenTreeCursor<'t> {
|
impl<'t> TokenStreamIter<'t> {
|
||||||
fn new(stream: &'t TokenStream) -> Self {
|
fn new(stream: &'t TokenStream) -> Self {
|
||||||
RefTokenTreeCursor { stream, index: 0 }
|
TokenStreamIter { stream, index: 0 }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn look_ahead(&self, n: usize) -> Option<&TokenTree> {
|
// Peeking could be done via `Peekable`, but most iterators need peeking,
|
||||||
self.stream.0.get(self.index + n)
|
// and this is simple and avoids the need to use `peekable` and `Peekable`
|
||||||
|
// at all the use sites.
|
||||||
|
pub fn peek(&self) -> Option<&'t TokenTree> {
|
||||||
|
self.stream.0.get(self.index)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'t> Iterator for RefTokenTreeCursor<'t> {
|
impl<'t> Iterator for TokenStreamIter<'t> {
|
||||||
type Item = &'t TokenTree;
|
type Item = &'t TokenTree;
|
||||||
|
|
||||||
fn next(&mut self) -> Option<&'t TokenTree> {
|
fn next(&mut self) -> Option<&'t TokenTree> {
|
||||||
|
@ -694,39 +695,6 @@ impl<'t> Iterator for RefTokenTreeCursor<'t> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Owning by-value iterator over a [`TokenStream`], that produces `&TokenTree`
|
|
||||||
/// items.
|
|
||||||
///
|
|
||||||
/// Doesn't impl `Iterator` because Rust doesn't permit an owning iterator to
|
|
||||||
/// return `&T` from `next`; the need for an explicit lifetime in the `Item`
|
|
||||||
/// associated type gets in the way. Instead, use `next_ref` (which doesn't
|
|
||||||
/// involve associated types) for getting individual elements, or
|
|
||||||
/// `RefTokenTreeCursor` if you really want an `Iterator`, e.g. in a `for`
|
|
||||||
/// loop.
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct TokenTreeCursor {
|
|
||||||
pub stream: TokenStream,
|
|
||||||
index: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TokenTreeCursor {
|
|
||||||
fn new(stream: TokenStream) -> Self {
|
|
||||||
TokenTreeCursor { stream, index: 0 }
|
|
||||||
}
|
|
||||||
|
|
||||||
#[inline]
|
|
||||||
pub fn next_ref(&mut self) -> Option<&TokenTree> {
|
|
||||||
self.stream.0.get(self.index).map(|tree| {
|
|
||||||
self.index += 1;
|
|
||||||
tree
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn look_ahead(&self, n: usize) -> Option<&TokenTree> {
|
|
||||||
self.stream.0.get(self.index + n)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone, PartialEq, Encodable, Decodable, HashStable_Generic)]
|
#[derive(Debug, Copy, Clone, PartialEq, Encodable, Decodable, HashStable_Generic)]
|
||||||
pub struct DelimSpan {
|
pub struct DelimSpan {
|
||||||
pub open: Span,
|
pub open: Span,
|
||||||
|
|
|
@ -725,7 +725,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
|
||||||
// E.g. we have seen cases where a proc macro can handle `a :: b` but not
|
// E.g. we have seen cases where a proc macro can handle `a :: b` but not
|
||||||
// `a::b`. See #117433 for some examples.
|
// `a::b`. See #117433 for some examples.
|
||||||
fn print_tts(&mut self, tts: &TokenStream, convert_dollar_crate: bool) {
|
fn print_tts(&mut self, tts: &TokenStream, convert_dollar_crate: bool) {
|
||||||
let mut iter = tts.trees().peekable();
|
let mut iter = tts.iter().peekable();
|
||||||
while let Some(tt) = iter.next() {
|
while let Some(tt) = iter.next() {
|
||||||
let spacing = self.print_tt(tt, convert_dollar_crate);
|
let spacing = self.print_tt(tt, convert_dollar_crate);
|
||||||
if let Some(next) = iter.peek() {
|
if let Some(next) = iter.peek() {
|
||||||
|
|
|
@ -18,7 +18,7 @@ pub(crate) fn expand_concat_idents<'cx>(
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut res_str = String::new();
|
let mut res_str = String::new();
|
||||||
for (i, e) in tts.trees().enumerate() {
|
for (i, e) in tts.iter().enumerate() {
|
||||||
if i & 1 == 1 {
|
if i & 1 == 1 {
|
||||||
match e {
|
match e {
|
||||||
TokenTree::Token(Token { kind: token::Comma, .. }, _) => {}
|
TokenTree::Token(Token { kind: token::Comma, .. }, _) => {}
|
||||||
|
|
|
@ -9,9 +9,9 @@ pub(crate) fn expand_trace_macros(
|
||||||
sp: Span,
|
sp: Span,
|
||||||
tt: TokenStream,
|
tt: TokenStream,
|
||||||
) -> MacroExpanderResult<'static> {
|
) -> MacroExpanderResult<'static> {
|
||||||
let mut cursor = tt.trees();
|
let mut iter = tt.iter();
|
||||||
let mut err = false;
|
let mut err = false;
|
||||||
let value = match &cursor.next() {
|
let value = match iter.next() {
|
||||||
Some(TokenTree::Token(token, _)) if token.is_keyword(kw::True) => true,
|
Some(TokenTree::Token(token, _)) if token.is_keyword(kw::True) => true,
|
||||||
Some(TokenTree::Token(token, _)) if token.is_keyword(kw::False) => false,
|
Some(TokenTree::Token(token, _)) if token.is_keyword(kw::False) => false,
|
||||||
_ => {
|
_ => {
|
||||||
|
@ -19,7 +19,7 @@ pub(crate) fn expand_trace_macros(
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
err |= cursor.next().is_some();
|
err |= iter.next().is_some();
|
||||||
if err {
|
if err {
|
||||||
cx.dcx().emit_err(errors::TraceMacros { span: sp });
|
cx.dcx().emit_err(errors::TraceMacros { span: sp });
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
use rustc_ast::token::{self, Delimiter, IdentIsRaw, Lit, Token, TokenKind};
|
use rustc_ast::token::{self, Delimiter, IdentIsRaw, Lit, Token, TokenKind};
|
||||||
use rustc_ast::tokenstream::{RefTokenTreeCursor, TokenStream, TokenTree};
|
use rustc_ast::tokenstream::{TokenStream, TokenStreamIter, TokenTree};
|
||||||
use rustc_ast::{LitIntType, LitKind};
|
use rustc_ast::{LitIntType, LitKind};
|
||||||
use rustc_ast_pretty::pprust;
|
use rustc_ast_pretty::pprust;
|
||||||
use rustc_errors::{Applicability, PResult};
|
use rustc_errors::{Applicability, PResult};
|
||||||
|
@ -38,14 +38,14 @@ impl MetaVarExpr {
|
||||||
outer_span: Span,
|
outer_span: Span,
|
||||||
psess: &'psess ParseSess,
|
psess: &'psess ParseSess,
|
||||||
) -> PResult<'psess, MetaVarExpr> {
|
) -> PResult<'psess, MetaVarExpr> {
|
||||||
let mut tts = input.trees();
|
let mut iter = input.iter();
|
||||||
let ident = parse_ident(&mut tts, psess, outer_span)?;
|
let ident = parse_ident(&mut iter, psess, outer_span)?;
|
||||||
let Some(TokenTree::Delimited(.., Delimiter::Parenthesis, args)) = tts.next() else {
|
let Some(TokenTree::Delimited(.., Delimiter::Parenthesis, args)) = iter.next() else {
|
||||||
let msg = "meta-variable expression parameter must be wrapped in parentheses";
|
let msg = "meta-variable expression parameter must be wrapped in parentheses";
|
||||||
return Err(psess.dcx().struct_span_err(ident.span, msg));
|
return Err(psess.dcx().struct_span_err(ident.span, msg));
|
||||||
};
|
};
|
||||||
check_trailing_token(&mut tts, psess)?;
|
check_trailing_token(&mut iter, psess)?;
|
||||||
let mut iter = args.trees();
|
let mut iter = args.iter();
|
||||||
let rslt = match ident.as_str() {
|
let rslt = match ident.as_str() {
|
||||||
"concat" => {
|
"concat" => {
|
||||||
let mut result = Vec::new();
|
let mut result = Vec::new();
|
||||||
|
@ -73,7 +73,7 @@ impl MetaVarExpr {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
result.push(element);
|
result.push(element);
|
||||||
if iter.look_ahead(0).is_none() {
|
if iter.peek().is_none() {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
if !try_eat_comma(&mut iter) {
|
if !try_eat_comma(&mut iter) {
|
||||||
|
@ -142,7 +142,7 @@ pub(crate) enum MetaVarExprConcatElem {
|
||||||
|
|
||||||
// Checks if there are any remaining tokens. For example, `${ignore(ident ... a b c ...)}`
|
// Checks if there are any remaining tokens. For example, `${ignore(ident ... a b c ...)}`
|
||||||
fn check_trailing_token<'psess>(
|
fn check_trailing_token<'psess>(
|
||||||
iter: &mut RefTokenTreeCursor<'_>,
|
iter: &mut TokenStreamIter<'_>,
|
||||||
psess: &'psess ParseSess,
|
psess: &'psess ParseSess,
|
||||||
) -> PResult<'psess, ()> {
|
) -> PResult<'psess, ()> {
|
||||||
if let Some(tt) = iter.next() {
|
if let Some(tt) = iter.next() {
|
||||||
|
@ -158,14 +158,14 @@ fn check_trailing_token<'psess>(
|
||||||
|
|
||||||
/// Parse a meta-variable `count` expression: `count(ident[, depth])`
|
/// Parse a meta-variable `count` expression: `count(ident[, depth])`
|
||||||
fn parse_count<'psess>(
|
fn parse_count<'psess>(
|
||||||
iter: &mut RefTokenTreeCursor<'_>,
|
iter: &mut TokenStreamIter<'_>,
|
||||||
psess: &'psess ParseSess,
|
psess: &'psess ParseSess,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> PResult<'psess, MetaVarExpr> {
|
) -> PResult<'psess, MetaVarExpr> {
|
||||||
eat_dollar(iter, psess, span)?;
|
eat_dollar(iter, psess, span)?;
|
||||||
let ident = parse_ident(iter, psess, span)?;
|
let ident = parse_ident(iter, psess, span)?;
|
||||||
let depth = if try_eat_comma(iter) {
|
let depth = if try_eat_comma(iter) {
|
||||||
if iter.look_ahead(0).is_none() {
|
if iter.peek().is_none() {
|
||||||
return Err(psess.dcx().struct_span_err(
|
return Err(psess.dcx().struct_span_err(
|
||||||
span,
|
span,
|
||||||
"`count` followed by a comma must have an associated index indicating its depth",
|
"`count` followed by a comma must have an associated index indicating its depth",
|
||||||
|
@ -180,7 +180,7 @@ fn parse_count<'psess>(
|
||||||
|
|
||||||
/// Parses the depth used by index(depth) and len(depth).
|
/// Parses the depth used by index(depth) and len(depth).
|
||||||
fn parse_depth<'psess>(
|
fn parse_depth<'psess>(
|
||||||
iter: &mut RefTokenTreeCursor<'_>,
|
iter: &mut TokenStreamIter<'_>,
|
||||||
psess: &'psess ParseSess,
|
psess: &'psess ParseSess,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> PResult<'psess, usize> {
|
) -> PResult<'psess, usize> {
|
||||||
|
@ -203,7 +203,7 @@ fn parse_depth<'psess>(
|
||||||
|
|
||||||
/// Parses an generic ident
|
/// Parses an generic ident
|
||||||
fn parse_ident<'psess>(
|
fn parse_ident<'psess>(
|
||||||
iter: &mut RefTokenTreeCursor<'_>,
|
iter: &mut TokenStreamIter<'_>,
|
||||||
psess: &'psess ParseSess,
|
psess: &'psess ParseSess,
|
||||||
fallback_span: Span,
|
fallback_span: Span,
|
||||||
) -> PResult<'psess, Ident> {
|
) -> PResult<'psess, Ident> {
|
||||||
|
@ -235,7 +235,7 @@ fn parse_ident_from_token<'psess>(
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_token<'psess, 't>(
|
fn parse_token<'psess, 't>(
|
||||||
iter: &mut RefTokenTreeCursor<'t>,
|
iter: &mut TokenStreamIter<'t>,
|
||||||
psess: &'psess ParseSess,
|
psess: &'psess ParseSess,
|
||||||
fallback_span: Span,
|
fallback_span: Span,
|
||||||
) -> PResult<'psess, &'t Token> {
|
) -> PResult<'psess, &'t Token> {
|
||||||
|
@ -250,8 +250,8 @@ fn parse_token<'psess, 't>(
|
||||||
|
|
||||||
/// Tries to move the iterator forward returning `true` if there is a comma. If not, then the
|
/// Tries to move the iterator forward returning `true` if there is a comma. If not, then the
|
||||||
/// iterator is not modified and the result is `false`.
|
/// iterator is not modified and the result is `false`.
|
||||||
fn try_eat_comma(iter: &mut RefTokenTreeCursor<'_>) -> bool {
|
fn try_eat_comma(iter: &mut TokenStreamIter<'_>) -> bool {
|
||||||
if let Some(TokenTree::Token(Token { kind: token::Comma, .. }, _)) = iter.look_ahead(0) {
|
if let Some(TokenTree::Token(Token { kind: token::Comma, .. }, _)) = iter.peek() {
|
||||||
let _ = iter.next();
|
let _ = iter.next();
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -260,8 +260,8 @@ fn try_eat_comma(iter: &mut RefTokenTreeCursor<'_>) -> bool {
|
||||||
|
|
||||||
/// Tries to move the iterator forward returning `true` if there is a dollar sign. If not, then the
|
/// Tries to move the iterator forward returning `true` if there is a dollar sign. If not, then the
|
||||||
/// iterator is not modified and the result is `false`.
|
/// iterator is not modified and the result is `false`.
|
||||||
fn try_eat_dollar(iter: &mut RefTokenTreeCursor<'_>) -> bool {
|
fn try_eat_dollar(iter: &mut TokenStreamIter<'_>) -> bool {
|
||||||
if let Some(TokenTree::Token(Token { kind: token::Dollar, .. }, _)) = iter.look_ahead(0) {
|
if let Some(TokenTree::Token(Token { kind: token::Dollar, .. }, _)) = iter.peek() {
|
||||||
let _ = iter.next();
|
let _ = iter.next();
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -270,12 +270,11 @@ fn try_eat_dollar(iter: &mut RefTokenTreeCursor<'_>) -> bool {
|
||||||
|
|
||||||
/// Expects that the next item is a dollar sign.
|
/// Expects that the next item is a dollar sign.
|
||||||
fn eat_dollar<'psess>(
|
fn eat_dollar<'psess>(
|
||||||
iter: &mut RefTokenTreeCursor<'_>,
|
iter: &mut TokenStreamIter<'_>,
|
||||||
psess: &'psess ParseSess,
|
psess: &'psess ParseSess,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> PResult<'psess, ()> {
|
) -> PResult<'psess, ()> {
|
||||||
if let Some(TokenTree::Token(Token { kind: token::Dollar, .. }, _)) = iter.look_ahead(0) {
|
if try_eat_dollar(iter) {
|
||||||
let _ = iter.next();
|
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
Err(psess.dcx().struct_span_err(
|
Err(psess.dcx().struct_span_err(
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
use rustc_ast::token::{self, Delimiter, IdentIsRaw, NonterminalKind, Token};
|
use rustc_ast::token::{self, Delimiter, IdentIsRaw, NonterminalKind, Token};
|
||||||
|
use rustc_ast::tokenstream::TokenStreamIter;
|
||||||
use rustc_ast::{NodeId, tokenstream};
|
use rustc_ast::{NodeId, tokenstream};
|
||||||
use rustc_ast_pretty::pprust;
|
use rustc_ast_pretty::pprust;
|
||||||
use rustc_feature::Features;
|
use rustc_feature::Features;
|
||||||
|
@ -48,25 +49,25 @@ pub(super) fn parse(
|
||||||
|
|
||||||
// For each token tree in `input`, parse the token into a `self::TokenTree`, consuming
|
// For each token tree in `input`, parse the token into a `self::TokenTree`, consuming
|
||||||
// additional trees if need be.
|
// additional trees if need be.
|
||||||
let mut trees = input.trees().peekable();
|
let mut iter = input.iter();
|
||||||
while let Some(tree) = trees.next() {
|
while let Some(tree) = iter.next() {
|
||||||
// Given the parsed tree, if there is a metavar and we are expecting matchers, actually
|
// Given the parsed tree, if there is a metavar and we are expecting matchers, actually
|
||||||
// parse out the matcher (i.e., in `$id:ident` this would parse the `:` and `ident`).
|
// parse out the matcher (i.e., in `$id:ident` this would parse the `:` and `ident`).
|
||||||
let tree = parse_tree(tree, &mut trees, parsing_patterns, sess, node_id, features, edition);
|
let tree = parse_tree(tree, &mut iter, parsing_patterns, sess, node_id, features, edition);
|
||||||
match tree {
|
match tree {
|
||||||
TokenTree::MetaVar(start_sp, ident) if parsing_patterns => {
|
TokenTree::MetaVar(start_sp, ident) if parsing_patterns => {
|
||||||
// Not consuming the next token immediately, as it may not be a colon
|
// Not consuming the next token immediately, as it may not be a colon
|
||||||
let span = match trees.peek() {
|
let span = match iter.peek() {
|
||||||
Some(&tokenstream::TokenTree::Token(
|
Some(&tokenstream::TokenTree::Token(
|
||||||
Token { kind: token::Colon, span: colon_span },
|
Token { kind: token::Colon, span: colon_span },
|
||||||
_,
|
_,
|
||||||
)) => {
|
)) => {
|
||||||
// Consume the colon first
|
// Consume the colon first
|
||||||
trees.next();
|
iter.next();
|
||||||
|
|
||||||
// It's ok to consume the next tree no matter how,
|
// It's ok to consume the next tree no matter how,
|
||||||
// since if it's not a token then it will be an invalid declaration.
|
// since if it's not a token then it will be an invalid declaration.
|
||||||
match trees.next() {
|
match iter.next() {
|
||||||
Some(tokenstream::TokenTree::Token(token, _)) => match token.ident() {
|
Some(tokenstream::TokenTree::Token(token, _)) => match token.ident() {
|
||||||
Some((fragment, _)) => {
|
Some((fragment, _)) => {
|
||||||
let span = token.span.with_lo(start_sp.lo());
|
let span = token.span.with_lo(start_sp.lo());
|
||||||
|
@ -142,14 +143,14 @@ fn maybe_emit_macro_metavar_expr_concat_feature(features: &Features, sess: &Sess
|
||||||
/// # Parameters
|
/// # Parameters
|
||||||
///
|
///
|
||||||
/// - `tree`: the tree we wish to convert.
|
/// - `tree`: the tree we wish to convert.
|
||||||
/// - `outer_trees`: an iterator over trees. We may need to read more tokens from it in order to finish
|
/// - `outer_iter`: an iterator over trees. We may need to read more tokens from it in order to finish
|
||||||
/// converting `tree`
|
/// converting `tree`
|
||||||
/// - `parsing_patterns`: same as [parse].
|
/// - `parsing_patterns`: same as [parse].
|
||||||
/// - `sess`: the parsing session. Any errors will be emitted to this session.
|
/// - `sess`: the parsing session. Any errors will be emitted to this session.
|
||||||
/// - `features`: language features so we can do feature gating.
|
/// - `features`: language features so we can do feature gating.
|
||||||
fn parse_tree<'a>(
|
fn parse_tree<'a>(
|
||||||
tree: &'a tokenstream::TokenTree,
|
tree: &'a tokenstream::TokenTree,
|
||||||
outer_trees: &mut impl Iterator<Item = &'a tokenstream::TokenTree>,
|
outer_iter: &mut TokenStreamIter<'a>,
|
||||||
parsing_patterns: bool,
|
parsing_patterns: bool,
|
||||||
sess: &Session,
|
sess: &Session,
|
||||||
node_id: NodeId,
|
node_id: NodeId,
|
||||||
|
@ -162,15 +163,16 @@ fn parse_tree<'a>(
|
||||||
&tokenstream::TokenTree::Token(Token { kind: token::Dollar, span: dollar_span }, _) => {
|
&tokenstream::TokenTree::Token(Token { kind: token::Dollar, span: dollar_span }, _) => {
|
||||||
// FIXME: Handle `Invisible`-delimited groups in a more systematic way
|
// FIXME: Handle `Invisible`-delimited groups in a more systematic way
|
||||||
// during parsing.
|
// during parsing.
|
||||||
let mut next = outer_trees.next();
|
let mut next = outer_iter.next();
|
||||||
let mut trees: Box<dyn Iterator<Item = &tokenstream::TokenTree>>;
|
let mut iter_storage;
|
||||||
match next {
|
let mut iter: &mut TokenStreamIter<'_> = match next {
|
||||||
Some(tokenstream::TokenTree::Delimited(.., delim, tts)) if delim.skip() => {
|
Some(tokenstream::TokenTree::Delimited(.., delim, tts)) if delim.skip() => {
|
||||||
trees = Box::new(tts.trees());
|
iter_storage = tts.iter();
|
||||||
next = trees.next();
|
next = iter_storage.next();
|
||||||
|
&mut iter_storage
|
||||||
}
|
}
|
||||||
_ => trees = Box::new(outer_trees),
|
_ => outer_iter,
|
||||||
}
|
};
|
||||||
|
|
||||||
match next {
|
match next {
|
||||||
// `tree` is followed by a delimited set of token trees.
|
// `tree` is followed by a delimited set of token trees.
|
||||||
|
@ -229,7 +231,7 @@ fn parse_tree<'a>(
|
||||||
let sequence = parse(tts, parsing_patterns, sess, node_id, features, edition);
|
let sequence = parse(tts, parsing_patterns, sess, node_id, features, edition);
|
||||||
// Get the Kleene operator and optional separator
|
// Get the Kleene operator and optional separator
|
||||||
let (separator, kleene) =
|
let (separator, kleene) =
|
||||||
parse_sep_and_kleene_op(&mut trees, delim_span.entire(), sess);
|
parse_sep_and_kleene_op(&mut iter, delim_span.entire(), sess);
|
||||||
// Count the number of captured "names" (i.e., named metavars)
|
// Count the number of captured "names" (i.e., named metavars)
|
||||||
let num_captures =
|
let num_captures =
|
||||||
if parsing_patterns { count_metavar_decls(&sequence) } else { 0 };
|
if parsing_patterns { count_metavar_decls(&sequence) } else { 0 };
|
||||||
|
@ -312,11 +314,11 @@ fn kleene_op(token: &Token) -> Option<KleeneOp> {
|
||||||
/// - Ok(Ok((op, span))) if the next token tree is a KleeneOp
|
/// - Ok(Ok((op, span))) if the next token tree is a KleeneOp
|
||||||
/// - Ok(Err(tok, span)) if the next token tree is a token but not a KleeneOp
|
/// - Ok(Err(tok, span)) if the next token tree is a token but not a KleeneOp
|
||||||
/// - Err(span) if the next token tree is not a token
|
/// - Err(span) if the next token tree is not a token
|
||||||
fn parse_kleene_op<'a>(
|
fn parse_kleene_op(
|
||||||
input: &mut impl Iterator<Item = &'a tokenstream::TokenTree>,
|
iter: &mut TokenStreamIter<'_>,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> Result<Result<(KleeneOp, Span), Token>, Span> {
|
) -> Result<Result<(KleeneOp, Span), Token>, Span> {
|
||||||
match input.next() {
|
match iter.next() {
|
||||||
Some(tokenstream::TokenTree::Token(token, _)) => match kleene_op(token) {
|
Some(tokenstream::TokenTree::Token(token, _)) => match kleene_op(token) {
|
||||||
Some(op) => Ok(Ok((op, token.span))),
|
Some(op) => Ok(Ok((op, token.span))),
|
||||||
None => Ok(Err(token.clone())),
|
None => Ok(Err(token.clone())),
|
||||||
|
@ -333,22 +335,22 @@ fn parse_kleene_op<'a>(
|
||||||
/// itself. Note that here we are parsing the _macro_ itself, rather than trying to match some
|
/// itself. Note that here we are parsing the _macro_ itself, rather than trying to match some
|
||||||
/// stream of tokens in an invocation of a macro.
|
/// stream of tokens in an invocation of a macro.
|
||||||
///
|
///
|
||||||
/// This function will take some input iterator `input` corresponding to `span` and a parsing
|
/// This function will take some input iterator `iter` corresponding to `span` and a parsing
|
||||||
/// session `sess`. If the next one (or possibly two) tokens in `input` correspond to a Kleene
|
/// session `sess`. If the next one (or possibly two) tokens in `iter` correspond to a Kleene
|
||||||
/// operator and separator, then a tuple with `(separator, KleeneOp)` is returned. Otherwise, an
|
/// operator and separator, then a tuple with `(separator, KleeneOp)` is returned. Otherwise, an
|
||||||
/// error with the appropriate span is emitted to `sess` and a dummy value is returned.
|
/// error with the appropriate span is emitted to `sess` and a dummy value is returned.
|
||||||
fn parse_sep_and_kleene_op<'a>(
|
fn parse_sep_and_kleene_op(
|
||||||
input: &mut impl Iterator<Item = &'a tokenstream::TokenTree>,
|
iter: &mut TokenStreamIter<'_>,
|
||||||
span: Span,
|
span: Span,
|
||||||
sess: &Session,
|
sess: &Session,
|
||||||
) -> (Option<Token>, KleeneToken) {
|
) -> (Option<Token>, KleeneToken) {
|
||||||
// We basically look at two token trees here, denoted as #1 and #2 below
|
// We basically look at two token trees here, denoted as #1 and #2 below
|
||||||
let span = match parse_kleene_op(input, span) {
|
let span = match parse_kleene_op(iter, span) {
|
||||||
// #1 is a `?`, `+`, or `*` KleeneOp
|
// #1 is a `?`, `+`, or `*` KleeneOp
|
||||||
Ok(Ok((op, span))) => return (None, KleeneToken::new(op, span)),
|
Ok(Ok((op, span))) => return (None, KleeneToken::new(op, span)),
|
||||||
|
|
||||||
// #1 is a separator followed by #2, a KleeneOp
|
// #1 is a separator followed by #2, a KleeneOp
|
||||||
Ok(Err(token)) => match parse_kleene_op(input, token.span) {
|
Ok(Err(token)) => match parse_kleene_op(iter, token.span) {
|
||||||
// #2 is the `?` Kleene op, which does not take a separator (error)
|
// #2 is the `?` Kleene op, which does not take a separator (error)
|
||||||
Ok(Ok((KleeneOp::ZeroOrOne, span))) => {
|
Ok(Ok((KleeneOp::ZeroOrOne, span))) => {
|
||||||
// Error!
|
// Error!
|
||||||
|
|
|
@ -111,9 +111,9 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre
|
||||||
// Estimate the capacity as `stream.len()` rounded up to the next power
|
// Estimate the capacity as `stream.len()` rounded up to the next power
|
||||||
// of two to limit the number of required reallocations.
|
// of two to limit the number of required reallocations.
|
||||||
let mut trees = Vec::with_capacity(stream.len().next_power_of_two());
|
let mut trees = Vec::with_capacity(stream.len().next_power_of_two());
|
||||||
let mut cursor = stream.trees();
|
let mut iter = stream.iter();
|
||||||
|
|
||||||
while let Some(tree) = cursor.next() {
|
while let Some(tree) = iter.next() {
|
||||||
let (Token { kind, span }, joint) = match tree.clone() {
|
let (Token { kind, span }, joint) = match tree.clone() {
|
||||||
tokenstream::TokenTree::Delimited(span, _, delim, tts) => {
|
tokenstream::TokenTree::Delimited(span, _, delim, tts) => {
|
||||||
let delimiter = pm::Delimiter::from_internal(delim);
|
let delimiter = pm::Delimiter::from_internal(delim);
|
||||||
|
|
|
@ -1828,7 +1828,7 @@ impl KeywordIdents {
|
||||||
fn check_tokens(&mut self, cx: &EarlyContext<'_>, tokens: &TokenStream) {
|
fn check_tokens(&mut self, cx: &EarlyContext<'_>, tokens: &TokenStream) {
|
||||||
// Check if the preceding token is `$`, because we want to allow `$async`, etc.
|
// Check if the preceding token is `$`, because we want to allow `$async`, etc.
|
||||||
let mut prev_dollar = false;
|
let mut prev_dollar = false;
|
||||||
for tt in tokens.trees() {
|
for tt in tokens.iter() {
|
||||||
match tt {
|
match tt {
|
||||||
// Only report non-raw idents.
|
// Only report non-raw idents.
|
||||||
TokenTree::Token(token, _) => {
|
TokenTree::Token(token, _) => {
|
||||||
|
|
|
@ -84,7 +84,7 @@ impl Expr2024 {
|
||||||
let mut prev_colon = false;
|
let mut prev_colon = false;
|
||||||
let mut prev_identifier = false;
|
let mut prev_identifier = false;
|
||||||
let mut prev_dollar = false;
|
let mut prev_dollar = false;
|
||||||
for tt in tokens.trees() {
|
for tt in tokens.iter() {
|
||||||
debug!(
|
debug!(
|
||||||
"check_tokens: {:?} - colon {prev_dollar} - ident {prev_identifier} - colon {prev_colon}",
|
"check_tokens: {:?} - colon {prev_dollar} - ident {prev_identifier} - colon {prev_colon}",
|
||||||
tt
|
tt
|
||||||
|
|
|
@ -35,7 +35,6 @@ where
|
||||||
{
|
{
|
||||||
fn visit_block_start(&mut self, _state: &A::Domain) {}
|
fn visit_block_start(&mut self, _state: &A::Domain) {}
|
||||||
|
|
||||||
/// // njn: grep for "before", "primary", etc.
|
|
||||||
/// Called after the "early" effect of the given statement is applied to `state`.
|
/// Called after the "early" effect of the given statement is applied to `state`.
|
||||||
fn visit_after_early_statement_effect(
|
fn visit_after_early_statement_effect(
|
||||||
&mut self,
|
&mut self,
|
||||||
|
|
|
@ -8,6 +8,7 @@ use ast::token::IdentIsRaw;
|
||||||
use ast::{CoroutineKind, ForLoopKind, GenBlockKind, MatchKind, Pat, Path, PathSegment, Recovered};
|
use ast::{CoroutineKind, ForLoopKind, GenBlockKind, MatchKind, Pat, Path, PathSegment, Recovered};
|
||||||
use rustc_ast::ptr::P;
|
use rustc_ast::ptr::P;
|
||||||
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
|
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
|
||||||
|
use rustc_ast::tokenstream::TokenTree;
|
||||||
use rustc_ast::util::case::Case;
|
use rustc_ast::util::case::Case;
|
||||||
use rustc_ast::util::classify;
|
use rustc_ast::util::classify;
|
||||||
use rustc_ast::util::parser::{AssocOp, ExprPrecedence, Fixity, prec_let_scrutinee_needs_par};
|
use rustc_ast::util::parser::{AssocOp, ExprPrecedence, Fixity, prec_let_scrutinee_needs_par};
|
||||||
|
@ -2392,7 +2393,8 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
if self.token == TokenKind::Semi
|
if self.token == TokenKind::Semi
|
||||||
&& matches!(self.token_cursor.stack.last(), Some((.., Delimiter::Parenthesis)))
|
&& let Some(last) = self.token_cursor.stack.last()
|
||||||
|
&& let Some(TokenTree::Delimited(_, _, Delimiter::Parenthesis, _)) = last.curr()
|
||||||
&& self.may_recover()
|
&& self.may_recover()
|
||||||
{
|
{
|
||||||
// It is likely that the closure body is a block but where the
|
// It is likely that the closure body is a block but where the
|
||||||
|
|
|
@ -24,9 +24,7 @@ use rustc_ast::ptr::P;
|
||||||
use rustc_ast::token::{
|
use rustc_ast::token::{
|
||||||
self, Delimiter, IdentIsRaw, InvisibleOrigin, MetaVarKind, Nonterminal, Token, TokenKind,
|
self, Delimiter, IdentIsRaw, InvisibleOrigin, MetaVarKind, Nonterminal, Token, TokenKind,
|
||||||
};
|
};
|
||||||
use rustc_ast::tokenstream::{
|
use rustc_ast::tokenstream::{AttrsTarget, Spacing, TokenStream, TokenTree};
|
||||||
AttrsTarget, DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree, TokenTreeCursor,
|
|
||||||
};
|
|
||||||
use rustc_ast::util::case::Case;
|
use rustc_ast::util::case::Case;
|
||||||
use rustc_ast::{
|
use rustc_ast::{
|
||||||
self as ast, AnonConst, AttrArgs, AttrId, ByRef, Const, CoroutineKind, DUMMY_NODE_ID,
|
self as ast, AnonConst, AttrArgs, AttrId, ByRef, Const, CoroutineKind, DUMMY_NODE_ID,
|
||||||
|
@ -272,21 +270,48 @@ struct CaptureState {
|
||||||
seen_attrs: IntervalSet<AttrId>,
|
seen_attrs: IntervalSet<AttrId>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Iterator over a `TokenStream` that produces `Token`s. It's a bit odd that
|
#[derive(Clone, Debug)]
|
||||||
|
struct TokenTreeCursor {
|
||||||
|
stream: TokenStream,
|
||||||
|
/// Points to the current token tree in the stream. In `TokenCursor::curr`,
|
||||||
|
/// this can be any token tree. In `TokenCursor::stack`, this is always a
|
||||||
|
/// `TokenTree::Delimited`.
|
||||||
|
index: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TokenTreeCursor {
|
||||||
|
#[inline]
|
||||||
|
fn new(stream: TokenStream) -> Self {
|
||||||
|
TokenTreeCursor { stream, index: 0 }
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn curr(&self) -> Option<&TokenTree> {
|
||||||
|
self.stream.get(self.index)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn bump(&mut self) {
|
||||||
|
self.index += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A `TokenStream` cursor that produces `Token`s. It's a bit odd that
|
||||||
/// we (a) lex tokens into a nice tree structure (`TokenStream`), and then (b)
|
/// we (a) lex tokens into a nice tree structure (`TokenStream`), and then (b)
|
||||||
/// use this type to emit them as a linear sequence. But a linear sequence is
|
/// use this type to emit them as a linear sequence. But a linear sequence is
|
||||||
/// what the parser expects, for the most part.
|
/// what the parser expects, for the most part.
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
struct TokenCursor {
|
struct TokenCursor {
|
||||||
// Cursor for the current (innermost) token stream. The delimiters for this
|
// Cursor for the current (innermost) token stream. The index within the
|
||||||
// token stream are found in `self.stack.last()`; when that is `None` then
|
// cursor can point to any token tree in the stream (or one past the end).
|
||||||
// we are in the outermost token stream which never has delimiters.
|
// The delimiters for this token stream are found in `self.stack.last()`;
|
||||||
tree_cursor: TokenTreeCursor,
|
// if that is `None` we are in the outermost token stream which never has
|
||||||
|
// delimiters.
|
||||||
|
curr: TokenTreeCursor,
|
||||||
|
|
||||||
// Token streams surrounding the current one. The delimiters for stack[n]'s
|
// Token streams surrounding the current one. The index within each cursor
|
||||||
// tokens are in `stack[n-1]`. `stack[0]` (when present) has no delimiters
|
// always points to a `TokenTree::Delimited`.
|
||||||
// because it's the outermost token stream which never has delimiters.
|
stack: Vec<TokenTreeCursor>,
|
||||||
stack: Vec<(TokenTreeCursor, DelimSpan, DelimSpacing, Delimiter)>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TokenCursor {
|
impl TokenCursor {
|
||||||
|
@ -301,32 +326,33 @@ impl TokenCursor {
|
||||||
// FIXME: we currently don't return `Delimiter::Invisible` open/close delims. To fix
|
// FIXME: we currently don't return `Delimiter::Invisible` open/close delims. To fix
|
||||||
// #67062 we will need to, whereupon the `delim != Delimiter::Invisible` conditions
|
// #67062 we will need to, whereupon the `delim != Delimiter::Invisible` conditions
|
||||||
// below can be removed.
|
// below can be removed.
|
||||||
if let Some(tree) = self.tree_cursor.next_ref() {
|
if let Some(tree) = self.curr.curr() {
|
||||||
match tree {
|
match tree {
|
||||||
&TokenTree::Token(ref token, spacing) => {
|
&TokenTree::Token(ref token, spacing) => {
|
||||||
debug_assert!(!matches!(
|
debug_assert!(!matches!(
|
||||||
token.kind,
|
token.kind,
|
||||||
token::OpenDelim(_) | token::CloseDelim(_)
|
token::OpenDelim(_) | token::CloseDelim(_)
|
||||||
));
|
));
|
||||||
return (token.clone(), spacing);
|
let res = (token.clone(), spacing);
|
||||||
|
self.curr.bump();
|
||||||
|
return res;
|
||||||
}
|
}
|
||||||
&TokenTree::Delimited(sp, spacing, delim, ref tts) => {
|
&TokenTree::Delimited(sp, spacing, delim, ref tts) => {
|
||||||
let trees = tts.clone().into_trees();
|
let trees = TokenTreeCursor::new(tts.clone());
|
||||||
self.stack.push((
|
self.stack.push(mem::replace(&mut self.curr, trees));
|
||||||
mem::replace(&mut self.tree_cursor, trees),
|
|
||||||
sp,
|
|
||||||
spacing,
|
|
||||||
delim,
|
|
||||||
));
|
|
||||||
if !delim.skip() {
|
if !delim.skip() {
|
||||||
return (Token::new(token::OpenDelim(delim), sp.open), spacing.open);
|
return (Token::new(token::OpenDelim(delim), sp.open), spacing.open);
|
||||||
}
|
}
|
||||||
// No open delimiter to return; continue on to the next iteration.
|
// No open delimiter to return; continue on to the next iteration.
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
} else if let Some((tree_cursor, span, spacing, delim)) = self.stack.pop() {
|
} else if let Some(parent) = self.stack.pop() {
|
||||||
// We have exhausted this token stream. Move back to its parent token stream.
|
// We have exhausted this token stream. Move back to its parent token stream.
|
||||||
self.tree_cursor = tree_cursor;
|
let Some(&TokenTree::Delimited(span, spacing, delim, _)) = parent.curr() else {
|
||||||
|
panic!("parent should be Delimited")
|
||||||
|
};
|
||||||
|
self.curr = parent;
|
||||||
|
self.curr.bump(); // move past the `Delimited`
|
||||||
if !delim.skip() {
|
if !delim.skip() {
|
||||||
return (Token::new(token::CloseDelim(delim), span.close), spacing.close);
|
return (Token::new(token::CloseDelim(delim), span.close), spacing.close);
|
||||||
}
|
}
|
||||||
|
@ -465,7 +491,7 @@ impl<'a> Parser<'a> {
|
||||||
capture_cfg: false,
|
capture_cfg: false,
|
||||||
restrictions: Restrictions::empty(),
|
restrictions: Restrictions::empty(),
|
||||||
expected_tokens: Vec::new(),
|
expected_tokens: Vec::new(),
|
||||||
token_cursor: TokenCursor { tree_cursor: stream.into_trees(), stack: Vec::new() },
|
token_cursor: TokenCursor { curr: TokenTreeCursor::new(stream), stack: Vec::new() },
|
||||||
num_bump_calls: 0,
|
num_bump_calls: 0,
|
||||||
break_last_token: 0,
|
break_last_token: 0,
|
||||||
unmatched_angle_bracket_count: 0,
|
unmatched_angle_bracket_count: 0,
|
||||||
|
@ -1191,7 +1217,7 @@ impl<'a> Parser<'a> {
|
||||||
if dist == 1 {
|
if dist == 1 {
|
||||||
// The index is zero because the tree cursor's index always points
|
// The index is zero because the tree cursor's index always points
|
||||||
// to the next token to be gotten.
|
// to the next token to be gotten.
|
||||||
match self.token_cursor.tree_cursor.look_ahead(0) {
|
match self.token_cursor.curr.curr() {
|
||||||
Some(tree) => {
|
Some(tree) => {
|
||||||
// Indexing stayed within the current token tree.
|
// Indexing stayed within the current token tree.
|
||||||
match tree {
|
match tree {
|
||||||
|
@ -1201,12 +1227,13 @@ impl<'a> Parser<'a> {
|
||||||
return looker(&Token::new(token::OpenDelim(delim), dspan.open));
|
return looker(&Token::new(token::OpenDelim(delim), dspan.open));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
}
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
// The tree cursor lookahead went (one) past the end of the
|
// The tree cursor lookahead went (one) past the end of the
|
||||||
// current token tree. Try to return a close delimiter.
|
// current token tree. Try to return a close delimiter.
|
||||||
if let Some(&(_, span, _, delim)) = self.token_cursor.stack.last()
|
if let Some(last) = self.token_cursor.stack.last()
|
||||||
|
&& let Some(&TokenTree::Delimited(span, _, delim, _)) = last.curr()
|
||||||
&& !delim.skip()
|
&& !delim.skip()
|
||||||
{
|
{
|
||||||
// We are not in the outermost token stream, so we have
|
// We are not in the outermost token stream, so we have
|
||||||
|
@ -1398,9 +1425,10 @@ impl<'a> Parser<'a> {
|
||||||
pub fn parse_token_tree(&mut self) -> TokenTree {
|
pub fn parse_token_tree(&mut self) -> TokenTree {
|
||||||
match self.token.kind {
|
match self.token.kind {
|
||||||
token::OpenDelim(..) => {
|
token::OpenDelim(..) => {
|
||||||
// Grab the tokens within the delimiters.
|
// Clone the `TokenTree::Delimited` that we are currently
|
||||||
let stream = self.token_cursor.tree_cursor.stream.clone();
|
// within. That's what we are going to return.
|
||||||
let (_, span, spacing, delim) = *self.token_cursor.stack.last().unwrap();
|
let tree = self.token_cursor.stack.last().unwrap().curr().unwrap().clone();
|
||||||
|
debug_assert_matches!(tree, TokenTree::Delimited(..));
|
||||||
|
|
||||||
// Advance the token cursor through the entire delimited
|
// Advance the token cursor through the entire delimited
|
||||||
// sequence. After getting the `OpenDelim` we are *within* the
|
// sequence. After getting the `OpenDelim` we are *within* the
|
||||||
|
@ -1420,7 +1448,7 @@ impl<'a> Parser<'a> {
|
||||||
|
|
||||||
// Consume close delimiter
|
// Consume close delimiter
|
||||||
self.bump();
|
self.bump();
|
||||||
TokenTree::Delimited(span, spacing, delim, stream)
|
tree
|
||||||
}
|
}
|
||||||
token::CloseDelim(_) | token::Eof => unreachable!(),
|
token::CloseDelim(_) | token::Eof => unreachable!(),
|
||||||
_ => {
|
_ => {
|
||||||
|
|
|
@ -2286,7 +2286,7 @@ fn bad_path_expr_1() {
|
||||||
fn string_to_tts_macro() {
|
fn string_to_tts_macro() {
|
||||||
create_default_session_globals_then(|| {
|
create_default_session_globals_then(|| {
|
||||||
let stream = string_to_stream("macro_rules! zip (($a)=>($a))".to_string());
|
let stream = string_to_stream("macro_rules! zip (($a)=>($a))".to_string());
|
||||||
let tts = &stream.trees().collect::<Vec<_>>()[..];
|
let tts = &stream.iter().collect::<Vec<_>>()[..];
|
||||||
|
|
||||||
match tts {
|
match tts {
|
||||||
[
|
[
|
||||||
|
@ -2298,14 +2298,14 @@ fn string_to_tts_macro() {
|
||||||
TokenTree::Token(Token { kind: token::Ident(name_zip, IdentIsRaw::No), .. }, _),
|
TokenTree::Token(Token { kind: token::Ident(name_zip, IdentIsRaw::No), .. }, _),
|
||||||
TokenTree::Delimited(.., macro_delim, macro_tts),
|
TokenTree::Delimited(.., macro_delim, macro_tts),
|
||||||
] if name_macro_rules == &kw::MacroRules && name_zip.as_str() == "zip" => {
|
] if name_macro_rules == &kw::MacroRules && name_zip.as_str() == "zip" => {
|
||||||
let tts = ¯o_tts.trees().collect::<Vec<_>>();
|
let tts = ¯o_tts.iter().collect::<Vec<_>>();
|
||||||
match &tts[..] {
|
match &tts[..] {
|
||||||
[
|
[
|
||||||
TokenTree::Delimited(.., first_delim, first_tts),
|
TokenTree::Delimited(.., first_delim, first_tts),
|
||||||
TokenTree::Token(Token { kind: token::FatArrow, .. }, _),
|
TokenTree::Token(Token { kind: token::FatArrow, .. }, _),
|
||||||
TokenTree::Delimited(.., second_delim, second_tts),
|
TokenTree::Delimited(.., second_delim, second_tts),
|
||||||
] if macro_delim == &Delimiter::Parenthesis => {
|
] if macro_delim == &Delimiter::Parenthesis => {
|
||||||
let tts = &first_tts.trees().collect::<Vec<_>>();
|
let tts = &first_tts.iter().collect::<Vec<_>>();
|
||||||
match &tts[..] {
|
match &tts[..] {
|
||||||
[
|
[
|
||||||
TokenTree::Token(Token { kind: token::Dollar, .. }, _),
|
TokenTree::Token(Token { kind: token::Dollar, .. }, _),
|
||||||
|
@ -2317,7 +2317,7 @@ fn string_to_tts_macro() {
|
||||||
}
|
}
|
||||||
_ => panic!("value 3: {:?} {:?}", first_delim, first_tts),
|
_ => panic!("value 3: {:?} {:?}", first_delim, first_tts),
|
||||||
}
|
}
|
||||||
let tts = &second_tts.trees().collect::<Vec<_>>();
|
let tts = &second_tts.iter().collect::<Vec<_>>();
|
||||||
match &tts[..] {
|
match &tts[..] {
|
||||||
[
|
[
|
||||||
TokenTree::Token(Token { kind: token::Dollar, .. }, _),
|
TokenTree::Token(Token { kind: token::Dollar, .. }, _),
|
||||||
|
@ -2545,7 +2545,7 @@ fn ttdelim_span() {
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let ast::ExprKind::MacCall(mac) = &expr.kind else { panic!("not a macro") };
|
let ast::ExprKind::MacCall(mac) = &expr.kind else { panic!("not a macro") };
|
||||||
let span = mac.args.tokens.trees().last().unwrap().span();
|
let span = mac.args.tokens.iter().last().unwrap().span();
|
||||||
|
|
||||||
match psess.source_map().span_to_snippet(span) {
|
match psess.source_map().span_to_snippet(span) {
|
||||||
Ok(s) => assert_eq!(&s[..], "{ body }"),
|
Ok(s) => assert_eq!(&s[..], "{ body }"),
|
||||||
|
|
|
@ -23,8 +23,8 @@ fn test_concat() {
|
||||||
let mut eq_res = TokenStream::default();
|
let mut eq_res = TokenStream::default();
|
||||||
eq_res.push_stream(test_fst);
|
eq_res.push_stream(test_fst);
|
||||||
eq_res.push_stream(test_snd);
|
eq_res.push_stream(test_snd);
|
||||||
assert_eq!(test_res.trees().count(), 5);
|
assert_eq!(test_res.iter().count(), 5);
|
||||||
assert_eq!(eq_res.trees().count(), 5);
|
assert_eq!(eq_res.iter().count(), 5);
|
||||||
assert_eq!(test_res.eq_unspanned(&eq_res), true);
|
assert_eq!(test_res.eq_unspanned(&eq_res), true);
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -33,7 +33,7 @@ fn test_concat() {
|
||||||
fn test_to_from_bijection() {
|
fn test_to_from_bijection() {
|
||||||
create_default_session_globals_then(|| {
|
create_default_session_globals_then(|| {
|
||||||
let test_start = string_to_ts("foo::bar(baz)");
|
let test_start = string_to_ts("foo::bar(baz)");
|
||||||
let test_end = test_start.trees().cloned().collect();
|
let test_end = test_start.iter().cloned().collect();
|
||||||
assert_eq!(test_start, test_end)
|
assert_eq!(test_start, test_end)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -105,6 +105,6 @@ fn test_dotdotdot() {
|
||||||
stream.push_tree(TokenTree::token_joint(token::Dot, sp(1, 2)));
|
stream.push_tree(TokenTree::token_joint(token::Dot, sp(1, 2)));
|
||||||
stream.push_tree(TokenTree::token_alone(token::Dot, sp(2, 3)));
|
stream.push_tree(TokenTree::token_alone(token::Dot, sp(2, 3)));
|
||||||
assert!(stream.eq_unspanned(&string_to_ts("...")));
|
assert!(stream.eq_unspanned(&string_to_ts("...")));
|
||||||
assert_eq!(stream.trees().count(), 1);
|
assert_eq!(stream.iter().count(), 1);
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -2604,7 +2604,7 @@ fn filter_tokens_from_list(
|
||||||
) -> Vec<TokenTree> {
|
) -> Vec<TokenTree> {
|
||||||
let mut tokens = Vec::with_capacity(args_tokens.len());
|
let mut tokens = Vec::with_capacity(args_tokens.len());
|
||||||
let mut skip_next_comma = false;
|
let mut skip_next_comma = false;
|
||||||
for token in args_tokens.trees() {
|
for token in args_tokens.iter() {
|
||||||
match token {
|
match token {
|
||||||
TokenTree::Token(Token { kind: TokenKind::Comma, .. }, _) if skip_next_comma => {
|
TokenTree::Token(Token { kind: TokenKind::Comma, .. }, _) if skip_next_comma => {
|
||||||
skip_next_comma = false;
|
skip_next_comma = false;
|
||||||
|
|
|
@ -131,7 +131,7 @@ fn print_tts(printer: &mut Printer<'_>, tts: &TokenStream) {
|
||||||
use State::*;
|
use State::*;
|
||||||
|
|
||||||
let mut state = Start;
|
let mut state = Start;
|
||||||
for tt in tts.trees() {
|
for tt in tts.iter() {
|
||||||
let (needs_space, next_state) = match &tt {
|
let (needs_space, next_state) = match &tt {
|
||||||
TokenTree::Token(tt, _) => match (state, &tt.kind) {
|
TokenTree::Token(tt, _) => match (state, &tt.kind) {
|
||||||
(Dollar, token::Ident(..)) => (false, DollarIdent),
|
(Dollar, token::Ident(..)) => (false, DollarIdent),
|
||||||
|
|
|
@ -15,7 +15,7 @@ pub(super) fn check(cx: &EarlyContext<'_>, attr: &Attribute) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if let AttrArgs::Delimited(args) = &normal_attr.item.args
|
if let AttrArgs::Delimited(args) = &normal_attr.item.args
|
||||||
&& let mut tt_iter = args.tokens.trees()
|
&& let mut tt_iter = args.tokens.iter()
|
||||||
&& let Some(TokenTree::Token(
|
&& let Some(TokenTree::Token(
|
||||||
Token {
|
Token {
|
||||||
kind: TokenKind::Ident(sym::expected, _),
|
kind: TokenKind::Ident(sym::expected, _),
|
||||||
|
|
|
@ -82,11 +82,11 @@ fn is_macro_export(attr: &Attribute) -> bool {
|
||||||
|
|
||||||
fn contains_unhygienic_crate_reference(tts: &TokenStream) -> Option<Span> {
|
fn contains_unhygienic_crate_reference(tts: &TokenStream) -> Option<Span> {
|
||||||
let mut prev_is_dollar = false;
|
let mut prev_is_dollar = false;
|
||||||
let mut cursor = tts.trees();
|
let mut iter = tts.iter();
|
||||||
while let Some(curr) = cursor.next() {
|
while let Some(curr) = iter.next() {
|
||||||
if !prev_is_dollar
|
if !prev_is_dollar
|
||||||
&& let Some(span) = is_crate_keyword(curr)
|
&& let Some(span) = is_crate_keyword(curr)
|
||||||
&& let Some(next) = cursor.look_ahead(0)
|
&& let Some(next) = iter.peek()
|
||||||
&& is_token(next, &TokenKind::PathSep)
|
&& is_token(next, &TokenKind::PathSep)
|
||||||
{
|
{
|
||||||
return Some(span);
|
return Some(span);
|
||||||
|
|
|
@ -13,7 +13,7 @@ use std::collections::HashMap;
|
||||||
use std::panic::{AssertUnwindSafe, catch_unwind};
|
use std::panic::{AssertUnwindSafe, catch_unwind};
|
||||||
|
|
||||||
use rustc_ast::token::{BinOpToken, Delimiter, Token, TokenKind};
|
use rustc_ast::token::{BinOpToken, Delimiter, Token, TokenKind};
|
||||||
use rustc_ast::tokenstream::{RefTokenTreeCursor, TokenStream, TokenTree};
|
use rustc_ast::tokenstream::{TokenStream, TokenStreamIter, TokenTree};
|
||||||
use rustc_ast::{ast, ptr};
|
use rustc_ast::{ast, ptr};
|
||||||
use rustc_ast_pretty::pprust;
|
use rustc_ast_pretty::pprust;
|
||||||
use rustc_span::{
|
use rustc_span::{
|
||||||
|
@ -443,7 +443,7 @@ pub(crate) fn rewrite_macro_def(
|
||||||
}
|
}
|
||||||
|
|
||||||
let ts = def.body.tokens.clone();
|
let ts = def.body.tokens.clone();
|
||||||
let mut parser = MacroParser::new(ts.trees());
|
let mut parser = MacroParser::new(ts.iter());
|
||||||
let parsed_def = match parser.parse() {
|
let parsed_def = match parser.parse() {
|
||||||
Some(def) => def,
|
Some(def) => def,
|
||||||
None => return snippet,
|
None => return snippet,
|
||||||
|
@ -794,7 +794,7 @@ impl MacroArgParser {
|
||||||
self.buf.clear();
|
self.buf.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_meta_variable(&mut self, iter: &mut RefTokenTreeCursor<'_>) -> Option<()> {
|
fn add_meta_variable(&mut self, iter: &mut TokenStreamIter<'_>) -> Option<()> {
|
||||||
match iter.next() {
|
match iter.next() {
|
||||||
Some(&TokenTree::Token(
|
Some(&TokenTree::Token(
|
||||||
Token {
|
Token {
|
||||||
|
@ -826,7 +826,7 @@ impl MacroArgParser {
|
||||||
&mut self,
|
&mut self,
|
||||||
inner: Vec<ParsedMacroArg>,
|
inner: Vec<ParsedMacroArg>,
|
||||||
delim: Delimiter,
|
delim: Delimiter,
|
||||||
iter: &mut RefTokenTreeCursor<'_>,
|
iter: &mut TokenStreamIter<'_>,
|
||||||
) -> Option<()> {
|
) -> Option<()> {
|
||||||
let mut buffer = String::new();
|
let mut buffer = String::new();
|
||||||
let mut first = true;
|
let mut first = true;
|
||||||
|
@ -926,7 +926,7 @@ impl MacroArgParser {
|
||||||
|
|
||||||
/// Returns a collection of parsed macro def's arguments.
|
/// Returns a collection of parsed macro def's arguments.
|
||||||
fn parse(mut self, tokens: TokenStream) -> Option<Vec<ParsedMacroArg>> {
|
fn parse(mut self, tokens: TokenStream) -> Option<Vec<ParsedMacroArg>> {
|
||||||
let mut iter = tokens.trees();
|
let mut iter = tokens.iter();
|
||||||
|
|
||||||
while let Some(tok) = iter.next() {
|
while let Some(tok) = iter.next() {
|
||||||
match tok {
|
match tok {
|
||||||
|
@ -1063,7 +1063,7 @@ fn format_macro_args(
|
||||||
}
|
}
|
||||||
|
|
||||||
fn span_for_token_stream(token_stream: &TokenStream) -> Option<Span> {
|
fn span_for_token_stream(token_stream: &TokenStream) -> Option<Span> {
|
||||||
token_stream.trees().next().map(|tt| tt.span())
|
token_stream.iter().next().map(|tt| tt.span())
|
||||||
}
|
}
|
||||||
|
|
||||||
// We should insert a space if the next token is a:
|
// We should insert a space if the next token is a:
|
||||||
|
@ -1179,18 +1179,18 @@ pub(crate) fn macro_style(mac: &ast::MacCall, context: &RewriteContext<'_>) -> D
|
||||||
// A very simple parser that just parses a macros 2.0 definition into its branches.
|
// A very simple parser that just parses a macros 2.0 definition into its branches.
|
||||||
// Currently we do not attempt to parse any further than that.
|
// Currently we do not attempt to parse any further than that.
|
||||||
struct MacroParser<'a> {
|
struct MacroParser<'a> {
|
||||||
toks: RefTokenTreeCursor<'a>,
|
iter: TokenStreamIter<'a>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> MacroParser<'a> {
|
impl<'a> MacroParser<'a> {
|
||||||
const fn new(toks: RefTokenTreeCursor<'a>) -> Self {
|
const fn new(iter: TokenStreamIter<'a>) -> Self {
|
||||||
Self { toks }
|
Self { iter }
|
||||||
}
|
}
|
||||||
|
|
||||||
// (`(` ... `)` `=>` `{` ... `}`)*
|
// (`(` ... `)` `=>` `{` ... `}`)*
|
||||||
fn parse(&mut self) -> Option<Macro> {
|
fn parse(&mut self) -> Option<Macro> {
|
||||||
let mut branches = vec![];
|
let mut branches = vec![];
|
||||||
while self.toks.look_ahead(1).is_some() {
|
while self.iter.peek().is_some() {
|
||||||
branches.push(self.parse_branch()?);
|
branches.push(self.parse_branch()?);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1199,13 +1199,13 @@ impl<'a> MacroParser<'a> {
|
||||||
|
|
||||||
// `(` ... `)` `=>` `{` ... `}`
|
// `(` ... `)` `=>` `{` ... `}`
|
||||||
fn parse_branch(&mut self) -> Option<MacroBranch> {
|
fn parse_branch(&mut self) -> Option<MacroBranch> {
|
||||||
let tok = self.toks.next()?;
|
let tok = self.iter.next()?;
|
||||||
let (lo, args_paren_kind) = match tok {
|
let (lo, args_paren_kind) = match tok {
|
||||||
TokenTree::Token(..) => return None,
|
TokenTree::Token(..) => return None,
|
||||||
&TokenTree::Delimited(delimited_span, _, d, _) => (delimited_span.open.lo(), d),
|
&TokenTree::Delimited(delimited_span, _, d, _) => (delimited_span.open.lo(), d),
|
||||||
};
|
};
|
||||||
let args = TokenStream::new(vec![tok.clone()]);
|
let args = TokenStream::new(vec![tok.clone()]);
|
||||||
match self.toks.next()? {
|
match self.iter.next()? {
|
||||||
TokenTree::Token(
|
TokenTree::Token(
|
||||||
Token {
|
Token {
|
||||||
kind: TokenKind::FatArrow,
|
kind: TokenKind::FatArrow,
|
||||||
|
@ -1215,7 +1215,7 @@ impl<'a> MacroParser<'a> {
|
||||||
) => {}
|
) => {}
|
||||||
_ => return None,
|
_ => return None,
|
||||||
}
|
}
|
||||||
let (mut hi, body, whole_body) = match self.toks.next()? {
|
let (mut hi, body, whole_body) = match self.iter.next()? {
|
||||||
TokenTree::Token(..) => return None,
|
TokenTree::Token(..) => return None,
|
||||||
TokenTree::Delimited(delimited_span, ..) => {
|
TokenTree::Delimited(delimited_span, ..) => {
|
||||||
let data = delimited_span.entire().data();
|
let data = delimited_span.entire().data();
|
||||||
|
@ -1237,10 +1237,10 @@ impl<'a> MacroParser<'a> {
|
||||||
span,
|
span,
|
||||||
},
|
},
|
||||||
_,
|
_,
|
||||||
)) = self.toks.look_ahead(0)
|
)) = self.iter.peek()
|
||||||
{
|
{
|
||||||
hi = span.hi();
|
hi = span.hi();
|
||||||
self.toks.next();
|
self.iter.next();
|
||||||
}
|
}
|
||||||
Some(MacroBranch {
|
Some(MacroBranch {
|
||||||
span: mk_sp(lo, hi),
|
span: mk_sp(lo, hi),
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue