1
Fork 0

use CursorRef more, to not to clone Trees

This commit is contained in:
klensy 2022-05-16 18:58:15 +03:00
parent 10b3a0d209
commit cc5f3e21ac
10 changed files with 35 additions and 24 deletions

View file

@ -552,7 +552,7 @@ impl MetaItemKind {
) -> Option<MetaItemKind> { ) -> Option<MetaItemKind> {
match tokens.next() { match tokens.next() {
Some(TokenTree::Delimited(_, Delimiter::Invisible, inner_tokens)) => { Some(TokenTree::Delimited(_, Delimiter::Invisible, inner_tokens)) => {
MetaItemKind::name_value_from_tokens(&mut inner_tokens.trees()) MetaItemKind::name_value_from_tokens(&mut inner_tokens.into_trees())
} }
Some(TokenTree::Token(token)) => { Some(TokenTree::Token(token)) => {
Lit::from_token(&token).ok().map(MetaItemKind::NameValue) Lit::from_token(&token).ok().map(MetaItemKind::NameValue)

View file

@ -442,8 +442,8 @@ impl TokenStream {
} }
} }
pub fn trees(&self) -> Cursor { pub fn trees(&self) -> CursorRef<'_> {
self.clone().into_trees() CursorRef::new(self)
} }
pub fn into_trees(self) -> Cursor { pub fn into_trees(self) -> Cursor {
@ -538,12 +538,21 @@ pub struct CursorRef<'t> {
} }
impl<'t> CursorRef<'t> { impl<'t> CursorRef<'t> {
fn new(stream: &'t TokenStream) -> Self {
CursorRef { stream, index: 0 }
}
#[inline]
fn next_with_spacing(&mut self) -> Option<&'t TreeAndSpacing> { fn next_with_spacing(&mut self) -> Option<&'t TreeAndSpacing> {
self.stream.0.get(self.index).map(|tree| { self.stream.0.get(self.index).map(|tree| {
self.index += 1; self.index += 1;
tree tree
}) })
} }
pub fn look_ahead(&self, n: usize) -> Option<&TokenTree> {
self.stream.0[self.index..].get(n).map(|(tree, _)| tree)
}
} }
impl<'t> Iterator for CursorRef<'t> { impl<'t> Iterator for CursorRef<'t> {

View file

@ -550,9 +550,9 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
fn print_tts(&mut self, tts: &TokenStream, convert_dollar_crate: bool) { fn print_tts(&mut self, tts: &TokenStream, convert_dollar_crate: bool) {
let mut iter = tts.trees().peekable(); let mut iter = tts.trees().peekable();
while let Some(tt) = iter.next() { while let Some(tt) = iter.next() {
self.print_tt(&tt, convert_dollar_crate); self.print_tt(tt, convert_dollar_crate);
if let Some(next) = iter.peek() { if let Some(next) = iter.peek() {
if tt_prepend_space(next, &tt) { if tt_prepend_space(next, tt) {
self.space(); self.space();
} }
} }

View file

@ -400,7 +400,7 @@ impl<'a> StripUnconfigured<'a> {
// Use the `#` in `#[cfg_attr(pred, attr)]` as the `#` token // Use the `#` in `#[cfg_attr(pred, attr)]` as the `#` token
// for `attr` when we expand it to `#[attr]` // for `attr` when we expand it to `#[attr]`
let mut orig_trees = orig_tokens.trees(); let mut orig_trees = orig_tokens.into_trees();
let TokenTree::Token(pound_token @ Token { kind: TokenKind::Pound, .. }) = orig_trees.next().unwrap() else { let TokenTree::Token(pound_token @ Token { kind: TokenKind::Pound, .. }) = orig_trees.next().unwrap() else {
panic!("Bad tokens for attribute {:?}", attr); panic!("Bad tokens for attribute {:?}", attr);
}; };

View file

@ -1,5 +1,5 @@
use rustc_ast::token::{self, Delimiter}; use rustc_ast::token::{self, Delimiter};
use rustc_ast::tokenstream::{Cursor, TokenStream, TokenTree}; use rustc_ast::tokenstream::{CursorRef, TokenStream, TokenTree};
use rustc_ast::{LitIntType, LitKind}; use rustc_ast::{LitIntType, LitKind};
use rustc_ast_pretty::pprust; use rustc_ast_pretty::pprust;
use rustc_errors::{Applicability, PResult}; use rustc_errors::{Applicability, PResult};
@ -71,12 +71,14 @@ impl MetaVarExpr {
} }
// Checks if there are any remaining tokens. For example, `${ignore(ident ... a b c ...)}` // Checks if there are any remaining tokens. For example, `${ignore(ident ... a b c ...)}`
fn check_trailing_token<'sess>(iter: &mut Cursor, sess: &'sess ParseSess) -> PResult<'sess, ()> { fn check_trailing_token<'sess>(
iter: &mut CursorRef<'_>,
sess: &'sess ParseSess,
) -> PResult<'sess, ()> {
if let Some(tt) = iter.next() { if let Some(tt) = iter.next() {
let mut diag = sess.span_diagnostic.struct_span_err( let mut diag = sess
tt.span(), .span_diagnostic
&format!("unexpected token: {}", pprust::tt_to_string(&tt)), .struct_span_err(tt.span(), &format!("unexpected token: {}", pprust::tt_to_string(tt)));
);
diag.span_note(tt.span(), "meta-variable expression must not have trailing tokens"); diag.span_note(tt.span(), "meta-variable expression must not have trailing tokens");
Err(diag) Err(diag)
} else { } else {
@ -86,7 +88,7 @@ fn check_trailing_token<'sess>(iter: &mut Cursor, sess: &'sess ParseSess) -> PRe
/// Parse a meta-variable `count` expression: `count(ident[, depth])` /// Parse a meta-variable `count` expression: `count(ident[, depth])`
fn parse_count<'sess>( fn parse_count<'sess>(
iter: &mut Cursor, iter: &mut CursorRef<'_>,
sess: &'sess ParseSess, sess: &'sess ParseSess,
span: Span, span: Span,
) -> PResult<'sess, MetaVarExpr> { ) -> PResult<'sess, MetaVarExpr> {
@ -97,7 +99,7 @@ fn parse_count<'sess>(
/// Parses the depth used by index(depth) and length(depth). /// Parses the depth used by index(depth) and length(depth).
fn parse_depth<'sess>( fn parse_depth<'sess>(
iter: &mut Cursor, iter: &mut CursorRef<'_>,
sess: &'sess ParseSess, sess: &'sess ParseSess,
span: Span, span: Span,
) -> PResult<'sess, usize> { ) -> PResult<'sess, usize> {
@ -110,7 +112,7 @@ fn parse_depth<'sess>(
"meta-variable expression depth must be a literal" "meta-variable expression depth must be a literal"
)); ));
}; };
if let Ok(lit_kind) = LitKind::from_lit_token(lit) if let Ok(lit_kind) = LitKind::from_lit_token(*lit)
&& let LitKind::Int(n_u128, LitIntType::Unsuffixed) = lit_kind && let LitKind::Int(n_u128, LitIntType::Unsuffixed) = lit_kind
&& let Ok(n_usize) = usize::try_from(n_u128) && let Ok(n_usize) = usize::try_from(n_u128)
{ {
@ -124,7 +126,7 @@ fn parse_depth<'sess>(
/// Parses an generic ident /// Parses an generic ident
fn parse_ident<'sess>( fn parse_ident<'sess>(
iter: &mut Cursor, iter: &mut CursorRef<'_>,
sess: &'sess ParseSess, sess: &'sess ParseSess,
span: Span, span: Span,
) -> PResult<'sess, Ident> { ) -> PResult<'sess, Ident> {
@ -132,7 +134,7 @@ fn parse_ident<'sess>(
if let Some((elem, false)) = token.ident() { if let Some((elem, false)) = token.ident() {
return Ok(elem); return Ok(elem);
} }
let token_str = pprust::token_to_string(&token); let token_str = pprust::token_to_string(token);
let mut err = sess.span_diagnostic.struct_span_err( let mut err = sess.span_diagnostic.struct_span_err(
span, span,
&format!("expected identifier, found `{}`", &token_str) &format!("expected identifier, found `{}`", &token_str)
@ -150,7 +152,7 @@ fn parse_ident<'sess>(
/// Tries to move the iterator forward returning `true` if there is a comma. If not, then the /// Tries to move the iterator forward returning `true` if there is a comma. If not, then the
/// iterator is not modified and the result is `false`. /// iterator is not modified and the result is `false`.
fn try_eat_comma(iter: &mut Cursor) -> bool { fn try_eat_comma(iter: &mut CursorRef<'_>) -> bool {
if let Some(TokenTree::Token(token::Token { kind: token::Comma, .. })) = iter.look_ahead(0) { if let Some(TokenTree::Token(token::Token { kind: token::Comma, .. })) = iter.look_ahead(0) {
let _ = iter.next(); let _ = iter.next();
return true; return true;

View file

@ -48,7 +48,7 @@ pub(super) fn parse(
// For each token tree in `input`, parse the token into a `self::TokenTree`, consuming // For each token tree in `input`, parse the token into a `self::TokenTree`, consuming
// additional trees if need be. // additional trees if need be.
let mut trees = input.trees(); let mut trees = input.into_trees();
while let Some(tree) = trees.next() { while let Some(tree) = trees.next() {
// Given the parsed tree, if there is a metavar and we are expecting matchers, actually // Given the parsed tree, if there is a metavar and we are expecting matchers, actually
// parse out the matcher (i.e., in `$id:ident` this would parse the `:` and `ident`). // parse out the matcher (i.e., in `$id:ident` this would parse the `:` and `ident`).

View file

@ -61,7 +61,7 @@ fn bad_path_expr_1() {
fn string_to_tts_macro() { fn string_to_tts_macro() {
create_default_session_globals_then(|| { create_default_session_globals_then(|| {
let tts: Vec<_> = let tts: Vec<_> =
string_to_stream("macro_rules! zip (($a)=>($a))".to_string()).trees().collect(); string_to_stream("macro_rules! zip (($a)=>($a))".to_string()).into_trees().collect();
let tts: &[TokenTree] = &tts[..]; let tts: &[TokenTree] = &tts[..];
match tts { match tts {
@ -293,7 +293,7 @@ fn ttdelim_span() {
.unwrap(); .unwrap();
let tts: Vec<_> = match expr.kind { let tts: Vec<_> = match expr.kind {
ast::ExprKind::MacCall(ref mac) => mac.args.inner_tokens().trees().collect(), ast::ExprKind::MacCall(ref mac) => mac.args.inner_tokens().into_trees().collect(),
_ => panic!("not a macro"), _ => panic!("not a macro"),
}; };

View file

@ -484,7 +484,7 @@ impl server::TokenStream for Rustc<'_, '_> {
tree.to_internal() tree.to_internal()
} }
fn into_iter(&mut self, stream: Self::TokenStream) -> Self::TokenStreamIter { fn into_iter(&mut self, stream: Self::TokenStream) -> Self::TokenStreamIter {
TokenStreamIter { cursor: stream.trees(), stack: vec![] } TokenStreamIter { cursor: stream.into_trees(), stack: vec![] }
} }
} }

View file

@ -35,7 +35,7 @@ fn test_concat() {
fn test_to_from_bijection() { fn test_to_from_bijection() {
create_default_session_globals_then(|| { create_default_session_globals_then(|| {
let test_start = string_to_ts("foo::bar(baz)"); let test_start = string_to_ts("foo::bar(baz)");
let test_end = test_start.trees().collect(); let test_end = test_start.trees().cloned().collect();
assert_eq!(test_start, test_end) assert_eq!(test_start, test_end)
}) })
} }

View file

@ -108,7 +108,7 @@ impl<'a> FlattenNonterminals<'a> {
fn can_skip(stream: &TokenStream) -> bool { fn can_skip(stream: &TokenStream) -> bool {
stream.trees().all(|tree| match tree { stream.trees().all(|tree| match tree {
TokenTree::Token(token) => !matches!(token.kind, token::Interpolated(_)), TokenTree::Token(token) => !matches!(token.kind, token::Interpolated(_)),
TokenTree::Delimited(_, _, inner) => can_skip(&inner), TokenTree::Delimited(_, _, inner) => can_skip(inner),
}) })
} }