Remove ThinTokenStream
.
`TokenStream` is now almost identical to `ThinTokenStream`. This commit removes the latter, replacing it with the former.
This commit is contained in:
parent
28966e1a7a
commit
ce0d9949b8
11 changed files with 23 additions and 72 deletions
|
@ -258,7 +258,7 @@ for tokenstream::TokenTree {
|
|||
tokenstream::TokenTree::Delimited(span, delim, ref tts) => {
|
||||
span.hash_stable(hcx, hasher);
|
||||
std_hash::Hash::hash(&delim, hasher);
|
||||
for sub_tt in tts.stream().trees() {
|
||||
for sub_tt in tts.trees() {
|
||||
sub_tt.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1540,7 +1540,7 @@ impl KeywordIdents {
|
|||
_ => {},
|
||||
}
|
||||
TokenTree::Delimited(_, _, tts) => {
|
||||
self.check_tokens(cx, tts.stream())
|
||||
self.check_tokens(cx, tts)
|
||||
},
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,7 +15,7 @@ use rustc_target::spec::abi::Abi;
|
|||
use source_map::{dummy_spanned, respan, Spanned};
|
||||
use symbol::{keywords, Symbol};
|
||||
use syntax_pos::{Span, DUMMY_SP};
|
||||
use tokenstream::{ThinTokenStream, TokenStream};
|
||||
use tokenstream::TokenStream;
|
||||
use ThinVec;
|
||||
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
|
@ -1216,7 +1216,7 @@ pub type Mac = Spanned<Mac_>;
|
|||
pub struct Mac_ {
|
||||
pub path: Path,
|
||||
pub delim: MacDelimiter,
|
||||
pub tts: ThinTokenStream,
|
||||
pub tts: TokenStream,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Debug)]
|
||||
|
@ -1228,13 +1228,13 @@ pub enum MacDelimiter {
|
|||
|
||||
impl Mac_ {
|
||||
pub fn stream(&self) -> TokenStream {
|
||||
self.tts.stream()
|
||||
self.tts.clone()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
|
||||
pub struct MacroDef {
|
||||
pub tokens: ThinTokenStream,
|
||||
pub tokens: TokenStream,
|
||||
pub legacy: bool,
|
||||
}
|
||||
|
||||
|
|
|
@ -565,7 +565,7 @@ impl MetaItemKind {
|
|||
}
|
||||
Some(TokenTree::Delimited(_, delim, ref tts)) if delim == token::Paren => {
|
||||
tokens.next();
|
||||
tts.stream()
|
||||
tts.clone()
|
||||
}
|
||||
_ => return Some(MetaItemKind::Word),
|
||||
};
|
||||
|
|
|
@ -748,7 +748,7 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, quoted: bool) -> Vec<ast::Stmt
|
|||
},
|
||||
TokenTree::Delimited(span, delim, ref tts) => {
|
||||
let mut stmts = statements_mk_tt(cx, &TokenTree::open_tt(span.open, delim), false);
|
||||
stmts.extend(statements_mk_tts(cx, tts.stream()));
|
||||
stmts.extend(statements_mk_tts(cx, tts.clone()));
|
||||
stmts.extend(statements_mk_tt(cx, &TokenTree::close_tt(span.close, delim), false));
|
||||
stmts
|
||||
}
|
||||
|
|
|
@ -598,7 +598,7 @@ pub fn noop_fold_tt<T: Folder>(tt: TokenTree, fld: &mut T) -> TokenTree {
|
|||
TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
|
||||
DelimSpan::from_pair(fld.new_span(span.open), fld.new_span(span.close)),
|
||||
delim,
|
||||
fld.fold_tts(tts.stream()).into(),
|
||||
fld.fold_tts(tts).into(),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -811,7 +811,7 @@ mod tests {
|
|||
)
|
||||
if name_macro_rules.name == "macro_rules"
|
||||
&& name_zip.name == "zip" => {
|
||||
let tts = ¯o_tts.stream().trees().collect::<Vec<_>>();
|
||||
let tts = ¯o_tts.trees().collect::<Vec<_>>();
|
||||
match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) {
|
||||
(
|
||||
3,
|
||||
|
@ -820,7 +820,7 @@ mod tests {
|
|||
Some(&TokenTree::Delimited(_, second_delim, ref second_tts)),
|
||||
)
|
||||
if macro_delim == token::Paren => {
|
||||
let tts = &first_tts.stream().trees().collect::<Vec<_>>();
|
||||
let tts = &first_tts.trees().collect::<Vec<_>>();
|
||||
match (tts.len(), tts.get(0), tts.get(1)) {
|
||||
(
|
||||
2,
|
||||
|
@ -830,7 +830,7 @@ mod tests {
|
|||
if first_delim == token::Paren && ident.name == "a" => {},
|
||||
_ => panic!("value 3: {:?} {:?}", first_delim, first_tts),
|
||||
}
|
||||
let tts = &second_tts.stream().trees().collect::<Vec<_>>();
|
||||
let tts = &second_tts.trees().collect::<Vec<_>>();
|
||||
match (tts.len(), tts.get(0), tts.get(1)) {
|
||||
(
|
||||
2,
|
||||
|
|
|
@ -46,7 +46,7 @@ use print::pprust;
|
|||
use ptr::P;
|
||||
use parse::PResult;
|
||||
use ThinVec;
|
||||
use tokenstream::{self, DelimSpan, ThinTokenStream, TokenTree, TokenStream};
|
||||
use tokenstream::{self, DelimSpan, TokenTree, TokenStream};
|
||||
use symbol::{Symbol, keywords};
|
||||
|
||||
use std::borrow::Cow;
|
||||
|
@ -285,12 +285,12 @@ enum LastToken {
|
|||
}
|
||||
|
||||
impl TokenCursorFrame {
|
||||
fn new(sp: DelimSpan, delim: DelimToken, tts: &ThinTokenStream) -> Self {
|
||||
fn new(sp: DelimSpan, delim: DelimToken, tts: &TokenStream) -> Self {
|
||||
TokenCursorFrame {
|
||||
delim: delim,
|
||||
span: sp,
|
||||
open_delim: delim == token::NoDelim,
|
||||
tree_cursor: tts.stream().into_trees(),
|
||||
tree_cursor: tts.clone().into_trees(),
|
||||
close_delim: delim == token::NoDelim,
|
||||
last_token: LastToken::Was(None),
|
||||
}
|
||||
|
@ -2325,7 +2325,7 @@ impl<'a> Parser<'a> {
|
|||
})
|
||||
}
|
||||
|
||||
fn expect_delimited_token_tree(&mut self) -> PResult<'a, (MacDelimiter, ThinTokenStream)> {
|
||||
fn expect_delimited_token_tree(&mut self) -> PResult<'a, (MacDelimiter, TokenStream)> {
|
||||
let delim = match self.token {
|
||||
token::OpenDelim(delim) => delim,
|
||||
_ => {
|
||||
|
@ -2345,7 +2345,7 @@ impl<'a> Parser<'a> {
|
|||
token::Brace => MacDelimiter::Brace,
|
||||
token::NoDelim => self.bug("unexpected no delimiter"),
|
||||
};
|
||||
Ok((delim, tts.stream().into()))
|
||||
Ok((delim, tts.into()))
|
||||
}
|
||||
|
||||
/// At the bottom (top?) of the precedence hierarchy,
|
||||
|
@ -4633,7 +4633,7 @@ impl<'a> Parser<'a> {
|
|||
let ident = self.parse_ident()?;
|
||||
let tokens = if self.check(&token::OpenDelim(token::Brace)) {
|
||||
match self.parse_token_tree() {
|
||||
TokenTree::Delimited(_, _, tts) => tts.stream(),
|
||||
TokenTree::Delimited(_, _, tts) => tts,
|
||||
_ => unreachable!(),
|
||||
}
|
||||
} else if self.check(&token::OpenDelim(token::Paren)) {
|
||||
|
|
|
@ -807,7 +807,7 @@ pub trait PrintState<'a> {
|
|||
TokenTree::Delimited(_, delim, tts) => {
|
||||
self.writer().word(token_to_string(&token::OpenDelim(delim)))?;
|
||||
self.writer().space()?;
|
||||
self.print_tts(tts.stream())?;
|
||||
self.print_tts(tts)?;
|
||||
self.writer().space()?;
|
||||
self.writer().word(token_to_string(&token::CloseDelim(delim)))
|
||||
},
|
||||
|
|
|
@ -41,7 +41,7 @@ pub enum TokenTree {
|
|||
/// A single token
|
||||
Token(Span, token::Token),
|
||||
/// A delimited sequence of token trees
|
||||
Delimited(DelimSpan, DelimToken, ThinTokenStream),
|
||||
Delimited(DelimSpan, DelimToken, TokenStream),
|
||||
}
|
||||
|
||||
impl TokenTree {
|
||||
|
@ -62,8 +62,7 @@ impl TokenTree {
|
|||
(&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => tk == tk2,
|
||||
(&TokenTree::Delimited(_, delim, ref tts),
|
||||
&TokenTree::Delimited(_, delim2, ref tts2)) => {
|
||||
delim == delim2 &&
|
||||
tts.stream().eq_unspanned(&tts2.stream())
|
||||
delim == delim2 && tts.eq_unspanned(&tts2)
|
||||
}
|
||||
(_, _) => false,
|
||||
}
|
||||
|
@ -81,8 +80,7 @@ impl TokenTree {
|
|||
}
|
||||
(&TokenTree::Delimited(_, delim, ref tts),
|
||||
&TokenTree::Delimited(_, delim2, ref tts2)) => {
|
||||
delim == delim2 &&
|
||||
tts.stream().probably_equal_for_proc_macro(&tts2.stream())
|
||||
delim == delim2 && tts.probably_equal_for_proc_macro(&tts2)
|
||||
}
|
||||
(_, _) => false,
|
||||
}
|
||||
|
@ -492,41 +490,6 @@ impl Cursor {
|
|||
}
|
||||
}
|
||||
|
||||
/// The `TokenStream` type is large enough to represent a single `TokenTree` without allocation.
|
||||
/// `ThinTokenStream` is smaller, but needs to allocate to represent a single `TokenTree`.
|
||||
/// We must use `ThinTokenStream` in `TokenTree::Delimited` to avoid infinite size due to recursion.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ThinTokenStream(Option<Lrc<Vec<TreeAndJoint>>>);
|
||||
|
||||
impl ThinTokenStream {
|
||||
pub fn stream(&self) -> TokenStream {
|
||||
self.clone().into()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TokenStream> for ThinTokenStream {
|
||||
fn from(stream: TokenStream) -> ThinTokenStream {
|
||||
ThinTokenStream(match stream {
|
||||
TokenStream::Empty => None,
|
||||
TokenStream::Stream(stream) => Some(stream),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ThinTokenStream> for TokenStream {
|
||||
fn from(stream: ThinTokenStream) -> TokenStream {
|
||||
stream.0.map(TokenStream::Stream).unwrap_or_else(TokenStream::empty)
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for ThinTokenStream {}
|
||||
|
||||
impl PartialEq<ThinTokenStream> for ThinTokenStream {
|
||||
fn eq(&self, other: &ThinTokenStream) -> bool {
|
||||
TokenStream::from(self.clone()) == TokenStream::from(other.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for TokenStream {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
f.write_str(&pprust::tokens_to_string(self.clone()))
|
||||
|
@ -545,18 +508,6 @@ impl Decodable for TokenStream {
|
|||
}
|
||||
}
|
||||
|
||||
impl Encodable for ThinTokenStream {
|
||||
fn encode<E: Encoder>(&self, encoder: &mut E) -> Result<(), E::Error> {
|
||||
TokenStream::from(self.clone()).encode(encoder)
|
||||
}
|
||||
}
|
||||
|
||||
impl Decodable for ThinTokenStream {
|
||||
fn decode<D: Decoder>(decoder: &mut D) -> Result<ThinTokenStream, D::Error> {
|
||||
TokenStream::decode(decoder).map(Into::into)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, RustcEncodable, RustcDecodable)]
|
||||
pub struct DelimSpan {
|
||||
pub open: Span,
|
||||
|
|
|
@ -832,7 +832,7 @@ pub fn walk_attribute<'a, V: Visitor<'a>>(visitor: &mut V, attr: &'a Attribute)
|
|||
pub fn walk_tt<'a, V: Visitor<'a>>(visitor: &mut V, tt: TokenTree) {
|
||||
match tt {
|
||||
TokenTree::Token(_, tok) => visitor.visit_token(tok),
|
||||
TokenTree::Delimited(_, _, tts) => visitor.visit_tts(tts.stream()),
|
||||
TokenTree::Delimited(_, _, tts) => visitor.visit_tts(tts),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue