Remove tokenstream::Delimited
.
Because it's an extra type layer that doesn't really help; in a couple of places it actively gets in the way, and overall removing it makes the code nicer. It does, however, move `tokenstream::TokenTree` further away from the `TokenTree` in `quote.rs`. More importantly, this change reduces the size of `TokenStream` from 48 bytes to 40 bytes on x86-64, which is enough to slightly reduce instruction counts on numerous benchmarks, the best by 1.5%. Note that `open_tt` and `close_tt` have gone from being methods on `Delimited` to associated methods of `TokenTree`.
This commit is contained in:
parent
b755501043
commit
1fe2c03240
18 changed files with 182 additions and 203 deletions
|
@ -72,7 +72,7 @@ use syntax::ptr::P;
|
||||||
use syntax::source_map::{self, respan, CompilerDesugaringKind, Spanned};
|
use syntax::source_map::{self, respan, CompilerDesugaringKind, Spanned};
|
||||||
use syntax::std_inject;
|
use syntax::std_inject;
|
||||||
use syntax::symbol::{keywords, Symbol};
|
use syntax::symbol::{keywords, Symbol};
|
||||||
use syntax::tokenstream::{Delimited, TokenStream, TokenTree};
|
use syntax::tokenstream::{TokenStream, TokenTree};
|
||||||
use syntax::parse::token::Token;
|
use syntax::parse::token::Token;
|
||||||
use syntax::visit::{self, Visitor};
|
use syntax::visit::{self, Visitor};
|
||||||
use syntax_pos::{Span, MultiSpan};
|
use syntax_pos::{Span, MultiSpan};
|
||||||
|
@ -1088,12 +1088,10 @@ impl<'a> LoweringContext<'a> {
|
||||||
fn lower_token_tree(&mut self, tree: TokenTree) -> TokenStream {
|
fn lower_token_tree(&mut self, tree: TokenTree) -> TokenStream {
|
||||||
match tree {
|
match tree {
|
||||||
TokenTree::Token(span, token) => self.lower_token(token, span),
|
TokenTree::Token(span, token) => self.lower_token(token, span),
|
||||||
TokenTree::Delimited(span, delimited) => TokenTree::Delimited(
|
TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
|
||||||
span,
|
span,
|
||||||
Delimited {
|
delim,
|
||||||
delim: delimited.delim,
|
self.lower_token_stream(tts.into()).into(),
|
||||||
tts: self.lower_token_stream(delimited.tts.into()).into(),
|
|
||||||
},
|
|
||||||
).into(),
|
).into(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -265,10 +265,10 @@ for tokenstream::TokenTree {
|
||||||
span.hash_stable(hcx, hasher);
|
span.hash_stable(hcx, hasher);
|
||||||
hash_token(token, hcx, hasher);
|
hash_token(token, hcx, hasher);
|
||||||
}
|
}
|
||||||
tokenstream::TokenTree::Delimited(span, ref delimited) => {
|
tokenstream::TokenTree::Delimited(span, delim, ref tts) => {
|
||||||
span.hash_stable(hcx, hasher);
|
span.hash_stable(hcx, hasher);
|
||||||
std_hash::Hash::hash(&delimited.delim, hasher);
|
std_hash::Hash::hash(&delim, hasher);
|
||||||
for sub_tt in delimited.stream().trees() {
|
for sub_tt in tts.stream().trees() {
|
||||||
sub_tt.hash_stable(hcx, hasher);
|
sub_tt.hash_stable(hcx, hasher);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1540,8 +1540,8 @@ impl KeywordIdents {
|
||||||
}
|
}
|
||||||
_ => {},
|
_ => {},
|
||||||
}
|
}
|
||||||
TokenTree::Delimited(_, ref delim) => {
|
TokenTree::Delimited(_, _, tts) => {
|
||||||
self.check_tokens(cx, delim.tts.clone().into())
|
self.check_tokens(cx, tts.stream())
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1235,7 +1235,7 @@ pub enum MacDelimiter {
|
||||||
|
|
||||||
impl Mac_ {
|
impl Mac_ {
|
||||||
pub fn stream(&self) -> TokenStream {
|
pub fn stream(&self) -> TokenStream {
|
||||||
self.tts.clone().into()
|
self.tts.stream()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -34,7 +34,7 @@ use parse::token::{self, Token};
|
||||||
use ptr::P;
|
use ptr::P;
|
||||||
use symbol::Symbol;
|
use symbol::Symbol;
|
||||||
use ThinVec;
|
use ThinVec;
|
||||||
use tokenstream::{TokenStream, TokenTree, Delimited, DelimSpan};
|
use tokenstream::{TokenStream, TokenTree, DelimSpan};
|
||||||
use GLOBALS;
|
use GLOBALS;
|
||||||
|
|
||||||
use std::iter;
|
use std::iter;
|
||||||
|
@ -549,10 +549,11 @@ impl MetaItemKind {
|
||||||
}
|
}
|
||||||
tokens.push(item.node.tokens());
|
tokens.push(item.node.tokens());
|
||||||
}
|
}
|
||||||
TokenTree::Delimited(DelimSpan::from_single(span), Delimited {
|
TokenTree::Delimited(
|
||||||
delim: token::Paren,
|
DelimSpan::from_single(span),
|
||||||
tts: TokenStream::concat(tokens).into(),
|
token::Paren,
|
||||||
}).into()
|
TokenStream::concat(tokens).into(),
|
||||||
|
).into()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -570,9 +571,9 @@ impl MetaItemKind {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
Some(TokenTree::Delimited(_, ref delimited)) if delimited.delim == token::Paren => {
|
Some(TokenTree::Delimited(_, delim, ref tts)) if delim == token::Paren => {
|
||||||
tokens.next();
|
tokens.next();
|
||||||
delimited.stream()
|
tts.stream()
|
||||||
}
|
}
|
||||||
_ => return Some(MetaItemKind::Word),
|
_ => return Some(MetaItemKind::Word),
|
||||||
};
|
};
|
||||||
|
|
|
@ -622,9 +622,9 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
||||||
fn extract_proc_macro_attr_input(&self, tokens: TokenStream, span: Span) -> TokenStream {
|
fn extract_proc_macro_attr_input(&self, tokens: TokenStream, span: Span) -> TokenStream {
|
||||||
let mut trees = tokens.trees();
|
let mut trees = tokens.trees();
|
||||||
match trees.next() {
|
match trees.next() {
|
||||||
Some(TokenTree::Delimited(_, delim)) => {
|
Some(TokenTree::Delimited(_, _, tts)) => {
|
||||||
if trees.next().is_none() {
|
if trees.next().is_none() {
|
||||||
return delim.tts.into()
|
return tts.into()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Some(TokenTree::Token(..)) => {}
|
Some(TokenTree::Token(..)) => {}
|
||||||
|
|
|
@ -36,7 +36,7 @@ pub mod rt {
|
||||||
use symbol::Symbol;
|
use symbol::Symbol;
|
||||||
use ThinVec;
|
use ThinVec;
|
||||||
|
|
||||||
use tokenstream::{self, DelimSpan, TokenTree, TokenStream};
|
use tokenstream::{DelimSpan, TokenTree, TokenStream};
|
||||||
|
|
||||||
pub use parse::new_parser_from_tts;
|
pub use parse::new_parser_from_tts;
|
||||||
pub use syntax_pos::{BytePos, Span, DUMMY_SP, FileName};
|
pub use syntax_pos::{BytePos, Span, DUMMY_SP, FileName};
|
||||||
|
@ -246,9 +246,9 @@ pub mod rt {
|
||||||
inner.push(self.tokens.clone());
|
inner.push(self.tokens.clone());
|
||||||
|
|
||||||
let delim_span = DelimSpan::from_single(self.span);
|
let delim_span = DelimSpan::from_single(self.span);
|
||||||
r.push(TokenTree::Delimited(delim_span, tokenstream::Delimited {
|
r.push(TokenTree::Delimited(
|
||||||
delim: token::Bracket, tts: TokenStream::concat(inner).into()
|
delim_span, token::Bracket, TokenStream::concat(inner).into()
|
||||||
}));
|
));
|
||||||
r
|
r
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -262,10 +262,9 @@ pub mod rt {
|
||||||
|
|
||||||
impl ToTokens for () {
|
impl ToTokens for () {
|
||||||
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
||||||
vec![TokenTree::Delimited(DelimSpan::dummy(), tokenstream::Delimited {
|
vec![
|
||||||
delim: token::Paren,
|
TokenTree::Delimited(DelimSpan::dummy(), token::Paren, TokenStream::empty().into())
|
||||||
tts: TokenStream::empty().into(),
|
]
|
||||||
})]
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -382,8 +381,6 @@ pub mod rt {
|
||||||
|
|
||||||
// Replaces `Token::OpenDelim .. Token::CloseDelim` with `TokenTree::Delimited(..)`.
|
// Replaces `Token::OpenDelim .. Token::CloseDelim` with `TokenTree::Delimited(..)`.
|
||||||
pub fn unflatten(tts: Vec<TokenTree>) -> Vec<TokenTree> {
|
pub fn unflatten(tts: Vec<TokenTree>) -> Vec<TokenTree> {
|
||||||
use tokenstream::Delimited;
|
|
||||||
|
|
||||||
let mut results = Vec::new();
|
let mut results = Vec::new();
|
||||||
let mut result = Vec::new();
|
let mut result = Vec::new();
|
||||||
let mut open_span = DUMMY_SP;
|
let mut open_span = DUMMY_SP;
|
||||||
|
@ -395,10 +392,11 @@ pub fn unflatten(tts: Vec<TokenTree>) -> Vec<TokenTree> {
|
||||||
}
|
}
|
||||||
TokenTree::Token(span, token::CloseDelim(delim)) => {
|
TokenTree::Token(span, token::CloseDelim(delim)) => {
|
||||||
let delim_span = DelimSpan::from_pair(open_span, span);
|
let delim_span = DelimSpan::from_pair(open_span, span);
|
||||||
let tree = TokenTree::Delimited(delim_span, Delimited {
|
let tree = TokenTree::Delimited(
|
||||||
|
delim_span,
|
||||||
delim,
|
delim,
|
||||||
tts: result.into_iter().map(TokenStream::from).collect::<TokenStream>().into(),
|
result.into_iter().map(TokenStream::from).collect::<TokenStream>().into(),
|
||||||
});
|
);
|
||||||
result = results.pop().unwrap();
|
result = results.pop().unwrap();
|
||||||
result.push(tree);
|
result.push(tree);
|
||||||
}
|
}
|
||||||
|
@ -758,10 +756,10 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, quoted: bool) -> Vec<ast::Stmt
|
||||||
vec![e_tok]);
|
vec![e_tok]);
|
||||||
vec![cx.stmt_expr(e_push)]
|
vec![cx.stmt_expr(e_push)]
|
||||||
},
|
},
|
||||||
TokenTree::Delimited(span, ref delimed) => {
|
TokenTree::Delimited(span, delim, ref tts) => {
|
||||||
let mut stmts = statements_mk_tt(cx, &delimed.open_tt(span.open), false);
|
let mut stmts = statements_mk_tt(cx, &TokenTree::open_tt(span.open, delim), false);
|
||||||
stmts.extend(statements_mk_tts(cx, delimed.stream()));
|
stmts.extend(statements_mk_tts(cx, tts.stream()));
|
||||||
stmts.extend(statements_mk_tt(cx, &delimed.close_tt(span.close), false));
|
stmts.extend(statements_mk_tt(cx, &TokenTree::close_tt(span.close, delim), false));
|
||||||
stmts
|
stmts
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -280,17 +280,17 @@ where
|
||||||
// `tree` is a `$` token. Look at the next token in `trees`
|
// `tree` is a `$` token. Look at the next token in `trees`
|
||||||
tokenstream::TokenTree::Token(span, token::Dollar) => match trees.next() {
|
tokenstream::TokenTree::Token(span, token::Dollar) => match trees.next() {
|
||||||
// `tree` is followed by a delimited set of token trees. This indicates the beginning
|
// `tree` is followed by a delimited set of token trees. This indicates the beginning
|
||||||
// of a repetition sequence in the macro (e.g., `$(pat)*`).
|
// of a repetition sequence in the macro (e.g. `$(pat)*`).
|
||||||
Some(tokenstream::TokenTree::Delimited(span, delimited)) => {
|
Some(tokenstream::TokenTree::Delimited(span, delim, tts)) => {
|
||||||
// Must have `(` not `{` or `[`
|
// Must have `(` not `{` or `[`
|
||||||
if delimited.delim != token::Paren {
|
if delim != token::Paren {
|
||||||
let tok = pprust::token_to_string(&token::OpenDelim(delimited.delim));
|
let tok = pprust::token_to_string(&token::OpenDelim(delim));
|
||||||
let msg = format!("expected `(`, found `{}`", tok);
|
let msg = format!("expected `(`, found `{}`", tok);
|
||||||
sess.span_diagnostic.span_err(span.entire(), &msg);
|
sess.span_diagnostic.span_err(span.entire(), &msg);
|
||||||
}
|
}
|
||||||
// Parse the contents of the sequence itself
|
// Parse the contents of the sequence itself
|
||||||
let sequence = parse(
|
let sequence = parse(
|
||||||
delimited.tts.into(),
|
tts.into(),
|
||||||
expect_matchers,
|
expect_matchers,
|
||||||
sess,
|
sess,
|
||||||
features,
|
features,
|
||||||
|
@ -354,12 +354,12 @@ where
|
||||||
|
|
||||||
// `tree` is the beginning of a delimited set of tokens (e.g., `(` or `{`). We need to
|
// `tree` is the beginning of a delimited set of tokens (e.g., `(` or `{`). We need to
|
||||||
// descend into the delimited set and further parse it.
|
// descend into the delimited set and further parse it.
|
||||||
tokenstream::TokenTree::Delimited(span, delimited) => TokenTree::Delimited(
|
tokenstream::TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
|
||||||
span,
|
span,
|
||||||
Lrc::new(Delimited {
|
Lrc::new(Delimited {
|
||||||
delim: delimited.delim,
|
delim: delim,
|
||||||
tts: parse(
|
tts: parse(
|
||||||
delimited.tts.into(),
|
tts.into(),
|
||||||
expect_matchers,
|
expect_matchers,
|
||||||
sess,
|
sess,
|
||||||
features,
|
features,
|
||||||
|
|
|
@ -17,7 +17,7 @@ use fold::noop_fold_tt;
|
||||||
use parse::token::{self, Token, NtTT};
|
use parse::token::{self, Token, NtTT};
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
use syntax_pos::DUMMY_SP;
|
use syntax_pos::DUMMY_SP;
|
||||||
use tokenstream::{TokenStream, TokenTree, Delimited, DelimSpan};
|
use tokenstream::{TokenStream, TokenTree, DelimSpan};
|
||||||
|
|
||||||
use rustc_data_structures::fx::FxHashMap;
|
use rustc_data_structures::fx::FxHashMap;
|
||||||
use rustc_data_structures::sync::Lrc;
|
use rustc_data_structures::sync::Lrc;
|
||||||
|
@ -105,10 +105,11 @@ pub fn transcribe(cx: &ExtCtxt,
|
||||||
if result_stack.is_empty() {
|
if result_stack.is_empty() {
|
||||||
return TokenStream::concat(result);
|
return TokenStream::concat(result);
|
||||||
}
|
}
|
||||||
let tree = TokenTree::Delimited(span, Delimited {
|
let tree = TokenTree::Delimited(
|
||||||
delim: forest.delim,
|
span,
|
||||||
tts: TokenStream::concat(result).into(),
|
forest.delim,
|
||||||
});
|
TokenStream::concat(result).into(),
|
||||||
|
);
|
||||||
result = result_stack.pop().unwrap();
|
result = result_stack.pop().unwrap();
|
||||||
result.push(tree.into());
|
result.push(tree.into());
|
||||||
}
|
}
|
||||||
|
|
|
@ -605,12 +605,10 @@ pub fn noop_fold_tt<T: Folder>(tt: TokenTree, fld: &mut T) -> TokenTree {
|
||||||
match tt {
|
match tt {
|
||||||
TokenTree::Token(span, tok) =>
|
TokenTree::Token(span, tok) =>
|
||||||
TokenTree::Token(fld.new_span(span), fld.fold_token(tok)),
|
TokenTree::Token(fld.new_span(span), fld.fold_token(tok)),
|
||||||
TokenTree::Delimited(span, delimed) => TokenTree::Delimited(
|
TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
|
||||||
DelimSpan::from_pair(fld.new_span(span.open), fld.new_span(span.close)),
|
DelimSpan::from_pair(fld.new_span(span.open), fld.new_span(span.close)),
|
||||||
Delimited {
|
delim,
|
||||||
tts: fld.fold_tts(delimed.stream()).into(),
|
fld.fold_tts(tts.stream()).into(),
|
||||||
delim: delimed.delim,
|
|
||||||
}
|
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,7 +11,7 @@
|
||||||
use print::pprust::token_to_string;
|
use print::pprust::token_to_string;
|
||||||
use parse::lexer::StringReader;
|
use parse::lexer::StringReader;
|
||||||
use parse::{token, PResult};
|
use parse::{token, PResult};
|
||||||
use tokenstream::{Delimited, DelimSpan, TokenStream, TokenTree};
|
use tokenstream::{DelimSpan, TokenStream, TokenTree};
|
||||||
|
|
||||||
impl<'a> StringReader<'a> {
|
impl<'a> StringReader<'a> {
|
||||||
// Parse a stream of tokens into a list of `TokenTree`s, up to an `Eof`.
|
// Parse a stream of tokens into a list of `TokenTree`s, up to an `Eof`.
|
||||||
|
@ -155,10 +155,11 @@ impl<'a> StringReader<'a> {
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(TokenTree::Delimited(delim_span, Delimited {
|
Ok(TokenTree::Delimited(
|
||||||
|
delim_span,
|
||||||
delim,
|
delim,
|
||||||
tts: tts.into(),
|
tts.into(),
|
||||||
}).into())
|
).into())
|
||||||
},
|
},
|
||||||
token::CloseDelim(_) => {
|
token::CloseDelim(_) => {
|
||||||
// An unexpected closing delimiter (i.e., there is no
|
// An unexpected closing delimiter (i.e., there is no
|
||||||
|
|
|
@ -764,7 +764,7 @@ mod tests {
|
||||||
use attr::first_attr_value_str_by_name;
|
use attr::first_attr_value_str_by_name;
|
||||||
use parse;
|
use parse;
|
||||||
use print::pprust::item_to_string;
|
use print::pprust::item_to_string;
|
||||||
use tokenstream::{self, DelimSpan, TokenTree};
|
use tokenstream::{DelimSpan, TokenTree};
|
||||||
use util::parser_testing::string_to_stream;
|
use util::parser_testing::string_to_stream;
|
||||||
use util::parser_testing::{string_to_expr, string_to_item};
|
use util::parser_testing::{string_to_expr, string_to_item};
|
||||||
use with_globals;
|
use with_globals;
|
||||||
|
@ -795,42 +795,41 @@ mod tests {
|
||||||
Some(&TokenTree::Token(_, token::Ident(name_macro_rules, false))),
|
Some(&TokenTree::Token(_, token::Ident(name_macro_rules, false))),
|
||||||
Some(&TokenTree::Token(_, token::Not)),
|
Some(&TokenTree::Token(_, token::Not)),
|
||||||
Some(&TokenTree::Token(_, token::Ident(name_zip, false))),
|
Some(&TokenTree::Token(_, token::Ident(name_zip, false))),
|
||||||
Some(&TokenTree::Delimited(_, ref macro_delimed)),
|
Some(&TokenTree::Delimited(_, macro_delim, ref macro_tts)),
|
||||||
)
|
)
|
||||||
if name_macro_rules.name == "macro_rules"
|
if name_macro_rules.name == "macro_rules"
|
||||||
&& name_zip.name == "zip" => {
|
&& name_zip.name == "zip" => {
|
||||||
let tts = ¯o_delimed.stream().trees().collect::<Vec<_>>();
|
let tts = ¯o_tts.stream().trees().collect::<Vec<_>>();
|
||||||
match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) {
|
match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) {
|
||||||
(
|
(
|
||||||
3,
|
3,
|
||||||
Some(&TokenTree::Delimited(_, ref first_delimed)),
|
Some(&TokenTree::Delimited(_, first_delim, ref first_tts)),
|
||||||
Some(&TokenTree::Token(_, token::FatArrow)),
|
Some(&TokenTree::Token(_, token::FatArrow)),
|
||||||
Some(&TokenTree::Delimited(_, ref second_delimed)),
|
Some(&TokenTree::Delimited(_, second_delim, ref second_tts)),
|
||||||
)
|
)
|
||||||
if macro_delimed.delim == token::Paren => {
|
if macro_delim == token::Paren => {
|
||||||
let tts = &first_delimed.stream().trees().collect::<Vec<_>>();
|
let tts = &first_tts.stream().trees().collect::<Vec<_>>();
|
||||||
match (tts.len(), tts.get(0), tts.get(1)) {
|
match (tts.len(), tts.get(0), tts.get(1)) {
|
||||||
(
|
(
|
||||||
2,
|
2,
|
||||||
Some(&TokenTree::Token(_, token::Dollar)),
|
Some(&TokenTree::Token(_, token::Dollar)),
|
||||||
Some(&TokenTree::Token(_, token::Ident(ident, false))),
|
Some(&TokenTree::Token(_, token::Ident(ident, false))),
|
||||||
)
|
)
|
||||||
if first_delimed.delim == token::Paren && ident.name == "a" => {},
|
if first_delim == token::Paren && ident.name == "a" => {},
|
||||||
_ => panic!("value 3: {:?}", *first_delimed),
|
_ => panic!("value 3: {:?} {:?}", first_delim, first_tts),
|
||||||
}
|
}
|
||||||
let tts = &second_delimed.stream().trees().collect::<Vec<_>>();
|
let tts = &second_tts.stream().trees().collect::<Vec<_>>();
|
||||||
match (tts.len(), tts.get(0), tts.get(1)) {
|
match (tts.len(), tts.get(0), tts.get(1)) {
|
||||||
(
|
(
|
||||||
2,
|
2,
|
||||||
Some(&TokenTree::Token(_, token::Dollar)),
|
Some(&TokenTree::Token(_, token::Dollar)),
|
||||||
Some(&TokenTree::Token(_, token::Ident(ident, false))),
|
Some(&TokenTree::Token(_, token::Ident(ident, false))),
|
||||||
)
|
)
|
||||||
if second_delimed.delim == token::Paren
|
if second_delim == token::Paren && ident.name == "a" => {},
|
||||||
&& ident.name == "a" => {},
|
_ => panic!("value 4: {:?} {:?}", second_delim, second_tts),
|
||||||
_ => panic!("value 4: {:?}", *second_delimed),
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
_ => panic!("value 2: {:?}", *macro_delimed),
|
_ => panic!("value 2: {:?} {:?}", macro_delim, macro_tts),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
_ => panic!("value: {:?}",tts),
|
_ => panic!("value: {:?}",tts),
|
||||||
|
@ -848,26 +847,24 @@ mod tests {
|
||||||
TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"), false)).into(),
|
TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"), false)).into(),
|
||||||
TokenTree::Delimited(
|
TokenTree::Delimited(
|
||||||
DelimSpan::from_pair(sp(5, 6), sp(13, 14)),
|
DelimSpan::from_pair(sp(5, 6), sp(13, 14)),
|
||||||
tokenstream::Delimited {
|
token::DelimToken::Paren,
|
||||||
delim: token::DelimToken::Paren,
|
TokenStream::concat(vec![
|
||||||
tts: TokenStream::concat(vec![
|
TokenTree::Token(sp(6, 7),
|
||||||
TokenTree::Token(sp(6, 7),
|
token::Ident(Ident::from_str("b"), false)).into(),
|
||||||
token::Ident(Ident::from_str("b"), false)).into(),
|
TokenTree::Token(sp(8, 9), token::Colon).into(),
|
||||||
TokenTree::Token(sp(8, 9), token::Colon).into(),
|
TokenTree::Token(sp(10, 13),
|
||||||
TokenTree::Token(sp(10, 13),
|
token::Ident(Ident::from_str("i32"), false)).into(),
|
||||||
token::Ident(Ident::from_str("i32"), false)).into(),
|
]).into(),
|
||||||
]).into(),
|
).into(),
|
||||||
}).into(),
|
|
||||||
TokenTree::Delimited(
|
TokenTree::Delimited(
|
||||||
DelimSpan::from_pair(sp(15, 16), sp(20, 21)),
|
DelimSpan::from_pair(sp(15, 16), sp(20, 21)),
|
||||||
tokenstream::Delimited {
|
token::DelimToken::Brace,
|
||||||
delim: token::DelimToken::Brace,
|
TokenStream::concat(vec![
|
||||||
tts: TokenStream::concat(vec![
|
TokenTree::Token(sp(17, 18),
|
||||||
TokenTree::Token(sp(17, 18),
|
token::Ident(Ident::from_str("b"), false)).into(),
|
||||||
token::Ident(Ident::from_str("b"), false)).into(),
|
TokenTree::Token(sp(18, 19), token::Semi).into(),
|
||||||
TokenTree::Token(sp(18, 19), token::Semi).into(),
|
]).into(),
|
||||||
]).into(),
|
).into()
|
||||||
}).into()
|
|
||||||
]);
|
]);
|
||||||
|
|
||||||
assert_eq!(tts, expected);
|
assert_eq!(tts, expected);
|
||||||
|
|
|
@ -48,13 +48,14 @@ use errors::{self, Applicability, DiagnosticBuilder, DiagnosticId};
|
||||||
use parse::{self, SeqSep, classify, token};
|
use parse::{self, SeqSep, classify, token};
|
||||||
use parse::lexer::TokenAndSpan;
|
use parse::lexer::TokenAndSpan;
|
||||||
use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
|
use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
|
||||||
|
use parse::token::DelimToken;
|
||||||
use parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership};
|
use parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership};
|
||||||
use util::parser::{AssocOp, Fixity};
|
use util::parser::{AssocOp, Fixity};
|
||||||
use print::pprust;
|
use print::pprust;
|
||||||
use ptr::P;
|
use ptr::P;
|
||||||
use parse::PResult;
|
use parse::PResult;
|
||||||
use ThinVec;
|
use ThinVec;
|
||||||
use tokenstream::{self, Delimited, DelimSpan, ThinTokenStream, TokenTree, TokenStream};
|
use tokenstream::{self, DelimSpan, ThinTokenStream, TokenTree, TokenStream};
|
||||||
use symbol::{Symbol, keywords};
|
use symbol::{Symbol, keywords};
|
||||||
|
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
|
@ -293,13 +294,13 @@ enum LastToken {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TokenCursorFrame {
|
impl TokenCursorFrame {
|
||||||
fn new(sp: DelimSpan, delimited: &Delimited) -> Self {
|
fn new(sp: DelimSpan, delim: DelimToken, tts: &ThinTokenStream) -> Self {
|
||||||
TokenCursorFrame {
|
TokenCursorFrame {
|
||||||
delim: delimited.delim,
|
delim: delim,
|
||||||
span: sp,
|
span: sp,
|
||||||
open_delim: delimited.delim == token::NoDelim,
|
open_delim: delim == token::NoDelim,
|
||||||
tree_cursor: delimited.stream().into_trees(),
|
tree_cursor: tts.stream().into_trees(),
|
||||||
close_delim: delimited.delim == token::NoDelim,
|
close_delim: delim == token::NoDelim,
|
||||||
last_token: LastToken::Was(None),
|
last_token: LastToken::Was(None),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -310,14 +311,12 @@ impl TokenCursor {
|
||||||
loop {
|
loop {
|
||||||
let tree = if !self.frame.open_delim {
|
let tree = if !self.frame.open_delim {
|
||||||
self.frame.open_delim = true;
|
self.frame.open_delim = true;
|
||||||
Delimited { delim: self.frame.delim, tts: TokenStream::empty().into() }
|
TokenTree::open_tt(self.frame.span.open, self.frame.delim)
|
||||||
.open_tt(self.frame.span.open)
|
|
||||||
} else if let Some(tree) = self.frame.tree_cursor.next() {
|
} else if let Some(tree) = self.frame.tree_cursor.next() {
|
||||||
tree
|
tree
|
||||||
} else if !self.frame.close_delim {
|
} else if !self.frame.close_delim {
|
||||||
self.frame.close_delim = true;
|
self.frame.close_delim = true;
|
||||||
Delimited { delim: self.frame.delim, tts: TokenStream::empty().into() }
|
TokenTree::close_tt(self.frame.span.close, self.frame.delim)
|
||||||
.close_tt(self.frame.span.close)
|
|
||||||
} else if let Some(frame) = self.stack.pop() {
|
} else if let Some(frame) = self.stack.pop() {
|
||||||
self.frame = frame;
|
self.frame = frame;
|
||||||
continue
|
continue
|
||||||
|
@ -332,8 +331,8 @@ impl TokenCursor {
|
||||||
|
|
||||||
match tree {
|
match tree {
|
||||||
TokenTree::Token(sp, tok) => return TokenAndSpan { tok: tok, sp: sp },
|
TokenTree::Token(sp, tok) => return TokenAndSpan { tok: tok, sp: sp },
|
||||||
TokenTree::Delimited(sp, ref delimited) => {
|
TokenTree::Delimited(sp, delim, tts) => {
|
||||||
let frame = TokenCursorFrame::new(sp, delimited);
|
let frame = TokenCursorFrame::new(sp, delim, &tts);
|
||||||
self.stack.push(mem::replace(&mut self.frame, frame));
|
self.stack.push(mem::replace(&mut self.frame, frame));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -362,25 +361,28 @@ impl TokenCursor {
|
||||||
}
|
}
|
||||||
|
|
||||||
let delim_span = DelimSpan::from_single(sp);
|
let delim_span = DelimSpan::from_single(sp);
|
||||||
let body = TokenTree::Delimited(delim_span, Delimited {
|
let body = TokenTree::Delimited(
|
||||||
delim: token::Bracket,
|
delim_span,
|
||||||
tts: [TokenTree::Token(sp, token::Ident(ast::Ident::from_str("doc"), false)),
|
token::Bracket,
|
||||||
TokenTree::Token(sp, token::Eq),
|
[TokenTree::Token(sp, token::Ident(ast::Ident::from_str("doc"), false)),
|
||||||
TokenTree::Token(sp, token::Literal(
|
TokenTree::Token(sp, token::Eq),
|
||||||
token::StrRaw(Symbol::intern(&stripped), num_of_hashes), None))]
|
TokenTree::Token(sp, token::Literal(
|
||||||
.iter().cloned().collect::<TokenStream>().into(),
|
token::StrRaw(Symbol::intern(&stripped), num_of_hashes), None))
|
||||||
});
|
]
|
||||||
|
.iter().cloned().collect::<TokenStream>().into(),
|
||||||
|
);
|
||||||
|
|
||||||
self.stack.push(mem::replace(&mut self.frame, TokenCursorFrame::new(delim_span, &Delimited {
|
self.stack.push(mem::replace(&mut self.frame, TokenCursorFrame::new(
|
||||||
delim: token::NoDelim,
|
delim_span,
|
||||||
tts: if doc_comment_style(&name.as_str()) == AttrStyle::Inner {
|
token::NoDelim,
|
||||||
|
&if doc_comment_style(&name.as_str()) == AttrStyle::Inner {
|
||||||
[TokenTree::Token(sp, token::Pound), TokenTree::Token(sp, token::Not), body]
|
[TokenTree::Token(sp, token::Pound), TokenTree::Token(sp, token::Not), body]
|
||||||
.iter().cloned().collect::<TokenStream>().into()
|
.iter().cloned().collect::<TokenStream>().into()
|
||||||
} else {
|
} else {
|
||||||
[TokenTree::Token(sp, token::Pound), body]
|
[TokenTree::Token(sp, token::Pound), body]
|
||||||
.iter().cloned().collect::<TokenStream>().into()
|
.iter().cloned().collect::<TokenStream>().into()
|
||||||
},
|
},
|
||||||
})));
|
)));
|
||||||
|
|
||||||
self.next()
|
self.next()
|
||||||
}
|
}
|
||||||
|
@ -561,10 +563,11 @@ impl<'a> Parser<'a> {
|
||||||
root_module_name: None,
|
root_module_name: None,
|
||||||
expected_tokens: Vec::new(),
|
expected_tokens: Vec::new(),
|
||||||
token_cursor: TokenCursor {
|
token_cursor: TokenCursor {
|
||||||
frame: TokenCursorFrame::new(DelimSpan::dummy(), &Delimited {
|
frame: TokenCursorFrame::new(
|
||||||
delim: token::NoDelim,
|
DelimSpan::dummy(),
|
||||||
tts: tokens.into(),
|
token::NoDelim,
|
||||||
}),
|
&tokens.into(),
|
||||||
|
),
|
||||||
stack: Vec::new(),
|
stack: Vec::new(),
|
||||||
},
|
},
|
||||||
desugar_doc_comments,
|
desugar_doc_comments,
|
||||||
|
@ -1238,7 +1241,7 @@ impl<'a> Parser<'a> {
|
||||||
f(&match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) {
|
f(&match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) {
|
||||||
Some(tree) => match tree {
|
Some(tree) => match tree {
|
||||||
TokenTree::Token(_, tok) => tok,
|
TokenTree::Token(_, tok) => tok,
|
||||||
TokenTree::Delimited(_, delimited) => token::OpenDelim(delimited.delim),
|
TokenTree::Delimited(_, delim, _) => token::OpenDelim(delim),
|
||||||
},
|
},
|
||||||
None => token::CloseDelim(self.token_cursor.frame.delim),
|
None => token::CloseDelim(self.token_cursor.frame.delim),
|
||||||
})
|
})
|
||||||
|
@ -1251,7 +1254,7 @@ impl<'a> Parser<'a> {
|
||||||
|
|
||||||
match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) {
|
match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) {
|
||||||
Some(TokenTree::Token(span, _)) => span,
|
Some(TokenTree::Token(span, _)) => span,
|
||||||
Some(TokenTree::Delimited(span, _)) => span.entire(),
|
Some(TokenTree::Delimited(span, ..)) => span.entire(),
|
||||||
None => self.look_ahead_span(dist - 1),
|
None => self.look_ahead_span(dist - 1),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2317,8 +2320,8 @@ impl<'a> Parser<'a> {
|
||||||
return Err(err)
|
return Err(err)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let delimited = match self.parse_token_tree() {
|
let tts = match self.parse_token_tree() {
|
||||||
TokenTree::Delimited(_, delimited) => delimited,
|
TokenTree::Delimited(_, _, tts) => tts,
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
};
|
};
|
||||||
let delim = match delim {
|
let delim = match delim {
|
||||||
|
@ -2327,7 +2330,7 @@ impl<'a> Parser<'a> {
|
||||||
token::Brace => MacDelimiter::Brace,
|
token::Brace => MacDelimiter::Brace,
|
||||||
token::NoDelim => self.bug("unexpected no delimiter"),
|
token::NoDelim => self.bug("unexpected no delimiter"),
|
||||||
};
|
};
|
||||||
Ok((delim, delimited.stream().into()))
|
Ok((delim, tts.stream().into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// At the bottom (top?) of the precedence hierarchy,
|
/// At the bottom (top?) of the precedence hierarchy,
|
||||||
|
@ -2892,10 +2895,11 @@ impl<'a> Parser<'a> {
|
||||||
self.token_cursor.stack.pop().unwrap());
|
self.token_cursor.stack.pop().unwrap());
|
||||||
self.span = frame.span.entire();
|
self.span = frame.span.entire();
|
||||||
self.bump();
|
self.bump();
|
||||||
TokenTree::Delimited(frame.span, Delimited {
|
TokenTree::Delimited(
|
||||||
delim: frame.delim,
|
frame.span,
|
||||||
tts: frame.tree_cursor.original_stream().into(),
|
frame.delim,
|
||||||
})
|
frame.tree_cursor.original_stream().into(),
|
||||||
|
)
|
||||||
},
|
},
|
||||||
token::CloseDelim(_) | token::Eof => unreachable!(),
|
token::CloseDelim(_) | token::Eof => unreachable!(),
|
||||||
_ => {
|
_ => {
|
||||||
|
@ -4609,7 +4613,7 @@ impl<'a> Parser<'a> {
|
||||||
let ident = self.parse_ident()?;
|
let ident = self.parse_ident()?;
|
||||||
let tokens = if self.check(&token::OpenDelim(token::Brace)) {
|
let tokens = if self.check(&token::OpenDelim(token::Brace)) {
|
||||||
match self.parse_token_tree() {
|
match self.parse_token_tree() {
|
||||||
TokenTree::Delimited(_, ref delimited) => delimited.stream(),
|
TokenTree::Delimited(_, _, tts) => tts.stream(),
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
}
|
}
|
||||||
} else if self.check(&token::OpenDelim(token::Paren)) {
|
} else if self.check(&token::OpenDelim(token::Paren)) {
|
||||||
|
|
|
@ -818,16 +818,13 @@ fn prepend_attrs(sess: &ParseSess,
|
||||||
|
|
||||||
brackets.push(attr.tokens.clone());
|
brackets.push(attr.tokens.clone());
|
||||||
|
|
||||||
let tokens = tokenstream::Delimited {
|
|
||||||
delim: DelimToken::Bracket,
|
|
||||||
tts: brackets.build().into(),
|
|
||||||
};
|
|
||||||
// The span we list here for `#` and for `[ ... ]` are both wrong in
|
// The span we list here for `#` and for `[ ... ]` are both wrong in
|
||||||
// that it encompasses more than each token, but it hopefully is "good
|
// that it encompasses more than each token, but it hopefully is "good
|
||||||
// enough" for now at least.
|
// enough" for now at least.
|
||||||
builder.push(tokenstream::TokenTree::Token(attr.span, Pound));
|
builder.push(tokenstream::TokenTree::Token(attr.span, Pound));
|
||||||
let delim_span = DelimSpan::from_single(attr.span);
|
let delim_span = DelimSpan::from_single(attr.span);
|
||||||
builder.push(tokenstream::TokenTree::Delimited(delim_span, tokens));
|
builder.push(tokenstream::TokenTree::Delimited(
|
||||||
|
delim_span, DelimToken::Bracket, brackets.build().into()));
|
||||||
}
|
}
|
||||||
builder.push(tokens.clone());
|
builder.push(tokens.clone());
|
||||||
Some(builder.build())
|
Some(builder.build())
|
||||||
|
|
|
@ -815,12 +815,12 @@ pub trait PrintState<'a> {
|
||||||
_ => Ok(())
|
_ => Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
TokenTree::Delimited(_, ref delimed) => {
|
TokenTree::Delimited(_, delim, tts) => {
|
||||||
self.writer().word(token_to_string(&delimed.open_token()))?;
|
self.writer().word(token_to_string(&token::OpenDelim(delim)))?;
|
||||||
self.writer().space()?;
|
self.writer().space()?;
|
||||||
self.print_tts(delimed.stream())?;
|
self.print_tts(tts.stream())?;
|
||||||
self.writer().space()?;
|
self.writer().space()?;
|
||||||
self.writer().word(token_to_string(&delimed.close_token()))
|
self.writer().word(token_to_string(&token::CloseDelim(delim)))
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -34,52 +34,6 @@ use util::RcVec;
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::{fmt, iter, mem};
|
use std::{fmt, iter, mem};
|
||||||
|
|
||||||
/// A delimited sequence of token trees
|
|
||||||
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
|
|
||||||
pub struct Delimited {
|
|
||||||
/// The type of delimiter
|
|
||||||
pub delim: DelimToken,
|
|
||||||
/// The delimited sequence of token trees
|
|
||||||
pub tts: ThinTokenStream,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Delimited {
|
|
||||||
/// Returns the opening delimiter as a token.
|
|
||||||
pub fn open_token(&self) -> token::Token {
|
|
||||||
token::OpenDelim(self.delim)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the closing delimiter as a token.
|
|
||||||
pub fn close_token(&self) -> token::Token {
|
|
||||||
token::CloseDelim(self.delim)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the opening delimiter as a token tree.
|
|
||||||
pub fn open_tt(&self, span: Span) -> TokenTree {
|
|
||||||
let open_span = if span.is_dummy() {
|
|
||||||
span
|
|
||||||
} else {
|
|
||||||
span.with_hi(span.lo() + BytePos(self.delim.len() as u32))
|
|
||||||
};
|
|
||||||
TokenTree::Token(open_span, self.open_token())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the closing delimiter as a token tree.
|
|
||||||
pub fn close_tt(&self, span: Span) -> TokenTree {
|
|
||||||
let close_span = if span.is_dummy() {
|
|
||||||
span
|
|
||||||
} else {
|
|
||||||
span.with_lo(span.hi() - BytePos(self.delim.len() as u32))
|
|
||||||
};
|
|
||||||
TokenTree::Token(close_span, self.close_token())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the token trees inside the delimiters.
|
|
||||||
pub fn stream(&self) -> TokenStream {
|
|
||||||
self.tts.clone().into()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// When the main rust parser encounters a syntax-extension invocation, it
|
/// When the main rust parser encounters a syntax-extension invocation, it
|
||||||
/// parses the arguments to the invocation as a token-tree. This is a very
|
/// parses the arguments to the invocation as a token-tree. This is a very
|
||||||
/// loose structure, such that all sorts of different AST-fragments can
|
/// loose structure, such that all sorts of different AST-fragments can
|
||||||
|
@ -97,7 +51,7 @@ pub enum TokenTree {
|
||||||
/// A single token
|
/// A single token
|
||||||
Token(Span, token::Token),
|
Token(Span, token::Token),
|
||||||
/// A delimited sequence of token trees
|
/// A delimited sequence of token trees
|
||||||
Delimited(DelimSpan, Delimited),
|
Delimited(DelimSpan, DelimToken, ThinTokenStream),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TokenTree {
|
impl TokenTree {
|
||||||
|
@ -116,9 +70,10 @@ impl TokenTree {
|
||||||
pub fn eq_unspanned(&self, other: &TokenTree) -> bool {
|
pub fn eq_unspanned(&self, other: &TokenTree) -> bool {
|
||||||
match (self, other) {
|
match (self, other) {
|
||||||
(&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => tk == tk2,
|
(&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => tk == tk2,
|
||||||
(&TokenTree::Delimited(_, ref dl), &TokenTree::Delimited(_, ref dl2)) => {
|
(&TokenTree::Delimited(_, delim, ref tts),
|
||||||
dl.delim == dl2.delim &&
|
&TokenTree::Delimited(_, delim2, ref tts2)) => {
|
||||||
dl.stream().eq_unspanned(&dl2.stream())
|
delim == delim2 &&
|
||||||
|
tts.stream().eq_unspanned(&tts2.stream())
|
||||||
}
|
}
|
||||||
(_, _) => false,
|
(_, _) => false,
|
||||||
}
|
}
|
||||||
|
@ -134,9 +89,10 @@ impl TokenTree {
|
||||||
(&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => {
|
(&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => {
|
||||||
tk.probably_equal_for_proc_macro(tk2)
|
tk.probably_equal_for_proc_macro(tk2)
|
||||||
}
|
}
|
||||||
(&TokenTree::Delimited(_, ref dl), &TokenTree::Delimited(_, ref dl2)) => {
|
(&TokenTree::Delimited(_, delim, ref tts),
|
||||||
dl.delim == dl2.delim &&
|
&TokenTree::Delimited(_, delim2, ref tts2)) => {
|
||||||
dl.stream().probably_equal_for_proc_macro(&dl2.stream())
|
delim == delim2 &&
|
||||||
|
tts.stream().probably_equal_for_proc_macro(&tts2.stream())
|
||||||
}
|
}
|
||||||
(_, _) => false,
|
(_, _) => false,
|
||||||
}
|
}
|
||||||
|
@ -146,7 +102,7 @@ impl TokenTree {
|
||||||
pub fn span(&self) -> Span {
|
pub fn span(&self) -> Span {
|
||||||
match *self {
|
match *self {
|
||||||
TokenTree::Token(sp, _) => sp,
|
TokenTree::Token(sp, _) => sp,
|
||||||
TokenTree::Delimited(sp, _) => sp.entire(),
|
TokenTree::Delimited(sp, ..) => sp.entire(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -154,7 +110,7 @@ impl TokenTree {
|
||||||
pub fn set_span(&mut self, span: Span) {
|
pub fn set_span(&mut self, span: Span) {
|
||||||
match *self {
|
match *self {
|
||||||
TokenTree::Token(ref mut sp, _) => *sp = span,
|
TokenTree::Token(ref mut sp, _) => *sp = span,
|
||||||
TokenTree::Delimited(ref mut sp, _) => *sp = DelimSpan::from_single(span),
|
TokenTree::Delimited(ref mut sp, ..) => *sp = DelimSpan::from_single(span),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -169,6 +125,26 @@ impl TokenTree {
|
||||||
pub fn joint(self) -> TokenStream {
|
pub fn joint(self) -> TokenStream {
|
||||||
TokenStream { kind: TokenStreamKind::JointTree(self) }
|
TokenStream { kind: TokenStreamKind::JointTree(self) }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns the opening delimiter as a token tree.
|
||||||
|
pub fn open_tt(span: Span, delim: DelimToken) -> TokenTree {
|
||||||
|
let open_span = if span.is_dummy() {
|
||||||
|
span
|
||||||
|
} else {
|
||||||
|
span.with_hi(span.lo() + BytePos(delim.len() as u32))
|
||||||
|
};
|
||||||
|
TokenTree::Token(open_span, token::OpenDelim(delim))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the closing delimiter as a token tree.
|
||||||
|
pub fn close_tt(span: Span, delim: DelimToken) -> TokenTree {
|
||||||
|
let close_span = if span.is_dummy() {
|
||||||
|
span
|
||||||
|
} else {
|
||||||
|
span.with_lo(span.hi() - BytePos(delim.len() as u32))
|
||||||
|
};
|
||||||
|
TokenTree::Token(close_span, token::CloseDelim(delim))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// # Token Streams
|
/// # Token Streams
|
||||||
|
@ -182,6 +158,10 @@ pub struct TokenStream {
|
||||||
kind: TokenStreamKind,
|
kind: TokenStreamKind,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// `TokenStream` is used a lot. Make sure it doesn't unintentionally get bigger.
|
||||||
|
#[cfg(target_arch = "x86_64")]
|
||||||
|
static_assert!(MEM_SIZE_OF_TOKEN_STREAM: mem::size_of::<TokenStream>() == 40);
|
||||||
|
|
||||||
impl TokenStream {
|
impl TokenStream {
|
||||||
/// Given a `TokenStream` with a `Stream` of only two arguments, return a new `TokenStream`
|
/// Given a `TokenStream` with a `Stream` of only two arguments, return a new `TokenStream`
|
||||||
/// separating the two arguments with a comma for diagnostic suggestions.
|
/// separating the two arguments with a comma for diagnostic suggestions.
|
||||||
|
@ -198,7 +178,7 @@ impl TokenStream {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
(TokenStreamKind::Tree(TokenTree::Token(sp, _)), _) => *sp,
|
(TokenStreamKind::Tree(TokenTree::Token(sp, _)), _) => *sp,
|
||||||
(TokenStreamKind::Tree(TokenTree::Delimited(sp, _)), _) => sp.entire(),
|
(TokenStreamKind::Tree(TokenTree::Delimited(sp, ..)), _) => sp.entire(),
|
||||||
_ => continue,
|
_ => continue,
|
||||||
};
|
};
|
||||||
let sp = sp.shrink_to_hi();
|
let sp = sp.shrink_to_hi();
|
||||||
|
@ -678,6 +658,12 @@ impl Cursor {
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct ThinTokenStream(Option<RcVec<TokenStream>>);
|
pub struct ThinTokenStream(Option<RcVec<TokenStream>>);
|
||||||
|
|
||||||
|
impl ThinTokenStream {
|
||||||
|
pub fn stream(&self) -> TokenStream {
|
||||||
|
self.clone().into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl From<TokenStream> for ThinTokenStream {
|
impl From<TokenStream> for ThinTokenStream {
|
||||||
fn from(stream: TokenStream) -> ThinTokenStream {
|
fn from(stream: TokenStream) -> ThinTokenStream {
|
||||||
ThinTokenStream(match stream.kind {
|
ThinTokenStream(match stream.kind {
|
||||||
|
|
|
@ -841,7 +841,7 @@ pub fn walk_attribute<'a, V: Visitor<'a>>(visitor: &mut V, attr: &'a Attribute)
|
||||||
pub fn walk_tt<'a, V: Visitor<'a>>(visitor: &mut V, tt: TokenTree) {
|
pub fn walk_tt<'a, V: Visitor<'a>>(visitor: &mut V, tt: TokenTree) {
|
||||||
match tt {
|
match tt {
|
||||||
TokenTree::Token(_, tok) => visitor.visit_token(tok),
|
TokenTree::Token(_, tok) => visitor.visit_token(tok),
|
||||||
TokenTree::Delimited(_, delimed) => visitor.visit_tts(delimed.stream()),
|
TokenTree::Delimited(_, _, tts) => visitor.visit_tts(tts.stream()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -64,11 +64,11 @@ impl FromInternal<(TokenStream, &'_ ParseSess, &'_ mut Vec<Self>)>
|
||||||
|
|
||||||
let (tree, joint) = stream.as_tree();
|
let (tree, joint) = stream.as_tree();
|
||||||
let (span, token) = match tree {
|
let (span, token) = match tree {
|
||||||
tokenstream::TokenTree::Delimited(span, delimed) => {
|
tokenstream::TokenTree::Delimited(span, delim, tts) => {
|
||||||
let delimiter = Delimiter::from_internal(delimed.delim);
|
let delimiter = Delimiter::from_internal(delim);
|
||||||
return TokenTree::Group(Group {
|
return TokenTree::Group(Group {
|
||||||
delimiter,
|
delimiter,
|
||||||
stream: delimed.tts.into(),
|
stream: tts.into(),
|
||||||
span,
|
span,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -232,10 +232,8 @@ impl ToInternal<TokenStream> for TokenTree<Group, Punct, Ident, Literal> {
|
||||||
}) => {
|
}) => {
|
||||||
return tokenstream::TokenTree::Delimited(
|
return tokenstream::TokenTree::Delimited(
|
||||||
span,
|
span,
|
||||||
tokenstream::Delimited {
|
delimiter.to_internal(),
|
||||||
delim: delimiter.to_internal(),
|
stream.into(),
|
||||||
tts: stream.into(),
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
.into();
|
.into();
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue