1
Fork 0

Reduce the size of the TokenTree

This commit is contained in:
Brendan Zabarauskas 2014-10-23 04:58:48 +11:00
parent dfb4163f83
commit 34dacb80ce
7 changed files with 34 additions and 22 deletions

View file

@ -629,8 +629,7 @@ pub enum TokenTree {
/// A single token /// A single token
TtToken(Span, ::parse::token::Token), TtToken(Span, ::parse::token::Token),
/// A delimited sequence of token trees /// A delimited sequence of token trees
// FIXME(eddyb) #6308 Use Rc<[TokenTree]> after DST. TtDelimited(Span, Rc<(Delimiter, Vec<TokenTree>, Delimiter)>),
TtDelimited(Span, Delimiter, Rc<Vec<TokenTree>>, Delimiter),
// These only make sense for right-hand-sides of MBE macros: // These only make sense for right-hand-sides of MBE macros:
@ -649,7 +648,7 @@ impl TokenTree {
pub fn get_span(&self) -> Span { pub fn get_span(&self) -> Span {
match *self { match *self {
TtToken(span, _) => span, TtToken(span, _) => span,
TtDelimited(span, _, _, _) => span, TtDelimited(span, _) => span,
TtSequence(span, _, _, _) => span, TtSequence(span, _, _, _) => span,
TtNonterminal(span, _) => span, TtNonterminal(span, _) => span,
} }

View file

@ -651,7 +651,8 @@ fn mk_tt(cx: &ExtCtxt, _: Span, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> {
vec!(e_tok)); vec!(e_tok));
vec!(cx.stmt_expr(e_push)) vec!(cx.stmt_expr(e_push))
}, },
ast::TtDelimited(sp, ref open, ref tts, ref close) => { ast::TtDelimited(sp, ref delimed) => {
let (ref open, ref tts, ref close) = **delimed;
mk_tt(cx, sp, &open.to_tt()).into_iter() mk_tt(cx, sp, &open.to_tt()).into_iter()
.chain(tts.iter().flat_map(|tt| mk_tt(cx, sp, tt).into_iter())) .chain(tts.iter().flat_map(|tt| mk_tt(cx, sp, tt).into_iter()))
.chain(mk_tt(cx, sp, &close.to_tt()).into_iter()) .chain(mk_tt(cx, sp, &close.to_tt()).into_iter())

View file

@ -172,7 +172,10 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
MatchedNonterminal(NtTT(ref tt)) => { MatchedNonterminal(NtTT(ref tt)) => {
match **tt { match **tt {
// ignore delimiters // ignore delimiters
TtDelimited(_, _, ref tts, _) => (**tts).clone(), TtDelimited(_, ref delimed) => {
let (_, ref tts, _) = **delimed;
tts.clone()
},
_ => cx.span_fatal(sp, "macro rhs must be delimited"), _ => cx.span_fatal(sp, "macro rhs must be delimited"),
} }
}, },

View file

@ -128,9 +128,13 @@ impl Add<LockstepIterSize, LockstepIterSize> for LockstepIterSize {
fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize { fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize {
match *t { match *t {
// The opening and closing delimiters are both tokens, so they are TtDelimited(_, ref delimed) => {
// treated as `LisUnconstrained`. let (_, ref tts, _) = **delimed;
TtDelimited(_, _, ref tts, _) | TtSequence(_, ref tts, _, _) => { tts.iter().fold(LisUnconstrained, |size, tt| {
size + lockstep_iter_size(tt, r)
})
},
TtSequence(_, ref tts, _, _) => {
tts.iter().fold(LisUnconstrained, |size, tt| { tts.iter().fold(LisUnconstrained, |size, tt| {
size + lockstep_iter_size(tt, r) size + lockstep_iter_size(tt, r)
}) })
@ -202,7 +206,8 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
(*frame.forest)[frame.idx].clone() (*frame.forest)[frame.idx].clone()
}; };
match t { match t {
TtDelimited(_, open, tts, close) => { TtDelimited(_, ref delimed) => {
let (ref open, ref tts, ref close) = **delimed;
let mut forest = Vec::with_capacity(1 + tts.len() + 1); let mut forest = Vec::with_capacity(1 + tts.len() + 1);
forest.push(open.to_tt()); forest.push(open.to_tt());
forest.extend(tts.iter().map(|x| (*x).clone())); forest.extend(tts.iter().map(|x| (*x).clone()));

View file

@ -571,17 +571,20 @@ pub fn noop_fold_tt<T: Folder>(tt: &TokenTree, fld: &mut T) -> TokenTree {
match *tt { match *tt {
TtToken(span, ref tok) => TtToken(span, ref tok) =>
TtToken(span, fld.fold_token(tok.clone())), TtToken(span, fld.fold_token(tok.clone())),
TtDelimited(span, ref open, ref tts, ref close) => TtDelimited(span, ref delimed) => {
TtDelimited(span, let (ref open, ref tts, ref close) = **delimed;
Delimiter { TtDelimited(span, Rc::new((
span: open.span, Delimiter {
token: fld.fold_token(open.token.clone()) span: open.span,
}, token: fld.fold_token(open.token.clone())
Rc::new(fld.fold_tts(tts.as_slice())), },
Delimiter { fld.fold_tts(tts.as_slice()),
span: close.span, Delimiter {
token: fld.fold_token(close.token.clone()) span: close.span,
}), token: fld.fold_token(close.token.clone())
},
)))
},
TtSequence(span, ref pattern, ref sep, is_optional) => TtSequence(span, ref pattern, ref sep, is_optional) =>
TtSequence(span, TtSequence(span,
Rc::new(fld.fold_tts(pattern.as_slice())), Rc::new(fld.fold_tts(pattern.as_slice())),

View file

@ -2615,7 +2615,7 @@ impl<'a> Parser<'a> {
// Expand to cover the entire delimited token tree // Expand to cover the entire delimited token tree
let span = Span { hi: self.span.hi, ..pre_span }; let span = Span { hi: self.span.hi, ..pre_span };
TtDelimited(span, open, Rc::new(tts), close) TtDelimited(span, Rc::new((open, tts, close)))
} }
_ => parse_non_delim_tt_tok(self) _ => parse_non_delim_tt_tok(self)
} }

View file

@ -1020,7 +1020,8 @@ impl<'a> State<'a> {
/// expression arguments as expressions). It can be done! I think. /// expression arguments as expressions). It can be done! I think.
pub fn print_tt(&mut self, tt: &ast::TokenTree) -> IoResult<()> { pub fn print_tt(&mut self, tt: &ast::TokenTree) -> IoResult<()> {
match *tt { match *tt {
ast::TtDelimited(_, ref open, ref tts, ref close) => { ast::TtDelimited(_, ref delimed) => {
let (ref open, ref tts, ref close) = **delimed;
try!(word(&mut self.s, parse::token::to_string(&open.token).as_slice())); try!(word(&mut self.s, parse::token::to_string(&open.token).as_slice()));
try!(space(&mut self.s)); try!(space(&mut self.s));
try!(self.print_tts(tts.as_slice())); try!(self.print_tts(tts.as_slice()));