1
Fork 0

Clean up ext::tt::transcribe::TtFrame, rename to Frame.

This commit is contained in:
Jeffrey Seyfried 2017-01-27 11:00:10 +00:00
parent d09e512158
commit abdc68973e
3 changed files with 89 additions and 62 deletions

View file

@ -14,27 +14,71 @@ use errors::Handler;
use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal}; use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
use parse::token::{self, MatchNt, SubstNt, Token, NtIdent, NtTT}; use parse::token::{self, MatchNt, SubstNt, Token, NtIdent, NtTT};
use syntax_pos::{Span, DUMMY_SP}; use syntax_pos::{Span, DUMMY_SP};
use tokenstream::{self, TokenTree}; use tokenstream::{self, TokenTree, Delimited, SequenceRepetition};
use util::small_vector::SmallVector; use util::small_vector::SmallVector;
use std::rc::Rc; use std::rc::Rc;
use std::ops::Add; use std::ops::Add;
use std::collections::HashMap; use std::collections::HashMap;
///an unzipping of `TokenTree`s // An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`).
#[derive(Clone)] enum Frame {
struct TtFrame { Delimited {
forest: TokenTree, forest: Rc<Delimited>,
idx: usize, idx: usize,
dotdotdoted: bool, span: Span,
sep: Option<Token>, },
MatchNt {
name: Ident,
kind: Ident,
idx: usize,
span: Span,
},
Sequence {
forest: Rc<SequenceRepetition>,
idx: usize,
sep: Option<Token>,
},
}
impl Iterator for Frame {
type Item = TokenTree;
fn next(&mut self) -> Option<TokenTree> {
match *self {
Frame::Delimited { ref forest, ref mut idx, span } => {
*idx += 1;
if *idx == forest.delim.len() {
Some(forest.open_tt(span))
} else if let Some(tree) = forest.tts.get(*idx - forest.delim.len() - 1) {
Some(tree.clone())
} else if *idx == forest.tts.len() + 2 * forest.delim.len() {
Some(forest.close_tt(span))
} else {
None
}
}
Frame::Sequence { ref forest, ref mut idx, .. } => {
*idx += 1;
forest.tts.get(*idx - 1).cloned()
}
Frame::MatchNt { ref mut idx, name, kind, span } => {
*idx += 1;
match *idx {
1 => Some(TokenTree::Token(span, token::SubstNt(name))),
2 => Some(TokenTree::Token(span, token::Colon)),
3 => Some(TokenTree::Token(span, token::Ident(kind))),
_ => None,
}
}
}
}
} }
#[derive(Clone)]
struct TtReader<'a> { struct TtReader<'a> {
sp_diag: &'a Handler, sp_diag: &'a Handler,
/// the unzipped tree: /// the unzipped tree:
stack: SmallVector<TtFrame>, stack: SmallVector<Frame>,
/* for MBE-style macro transcription */ /* for MBE-style macro transcription */
interpolations: HashMap<Ident, Rc<NamedMatch>>, interpolations: HashMap<Ident, Rc<NamedMatch>>,
@ -51,15 +95,10 @@ pub fn transcribe(sp_diag: &Handler,
-> Vec<TokenTree> { -> Vec<TokenTree> {
let mut r = TtReader { let mut r = TtReader {
sp_diag: sp_diag, sp_diag: sp_diag,
stack: SmallVector::one(TtFrame { stack: SmallVector::one(Frame::Delimited {
forest: TokenTree::Sequence(DUMMY_SP, Rc::new(tokenstream::SequenceRepetition { forest: Rc::new(tokenstream::Delimited { delim: token::NoDelim, tts: src }),
tts: src,
// doesn't matter. This merely holds the root unzipping.
separator: None, op: tokenstream::KleeneOp::ZeroOrMore, num_captures: 0
})),
idx: 0, idx: 0,
dotdotdoted: false, span: DUMMY_SP,
sep: None,
}), }),
interpolations: match interp { /* just a convenience */ interpolations: match interp { /* just a convenience */
None => HashMap::new(), None => HashMap::new(),
@ -151,34 +190,33 @@ fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize {
/// EFFECT: advances the reader's token field /// EFFECT: advances the reader's token field
fn tt_next_token(r: &mut TtReader, prev_span: Span) -> Option<TokenTree> { fn tt_next_token(r: &mut TtReader, prev_span: Span) -> Option<TokenTree> {
loop { loop {
let frame = match r.stack.last() { let tree = match r.stack.last_mut() {
Some(frame) => frame.clone(), Some(frame) => frame.next(),
None => return None, None => return None,
}; };
if frame.idx == frame.forest.len() { let tree = if let Some(tree) = tree {
if frame.dotdotdoted && tree
*r.repeat_idx.last().unwrap() == *r.repeat_len.last().unwrap() - 1 { } else {
*r.repeat_idx.last_mut().unwrap() += 1; if let Frame::Sequence { ref mut idx, ref sep, .. } = *r.stack.last_mut().unwrap() {
r.stack.last_mut().unwrap().idx = 0; if *r.repeat_idx.last().unwrap() < *r.repeat_len.last().unwrap() - 1 {
if let Some(tk) = r.stack.last().unwrap().sep.clone() { *r.repeat_idx.last_mut().unwrap() += 1;
return Some(TokenTree::Token(prev_span, tk)); // repeat same span, I guess *idx = 0;
} if let Some(sep) = sep.clone() {
} else { return Some(TokenTree::Token(prev_span, sep)); // repeat same span, I guess
r.stack.pop(); }
match r.stack.last_mut() { continue
Some(frame) => frame.idx += 1,
None => return None,
}
if frame.dotdotdoted {
r.repeat_idx.pop();
r.repeat_len.pop();
} }
} }
continue
}
match frame.forest.get_tt(frame.idx) { if let Frame::Sequence { .. } = r.stack.pop().unwrap() {
r.repeat_idx.pop();
r.repeat_len.pop();
}
continue
};
match tree {
TokenTree::Sequence(sp, seq) => { TokenTree::Sequence(sp, seq) => {
// FIXME(pcwalton): Bad copy. // FIXME(pcwalton): Bad copy.
match lockstep_iter_size(&TokenTree::Sequence(sp, seq.clone()), match lockstep_iter_size(&TokenTree::Sequence(sp, seq.clone()),
@ -202,23 +240,20 @@ fn tt_next_token(r: &mut TtReader, prev_span: Span) -> Option<TokenTree> {
"this must repeat at least once")); "this must repeat at least once"));
} }
r.stack.last_mut().unwrap().idx += 1;
return tt_next_token(r, prev_span); return tt_next_token(r, prev_span);
} }
r.repeat_len.push(len); r.repeat_len.push(len);
r.repeat_idx.push(0); r.repeat_idx.push(0);
r.stack.push(TtFrame { r.stack.push(Frame::Sequence {
idx: 0, idx: 0,
dotdotdoted: true,
sep: seq.separator.clone(), sep: seq.separator.clone(),
forest: TokenTree::Sequence(sp, seq), forest: seq,
}); });
} }
} }
} }
// FIXME #2887: think about span stuff here // FIXME #2887: think about span stuff here
TokenTree::Token(sp, SubstNt(ident)) => { TokenTree::Token(sp, SubstNt(ident)) => {
r.stack.last_mut().unwrap().idx += 1;
match lookup_cur_matched(r, ident) { match lookup_cur_matched(r, ident) {
None => { None => {
return Some(TokenTree::Token(sp, SubstNt(ident))); return Some(TokenTree::Token(sp, SubstNt(ident)));
@ -245,21 +280,13 @@ fn tt_next_token(r: &mut TtReader, prev_span: Span) -> Option<TokenTree> {
} }
} }
} }
// TokenTree::Delimited or any token that can be unzipped TokenTree::Delimited(span, delimited) => {
seq @ TokenTree::Delimited(..) | seq @ TokenTree::Token(_, MatchNt(..)) => { r.stack.push(Frame::Delimited { forest: delimited, idx: 0, span: span });
// do not advance the idx yet
r.stack.push(TtFrame {
forest: seq,
idx: 0,
dotdotdoted: false,
sep: None
});
// if this could be 0-length, we'd need to potentially recur here
} }
tt @ TokenTree::Token(..) => { TokenTree::Token(span, MatchNt(name, kind)) => {
r.stack.last_mut().unwrap().idx += 1; r.stack.push(Frame::MatchNt { name: name, kind: kind, idx: 0, span: span });
return Some(tt);
} }
tt @ TokenTree::Token(..) => return Some(tt),
} }
} }
} }

View file

@ -50,8 +50,8 @@ pub enum DelimToken {
} }
impl DelimToken { impl DelimToken {
pub fn len(&self) -> u32 { pub fn len(self) -> usize {
if *self == NoDelim { 0 } else { 1 } if self == NoDelim { 0 } else { 1 }
} }
} }

View file

@ -64,7 +64,7 @@ impl Delimited {
pub fn open_tt(&self, span: Span) -> TokenTree { pub fn open_tt(&self, span: Span) -> TokenTree {
let open_span = match span { let open_span = match span {
DUMMY_SP => DUMMY_SP, DUMMY_SP => DUMMY_SP,
_ => Span { hi: span.lo + BytePos(self.delim.len()), ..span }, _ => Span { hi: span.lo + BytePos(self.delim.len() as u32), ..span },
}; };
TokenTree::Token(open_span, self.open_token()) TokenTree::Token(open_span, self.open_token())
} }
@ -73,7 +73,7 @@ impl Delimited {
pub fn close_tt(&self, span: Span) -> TokenTree { pub fn close_tt(&self, span: Span) -> TokenTree {
let close_span = match span { let close_span = match span {
DUMMY_SP => DUMMY_SP, DUMMY_SP => DUMMY_SP,
_ => Span { lo: span.hi - BytePos(self.delim.len()), ..span }, _ => Span { lo: span.hi - BytePos(self.delim.len() as u32), ..span },
}; };
TokenTree::Token(close_span, self.close_token()) TokenTree::Token(close_span, self.close_token())
} }