1
Fork 0

Speed up expansion.

This reduces duplication, thereby increasing expansion speed.
This commit is contained in:
Mark Simulacrum 2017-06-08 05:51:32 -06:00
parent 76242aebb9
commit 3d9ebf2916
4 changed files with 35 additions and 28 deletions

View file

@ -139,13 +139,20 @@ struct MatcherPos {
sep: Option<Token>, sep: Option<Token>,
idx: usize, idx: usize,
up: Option<Box<MatcherPos>>, up: Option<Box<MatcherPos>>,
matches: Vec<Vec<Rc<NamedMatch>>>, matches: Vec<Rc<Vec<NamedMatch>>>,
match_lo: usize, match_lo: usize,
match_cur: usize, match_cur: usize,
match_hi: usize, match_hi: usize,
sp_lo: BytePos, sp_lo: BytePos,
} }
impl MatcherPos {
fn push_match(&mut self, idx: usize, m: NamedMatch) {
let matches = Rc::make_mut(&mut self.matches[idx]);
matches.push(m);
}
}
pub type NamedParseResult = ParseResult<HashMap<Ident, Rc<NamedMatch>>>; pub type NamedParseResult = ParseResult<HashMap<Ident, Rc<NamedMatch>>>;
pub fn count_names(ms: &[TokenTree]) -> usize { pub fn count_names(ms: &[TokenTree]) -> usize {
@ -199,14 +206,15 @@ fn initial_matcher_pos(ms: Vec<TokenTree>, lo: BytePos) -> Box<MatcherPos> {
/// only on the nesting depth of `ast::TTSeq`s in the originating /// only on the nesting depth of `ast::TTSeq`s in the originating
/// token tree it was derived from. /// token tree it was derived from.
#[derive(Debug, Clone)]
pub enum NamedMatch { pub enum NamedMatch {
MatchedSeq(Vec<Rc<NamedMatch>>, syntax_pos::Span), MatchedSeq(Rc<Vec<NamedMatch>>, syntax_pos::Span),
MatchedNonterminal(Rc<Nonterminal>) MatchedNonterminal(Rc<Nonterminal>)
} }
fn nameize<I: Iterator<Item=Rc<NamedMatch>>>(sess: &ParseSess, ms: &[TokenTree], mut res: I) fn nameize<I: Iterator<Item=NamedMatch>>(sess: &ParseSess, ms: &[TokenTree], mut res: I)
-> NamedParseResult { -> NamedParseResult {
fn n_rec<I: Iterator<Item=Rc<NamedMatch>>>(sess: &ParseSess, m: &TokenTree, mut res: &mut I, fn n_rec<I: Iterator<Item=NamedMatch>>(sess: &ParseSess, m: &TokenTree, mut res: &mut I,
ret_val: &mut HashMap<Ident, Rc<NamedMatch>>) ret_val: &mut HashMap<Ident, Rc<NamedMatch>>)
-> Result<(), (syntax_pos::Span, String)> { -> Result<(), (syntax_pos::Span, String)> {
match *m { match *m {
@ -228,7 +236,8 @@ fn nameize<I: Iterator<Item=Rc<NamedMatch>>>(sess: &ParseSess, ms: &[TokenTree],
TokenTree::MetaVarDecl(sp, bind_name, _) => { TokenTree::MetaVarDecl(sp, bind_name, _) => {
match ret_val.entry(bind_name) { match ret_val.entry(bind_name) {
Vacant(spot) => { Vacant(spot) => {
spot.insert(res.next().unwrap()); // FIXME(simulacrum): Don't construct Rc here
spot.insert(Rc::new(res.next().unwrap()));
} }
Occupied(..) => { Occupied(..) => {
return Err((sp, format!("duplicated bind name: {}", bind_name))) return Err((sp, format!("duplicated bind name: {}", bind_name)))
@ -280,8 +289,8 @@ fn token_name_eq(t1 : &Token, t2 : &Token) -> bool {
} }
} }
fn create_matches(len: usize) -> Vec<Vec<Rc<NamedMatch>>> { fn create_matches(len: usize) -> Vec<Rc<Vec<NamedMatch>>> {
(0..len).into_iter().map(|_| Vec::new()).collect() (0..len).into_iter().map(|_| Rc::new(Vec::new())).collect()
} }
fn inner_parse_loop(sess: &ParseSess, fn inner_parse_loop(sess: &ParseSess,
@ -320,15 +329,10 @@ fn inner_parse_loop(sess: &ParseSess,
// update matches (the MBE "parse tree") by appending // update matches (the MBE "parse tree") by appending
// each tree as a subtree. // each tree as a subtree.
// I bet this is a perf problem: we're preemptively
// doing a lot of array work that will get thrown away
// most of the time.
// Only touch the binders we have actually bound // Only touch the binders we have actually bound
for idx in ei.match_lo..ei.match_hi { for idx in ei.match_lo..ei.match_hi {
let sub = ei.matches[idx].clone(); let sub = ei.matches[idx].clone();
new_pos.matches[idx] new_pos.push_match(idx, MatchedSeq(sub, Span { lo: ei.sp_lo, ..span }));
.push(Rc::new(MatchedSeq(sub, Span { lo: ei.sp_lo, ..span })));
} }
new_pos.match_cur = ei.match_hi; new_pos.match_cur = ei.match_hi;
@ -362,7 +366,7 @@ fn inner_parse_loop(sess: &ParseSess,
new_ei.match_cur += seq.num_captures; new_ei.match_cur += seq.num_captures;
new_ei.idx += 1; new_ei.idx += 1;
for idx in ei.match_cur..ei.match_cur + seq.num_captures { for idx in ei.match_cur..ei.match_cur + seq.num_captures {
new_ei.matches[idx].push(Rc::new(MatchedSeq(vec![], sp))); new_ei.push_match(idx, MatchedSeq(Rc::new(vec![]), sp));
} }
cur_eis.push(new_ei); cur_eis.push(new_ei);
} }
@ -446,7 +450,9 @@ pub fn parse(sess: &ParseSess,
/* error messages here could be improved with links to orig. rules */ /* error messages here could be improved with links to orig. rules */
if token_name_eq(&parser.token, &token::Eof) { if token_name_eq(&parser.token, &token::Eof) {
if eof_eis.len() == 1 { if eof_eis.len() == 1 {
let matches = eof_eis[0].matches.iter_mut().map(|mut dv| dv.pop().unwrap()); let matches = eof_eis[0].matches.iter_mut().map(|mut dv| {
Rc::make_mut(dv).pop().unwrap()
});
return nameize(sess, ms, matches); return nameize(sess, ms, matches);
} else if eof_eis.len() > 1 { } else if eof_eis.len() > 1 {
return Error(parser.span, "ambiguity: multiple successful parses".to_string()); return Error(parser.span, "ambiguity: multiple successful parses".to_string());
@ -479,8 +485,8 @@ pub fn parse(sess: &ParseSess,
let mut ei = bb_eis.pop().unwrap(); let mut ei = bb_eis.pop().unwrap();
if let TokenTree::MetaVarDecl(span, _, ident) = ei.top_elts.get_tt(ei.idx) { if let TokenTree::MetaVarDecl(span, _, ident) = ei.top_elts.get_tt(ei.idx) {
let match_cur = ei.match_cur; let match_cur = ei.match_cur;
ei.matches[match_cur].push(Rc::new(MatchedNonterminal( ei.push_match(match_cur,
Rc::new(parse_nt(&mut parser, span, &ident.name.as_str()))))); MatchedNonterminal(Rc::new(parse_nt(&mut parser, span, &ident.name.as_str()))));
ei.idx += 1; ei.idx += 1;
ei.match_cur += 1; ei.match_cur += 1;
} else { } else {

View file

@ -219,7 +219,7 @@ pub fn compile(sess: &ParseSess, features: &RefCell<Features>, def: &ast::Item)
let lhses = match *argument_map[&lhs_nm] { let lhses = match *argument_map[&lhs_nm] {
MatchedSeq(ref s, _) => { MatchedSeq(ref s, _) => {
s.iter().map(|m| { s.iter().map(|m| {
if let MatchedNonterminal(ref nt) = **m { if let MatchedNonterminal(ref nt) = *m {
if let NtTT(ref tt) = **nt { if let NtTT(ref tt) = **nt {
let tt = quoted::parse(tt.clone().into(), true, sess).pop().unwrap(); let tt = quoted::parse(tt.clone().into(), true, sess).pop().unwrap();
valid &= check_lhs_nt_follows(sess, features, &tt); valid &= check_lhs_nt_follows(sess, features, &tt);
@ -235,7 +235,7 @@ pub fn compile(sess: &ParseSess, features: &RefCell<Features>, def: &ast::Item)
let rhses = match *argument_map[&rhs_nm] { let rhses = match *argument_map[&rhs_nm] {
MatchedSeq(ref s, _) => { MatchedSeq(ref s, _) => {
s.iter().map(|m| { s.iter().map(|m| {
if let MatchedNonterminal(ref nt) = **m { if let MatchedNonterminal(ref nt) = *m {
if let NtTT(ref tt) = **nt { if let NtTT(ref tt) = **nt {
return quoted::parse(tt.clone().into(), false, sess).pop().unwrap(); return quoted::parse(tt.clone().into(), false, sess).pop().unwrap();
} }

View file

@ -182,15 +182,16 @@ fn lookup_cur_matched(ident: Ident,
repeats: &[(usize, usize)]) repeats: &[(usize, usize)])
-> Option<Rc<NamedMatch>> { -> Option<Rc<NamedMatch>> {
interpolations.get(&ident).map(|matched| { interpolations.get(&ident).map(|matched| {
repeats.iter().fold(matched.clone(), |ad, &(idx, _)| { let mut matched = matched.clone();
match *ad { for &(idx, _) in repeats {
MatchedNonterminal(_) => { let m = matched.clone();
// end of the line; duplicate henceforth match *m {
ad.clone() MatchedNonterminal(_) => break,
MatchedSeq(ref ads, _) => matched = Rc::new(ads[idx].clone()),
} }
MatchedSeq(ref ads, _) => ads[idx].clone()
} }
})
matched
}) })
} }

View file

@ -54,7 +54,7 @@ fn expand_mbe_matches(cx: &mut ExtCtxt, _: Span, args: &[TokenTree])
let mac_expr = match (&*matched_nt, &*map[&Ident::from_str("pat")]) { let mac_expr = match (&*matched_nt, &*map[&Ident::from_str("pat")]) {
(&NtExpr(ref matched_expr), &MatchedSeq(ref pats, seq_sp)) => { (&NtExpr(ref matched_expr), &MatchedSeq(ref pats, seq_sp)) => {
let pats: Vec<P<Pat>> = pats.iter().map(|pat_nt| { let pats: Vec<P<Pat>> = pats.iter().map(|pat_nt| {
match **pat_nt { match *pat_nt {
MatchedNonterminal(ref nt) => match **nt { MatchedNonterminal(ref nt) => match **nt {
NtPat(ref pat) => pat.clone(), NtPat(ref pat) => pat.clone(),
_ => unreachable!(), _ => unreachable!(),