1
Fork 0

Remove Token::MatchNt.

This commit is contained in:
Jeffrey Seyfried 2017-01-30 23:48:14 +00:00
parent d8b34e9a74
commit 0cc7053efa
9 changed files with 79 additions and 78 deletions

View file

@ -1109,10 +1109,6 @@ impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> {
token::Token::Ident(ident) | token::Token::Ident(ident) |
token::Token::Lifetime(ident) | token::Token::Lifetime(ident) |
token::Token::SubstNt(ident) => ident.name.as_str().hash(self.st), token::Token::SubstNt(ident) => ident.name.as_str().hash(self.st),
token::Token::MatchNt(ident1, ident2) => {
ident1.name.as_str().hash(self.st);
ident2.name.as_str().hash(self.st);
}
token::Token::Interpolated(ref non_terminal) => { token::Token::Interpolated(ref non_terminal) => {
// FIXME(mw): This could be implemented properly. It's just a // FIXME(mw): This could be implemented properly. It's just a

View file

@ -315,7 +315,7 @@ impl<'a> Classifier<'a> {
token::Lifetime(..) => Class::Lifetime, token::Lifetime(..) => Class::Lifetime,
token::Underscore | token::Eof | token::Interpolated(..) | token::Underscore | token::Eof | token::Interpolated(..) |
token::MatchNt(..) | token::SubstNt(..) | token::Tilde | token::At => Class::None, token::SubstNt(..) | token::Tilde | token::At => Class::None,
}; };
// Anything that didn't return above is the simple case where we the // Anything that didn't return above is the simple case where we the

View file

@ -85,9 +85,7 @@ use errors::FatalError;
use ext::tt::quoted; use ext::tt::quoted;
use parse::{Directory, ParseSess}; use parse::{Directory, ParseSess};
use parse::parser::{PathStyle, Parser}; use parse::parser::{PathStyle, Parser};
use parse::token::{DocComment, MatchNt}; use parse::token::{self, DocComment, Token, Nonterminal};
use parse::token::{Token, Nonterminal};
use parse::token;
use print::pprust; use print::pprust;
use tokenstream::TokenTree; use tokenstream::TokenTree;
use util::small_vector::SmallVector; use util::small_vector::SmallVector;
@ -156,7 +154,7 @@ pub fn count_names(ms: &[quoted::TokenTree]) -> usize {
TokenTree::Delimited(_, ref delim) => { TokenTree::Delimited(_, ref delim) => {
count_names(&delim.tts) count_names(&delim.tts)
} }
TokenTree::Token(_, MatchNt(..)) => { TokenTree::MetaVarDecl(..) => {
1 1
} }
TokenTree::Token(..) => 0, TokenTree::Token(..) => 0,
@ -221,7 +219,7 @@ fn nameize<I: Iterator<Item=Rc<NamedMatch>>>(ms: &[quoted::TokenTree], mut res:
n_rec(next_m, res.by_ref(), ret_val)?; n_rec(next_m, res.by_ref(), ret_val)?;
} }
} }
TokenTree::Token(sp, MatchNt(bind_name, _)) => { TokenTree::MetaVarDecl(sp, bind_name, _) => {
match ret_val.entry(bind_name) { match ret_val.entry(bind_name) {
Vacant(spot) => { Vacant(spot) => {
spot.insert(res.next().unwrap()); spot.insert(res.next().unwrap());
@ -377,7 +375,7 @@ fn inner_parse_loop(cur_eis: &mut SmallVector<Box<MatcherPos>>,
top_elts: Tt(TokenTree::Sequence(sp, seq)), top_elts: Tt(TokenTree::Sequence(sp, seq)),
})); }));
} }
TokenTree::Token(_, MatchNt(..)) => { TokenTree::MetaVarDecl(..) => {
// Built-in nonterminals never start with these tokens, // Built-in nonterminals never start with these tokens,
// so we can eliminate them from consideration. // so we can eliminate them from consideration.
match *token { match *token {
@ -445,7 +443,7 @@ pub fn parse(sess: &ParseSess,
} }
} else if (!bb_eis.is_empty() && !next_eis.is_empty()) || bb_eis.len() > 1 { } else if (!bb_eis.is_empty() && !next_eis.is_empty()) || bb_eis.len() > 1 {
let nts = bb_eis.iter().map(|ei| match ei.top_elts.get_tt(ei.idx) { let nts = bb_eis.iter().map(|ei| match ei.top_elts.get_tt(ei.idx) {
TokenTree::Token(_, MatchNt(bind, name)) => { TokenTree::MetaVarDecl(_, bind, name) => {
format!("{} ('{}')", name, bind) format!("{} ('{}')", name, bind)
} }
_ => panic!() _ => panic!()
@ -467,7 +465,7 @@ pub fn parse(sess: &ParseSess,
parser.bump(); parser.bump();
} else /* bb_eis.len() == 1 */ { } else /* bb_eis.len() == 1 */ {
let mut ei = bb_eis.pop().unwrap(); let mut ei = bb_eis.pop().unwrap();
if let TokenTree::Token(span, MatchNt(_, ident)) = ei.top_elts.get_tt(ei.idx) { if let TokenTree::MetaVarDecl(span, _, ident) = ei.top_elts.get_tt(ei.idx) {
let match_cur = ei.match_cur; let match_cur = ei.match_cur;
ei.matches[match_cur].push(Rc::new(MatchedNonterminal( ei.matches[match_cur].push(Rc::new(MatchedNonterminal(
Rc::new(parse_nt(&mut parser, span, &ident.name.as_str()))))); Rc::new(parse_nt(&mut parser, span, &ident.name.as_str())))));

View file

@ -20,7 +20,7 @@ use ext::tt::quoted;
use ext::tt::transcribe::transcribe; use ext::tt::transcribe::transcribe;
use parse::{Directory, ParseSess}; use parse::{Directory, ParseSess};
use parse::parser::Parser; use parse::parser::Parser;
use parse::token::{self, NtTT, Token}; use parse::token::{self, NtTT};
use parse::token::Token::*; use parse::token::Token::*;
use print; use print;
use symbol::Symbol; use symbol::Symbol;
@ -165,14 +165,12 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension {
// $( $lhs:tt => $rhs:tt );+ // $( $lhs:tt => $rhs:tt );+
// ...quasiquoting this would be nice. // ...quasiquoting this would be nice.
// These spans won't matter, anyways // These spans won't matter, anyways
let match_lhs_tok = MatchNt(lhs_nm, ast::Ident::from_str("tt"));
let match_rhs_tok = MatchNt(rhs_nm, ast::Ident::from_str("tt"));
let argument_gram = vec![ let argument_gram = vec![
quoted::TokenTree::Sequence(DUMMY_SP, Rc::new(quoted::SequenceRepetition { quoted::TokenTree::Sequence(DUMMY_SP, Rc::new(quoted::SequenceRepetition {
tts: vec![ tts: vec![
quoted::TokenTree::Token(DUMMY_SP, match_lhs_tok), quoted::TokenTree::MetaVarDecl(DUMMY_SP, lhs_nm, ast::Ident::from_str("tt")),
quoted::TokenTree::Token(DUMMY_SP, token::FatArrow), quoted::TokenTree::Token(DUMMY_SP, token::FatArrow),
quoted::TokenTree::Token(DUMMY_SP, match_rhs_tok), quoted::TokenTree::MetaVarDecl(DUMMY_SP, rhs_nm, ast::Ident::from_str("tt")),
], ],
separator: Some(token::Semi), separator: Some(token::Semi),
op: quoted::KleeneOp::OneOrMore, op: quoted::KleeneOp::OneOrMore,
@ -272,7 +270,7 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool {
use self::quoted::TokenTree; use self::quoted::TokenTree;
for tt in tts { for tt in tts {
match *tt { match *tt {
TokenTree::Token(_, _) => (), TokenTree::Token(..) | TokenTree::MetaVarDecl(..) => (),
TokenTree::Delimited(_, ref del) => if !check_lhs_no_empty_seq(sess, &del.tts) { TokenTree::Delimited(_, ref del) => if !check_lhs_no_empty_seq(sess, &del.tts) {
return false; return false;
}, },
@ -352,13 +350,12 @@ impl FirstSets {
let mut first = TokenSet::empty(); let mut first = TokenSet::empty();
for tt in tts.iter().rev() { for tt in tts.iter().rev() {
match *tt { match *tt {
TokenTree::Token(sp, ref tok) => { TokenTree::Token(..) | TokenTree::MetaVarDecl(..) => {
first.replace_with((sp, tok.clone())); first.replace_with(tt.clone());
} }
TokenTree::Delimited(span, ref delimited) => { TokenTree::Delimited(span, ref delimited) => {
build_recur(sets, &delimited.tts[..]); build_recur(sets, &delimited.tts[..]);
first.replace_with((delimited.open_tt(span).span(), first.replace_with(delimited.open_tt(span));
Token::OpenDelim(delimited.delim)));
} }
TokenTree::Sequence(sp, ref seq_rep) => { TokenTree::Sequence(sp, ref seq_rep) => {
let subfirst = build_recur(sets, &seq_rep.tts[..]); let subfirst = build_recur(sets, &seq_rep.tts[..]);
@ -383,7 +380,7 @@ impl FirstSets {
if let (Some(ref sep), true) = (seq_rep.separator.clone(), if let (Some(ref sep), true) = (seq_rep.separator.clone(),
subfirst.maybe_empty) { subfirst.maybe_empty) {
first.add_one_maybe((sp, sep.clone())); first.add_one_maybe(TokenTree::Token(sp, sep.clone()));
} }
// Reverse scan: Sequence comes before `first`. // Reverse scan: Sequence comes before `first`.
@ -413,13 +410,12 @@ impl FirstSets {
for tt in tts.iter() { for tt in tts.iter() {
assert!(first.maybe_empty); assert!(first.maybe_empty);
match *tt { match *tt {
TokenTree::Token(sp, ref tok) => { TokenTree::Token(..) | TokenTree::MetaVarDecl(..) => {
first.add_one((sp, tok.clone())); first.add_one(tt.clone());
return first; return first;
} }
TokenTree::Delimited(span, ref delimited) => { TokenTree::Delimited(span, ref delimited) => {
first.add_one((delimited.open_tt(span).span(), first.add_one(delimited.open_tt(span));
Token::OpenDelim(delimited.delim)));
return first; return first;
} }
TokenTree::Sequence(sp, ref seq_rep) => { TokenTree::Sequence(sp, ref seq_rep) => {
@ -431,7 +427,7 @@ impl FirstSets {
if let (Some(ref sep), true) = (seq_rep.separator.clone(), if let (Some(ref sep), true) = (seq_rep.separator.clone(),
subfirst.maybe_empty) { subfirst.maybe_empty) {
first.add_one_maybe((sp, sep.clone())); first.add_one_maybe(TokenTree::Token(sp, sep.clone()));
} }
assert!(first.maybe_empty); assert!(first.maybe_empty);
@ -467,8 +463,8 @@ impl FirstSets {
} }
} }
// A set of Tokens, which may include MatchNt tokens (for // A set of `quoted::TokenTree`s, which may include `TokenTree::Match`s
// macro-by-example syntactic variables). It also carries the // (for macro-by-example syntactic variables). It also carries the
// `maybe_empty` flag; that is true if and only if the matcher can // `maybe_empty` flag; that is true if and only if the matcher can
// match an empty token sequence. // match an empty token sequence.
// //
@ -479,7 +475,7 @@ impl FirstSets {
// (Notably, we must allow for *-op to occur zero times.) // (Notably, we must allow for *-op to occur zero times.)
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
struct TokenSet { struct TokenSet {
tokens: Vec<(Span, Token)>, tokens: Vec<quoted::TokenTree>,
maybe_empty: bool, maybe_empty: bool,
} }
@ -489,13 +485,13 @@ impl TokenSet {
// Returns the set `{ tok }` for the single-token (and thus // Returns the set `{ tok }` for the single-token (and thus
// non-empty) sequence [tok]. // non-empty) sequence [tok].
fn singleton(tok: (Span, Token)) -> Self { fn singleton(tok: quoted::TokenTree) -> Self {
TokenSet { tokens: vec![tok], maybe_empty: false } TokenSet { tokens: vec![tok], maybe_empty: false }
} }
// Changes self to be the set `{ tok }`. // Changes self to be the set `{ tok }`.
// Since `tok` is always present, marks self as non-empty. // Since `tok` is always present, marks self as non-empty.
fn replace_with(&mut self, tok: (Span, Token)) { fn replace_with(&mut self, tok: quoted::TokenTree) {
self.tokens.clear(); self.tokens.clear();
self.tokens.push(tok); self.tokens.push(tok);
self.maybe_empty = false; self.maybe_empty = false;
@ -510,7 +506,7 @@ impl TokenSet {
} }
// Adds `tok` to the set for `self`, marking sequence as non-empy. // Adds `tok` to the set for `self`, marking sequence as non-empy.
fn add_one(&mut self, tok: (Span, Token)) { fn add_one(&mut self, tok: quoted::TokenTree) {
if !self.tokens.contains(&tok) { if !self.tokens.contains(&tok) {
self.tokens.push(tok); self.tokens.push(tok);
} }
@ -518,7 +514,7 @@ impl TokenSet {
} }
// Adds `tok` to the set for `self`. (Leaves `maybe_empty` flag alone.) // Adds `tok` to the set for `self`. (Leaves `maybe_empty` flag alone.)
fn add_one_maybe(&mut self, tok: (Span, Token)) { fn add_one_maybe(&mut self, tok: quoted::TokenTree) {
if !self.tokens.contains(&tok) { if !self.tokens.contains(&tok) {
self.tokens.push(tok); self.tokens.push(tok);
} }
@ -558,7 +554,6 @@ fn check_matcher_core(sess: &ParseSess,
first_sets: &FirstSets, first_sets: &FirstSets,
matcher: &[quoted::TokenTree], matcher: &[quoted::TokenTree],
follow: &TokenSet) -> TokenSet { follow: &TokenSet) -> TokenSet {
use print::pprust::token_to_string;
use self::quoted::TokenTree; use self::quoted::TokenTree;
let mut last = TokenSet::empty(); let mut last = TokenSet::empty();
@ -584,11 +579,11 @@ fn check_matcher_core(sess: &ParseSess,
// First, update `last` so that it corresponds to the set // First, update `last` so that it corresponds to the set
// of NT tokens that might end the sequence `... token`. // of NT tokens that might end the sequence `... token`.
match *token { match *token {
TokenTree::Token(sp, ref tok) => { TokenTree::Token(..) | TokenTree::MetaVarDecl(..) => {
let can_be_followed_by_any; let can_be_followed_by_any;
if let Err(bad_frag) = has_legal_fragment_specifier(tok) { if let Err(bad_frag) = has_legal_fragment_specifier(token) {
let msg = format!("invalid fragment specifier `{}`", bad_frag); let msg = format!("invalid fragment specifier `{}`", bad_frag);
sess.span_diagnostic.struct_span_err(sp, &msg) sess.span_diagnostic.struct_span_err(token.span(), &msg)
.help("valid fragment specifiers are `ident`, `block`, \ .help("valid fragment specifiers are `ident`, `block`, \
`stmt`, `expr`, `pat`, `ty`, `path`, `meta`, `tt` \ `stmt`, `expr`, `pat`, `ty`, `path`, `meta`, `tt` \
and `item`") and `item`")
@ -597,7 +592,7 @@ fn check_matcher_core(sess: &ParseSess,
// from error messages.) // from error messages.)
can_be_followed_by_any = true; can_be_followed_by_any = true;
} else { } else {
can_be_followed_by_any = token_can_be_followed_by_any(tok); can_be_followed_by_any = token_can_be_followed_by_any(token);
} }
if can_be_followed_by_any { if can_be_followed_by_any {
@ -607,13 +602,12 @@ fn check_matcher_core(sess: &ParseSess,
// followed by anything against SUFFIX. // followed by anything against SUFFIX.
continue 'each_token; continue 'each_token;
} else { } else {
last.replace_with((sp, tok.clone())); last.replace_with(token.clone());
suffix_first = build_suffix_first(); suffix_first = build_suffix_first();
} }
} }
TokenTree::Delimited(span, ref d) => { TokenTree::Delimited(span, ref d) => {
let my_suffix = TokenSet::singleton((d.close_tt(span).span(), let my_suffix = TokenSet::singleton(d.close_tt(span));
Token::CloseDelim(d.delim)));
check_matcher_core(sess, first_sets, &d.tts, &my_suffix); check_matcher_core(sess, first_sets, &d.tts, &my_suffix);
// don't track non NT tokens // don't track non NT tokens
last.replace_with_irrelevant(); last.replace_with_irrelevant();
@ -637,7 +631,7 @@ fn check_matcher_core(sess: &ParseSess,
let mut new; let mut new;
let my_suffix = if let Some(ref u) = seq_rep.separator { let my_suffix = if let Some(ref u) = seq_rep.separator {
new = suffix_first.clone(); new = suffix_first.clone();
new.add_one_maybe((sp, u.clone())); new.add_one_maybe(TokenTree::Token(sp, u.clone()));
&new &new
} else { } else {
&suffix_first &suffix_first
@ -663,12 +657,13 @@ fn check_matcher_core(sess: &ParseSess,
// Now `last` holds the complete set of NT tokens that could // Now `last` holds the complete set of NT tokens that could
// end the sequence before SUFFIX. Check that every one works with `suffix`. // end the sequence before SUFFIX. Check that every one works with `suffix`.
'each_last: for &(_sp, ref t) in &last.tokens { 'each_last: for token in &last.tokens {
if let MatchNt(ref name, ref frag_spec) = *t { if let TokenTree::MetaVarDecl(_, ref name, ref frag_spec) = *token {
for &(sp, ref next_token) in &suffix_first.tokens { for next_token in &suffix_first.tokens {
match is_in_follow(next_token, &frag_spec.name.as_str()) { match is_in_follow(next_token, &frag_spec.name.as_str()) {
Err((msg, help)) => { Err((msg, help)) => {
sess.span_diagnostic.struct_span_err(sp, &msg).help(help).emit(); sess.span_diagnostic.struct_span_err(next_token.span(), &msg)
.help(help).emit();
// don't bother reporting every source of // don't bother reporting every source of
// conflict for a particular element of `last`. // conflict for a particular element of `last`.
continue 'each_last; continue 'each_last;
@ -684,12 +679,12 @@ fn check_matcher_core(sess: &ParseSess,
}; };
sess.span_diagnostic.span_err( sess.span_diagnostic.span_err(
sp, next_token.span(),
&format!("`${name}:{frag}` {may_be} followed by `{next}`, which \ &format!("`${name}:{frag}` {may_be} followed by `{next}`, which \
is not allowed for `{frag}` fragments", is not allowed for `{frag}` fragments",
name=name, name=name,
frag=frag_spec, frag=frag_spec,
next=token_to_string(next_token), next=quoted_tt_to_string(next_token),
may_be=may_be) may_be=may_be)
); );
} }
@ -701,8 +696,8 @@ fn check_matcher_core(sess: &ParseSess,
last last
} }
fn token_can_be_followed_by_any(tok: &Token) -> bool { fn token_can_be_followed_by_any(tok: &quoted::TokenTree) -> bool {
if let &MatchNt(_, ref frag_spec) = tok { if let quoted::TokenTree::MetaVarDecl(_, _, frag_spec) = *tok {
frag_can_be_followed_by_any(&frag_spec.name.as_str()) frag_can_be_followed_by_any(&frag_spec.name.as_str())
} else { } else {
// (Non NT's can always be followed by anthing in matchers.) // (Non NT's can always be followed by anthing in matchers.)
@ -740,8 +735,10 @@ fn frag_can_be_followed_by_any(frag: &str) -> bool {
/// break macros that were relying on that binary operator as a /// break macros that were relying on that binary operator as a
/// separator. /// separator.
// when changing this do not forget to update doc/book/macros.md! // when changing this do not forget to update doc/book/macros.md!
fn is_in_follow(tok: &Token, frag: &str) -> Result<bool, (String, &'static str)> { fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> Result<bool, (String, &'static str)> {
if let &CloseDelim(_) = tok { use self::quoted::TokenTree;
if let TokenTree::Token(_, token::CloseDelim(_)) = *tok {
// closing a token tree can never be matched by any fragment; // closing a token tree can never be matched by any fragment;
// iow, we always require that `(` and `)` match, etc. // iow, we always require that `(` and `)` match, etc.
Ok(true) Ok(true)
@ -757,27 +754,30 @@ fn is_in_follow(tok: &Token, frag: &str) -> Result<bool, (String, &'static str)>
// maintain // maintain
Ok(true) Ok(true)
}, },
"stmt" | "expr" => { "stmt" | "expr" => match *tok {
match *tok { TokenTree::Token(_, ref tok) => match *tok {
FatArrow | Comma | Semi => Ok(true), FatArrow | Comma | Semi => Ok(true),
_ => Ok(false) _ => Ok(false)
} },
_ => Ok(false),
}, },
"pat" => { "pat" => match *tok {
match *tok { TokenTree::Token(_, ref tok) => match *tok {
FatArrow | Comma | Eq | BinOp(token::Or) => Ok(true), FatArrow | Comma | Eq | BinOp(token::Or) => Ok(true),
Ident(i) if i.name == "if" || i.name == "in" => Ok(true), Ident(i) if i.name == "if" || i.name == "in" => Ok(true),
_ => Ok(false) _ => Ok(false)
} },
_ => Ok(false),
}, },
"path" | "ty" => { "path" | "ty" => match *tok {
match *tok { TokenTree::Token(_, ref tok) => match *tok {
OpenDelim(token::DelimToken::Brace) | OpenDelim(token::DelimToken::Bracket) | OpenDelim(token::DelimToken::Brace) | OpenDelim(token::DelimToken::Bracket) |
Comma | FatArrow | Colon | Eq | Gt | Semi | BinOp(token::Or) => Ok(true), Comma | FatArrow | Colon | Eq | Gt | Semi | BinOp(token::Or) => Ok(true),
MatchNt(_, ref frag) if frag.name == "block" => Ok(true),
Ident(i) if i.name == "as" || i.name == "where" => Ok(true), Ident(i) if i.name == "as" || i.name == "where" => Ok(true),
_ => Ok(false) _ => Ok(false)
} },
TokenTree::MetaVarDecl(_, _, frag) if frag.name == "block" => Ok(true),
_ => Ok(false),
}, },
"ident" => { "ident" => {
// being a single token, idents are harmless // being a single token, idents are harmless
@ -796,9 +796,9 @@ fn is_in_follow(tok: &Token, frag: &str) -> Result<bool, (String, &'static str)>
} }
} }
fn has_legal_fragment_specifier(tok: &Token) -> Result<(), String> { fn has_legal_fragment_specifier(tok: &quoted::TokenTree) -> Result<(), String> {
debug!("has_legal_fragment_specifier({:?})", tok); debug!("has_legal_fragment_specifier({:?})", tok);
if let &MatchNt(_, ref frag_spec) = tok { if let quoted::TokenTree::MetaVarDecl(_, _, frag_spec) = *tok {
let s = &frag_spec.name.as_str(); let s = &frag_spec.name.as_str();
if !is_legal_fragment_specifier(s) { if !is_legal_fragment_specifier(s) {
return Err(s.to_string()); return Err(s.to_string());
@ -814,3 +814,11 @@ fn is_legal_fragment_specifier(frag: &str) -> bool {
_ => false, _ => false,
} }
} }
fn quoted_tt_to_string(tt: &quoted::TokenTree) -> String {
match *tt {
quoted::TokenTree::Token(_, ref tok) => ::print::pprust::token_to_string(tok),
quoted::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind),
_ => panic!("unexpected quoted::TokenTree::{Sequence or Delimited} in follow set checker"),
}
}

View file

@ -58,7 +58,7 @@ pub struct SequenceRepetition {
pub separator: Option<token::Token>, pub separator: Option<token::Token>,
/// Whether the sequence can be repeated zero (*), or one or more times (+) /// Whether the sequence can be repeated zero (*), or one or more times (+)
pub op: KleeneOp, pub op: KleeneOp,
/// The number of `MatchNt`s that appear in the sequence (and subsequences) /// The number of `Match`s that appear in the sequence (and subsequences)
pub num_captures: usize, pub num_captures: usize,
} }
@ -78,6 +78,8 @@ pub enum TokenTree {
Delimited(Span, Rc<Delimited>), Delimited(Span, Rc<Delimited>),
/// A kleene-style repetition sequence with a span /// A kleene-style repetition sequence with a span
Sequence(Span, Rc<SequenceRepetition>), Sequence(Span, Rc<SequenceRepetition>),
/// Matches a nonterminal. This is only used in the left hand side of MBE macros.
MetaVarDecl(Span, ast::Ident /* name to bind */, ast::Ident /* kind of nonterminal */),
} }
impl TokenTree { impl TokenTree {
@ -88,7 +90,7 @@ impl TokenTree {
_ => delimed.tts.len() + 2, _ => delimed.tts.len() + 2,
}, },
TokenTree::Sequence(_, ref seq) => seq.tts.len(), TokenTree::Sequence(_, ref seq) => seq.tts.len(),
TokenTree::Token(..) => 0, _ => 0,
} }
} }
@ -115,6 +117,7 @@ impl TokenTree {
pub fn span(&self) -> Span { pub fn span(&self) -> Span {
match *self { match *self {
TokenTree::Token(sp, _) | TokenTree::Token(sp, _) |
TokenTree::MetaVarDecl(sp, _, _) |
TokenTree::Delimited(sp, _) | TokenTree::Delimited(sp, _) |
TokenTree::Sequence(sp, _) => sp, TokenTree::Sequence(sp, _) => sp,
} }
@ -133,7 +136,7 @@ pub fn parse(input: &[tokenstream::TokenTree], expect_matchers: bool, sess: &Par
Some(tokenstream::TokenTree::Token(span, token::Colon)) => match trees.next() { Some(tokenstream::TokenTree::Token(span, token::Colon)) => match trees.next() {
Some(tokenstream::TokenTree::Token(end_sp, token::Ident(kind))) => { Some(tokenstream::TokenTree::Token(end_sp, token::Ident(kind))) => {
let span = Span { lo: start_sp.lo, ..end_sp }; let span = Span { lo: start_sp.lo, ..end_sp };
result.push(TokenTree::Token(span, token::MatchNt(ident, kind))); result.push(TokenTree::MetaVarDecl(span, ident, kind));
continue continue
} }
tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span), tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),

View file

@ -12,7 +12,7 @@ use ast::Ident;
use errors::Handler; use errors::Handler;
use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal}; use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
use ext::tt::quoted; use ext::tt::quoted;
use parse::token::{self, MatchNt, SubstNt, Token, NtIdent, NtTT}; use parse::token::{self, SubstNt, Token, NtIdent, NtTT};
use syntax_pos::{Span, DUMMY_SP}; use syntax_pos::{Span, DUMMY_SP};
use tokenstream::{TokenTree, Delimited}; use tokenstream::{TokenTree, Delimited};
use util::small_vector::SmallVector; use util::small_vector::SmallVector;
@ -61,7 +61,7 @@ impl Iterator for Frame {
} }
/// This can do Macro-By-Example transcription. On the other hand, if /// This can do Macro-By-Example transcription. On the other hand, if
/// `src` contains no `TokenTree::Sequence`s, `MatchNt`s or `SubstNt`s, `interp` can /// `src` contains no `TokenTree::{Sequence, Match}`s, or `SubstNt`s, `interp` can
/// (and should) be None. /// (and should) be None.
pub fn transcribe(sp_diag: &Handler, pub fn transcribe(sp_diag: &Handler,
interp: Option<HashMap<Ident, Rc<NamedMatch>>>, interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
@ -177,6 +177,7 @@ pub fn transcribe(sp_diag: &Handler,
result_stack.push(mem::replace(&mut result, Vec::new())); result_stack.push(mem::replace(&mut result, Vec::new()));
} }
quoted::TokenTree::Token(span, tok) => result.push(TokenTree::Token(span, tok)), quoted::TokenTree::Token(span, tok) => result.push(TokenTree::Token(span, tok)),
quoted::TokenTree::MetaVarDecl(..) => panic!("unexpected `TokenTree::MetaVarDecl"),
} }
} }
} }
@ -243,7 +244,7 @@ fn lockstep_iter_size(tree: &quoted::TokenTree,
size + lockstep_iter_size(tt, interpolations, repeat_idx) size + lockstep_iter_size(tt, interpolations, repeat_idx)
}) })
}, },
TokenTree::Token(_, SubstNt(name)) | TokenTree::Token(_, MatchNt(name, _)) => TokenTree::Token(_, SubstNt(name)) | TokenTree::MetaVarDecl(_, name, _) =>
match lookup_cur_matched(name, interpolations, repeat_idx) { match lookup_cur_matched(name, interpolations, repeat_idx) {
Some(matched) => match *matched { Some(matched) => match *matched {
MatchedNonterminal(_) => LockstepIterSize::Unconstrained, MatchedNonterminal(_) => LockstepIterSize::Unconstrained,

View file

@ -571,7 +571,6 @@ pub fn noop_fold_token<T: Folder>(t: token::Token, fld: &mut T) -> token::Token
token::Interpolated(Rc::new(fld.fold_interpolated(nt))) token::Interpolated(Rc::new(fld.fold_interpolated(nt)))
} }
token::SubstNt(ident) => token::SubstNt(fld.fold_ident(ident)), token::SubstNt(ident) => token::SubstNt(fld.fold_ident(ident)),
token::MatchNt(name, kind) => token::MatchNt(fld.fold_ident(name), fld.fold_ident(kind)),
_ => t _ => t
} }
} }

View file

@ -152,9 +152,6 @@ pub enum Token {
// Can be expanded into several tokens. // Can be expanded into several tokens.
/// Doc comment /// Doc comment
DocComment(ast::Name), DocComment(ast::Name),
// In left-hand-sides of MBE macros:
/// Parse a nonterminal (name to bind, name of NT)
MatchNt(ast::Ident, ast::Ident),
// In right-hand-sides of MBE macros: // In right-hand-sides of MBE macros:
/// A syntactic variable that will be filled in by macro expansion. /// A syntactic variable that will be filled in by macro expansion.
SubstNt(ast::Ident), SubstNt(ast::Ident),

View file

@ -271,7 +271,6 @@ pub fn token_to_string(tok: &Token) -> String {
/* Other */ /* Other */
token::DocComment(s) => s.to_string(), token::DocComment(s) => s.to_string(),
token::SubstNt(s) => format!("${}", s), token::SubstNt(s) => format!("${}", s),
token::MatchNt(s, t) => format!("${}:{}", s, t),
token::Eof => "<eof>".to_string(), token::Eof => "<eof>".to_string(),
token::Whitespace => " ".to_string(), token::Whitespace => " ".to_string(),
token::Comment => "/* */".to_string(), token::Comment => "/* */".to_string(),