Use TokenTree
s in lhs of macros
This commit is contained in:
parent
5c1fd5f8b7
commit
38ce6d9eac
10 changed files with 387 additions and 249 deletions
|
@ -163,7 +163,8 @@ fn doit(sess: &parse::ParseSess, mut lexer: lexer::StringReader,
|
|||
|
||||
token::Lifetime(..) => "lifetime",
|
||||
token::DocComment(..) => "doccomment",
|
||||
token::Underscore | token::Eof | token::Interpolated(..) => "",
|
||||
token::Underscore | token::Eof | token::Interpolated(..) |
|
||||
token::MatchNt(..) | token::SubstNt(..) => "",
|
||||
};
|
||||
|
||||
// as mentioned above, use the original source code instead of
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
|
||||
// The Rust abstract syntax tree.
|
||||
|
||||
use codemap::{Span, Spanned, DUMMY_SP, ExpnId};
|
||||
use codemap::{Span, Spanned, DUMMY_SP, ExpnId, respan};
|
||||
use abi::Abi;
|
||||
use ast_util;
|
||||
use owned_slice::OwnedSlice;
|
||||
|
@ -657,23 +657,55 @@ pub enum TokenTree {
|
|||
/// A delimited sequence of token trees
|
||||
TtDelimited(Span, Rc<Delimited>),
|
||||
|
||||
// These only make sense for right-hand-sides of MBE macros:
|
||||
// This only makes sense for right-hand-sides of MBE macros:
|
||||
|
||||
/// A Kleene-style repetition sequence with an optional separator.
|
||||
// FIXME(eddyb) #6308 Use Rc<[TokenTree]> after DST.
|
||||
TtSequence(Span, Rc<Vec<TokenTree>>, Option<token::Token>, KleeneOp),
|
||||
/// A syntactic variable that will be filled in by macro expansion.
|
||||
TtNonterminal(Span, Ident)
|
||||
TtSequence(Span, Rc<Vec<TokenTree>>, Option<::parse::token::Token>, KleeneOp, uint),
|
||||
}
|
||||
|
||||
impl TokenTree {
|
||||
pub fn expand_into_tts(self) -> Rc<Vec<TokenTree>> {
|
||||
match self {
|
||||
TtToken(sp, token::DocComment(name)) => {
|
||||
let doc = MetaNameValue(token::intern_and_get_ident("doc"),
|
||||
respan(sp, LitStr(token::get_name(name), CookedStr)));
|
||||
let doc = token::NtMeta(P(respan(sp, doc)));
|
||||
let delimed = Delimited {
|
||||
delim: token::Bracket,
|
||||
open_span: sp,
|
||||
tts: vec![TtToken(sp, token::Interpolated(doc))],
|
||||
close_span: sp,
|
||||
};
|
||||
Rc::new(vec![TtToken(sp, token::Pound),
|
||||
TtDelimited(sp, Rc::new(delimed))])
|
||||
}
|
||||
TtDelimited(_, ref delimed) => {
|
||||
let mut tts = Vec::with_capacity(1 + delimed.tts.len() + 1);
|
||||
tts.push(delimed.open_tt());
|
||||
tts.extend(delimed.tts.iter().map(|tt| tt.clone()));
|
||||
tts.push(delimed.close_tt());
|
||||
Rc::new(tts)
|
||||
}
|
||||
TtToken(sp, token::SubstNt(name, namep)) => {
|
||||
Rc::new(vec![TtToken(sp, token::Dollar),
|
||||
TtToken(sp, token::Ident(name, namep))])
|
||||
}
|
||||
TtToken(sp, token::MatchNt(name, kind, namep, kindp)) => {
|
||||
Rc::new(vec![TtToken(sp, token::SubstNt(name, namep)),
|
||||
TtToken(sp, token::Colon),
|
||||
TtToken(sp, token::Ident(kind, kindp))])
|
||||
}
|
||||
_ => panic!("Cannot expand a token")
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the `Span` corresponding to this token tree.
|
||||
pub fn get_span(&self) -> Span {
|
||||
match *self {
|
||||
TtToken(span, _) => span,
|
||||
TtDelimited(span, _) => span,
|
||||
TtSequence(span, _, _, _) => span,
|
||||
TtNonterminal(span, _) => span,
|
||||
TtToken(span, _) => span,
|
||||
TtDelimited(span, _) => span,
|
||||
TtSequence(span, _, _, _, _) => span,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -616,6 +616,20 @@ fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
|
|||
vec!(mk_name(cx, sp, ident.ident())));
|
||||
}
|
||||
|
||||
token::MatchNt(name, kind, name_style, kind_style) => {
|
||||
return cx.expr_call(sp,
|
||||
mk_token_path(cx, sp, "MatchNt"),
|
||||
vec![mk_ident(cx, sp, name),
|
||||
mk_ident(cx, sp, kind),
|
||||
match name_style {
|
||||
ModName => mk_token_path(cx, sp, "ModName"),
|
||||
Plain => mk_token_path(cx, sp, "Plain"),
|
||||
},
|
||||
match kind_style {
|
||||
ModName => mk_token_path(cx, sp, "ModName"),
|
||||
Plain => mk_token_path(cx, sp, "Plain"),
|
||||
}]);
|
||||
}
|
||||
token::Interpolated(_) => panic!("quote! with interpolated token"),
|
||||
|
||||
_ => ()
|
||||
|
@ -654,6 +668,25 @@ fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
|
|||
|
||||
fn mk_tt(cx: &ExtCtxt, _: Span, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> {
|
||||
match *tt {
|
||||
ast::TtToken(sp, SubstNt(ident, _)) => {
|
||||
// tt.extend($ident.to_tokens(ext_cx).into_iter())
|
||||
|
||||
let e_to_toks =
|
||||
cx.expr_method_call(sp,
|
||||
cx.expr_ident(sp, ident),
|
||||
id_ext("to_tokens"),
|
||||
vec!(cx.expr_ident(sp, id_ext("ext_cx"))));
|
||||
let e_to_toks =
|
||||
cx.expr_method_call(sp, e_to_toks, id_ext("into_iter"), vec![]);
|
||||
|
||||
let e_push =
|
||||
cx.expr_method_call(sp,
|
||||
cx.expr_ident(sp, id_ext("tt")),
|
||||
id_ext("extend"),
|
||||
vec!(e_to_toks));
|
||||
|
||||
vec!(cx.stmt_expr(e_push))
|
||||
}
|
||||
ast::TtToken(sp, ref tok) => {
|
||||
let e_sp = cx.expr_ident(sp, id_ext("_sp"));
|
||||
let e_tok = cx.expr_call(sp,
|
||||
|
@ -673,25 +706,6 @@ fn mk_tt(cx: &ExtCtxt, _: Span, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> {
|
|||
.collect()
|
||||
},
|
||||
ast::TtSequence(..) => panic!("TtSequence in quote!"),
|
||||
ast::TtNonterminal(sp, ident) => {
|
||||
// tt.extend($ident.to_tokens(ext_cx).into_iter())
|
||||
|
||||
let e_to_toks =
|
||||
cx.expr_method_call(sp,
|
||||
cx.expr_ident(sp, ident),
|
||||
id_ext("to_tokens"),
|
||||
vec!(cx.expr_ident(sp, id_ext("ext_cx"))));
|
||||
let e_to_toks =
|
||||
cx.expr_method_call(sp, e_to_toks, id_ext("into_iter"), vec![]);
|
||||
|
||||
let e_push =
|
||||
cx.expr_method_call(sp,
|
||||
cx.expr_ident(sp, id_ext("tt")),
|
||||
id_ext("extend"),
|
||||
vec!(e_to_toks));
|
||||
|
||||
vec!(cx.stmt_expr(e_push))
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -78,69 +78,80 @@
|
|||
|
||||
|
||||
use ast;
|
||||
use ast::{Matcher, MatchTok, MatchSeq, MatchNonterminal, Ident};
|
||||
use ast::{Matcher, TokenTree, Ident};
|
||||
use ast::{TtDelimited, TtSequence, TtToken};
|
||||
use codemap::{BytePos, mk_sp};
|
||||
use codemap;
|
||||
use parse::lexer::*; //resolve bug?
|
||||
use parse::ParseSess;
|
||||
use parse::attr::ParserAttr;
|
||||
use parse::parser::{LifetimeAndTypesWithoutColons, Parser};
|
||||
use parse::token::{Eof, DocComment, MatchNt, SubstNt};
|
||||
use parse::token::{Token, Nonterminal};
|
||||
use parse::token;
|
||||
use print::pprust;
|
||||
use ptr::P;
|
||||
|
||||
use std::mem;
|
||||
use std::rc::Rc;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::hash_map::{Vacant, Occupied};
|
||||
|
||||
/* to avoid costly uniqueness checks, we require that `MatchSeq` always has a
|
||||
nonempty body. */
|
||||
|
||||
|
||||
/// an unzipping of `TokenTree`s
|
||||
#[deriving(Clone)]
|
||||
struct MatcherTtFrame {
|
||||
elts: Rc<Vec<ast::TokenTree>>,
|
||||
idx: uint,
|
||||
}
|
||||
|
||||
#[deriving(Clone)]
|
||||
pub struct MatcherPos {
|
||||
elts: Vec<ast::Matcher> , // maybe should be <'>? Need to understand regions.
|
||||
stack: Vec<MatcherTtFrame>,
|
||||
elts: Rc<Vec<ast::TokenTree>>,
|
||||
sep: Option<Token>,
|
||||
idx: uint,
|
||||
up: Option<Box<MatcherPos>>,
|
||||
matches: Vec<Vec<Rc<NamedMatch>>>,
|
||||
match_lo: uint, match_hi: uint,
|
||||
match_lo: uint,
|
||||
match_cur: uint,
|
||||
match_hi: uint,
|
||||
sp_lo: BytePos,
|
||||
}
|
||||
|
||||
pub fn count_names(ms: &[Matcher]) -> uint {
|
||||
ms.iter().fold(0, |ct, m| {
|
||||
ct + match m.node {
|
||||
MatchTok(_) => 0u,
|
||||
MatchSeq(ref more_ms, _, _, _, _) => {
|
||||
count_names(more_ms.as_slice())
|
||||
pub fn count_names(ms: &[TokenTree]) -> uint {
|
||||
ms.iter().fold(0, |count, elt| {
|
||||
count + match elt {
|
||||
&TtSequence(_, _, _, _, advance_by) => {
|
||||
advance_by
|
||||
}
|
||||
MatchNonterminal(_, _, _) => 1u
|
||||
}})
|
||||
&TtDelimited(_, ref delim) => {
|
||||
count_names(delim.tts.as_slice())
|
||||
}
|
||||
&TtToken(_, MatchNt(..)) => {
|
||||
1
|
||||
}
|
||||
&TtToken(_, _) => 0,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn initial_matcher_pos(ms: Vec<Matcher> , sep: Option<Token>, lo: BytePos)
|
||||
pub fn initial_matcher_pos(ms: Rc<Vec<TokenTree>>, sep: Option<Token>, lo: BytePos)
|
||||
-> Box<MatcherPos> {
|
||||
let mut match_idx_hi = 0u;
|
||||
for elt in ms.iter() {
|
||||
match elt.node {
|
||||
MatchTok(_) => (),
|
||||
MatchSeq(_,_,_,_,hi) => {
|
||||
match_idx_hi = hi; // it is monotonic...
|
||||
}
|
||||
MatchNonterminal(_,_,pos) => {
|
||||
match_idx_hi = pos+1u; // ...so latest is highest
|
||||
}
|
||||
}
|
||||
}
|
||||
let matches = Vec::from_fn(count_names(ms.as_slice()), |_i| Vec::new());
|
||||
let match_idx_hi = count_names(ms.as_slice());
|
||||
let matches = Vec::from_fn(match_idx_hi, |_i| Vec::new());
|
||||
box MatcherPos {
|
||||
stack: vec![],
|
||||
elts: ms,
|
||||
sep: sep,
|
||||
idx: 0u,
|
||||
up: None,
|
||||
matches: matches,
|
||||
match_lo: 0u,
|
||||
match_cur: 0u,
|
||||
match_hi: match_idx_hi,
|
||||
sp_lo: lo
|
||||
}
|
||||
|
@ -152,11 +163,9 @@ pub fn initial_matcher_pos(ms: Vec<Matcher> , sep: Option<Token>, lo: BytePos)
|
|||
/// (expr, item, etc). All the leaves in a single NamedMatch correspond to a
|
||||
/// single matcher_nonterminal in the ast::Matcher that produced it.
|
||||
///
|
||||
/// It should probably be renamed, it has more or less exact correspondence to
|
||||
/// ast::match nodes, and the in-memory structure of a particular NamedMatch
|
||||
/// represents the match that occurred when a particular subset of an
|
||||
/// ast::match -- those ast::Matcher nodes leading to a single
|
||||
/// MatchNonterminal -- was applied to a particular token tree.
|
||||
/// The in-memory structure of a particular NamedMatch represents the match
|
||||
/// that occurred when a particular subset of a matcher was applied to a
|
||||
/// particular token tree.
|
||||
///
|
||||
/// The width of each MatchedSeq in the NamedMatch, and the identity of the
|
||||
/// MatchedNonterminal's, will depend on the token tree it was applied to: each
|
||||
|
@ -170,34 +179,43 @@ pub enum NamedMatch {
|
|||
MatchedNonterminal(Nonterminal)
|
||||
}
|
||||
|
||||
pub fn nameize(p_s: &ParseSess, ms: &[Matcher], res: &[Rc<NamedMatch>])
|
||||
pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
|
||||
-> HashMap<Ident, Rc<NamedMatch>> {
|
||||
fn n_rec(p_s: &ParseSess, m: &Matcher, res: &[Rc<NamedMatch>],
|
||||
ret_val: &mut HashMap<Ident, Rc<NamedMatch>>) {
|
||||
match *m {
|
||||
codemap::Spanned {node: MatchTok(_), .. } => (),
|
||||
codemap::Spanned {node: MatchSeq(ref more_ms, _, _, _, _), .. } => {
|
||||
for next_m in more_ms.iter() {
|
||||
n_rec(p_s, next_m, res, ret_val)
|
||||
};
|
||||
}
|
||||
codemap::Spanned {
|
||||
node: MatchNonterminal(bind_name, _, idx),
|
||||
span
|
||||
} => {
|
||||
if ret_val.contains_key(&bind_name) {
|
||||
let string = token::get_ident(bind_name);
|
||||
p_s.span_diagnostic
|
||||
.span_fatal(span,
|
||||
format!("duplicated bind name: {}",
|
||||
string.get()).as_slice())
|
||||
fn n_rec(p_s: &ParseSess, m: &TokenTree, res: &[Rc<NamedMatch>],
|
||||
ret_val: &mut HashMap<Ident, Rc<NamedMatch>>, idx: &mut uint) {
|
||||
match m {
|
||||
&TtSequence(_, ref more_ms, _, _, _) => {
|
||||
for next_m in more_ms.iter() {
|
||||
n_rec(p_s, next_m, res, ret_val, idx)
|
||||
}
|
||||
}
|
||||
ret_val.insert(bind_name, res[idx].clone());
|
||||
}
|
||||
&TtDelimited(_, ref delim) => {
|
||||
for next_m in delim.tts.iter() {
|
||||
n_rec(p_s, next_m, res, ret_val, idx)
|
||||
}
|
||||
}
|
||||
&TtToken(sp, MatchNt(bind_name, _, _, _)) => {
|
||||
match ret_val.entry(bind_name) {
|
||||
Vacant(spot) => {
|
||||
spot.set(res[*idx].clone());
|
||||
*idx += 1;
|
||||
}
|
||||
Occupied(..) => {
|
||||
let string = token::get_ident(bind_name);
|
||||
p_s.span_diagnostic
|
||||
.span_fatal(sp,
|
||||
format!("duplicated bind name: {}",
|
||||
string.get()).as_slice())
|
||||
}
|
||||
}
|
||||
}
|
||||
&TtToken(_, SubstNt(..)) => panic!("Cannot fill in a NT"),
|
||||
&TtToken(_, _) => (),
|
||||
}
|
||||
}
|
||||
let mut ret_val = HashMap::new();
|
||||
for m in ms.iter() { n_rec(p_s, m, res, &mut ret_val) }
|
||||
let mut idx = 0u;
|
||||
for m in ms.iter() { n_rec(p_s, m, res, &mut ret_val, &mut idx) }
|
||||
ret_val
|
||||
}
|
||||
|
||||
|
@ -210,7 +228,7 @@ pub enum ParseResult {
|
|||
pub fn parse_or_else(sess: &ParseSess,
|
||||
cfg: ast::CrateConfig,
|
||||
rdr: TtReader,
|
||||
ms: Vec<Matcher> )
|
||||
ms: Vec<TokenTree> )
|
||||
-> HashMap<Ident, Rc<NamedMatch>> {
|
||||
match parse(sess, cfg, rdr, ms.as_slice()) {
|
||||
Success(m) => m,
|
||||
|
@ -237,12 +255,12 @@ pub fn token_name_eq(t1 : &Token, t2 : &Token) -> bool {
|
|||
pub fn parse(sess: &ParseSess,
|
||||
cfg: ast::CrateConfig,
|
||||
mut rdr: TtReader,
|
||||
ms: &[Matcher])
|
||||
ms: &[TokenTree])
|
||||
-> ParseResult {
|
||||
let mut cur_eis = Vec::new();
|
||||
cur_eis.push(initial_matcher_pos(ms.iter()
|
||||
.map(|x| (*x).clone())
|
||||
.collect(),
|
||||
cur_eis.push(initial_matcher_pos(Rc::new(ms.iter()
|
||||
.map(|x| (*x).clone())
|
||||
.collect()),
|
||||
None,
|
||||
rdr.peek().sp.lo));
|
||||
|
||||
|
@ -255,11 +273,22 @@ pub fn parse(sess: &ParseSess,
|
|||
|
||||
/* we append new items to this while we go */
|
||||
loop {
|
||||
let ei = match cur_eis.pop() {
|
||||
let mut ei = match cur_eis.pop() {
|
||||
None => break, /* for each Earley Item */
|
||||
Some(ei) => ei,
|
||||
};
|
||||
|
||||
// When unzipped trees end, remove them
|
||||
while ei.idx >= ei.elts.len() {
|
||||
match ei.stack.pop() {
|
||||
Some(MatcherTtFrame { elts, idx }) => {
|
||||
ei.elts = elts;
|
||||
ei.idx = idx + 1;
|
||||
}
|
||||
None => break
|
||||
}
|
||||
}
|
||||
|
||||
let idx = ei.idx;
|
||||
let len = ei.elts.len();
|
||||
|
||||
|
@ -293,6 +322,7 @@ pub fn parse(sess: &ParseSess,
|
|||
sp.hi))));
|
||||
}
|
||||
|
||||
new_pos.match_cur = ei.match_hi;
|
||||
new_pos.idx += 1;
|
||||
cur_eis.push(new_pos);
|
||||
}
|
||||
|
@ -301,69 +331,88 @@ pub fn parse(sess: &ParseSess,
|
|||
|
||||
// the *_t vars are workarounds for the lack of unary move
|
||||
match ei.sep {
|
||||
Some(ref t) if idx == len => { // we need a separator
|
||||
// i'm conflicted about whether this should be hygienic....
|
||||
// though in this case, if the separators are never legal
|
||||
// idents, it shouldn't matter.
|
||||
if token_name_eq(&tok, t) { //pass the separator
|
||||
let mut ei_t = ei.clone();
|
||||
ei_t.idx += 1;
|
||||
next_eis.push(ei_t);
|
||||
Some(ref t) if idx == len => { // we need a separator
|
||||
// i'm conflicted about whether this should be hygienic....
|
||||
// though in this case, if the separators are never legal
|
||||
// idents, it shouldn't matter.
|
||||
if token_name_eq(&tok, t) { //pass the separator
|
||||
let mut ei_t = ei.clone();
|
||||
// ei_t.match_cur = ei_t.match_lo;
|
||||
ei_t.idx += 1;
|
||||
next_eis.push(ei_t);
|
||||
}
|
||||
}
|
||||
_ => { // we don't need a separator
|
||||
let mut ei_t = ei;
|
||||
ei_t.match_cur = ei_t.match_lo;
|
||||
ei_t.idx = 0;
|
||||
cur_eis.push(ei_t);
|
||||
}
|
||||
}
|
||||
_ => { // we don't need a separator
|
||||
let mut ei_t = ei;
|
||||
ei_t.idx = 0;
|
||||
cur_eis.push(ei_t);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
eof_eis.push(ei);
|
||||
}
|
||||
} else {
|
||||
match ei.elts[idx].node.clone() {
|
||||
/* need to descend into sequence */
|
||||
MatchSeq(ref matchers, ref sep, kleene_op,
|
||||
match_idx_lo, match_idx_hi) => {
|
||||
if kleene_op == ast::ZeroOrMore {
|
||||
let mut new_ei = ei.clone();
|
||||
new_ei.idx += 1u;
|
||||
//we specifically matched zero repeats.
|
||||
for idx in range(match_idx_lo, match_idx_hi) {
|
||||
new_ei.matches[idx]
|
||||
.push(Rc::new(MatchedSeq(Vec::new(), sp)));
|
||||
match (*ei.elts)[idx].clone() {
|
||||
/* need to descend into sequence */
|
||||
TtSequence(_, ref matchers, ref sep, kleene_op, match_num) => {
|
||||
if kleene_op == ast::ZeroOrMore {
|
||||
let mut new_ei = ei.clone();
|
||||
new_ei.match_cur += match_num;
|
||||
new_ei.idx += 1u;
|
||||
//we specifically matched zero repeats.
|
||||
for idx in range(ei.match_cur, ei.match_cur + match_num) {
|
||||
new_ei.matches[idx]
|
||||
.push(Rc::new(MatchedSeq(Vec::new(), sp)));
|
||||
}
|
||||
|
||||
cur_eis.push(new_ei);
|
||||
}
|
||||
|
||||
cur_eis.push(new_ei);
|
||||
let matches = Vec::from_elem(ei.matches.len(), Vec::new());
|
||||
let ei_t = ei;
|
||||
cur_eis.push(box MatcherPos {
|
||||
stack: vec![],
|
||||
elts: matchers.clone(),
|
||||
sep: (*sep).clone(),
|
||||
idx: 0u,
|
||||
matches: matches,
|
||||
match_lo: ei_t.match_cur,
|
||||
match_cur: ei_t.match_cur,
|
||||
match_hi: ei_t.match_cur + match_num,
|
||||
up: Some(ei_t),
|
||||
sp_lo: sp.lo
|
||||
});
|
||||
}
|
||||
|
||||
let matches = Vec::from_elem(ei.matches.len(), Vec::new());
|
||||
let ei_t = ei;
|
||||
cur_eis.push(box MatcherPos {
|
||||
elts: (*matchers).clone(),
|
||||
sep: (*sep).clone(),
|
||||
idx: 0u,
|
||||
up: Some(ei_t),
|
||||
matches: matches,
|
||||
match_lo: match_idx_lo, match_hi: match_idx_hi,
|
||||
sp_lo: sp.lo
|
||||
});
|
||||
}
|
||||
MatchNonterminal(_,_,_) => {
|
||||
// Built-in nonterminals never start with these tokens,
|
||||
// so we can eliminate them from consideration.
|
||||
match tok {
|
||||
token::CloseDelim(_) => {},
|
||||
_ => bb_eis.push(ei),
|
||||
TtToken(_, MatchNt(..)) => {
|
||||
// Built-in nonterminals never start with these tokens,
|
||||
// so we can eliminate them from consideration.
|
||||
match tok {
|
||||
token::CloseDelim(_) => {},
|
||||
_ => bb_eis.push(ei),
|
||||
}
|
||||
}
|
||||
}
|
||||
MatchTok(ref t) => {
|
||||
let mut ei_t = ei.clone();
|
||||
if token_name_eq(t,&tok) {
|
||||
ei_t.idx += 1;
|
||||
next_eis.push(ei_t);
|
||||
TtToken(sp, SubstNt(..)) => {
|
||||
return Error(sp, "Cannot transcribe in macro LHS".into_string())
|
||||
}
|
||||
seq @ TtDelimited(..) | seq @ TtToken(_, DocComment(..)) => {
|
||||
let tts = seq.expand_into_tts();
|
||||
let elts = mem::replace(&mut ei.elts, tts);
|
||||
let idx = ei.idx;
|
||||
ei.stack.push(MatcherTtFrame {
|
||||
elts: elts,
|
||||
idx: idx,
|
||||
});
|
||||
ei.idx = 0;
|
||||
cur_eis.push(ei);
|
||||
}
|
||||
TtToken(_, ref t) => {
|
||||
let mut ei_t = ei.clone();
|
||||
if token_name_eq(t,&tok) {
|
||||
ei_t.idx += 1;
|
||||
next_eis.push(ei_t);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -385,8 +434,8 @@ pub fn parse(sess: &ParseSess,
|
|||
if (bb_eis.len() > 0u && next_eis.len() > 0u)
|
||||
|| bb_eis.len() > 1u {
|
||||
let nts = bb_eis.iter().map(|ei| {
|
||||
match ei.elts[ei.idx].node {
|
||||
MatchNonterminal(bind, name, _) => {
|
||||
match (*ei.elts)[ei.idx] {
|
||||
TtToken(_, MatchNt(bind, name, _, _)) => {
|
||||
(format!("{} ('{}')",
|
||||
token::get_ident(name),
|
||||
token::get_ident(bind))).to_string()
|
||||
|
@ -410,12 +459,14 @@ pub fn parse(sess: &ParseSess,
|
|||
let mut rust_parser = Parser::new(sess, cfg.clone(), box rdr.clone());
|
||||
|
||||
let mut ei = bb_eis.pop().unwrap();
|
||||
match ei.elts[ei.idx].node {
|
||||
MatchNonterminal(_, name, idx) => {
|
||||
match (*ei.elts)[ei.idx] {
|
||||
TtToken(_, MatchNt(_, name, _, _)) => {
|
||||
let name_string = token::get_ident(name);
|
||||
ei.matches[idx].push(Rc::new(MatchedNonterminal(
|
||||
let match_cur = ei.match_cur;
|
||||
ei.matches[match_cur].push(Rc::new(MatchedNonterminal(
|
||||
parse_nt(&mut rust_parser, name_string.get()))));
|
||||
ei.idx += 1u;
|
||||
ei.match_cur += 1;
|
||||
}
|
||||
_ => panic!()
|
||||
}
|
||||
|
|
|
@ -9,8 +9,9 @@
|
|||
// except according to those terms.
|
||||
|
||||
use ast::{Ident, Matcher_, Matcher, MatchTok, MatchNonterminal, MatchSeq, TtDelimited};
|
||||
use ast::{TtSequence, TtToken};
|
||||
use ast;
|
||||
use codemap::{Span, Spanned, DUMMY_SP};
|
||||
use codemap::{Span, DUMMY_SP};
|
||||
use ext::base::{ExtCtxt, MacResult, MacroDef};
|
||||
use ext::base::{NormalTT, TTMacroExpander};
|
||||
use ext::tt::macro_parser::{Success, Error, Failure};
|
||||
|
@ -20,7 +21,7 @@ use parse::lexer::new_tt_reader;
|
|||
use parse::parser::Parser;
|
||||
use parse::attr::ParserAttr;
|
||||
use parse::token::{special_idents, gensym_ident};
|
||||
use parse::token::{NtMatchers, NtTT};
|
||||
use parse::token::{MatchNt, NtMatchers, NtTT};
|
||||
use parse::token;
|
||||
use print;
|
||||
use ptr::P;
|
||||
|
@ -158,14 +159,18 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
|
|||
|
||||
for (i, lhs) in lhses.iter().enumerate() { // try each arm's matchers
|
||||
match **lhs {
|
||||
MatchedNonterminal(NtMatchers(ref mtcs)) => {
|
||||
MatchedNonterminal(NtTT(ref lhs_tt)) => {
|
||||
let lhs_tt = match **lhs_tt {
|
||||
TtDelimited(_, ref delim) => delim.tts.as_slice(),
|
||||
_ => cx.span_fatal(sp, "malformed macro lhs")
|
||||
};
|
||||
// `None` is because we're not interpolating
|
||||
let arg_rdr = new_tt_reader(&cx.parse_sess().span_diagnostic,
|
||||
None,
|
||||
arg.iter()
|
||||
.map(|x| (*x).clone())
|
||||
.collect());
|
||||
match parse(cx.parse_sess(), cx.cfg(), arg_rdr, mtcs.as_slice()) {
|
||||
match parse(cx.parse_sess(), cx.cfg(), arg_rdr, lhs_tt) {
|
||||
Success(named_matches) => {
|
||||
let rhs = match *rhses[i] {
|
||||
// okay, what's your transcriber?
|
||||
|
@ -210,31 +215,33 @@ pub fn add_new_extension<'cx>(cx: &'cx mut ExtCtxt,
|
|||
name: Ident,
|
||||
arg: Vec<ast::TokenTree> )
|
||||
-> Box<MacResult+'cx> {
|
||||
// these spans won't matter, anyways
|
||||
fn ms(m: Matcher_) -> Matcher {
|
||||
Spanned {
|
||||
node: m.clone(),
|
||||
span: DUMMY_SP
|
||||
}
|
||||
}
|
||||
|
||||
let lhs_nm = gensym_ident("lhs");
|
||||
let rhs_nm = gensym_ident("rhs");
|
||||
|
||||
// The pattern that macro_rules matches.
|
||||
// The grammar for macro_rules! is:
|
||||
// $( $lhs:mtcs => $rhs:tt );+
|
||||
// $( $lhs:tt => $rhs:tt );+
|
||||
// ...quasiquoting this would be nice.
|
||||
// These spans won't matter, anyways
|
||||
let match_lhs_tok = MatchNt(lhs_nm, special_idents::tt, token::Plain, token::Plain);
|
||||
let match_rhs_tok = MatchNt(rhs_nm, special_idents::tt, token::Plain, token::Plain);
|
||||
let argument_gram = vec!(
|
||||
ms(MatchSeq(vec!(
|
||||
ms(MatchNonterminal(lhs_nm, special_idents::matchers, 0u)),
|
||||
ms(MatchTok(token::FatArrow)),
|
||||
ms(MatchNonterminal(rhs_nm, special_idents::tt, 1u))),
|
||||
Some(token::Semi), ast::OneOrMore, 0u, 2u)),
|
||||
TtSequence(DUMMY_SP,
|
||||
Rc::new(vec![
|
||||
TtToken(DUMMY_SP, match_lhs),
|
||||
TtToken(DUMMY_SP, token::FatArrow),
|
||||
TtToken(DUMMY_SP, match_rhs)]),
|
||||
Some(token::Semi),
|
||||
ast::OneOrMore,
|
||||
2),
|
||||
//to phase into semicolon-termination instead of
|
||||
//semicolon-separation
|
||||
ms(MatchSeq(vec!(ms(MatchTok(token::Semi))), None,
|
||||
ast::ZeroOrMore, 2u, 2u)));
|
||||
TtSequence(DUMMY_SP,
|
||||
Rc::new(vec![TtToken(DUMMY_SP, token::Semi)]),
|
||||
None,
|
||||
ast::ZeroOrMore,
|
||||
0));
|
||||
|
||||
|
||||
// Parse the macro_rules! invocation (`none` is for no interpolations):
|
||||
|
|
|
@ -9,10 +9,11 @@
|
|||
// except according to those terms.
|
||||
|
||||
use ast;
|
||||
use ast::{TokenTree, TtDelimited, TtToken, TtSequence, TtNonterminal, Ident};
|
||||
use ast::{TokenTree, TtDelimited, TtToken, TtSequence, Ident};
|
||||
use codemap::{Span, DUMMY_SP};
|
||||
use diagnostic::SpanHandler;
|
||||
use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
|
||||
use parse::token::{Eof, DocComment, Interpolated, MatchNt, SubstNt};
|
||||
use parse::token::{Token, NtIdent};
|
||||
use parse::token;
|
||||
use parse::lexer::TokenAndSpan;
|
||||
|
@ -85,17 +86,9 @@ fn lookup_cur_matched_by_matched(r: &TtReader, start: Rc<NamedMatch>) -> Rc<Name
|
|||
})
|
||||
}
|
||||
|
||||
fn lookup_cur_matched(r: &TtReader, name: Ident) -> Rc<NamedMatch> {
|
||||
fn lookup_cur_matched(r: &TtReader, name: Ident) -> Option<Rc<NamedMatch>> {
|
||||
let matched_opt = r.interpolations.find_copy(&name);
|
||||
match matched_opt {
|
||||
Some(s) => lookup_cur_matched_by_matched(r, s),
|
||||
None => {
|
||||
r.sp_diag
|
||||
.span_fatal(r.cur_span,
|
||||
format!("unknown macro variable `{}`",
|
||||
token::get_ident(name)).as_slice());
|
||||
}
|
||||
}
|
||||
matched_opt.map(|s| lookup_cur_matched_by_matched(r, s))
|
||||
}
|
||||
|
||||
#[deriving(Clone)]
|
||||
|
@ -133,16 +126,20 @@ fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize {
|
|||
size + lockstep_iter_size(tt, r)
|
||||
})
|
||||
},
|
||||
TtSequence(_, ref tts, _, _) => {
|
||||
TtSequence(_, ref tts, _, _, _) => {
|
||||
tts.iter().fold(LisUnconstrained, |size, tt| {
|
||||
size + lockstep_iter_size(tt, r)
|
||||
})
|
||||
},
|
||||
TtToken(_, SubstNt(name, _)) | TtToken(_, MatchNt(name, _, _, _)) =>
|
||||
match lookup_cur_matched(r, name) {
|
||||
Some(matched) => match *matched {
|
||||
MatchedNonterminal(_) => LisUnconstrained,
|
||||
MatchedSeq(ref ads, _) => LisConstraint(ads.len(), name),
|
||||
},
|
||||
_ => LisUnconstrained
|
||||
},
|
||||
TtToken(..) => LisUnconstrained,
|
||||
TtNonterminal(_, name) => match *lookup_cur_matched(r, name) {
|
||||
MatchedNonterminal(_) => LisUnconstrained,
|
||||
MatchedSeq(ref ads, _) => LisConstraint(ads.len(), name)
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -205,40 +202,21 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
|
|||
(*frame.forest)[frame.idx].clone()
|
||||
};
|
||||
match t {
|
||||
TtDelimited(_, ref delimed) => {
|
||||
let mut tts = Vec::with_capacity(1 + delimed.tts.len() + 1);
|
||||
tts.push(delimed.open_tt());
|
||||
tts.extend(delimed.tts.iter().map(|tt| tt.clone()));
|
||||
tts.push(delimed.close_tt());
|
||||
|
||||
r.stack.push(TtFrame {
|
||||
forest: Rc::new(tts),
|
||||
idx: 0,
|
||||
dotdotdoted: false,
|
||||
sep: None
|
||||
});
|
||||
// if this could be 0-length, we'd need to potentially recur here
|
||||
}
|
||||
TtToken(sp, tok) => {
|
||||
r.cur_span = sp;
|
||||
r.cur_tok = tok;
|
||||
r.stack.last_mut().unwrap().idx += 1;
|
||||
return ret_val;
|
||||
}
|
||||
TtSequence(sp, tts, sep, kleene_op) => {
|
||||
TtSequence(sp, tts, sep, kleene_op, n) => {
|
||||
// FIXME(pcwalton): Bad copy.
|
||||
match lockstep_iter_size(&TtSequence(sp, tts.clone(), sep.clone(), kleene_op), r) {
|
||||
match lockstep_iter_size(&TtSequence(sp, tts.clone(), sep.clone(), kleene_op, n),
|
||||
r) {
|
||||
LisUnconstrained => {
|
||||
r.sp_diag.span_fatal(
|
||||
sp.clone(), /* blame macro writer */
|
||||
"attempted to repeat an expression \
|
||||
containing no syntax \
|
||||
variables matched as repeating at this depth");
|
||||
}
|
||||
LisContradiction(ref msg) => {
|
||||
// FIXME #2887 blame macro invoker instead
|
||||
r.sp_diag.span_fatal(sp.clone(), msg.as_slice());
|
||||
}
|
||||
}
|
||||
LisContradiction(ref msg) => {
|
||||
// FIXME #2887 blame macro invoker instead
|
||||
r.sp_diag.span_fatal(sp.clone(), msg.as_slice());
|
||||
}
|
||||
LisConstraint(len, _) => {
|
||||
if len == 0 {
|
||||
if kleene_op == ast::OneOrMore {
|
||||
|
@ -262,31 +240,62 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
|
|||
}
|
||||
}
|
||||
// FIXME #2887: think about span stuff here
|
||||
TtNonterminal(sp, ident) => {
|
||||
r.stack.last_mut().unwrap().idx += 1;
|
||||
match *lookup_cur_matched(r, ident) {
|
||||
/* sidestep the interpolation tricks for ident because
|
||||
(a) idents can be in lots of places, so it'd be a pain
|
||||
(b) we actually can, since it's a token. */
|
||||
MatchedNonterminal(NtIdent(box sn, b)) => {
|
||||
r.cur_span = sp;
|
||||
r.cur_tok = token::Ident(sn,b);
|
||||
return ret_val;
|
||||
TtToken(sp, SubstNt(ident, namep)) => {
|
||||
match lookup_cur_matched(r, ident) {
|
||||
None => {
|
||||
r.stack.push(TtFrame {
|
||||
forest: TtToken(sp, SubstNt(ident, namep)).expand_into_tts(),
|
||||
idx: 0,
|
||||
dotdotdoted: false,
|
||||
sep: None
|
||||
});
|
||||
// this can't be 0 length, just like TtDelimited
|
||||
}
|
||||
MatchedNonterminal(ref other_whole_nt) => {
|
||||
// FIXME(pcwalton): Bad copy.
|
||||
r.cur_span = sp;
|
||||
r.cur_tok = token::Interpolated((*other_whole_nt).clone());
|
||||
return ret_val;
|
||||
}
|
||||
MatchedSeq(..) => {
|
||||
r.sp_diag.span_fatal(
|
||||
r.cur_span, /* blame the macro writer */
|
||||
format!("variable '{}' is still repeating at this depth",
|
||||
token::get_ident(ident)).as_slice());
|
||||
Some(cur_matched) => {
|
||||
r.stack.last_mut().unwrap().idx += 1;
|
||||
match *cur_matched {
|
||||
// sidestep the interpolation tricks for ident because
|
||||
// (a) idents can be in lots of places, so it'd be a pain
|
||||
// (b) we actually can, since it's a token.
|
||||
MatchedNonterminal(NtIdent(box sn, b)) => {
|
||||
r.cur_span = sp;
|
||||
r.cur_tok = token::Ident(sn, b);
|
||||
return ret_val;
|
||||
}
|
||||
MatchedNonterminal(ref other_whole_nt) => {
|
||||
// FIXME(pcwalton): Bad copy.
|
||||
r.cur_span = sp;
|
||||
r.cur_tok = token::Interpolated((*other_whole_nt).clone());
|
||||
return ret_val;
|
||||
}
|
||||
MatchedSeq(..) => {
|
||||
r.sp_diag.span_fatal(
|
||||
r.cur_span, /* blame the macro writer */
|
||||
format!("variable '{}' is still repeating at this depth",
|
||||
token::get_ident(ident)).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// TtDelimited or any token that can be unzipped
|
||||
seq @ TtDelimited(..) | seq @ TtToken(_, DocComment(..))
|
||||
| seq @ TtToken(_, MatchNt(..)) => {
|
||||
// do not advance the idx yet
|
||||
r.stack.push(TtFrame {
|
||||
forest: seq.expand_into_tts(),
|
||||
idx: 0,
|
||||
dotdotdoted: false,
|
||||
sep: None
|
||||
});
|
||||
// if this could be 0-length, we'd need to potentially recur here
|
||||
}
|
||||
TtToken(sp, tok) => {
|
||||
r.cur_span = sp;
|
||||
r.cur_tok = tok;
|
||||
r.stack.last_mut().unwrap().idx += 1;
|
||||
return ret_val;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -581,13 +581,12 @@ pub fn noop_fold_tt<T: Folder>(tt: &TokenTree, fld: &mut T) -> TokenTree {
|
|||
}
|
||||
))
|
||||
},
|
||||
TtSequence(span, ref pattern, ref sep, is_optional) =>
|
||||
TtSequence(span, ref pattern, ref sep, is_optional, advance_by) =>
|
||||
TtSequence(span,
|
||||
Rc::new(fld.fold_tts(pattern.as_slice())),
|
||||
sep.clone().map(|tok| fld.fold_token(tok)),
|
||||
is_optional),
|
||||
TtNonterminal(sp,ref ident) =>
|
||||
TtNonterminal(sp,fld.fold_ident(*ident))
|
||||
is_optional,
|
||||
advance_by),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -603,6 +602,12 @@ pub fn noop_fold_token<T: Folder>(t: token::Token, fld: &mut T) -> token::Token
|
|||
}
|
||||
token::Lifetime(id) => token::Lifetime(fld.fold_ident(id)),
|
||||
token::Interpolated(nt) => token::Interpolated(fld.fold_interpolated(nt)),
|
||||
token::SubstNt(ident, namep) => {
|
||||
token::SubstNt(fld.fold_ident(ident), namep)
|
||||
}
|
||||
token::MatchNt(name, kind, namep, kindp) => {
|
||||
token::MatchNt(fld.fold_ident(name), fld.fold_ident(kind), namep, kindp)
|
||||
}
|
||||
_ => t
|
||||
}
|
||||
}
|
||||
|
|
|
@ -49,7 +49,7 @@ use ast::{StructVariantKind, BiSub};
|
|||
use ast::StrStyle;
|
||||
use ast::{SelfExplicit, SelfRegion, SelfStatic, SelfValue};
|
||||
use ast::{Delimited, TokenTree, TraitItem, TraitRef, TtDelimited, TtSequence, TtToken};
|
||||
use ast::{TtNonterminal, TupleVariantKind, Ty, Ty_, TyBot};
|
||||
use ast::{TupleVariantKind, Ty, Ty_, TyBot};
|
||||
use ast::{TypeField, TyFixedLengthVec, TyClosure, TyProc, TyBareFn};
|
||||
use ast::{TyTypeof, TyInfer, TypeMethod};
|
||||
use ast::{TyNil, TyParam, TyParamBound, TyParen, TyPath, TyPtr, TyQPath};
|
||||
|
@ -65,6 +65,7 @@ use ast_util::{as_prec, ident_to_path, operator_prec};
|
|||
use ast_util;
|
||||
use codemap::{Span, BytePos, Spanned, spanned, mk_sp};
|
||||
use codemap;
|
||||
use ext::tt::macro_parser;
|
||||
use parse;
|
||||
use parse::attr::ParserAttr;
|
||||
use parse::classify;
|
||||
|
@ -73,7 +74,7 @@ use parse::common::{seq_sep_trailing_allowed};
|
|||
use parse::lexer::Reader;
|
||||
use parse::lexer::TokenAndSpan;
|
||||
use parse::obsolete::*;
|
||||
use parse::token::InternedString;
|
||||
use parse::token::{MatchNt, SubstNt, InternedString};
|
||||
use parse::token::{keywords, special_idents};
|
||||
use parse::token;
|
||||
use parse::{new_sub_parser_from_file, ParseSess};
|
||||
|
@ -2508,7 +2509,7 @@ impl<'a> Parser<'a> {
|
|||
pub fn parse_token_tree(&mut self) -> TokenTree {
|
||||
// FIXME #6994: currently, this is too eager. It
|
||||
// parses token trees but also identifies TtSequence's
|
||||
// and TtNonterminal's; it's too early to know yet
|
||||
// and token::SubstNt's; it's too early to know yet
|
||||
// whether something will be a nonterminal or a seq
|
||||
// yet.
|
||||
maybe_whole!(deref self, NtTT);
|
||||
|
@ -2549,9 +2550,21 @@ impl<'a> Parser<'a> {
|
|||
let seq = match seq {
|
||||
Spanned { node, .. } => node,
|
||||
};
|
||||
TtSequence(mk_sp(sp.lo, p.span.hi), Rc::new(seq), sep, repeat)
|
||||
let name_num = macro_parser::count_names(seq.as_slice());
|
||||
TtSequence(mk_sp(sp.lo, p.span.hi), Rc::new(seq), sep, repeat, name_num)
|
||||
} else {
|
||||
TtNonterminal(sp, p.parse_ident())
|
||||
// A nonterminal that matches or not
|
||||
let namep = match p.token { token::Ident(_, p) => p, _ => token::Plain };
|
||||
let name = p.parse_ident();
|
||||
if p.token == token::Colon && p.look_ahead(1, |t| t.is_ident()) {
|
||||
p.bump();
|
||||
let kindp = match p.token { token::Ident(_, p) => p, _ => token::Plain };
|
||||
let nt_kind = p.parse_ident();
|
||||
let m = TtToken(sp, MatchNt(name, nt_kind, namep, kindp));
|
||||
m
|
||||
} else {
|
||||
TtToken(sp, SubstNt(name, namep))
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
|
|
|
@ -108,7 +108,15 @@ pub enum Token {
|
|||
|
||||
/* For interpolation */
|
||||
Interpolated(Nonterminal),
|
||||
// Can be expanded into several tokens.
|
||||
/// Doc comment
|
||||
DocComment(ast::Name),
|
||||
// In left-hand-sides of MBE macros:
|
||||
/// Parse a nonterminal (name to bind, name of NT, styles of their idents)
|
||||
MatchNt(ast::Ident, ast::Ident, IdentStyle, IdentStyle),
|
||||
// In right-hand-sides of MBE macros:
|
||||
/// A syntactic variable that will be filled in by macro expansion.
|
||||
SubstNt(ast::Ident, IdentStyle),
|
||||
|
||||
// Junk. These carry no data because we don't really care about the data
|
||||
// they *would* carry, and don't really want to allocate a new ident for
|
||||
|
|
|
@ -254,6 +254,8 @@ pub fn token_to_string(tok: &Token) -> String {
|
|||
|
||||
/* Other */
|
||||
token::DocComment(s) => s.as_str().into_string(),
|
||||
token::SubstNt(s, _) => format!("${}", s),
|
||||
token::MatchNt(s, t, _, _) => format!("${}:{}", s, t),
|
||||
token::Eof => "<eof>".into_string(),
|
||||
token::Whitespace => " ".into_string(),
|
||||
token::Comment => "/* */".into_string(),
|
||||
|
@ -1120,13 +1122,6 @@ impl<'a> State<'a> {
|
|||
/// expression arguments as expressions). It can be done! I think.
|
||||
pub fn print_tt(&mut self, tt: &ast::TokenTree) -> IoResult<()> {
|
||||
match *tt {
|
||||
ast::TtDelimited(_, ref delimed) => {
|
||||
try!(word(&mut self.s, token_to_string(&delimed.open_token()).as_slice()));
|
||||
try!(space(&mut self.s));
|
||||
try!(self.print_tts(delimed.tts.as_slice()));
|
||||
try!(space(&mut self.s));
|
||||
word(&mut self.s, token_to_string(&delimed.close_token()).as_slice())
|
||||
},
|
||||
ast::TtToken(_, ref tk) => {
|
||||
try!(word(&mut self.s, token_to_string(tk).as_slice()));
|
||||
match *tk {
|
||||
|
@ -1136,7 +1131,14 @@ impl<'a> State<'a> {
|
|||
_ => Ok(())
|
||||
}
|
||||
}
|
||||
ast::TtSequence(_, ref tts, ref separator, kleene_op) => {
|
||||
ast::TtDelimited(_, ref delimed) => {
|
||||
try!(word(&mut self.s, token_to_string(&delimed.open_token()).as_slice()));
|
||||
try!(space(&mut self.s));
|
||||
try!(self.print_tts(delimed.tts.as_slice()));
|
||||
try!(space(&mut self.s));
|
||||
word(&mut self.s, token_to_string(&delimed.close_token()).as_slice())
|
||||
},
|
||||
ast::TtSequence(_, ref tts, ref separator, kleene_op, _) => {
|
||||
try!(word(&mut self.s, "$("));
|
||||
for tt_elt in (*tts).iter() {
|
||||
try!(self.print_tt(tt_elt));
|
||||
|
@ -1153,10 +1155,6 @@ impl<'a> State<'a> {
|
|||
ast::OneOrMore => word(&mut self.s, "+"),
|
||||
}
|
||||
}
|
||||
ast::TtNonterminal(_, name) => {
|
||||
try!(word(&mut self.s, "$"));
|
||||
self.print_ident(name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue