Use TokenTree
s in lhs of macros
This commit is contained in:
parent
5c1fd5f8b7
commit
38ce6d9eac
10 changed files with 387 additions and 249 deletions
|
@ -163,7 +163,8 @@ fn doit(sess: &parse::ParseSess, mut lexer: lexer::StringReader,
|
||||||
|
|
||||||
token::Lifetime(..) => "lifetime",
|
token::Lifetime(..) => "lifetime",
|
||||||
token::DocComment(..) => "doccomment",
|
token::DocComment(..) => "doccomment",
|
||||||
token::Underscore | token::Eof | token::Interpolated(..) => "",
|
token::Underscore | token::Eof | token::Interpolated(..) |
|
||||||
|
token::MatchNt(..) | token::SubstNt(..) => "",
|
||||||
};
|
};
|
||||||
|
|
||||||
// as mentioned above, use the original source code instead of
|
// as mentioned above, use the original source code instead of
|
||||||
|
|
|
@ -10,7 +10,7 @@
|
||||||
|
|
||||||
// The Rust abstract syntax tree.
|
// The Rust abstract syntax tree.
|
||||||
|
|
||||||
use codemap::{Span, Spanned, DUMMY_SP, ExpnId};
|
use codemap::{Span, Spanned, DUMMY_SP, ExpnId, respan};
|
||||||
use abi::Abi;
|
use abi::Abi;
|
||||||
use ast_util;
|
use ast_util;
|
||||||
use owned_slice::OwnedSlice;
|
use owned_slice::OwnedSlice;
|
||||||
|
@ -657,23 +657,55 @@ pub enum TokenTree {
|
||||||
/// A delimited sequence of token trees
|
/// A delimited sequence of token trees
|
||||||
TtDelimited(Span, Rc<Delimited>),
|
TtDelimited(Span, Rc<Delimited>),
|
||||||
|
|
||||||
// These only make sense for right-hand-sides of MBE macros:
|
// This only makes sense for right-hand-sides of MBE macros:
|
||||||
|
|
||||||
/// A Kleene-style repetition sequence with an optional separator.
|
/// A Kleene-style repetition sequence with an optional separator.
|
||||||
// FIXME(eddyb) #6308 Use Rc<[TokenTree]> after DST.
|
// FIXME(eddyb) #6308 Use Rc<[TokenTree]> after DST.
|
||||||
TtSequence(Span, Rc<Vec<TokenTree>>, Option<token::Token>, KleeneOp),
|
TtSequence(Span, Rc<Vec<TokenTree>>, Option<::parse::token::Token>, KleeneOp, uint),
|
||||||
/// A syntactic variable that will be filled in by macro expansion.
|
|
||||||
TtNonterminal(Span, Ident)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TokenTree {
|
impl TokenTree {
|
||||||
|
pub fn expand_into_tts(self) -> Rc<Vec<TokenTree>> {
|
||||||
|
match self {
|
||||||
|
TtToken(sp, token::DocComment(name)) => {
|
||||||
|
let doc = MetaNameValue(token::intern_and_get_ident("doc"),
|
||||||
|
respan(sp, LitStr(token::get_name(name), CookedStr)));
|
||||||
|
let doc = token::NtMeta(P(respan(sp, doc)));
|
||||||
|
let delimed = Delimited {
|
||||||
|
delim: token::Bracket,
|
||||||
|
open_span: sp,
|
||||||
|
tts: vec![TtToken(sp, token::Interpolated(doc))],
|
||||||
|
close_span: sp,
|
||||||
|
};
|
||||||
|
Rc::new(vec![TtToken(sp, token::Pound),
|
||||||
|
TtDelimited(sp, Rc::new(delimed))])
|
||||||
|
}
|
||||||
|
TtDelimited(_, ref delimed) => {
|
||||||
|
let mut tts = Vec::with_capacity(1 + delimed.tts.len() + 1);
|
||||||
|
tts.push(delimed.open_tt());
|
||||||
|
tts.extend(delimed.tts.iter().map(|tt| tt.clone()));
|
||||||
|
tts.push(delimed.close_tt());
|
||||||
|
Rc::new(tts)
|
||||||
|
}
|
||||||
|
TtToken(sp, token::SubstNt(name, namep)) => {
|
||||||
|
Rc::new(vec![TtToken(sp, token::Dollar),
|
||||||
|
TtToken(sp, token::Ident(name, namep))])
|
||||||
|
}
|
||||||
|
TtToken(sp, token::MatchNt(name, kind, namep, kindp)) => {
|
||||||
|
Rc::new(vec![TtToken(sp, token::SubstNt(name, namep)),
|
||||||
|
TtToken(sp, token::Colon),
|
||||||
|
TtToken(sp, token::Ident(kind, kindp))])
|
||||||
|
}
|
||||||
|
_ => panic!("Cannot expand a token")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Returns the `Span` corresponding to this token tree.
|
/// Returns the `Span` corresponding to this token tree.
|
||||||
pub fn get_span(&self) -> Span {
|
pub fn get_span(&self) -> Span {
|
||||||
match *self {
|
match *self {
|
||||||
TtToken(span, _) => span,
|
TtToken(span, _) => span,
|
||||||
TtDelimited(span, _) => span,
|
TtDelimited(span, _) => span,
|
||||||
TtSequence(span, _, _, _) => span,
|
TtSequence(span, _, _, _, _) => span,
|
||||||
TtNonterminal(span, _) => span,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -616,6 +616,20 @@ fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
|
||||||
vec!(mk_name(cx, sp, ident.ident())));
|
vec!(mk_name(cx, sp, ident.ident())));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
token::MatchNt(name, kind, name_style, kind_style) => {
|
||||||
|
return cx.expr_call(sp,
|
||||||
|
mk_token_path(cx, sp, "MatchNt"),
|
||||||
|
vec![mk_ident(cx, sp, name),
|
||||||
|
mk_ident(cx, sp, kind),
|
||||||
|
match name_style {
|
||||||
|
ModName => mk_token_path(cx, sp, "ModName"),
|
||||||
|
Plain => mk_token_path(cx, sp, "Plain"),
|
||||||
|
},
|
||||||
|
match kind_style {
|
||||||
|
ModName => mk_token_path(cx, sp, "ModName"),
|
||||||
|
Plain => mk_token_path(cx, sp, "Plain"),
|
||||||
|
}]);
|
||||||
|
}
|
||||||
token::Interpolated(_) => panic!("quote! with interpolated token"),
|
token::Interpolated(_) => panic!("quote! with interpolated token"),
|
||||||
|
|
||||||
_ => ()
|
_ => ()
|
||||||
|
@ -654,6 +668,25 @@ fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
|
||||||
|
|
||||||
fn mk_tt(cx: &ExtCtxt, _: Span, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> {
|
fn mk_tt(cx: &ExtCtxt, _: Span, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> {
|
||||||
match *tt {
|
match *tt {
|
||||||
|
ast::TtToken(sp, SubstNt(ident, _)) => {
|
||||||
|
// tt.extend($ident.to_tokens(ext_cx).into_iter())
|
||||||
|
|
||||||
|
let e_to_toks =
|
||||||
|
cx.expr_method_call(sp,
|
||||||
|
cx.expr_ident(sp, ident),
|
||||||
|
id_ext("to_tokens"),
|
||||||
|
vec!(cx.expr_ident(sp, id_ext("ext_cx"))));
|
||||||
|
let e_to_toks =
|
||||||
|
cx.expr_method_call(sp, e_to_toks, id_ext("into_iter"), vec![]);
|
||||||
|
|
||||||
|
let e_push =
|
||||||
|
cx.expr_method_call(sp,
|
||||||
|
cx.expr_ident(sp, id_ext("tt")),
|
||||||
|
id_ext("extend"),
|
||||||
|
vec!(e_to_toks));
|
||||||
|
|
||||||
|
vec!(cx.stmt_expr(e_push))
|
||||||
|
}
|
||||||
ast::TtToken(sp, ref tok) => {
|
ast::TtToken(sp, ref tok) => {
|
||||||
let e_sp = cx.expr_ident(sp, id_ext("_sp"));
|
let e_sp = cx.expr_ident(sp, id_ext("_sp"));
|
||||||
let e_tok = cx.expr_call(sp,
|
let e_tok = cx.expr_call(sp,
|
||||||
|
@ -673,25 +706,6 @@ fn mk_tt(cx: &ExtCtxt, _: Span, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> {
|
||||||
.collect()
|
.collect()
|
||||||
},
|
},
|
||||||
ast::TtSequence(..) => panic!("TtSequence in quote!"),
|
ast::TtSequence(..) => panic!("TtSequence in quote!"),
|
||||||
ast::TtNonterminal(sp, ident) => {
|
|
||||||
// tt.extend($ident.to_tokens(ext_cx).into_iter())
|
|
||||||
|
|
||||||
let e_to_toks =
|
|
||||||
cx.expr_method_call(sp,
|
|
||||||
cx.expr_ident(sp, ident),
|
|
||||||
id_ext("to_tokens"),
|
|
||||||
vec!(cx.expr_ident(sp, id_ext("ext_cx"))));
|
|
||||||
let e_to_toks =
|
|
||||||
cx.expr_method_call(sp, e_to_toks, id_ext("into_iter"), vec![]);
|
|
||||||
|
|
||||||
let e_push =
|
|
||||||
cx.expr_method_call(sp,
|
|
||||||
cx.expr_ident(sp, id_ext("tt")),
|
|
||||||
id_ext("extend"),
|
|
||||||
vec!(e_to_toks));
|
|
||||||
|
|
||||||
vec!(cx.stmt_expr(e_push))
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -78,69 +78,80 @@
|
||||||
|
|
||||||
|
|
||||||
use ast;
|
use ast;
|
||||||
use ast::{Matcher, MatchTok, MatchSeq, MatchNonterminal, Ident};
|
use ast::{Matcher, TokenTree, Ident};
|
||||||
|
use ast::{TtDelimited, TtSequence, TtToken};
|
||||||
use codemap::{BytePos, mk_sp};
|
use codemap::{BytePos, mk_sp};
|
||||||
use codemap;
|
use codemap;
|
||||||
use parse::lexer::*; //resolve bug?
|
use parse::lexer::*; //resolve bug?
|
||||||
use parse::ParseSess;
|
use parse::ParseSess;
|
||||||
use parse::attr::ParserAttr;
|
use parse::attr::ParserAttr;
|
||||||
use parse::parser::{LifetimeAndTypesWithoutColons, Parser};
|
use parse::parser::{LifetimeAndTypesWithoutColons, Parser};
|
||||||
|
use parse::token::{Eof, DocComment, MatchNt, SubstNt};
|
||||||
use parse::token::{Token, Nonterminal};
|
use parse::token::{Token, Nonterminal};
|
||||||
use parse::token;
|
use parse::token;
|
||||||
use print::pprust;
|
use print::pprust;
|
||||||
use ptr::P;
|
use ptr::P;
|
||||||
|
|
||||||
|
use std::mem;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
use std::collections::hash_map::{Vacant, Occupied};
|
||||||
|
|
||||||
/* to avoid costly uniqueness checks, we require that `MatchSeq` always has a
|
/* to avoid costly uniqueness checks, we require that `MatchSeq` always has a
|
||||||
nonempty body. */
|
nonempty body. */
|
||||||
|
|
||||||
|
|
||||||
|
/// an unzipping of `TokenTree`s
|
||||||
|
#[deriving(Clone)]
|
||||||
|
struct MatcherTtFrame {
|
||||||
|
elts: Rc<Vec<ast::TokenTree>>,
|
||||||
|
idx: uint,
|
||||||
|
}
|
||||||
|
|
||||||
#[deriving(Clone)]
|
#[deriving(Clone)]
|
||||||
pub struct MatcherPos {
|
pub struct MatcherPos {
|
||||||
elts: Vec<ast::Matcher> , // maybe should be <'>? Need to understand regions.
|
stack: Vec<MatcherTtFrame>,
|
||||||
|
elts: Rc<Vec<ast::TokenTree>>,
|
||||||
sep: Option<Token>,
|
sep: Option<Token>,
|
||||||
idx: uint,
|
idx: uint,
|
||||||
up: Option<Box<MatcherPos>>,
|
up: Option<Box<MatcherPos>>,
|
||||||
matches: Vec<Vec<Rc<NamedMatch>>>,
|
matches: Vec<Vec<Rc<NamedMatch>>>,
|
||||||
match_lo: uint, match_hi: uint,
|
match_lo: uint,
|
||||||
|
match_cur: uint,
|
||||||
|
match_hi: uint,
|
||||||
sp_lo: BytePos,
|
sp_lo: BytePos,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn count_names(ms: &[Matcher]) -> uint {
|
pub fn count_names(ms: &[TokenTree]) -> uint {
|
||||||
ms.iter().fold(0, |ct, m| {
|
ms.iter().fold(0, |count, elt| {
|
||||||
ct + match m.node {
|
count + match elt {
|
||||||
MatchTok(_) => 0u,
|
&TtSequence(_, _, _, _, advance_by) => {
|
||||||
MatchSeq(ref more_ms, _, _, _, _) => {
|
advance_by
|
||||||
count_names(more_ms.as_slice())
|
|
||||||
}
|
}
|
||||||
MatchNonterminal(_, _, _) => 1u
|
&TtDelimited(_, ref delim) => {
|
||||||
}})
|
count_names(delim.tts.as_slice())
|
||||||
|
}
|
||||||
|
&TtToken(_, MatchNt(..)) => {
|
||||||
|
1
|
||||||
|
}
|
||||||
|
&TtToken(_, _) => 0,
|
||||||
|
}
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn initial_matcher_pos(ms: Vec<Matcher> , sep: Option<Token>, lo: BytePos)
|
pub fn initial_matcher_pos(ms: Rc<Vec<TokenTree>>, sep: Option<Token>, lo: BytePos)
|
||||||
-> Box<MatcherPos> {
|
-> Box<MatcherPos> {
|
||||||
let mut match_idx_hi = 0u;
|
let match_idx_hi = count_names(ms.as_slice());
|
||||||
for elt in ms.iter() {
|
let matches = Vec::from_fn(match_idx_hi, |_i| Vec::new());
|
||||||
match elt.node {
|
|
||||||
MatchTok(_) => (),
|
|
||||||
MatchSeq(_,_,_,_,hi) => {
|
|
||||||
match_idx_hi = hi; // it is monotonic...
|
|
||||||
}
|
|
||||||
MatchNonterminal(_,_,pos) => {
|
|
||||||
match_idx_hi = pos+1u; // ...so latest is highest
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let matches = Vec::from_fn(count_names(ms.as_slice()), |_i| Vec::new());
|
|
||||||
box MatcherPos {
|
box MatcherPos {
|
||||||
|
stack: vec![],
|
||||||
elts: ms,
|
elts: ms,
|
||||||
sep: sep,
|
sep: sep,
|
||||||
idx: 0u,
|
idx: 0u,
|
||||||
up: None,
|
up: None,
|
||||||
matches: matches,
|
matches: matches,
|
||||||
match_lo: 0u,
|
match_lo: 0u,
|
||||||
|
match_cur: 0u,
|
||||||
match_hi: match_idx_hi,
|
match_hi: match_idx_hi,
|
||||||
sp_lo: lo
|
sp_lo: lo
|
||||||
}
|
}
|
||||||
|
@ -152,11 +163,9 @@ pub fn initial_matcher_pos(ms: Vec<Matcher> , sep: Option<Token>, lo: BytePos)
|
||||||
/// (expr, item, etc). All the leaves in a single NamedMatch correspond to a
|
/// (expr, item, etc). All the leaves in a single NamedMatch correspond to a
|
||||||
/// single matcher_nonterminal in the ast::Matcher that produced it.
|
/// single matcher_nonterminal in the ast::Matcher that produced it.
|
||||||
///
|
///
|
||||||
/// It should probably be renamed, it has more or less exact correspondence to
|
/// The in-memory structure of a particular NamedMatch represents the match
|
||||||
/// ast::match nodes, and the in-memory structure of a particular NamedMatch
|
/// that occurred when a particular subset of a matcher was applied to a
|
||||||
/// represents the match that occurred when a particular subset of an
|
/// particular token tree.
|
||||||
/// ast::match -- those ast::Matcher nodes leading to a single
|
|
||||||
/// MatchNonterminal -- was applied to a particular token tree.
|
|
||||||
///
|
///
|
||||||
/// The width of each MatchedSeq in the NamedMatch, and the identity of the
|
/// The width of each MatchedSeq in the NamedMatch, and the identity of the
|
||||||
/// MatchedNonterminal's, will depend on the token tree it was applied to: each
|
/// MatchedNonterminal's, will depend on the token tree it was applied to: each
|
||||||
|
@ -170,34 +179,43 @@ pub enum NamedMatch {
|
||||||
MatchedNonterminal(Nonterminal)
|
MatchedNonterminal(Nonterminal)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn nameize(p_s: &ParseSess, ms: &[Matcher], res: &[Rc<NamedMatch>])
|
pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
|
||||||
-> HashMap<Ident, Rc<NamedMatch>> {
|
-> HashMap<Ident, Rc<NamedMatch>> {
|
||||||
fn n_rec(p_s: &ParseSess, m: &Matcher, res: &[Rc<NamedMatch>],
|
fn n_rec(p_s: &ParseSess, m: &TokenTree, res: &[Rc<NamedMatch>],
|
||||||
ret_val: &mut HashMap<Ident, Rc<NamedMatch>>) {
|
ret_val: &mut HashMap<Ident, Rc<NamedMatch>>, idx: &mut uint) {
|
||||||
match *m {
|
match m {
|
||||||
codemap::Spanned {node: MatchTok(_), .. } => (),
|
&TtSequence(_, ref more_ms, _, _, _) => {
|
||||||
codemap::Spanned {node: MatchSeq(ref more_ms, _, _, _, _), .. } => {
|
for next_m in more_ms.iter() {
|
||||||
for next_m in more_ms.iter() {
|
n_rec(p_s, next_m, res, ret_val, idx)
|
||||||
n_rec(p_s, next_m, res, ret_val)
|
}
|
||||||
};
|
|
||||||
}
|
|
||||||
codemap::Spanned {
|
|
||||||
node: MatchNonterminal(bind_name, _, idx),
|
|
||||||
span
|
|
||||||
} => {
|
|
||||||
if ret_val.contains_key(&bind_name) {
|
|
||||||
let string = token::get_ident(bind_name);
|
|
||||||
p_s.span_diagnostic
|
|
||||||
.span_fatal(span,
|
|
||||||
format!("duplicated bind name: {}",
|
|
||||||
string.get()).as_slice())
|
|
||||||
}
|
}
|
||||||
ret_val.insert(bind_name, res[idx].clone());
|
&TtDelimited(_, ref delim) => {
|
||||||
}
|
for next_m in delim.tts.iter() {
|
||||||
|
n_rec(p_s, next_m, res, ret_val, idx)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
&TtToken(sp, MatchNt(bind_name, _, _, _)) => {
|
||||||
|
match ret_val.entry(bind_name) {
|
||||||
|
Vacant(spot) => {
|
||||||
|
spot.set(res[*idx].clone());
|
||||||
|
*idx += 1;
|
||||||
|
}
|
||||||
|
Occupied(..) => {
|
||||||
|
let string = token::get_ident(bind_name);
|
||||||
|
p_s.span_diagnostic
|
||||||
|
.span_fatal(sp,
|
||||||
|
format!("duplicated bind name: {}",
|
||||||
|
string.get()).as_slice())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
&TtToken(_, SubstNt(..)) => panic!("Cannot fill in a NT"),
|
||||||
|
&TtToken(_, _) => (),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let mut ret_val = HashMap::new();
|
let mut ret_val = HashMap::new();
|
||||||
for m in ms.iter() { n_rec(p_s, m, res, &mut ret_val) }
|
let mut idx = 0u;
|
||||||
|
for m in ms.iter() { n_rec(p_s, m, res, &mut ret_val, &mut idx) }
|
||||||
ret_val
|
ret_val
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -210,7 +228,7 @@ pub enum ParseResult {
|
||||||
pub fn parse_or_else(sess: &ParseSess,
|
pub fn parse_or_else(sess: &ParseSess,
|
||||||
cfg: ast::CrateConfig,
|
cfg: ast::CrateConfig,
|
||||||
rdr: TtReader,
|
rdr: TtReader,
|
||||||
ms: Vec<Matcher> )
|
ms: Vec<TokenTree> )
|
||||||
-> HashMap<Ident, Rc<NamedMatch>> {
|
-> HashMap<Ident, Rc<NamedMatch>> {
|
||||||
match parse(sess, cfg, rdr, ms.as_slice()) {
|
match parse(sess, cfg, rdr, ms.as_slice()) {
|
||||||
Success(m) => m,
|
Success(m) => m,
|
||||||
|
@ -237,12 +255,12 @@ pub fn token_name_eq(t1 : &Token, t2 : &Token) -> bool {
|
||||||
pub fn parse(sess: &ParseSess,
|
pub fn parse(sess: &ParseSess,
|
||||||
cfg: ast::CrateConfig,
|
cfg: ast::CrateConfig,
|
||||||
mut rdr: TtReader,
|
mut rdr: TtReader,
|
||||||
ms: &[Matcher])
|
ms: &[TokenTree])
|
||||||
-> ParseResult {
|
-> ParseResult {
|
||||||
let mut cur_eis = Vec::new();
|
let mut cur_eis = Vec::new();
|
||||||
cur_eis.push(initial_matcher_pos(ms.iter()
|
cur_eis.push(initial_matcher_pos(Rc::new(ms.iter()
|
||||||
.map(|x| (*x).clone())
|
.map(|x| (*x).clone())
|
||||||
.collect(),
|
.collect()),
|
||||||
None,
|
None,
|
||||||
rdr.peek().sp.lo));
|
rdr.peek().sp.lo));
|
||||||
|
|
||||||
|
@ -255,11 +273,22 @@ pub fn parse(sess: &ParseSess,
|
||||||
|
|
||||||
/* we append new items to this while we go */
|
/* we append new items to this while we go */
|
||||||
loop {
|
loop {
|
||||||
let ei = match cur_eis.pop() {
|
let mut ei = match cur_eis.pop() {
|
||||||
None => break, /* for each Earley Item */
|
None => break, /* for each Earley Item */
|
||||||
Some(ei) => ei,
|
Some(ei) => ei,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// When unzipped trees end, remove them
|
||||||
|
while ei.idx >= ei.elts.len() {
|
||||||
|
match ei.stack.pop() {
|
||||||
|
Some(MatcherTtFrame { elts, idx }) => {
|
||||||
|
ei.elts = elts;
|
||||||
|
ei.idx = idx + 1;
|
||||||
|
}
|
||||||
|
None => break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let idx = ei.idx;
|
let idx = ei.idx;
|
||||||
let len = ei.elts.len();
|
let len = ei.elts.len();
|
||||||
|
|
||||||
|
@ -293,6 +322,7 @@ pub fn parse(sess: &ParseSess,
|
||||||
sp.hi))));
|
sp.hi))));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
new_pos.match_cur = ei.match_hi;
|
||||||
new_pos.idx += 1;
|
new_pos.idx += 1;
|
||||||
cur_eis.push(new_pos);
|
cur_eis.push(new_pos);
|
||||||
}
|
}
|
||||||
|
@ -301,69 +331,88 @@ pub fn parse(sess: &ParseSess,
|
||||||
|
|
||||||
// the *_t vars are workarounds for the lack of unary move
|
// the *_t vars are workarounds for the lack of unary move
|
||||||
match ei.sep {
|
match ei.sep {
|
||||||
Some(ref t) if idx == len => { // we need a separator
|
Some(ref t) if idx == len => { // we need a separator
|
||||||
// i'm conflicted about whether this should be hygienic....
|
// i'm conflicted about whether this should be hygienic....
|
||||||
// though in this case, if the separators are never legal
|
// though in this case, if the separators are never legal
|
||||||
// idents, it shouldn't matter.
|
// idents, it shouldn't matter.
|
||||||
if token_name_eq(&tok, t) { //pass the separator
|
if token_name_eq(&tok, t) { //pass the separator
|
||||||
let mut ei_t = ei.clone();
|
let mut ei_t = ei.clone();
|
||||||
ei_t.idx += 1;
|
// ei_t.match_cur = ei_t.match_lo;
|
||||||
next_eis.push(ei_t);
|
ei_t.idx += 1;
|
||||||
|
next_eis.push(ei_t);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => { // we don't need a separator
|
||||||
|
let mut ei_t = ei;
|
||||||
|
ei_t.match_cur = ei_t.match_lo;
|
||||||
|
ei_t.idx = 0;
|
||||||
|
cur_eis.push(ei_t);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
_ => { // we don't need a separator
|
|
||||||
let mut ei_t = ei;
|
|
||||||
ei_t.idx = 0;
|
|
||||||
cur_eis.push(ei_t);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
eof_eis.push(ei);
|
eof_eis.push(ei);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
match ei.elts[idx].node.clone() {
|
match (*ei.elts)[idx].clone() {
|
||||||
/* need to descend into sequence */
|
/* need to descend into sequence */
|
||||||
MatchSeq(ref matchers, ref sep, kleene_op,
|
TtSequence(_, ref matchers, ref sep, kleene_op, match_num) => {
|
||||||
match_idx_lo, match_idx_hi) => {
|
if kleene_op == ast::ZeroOrMore {
|
||||||
if kleene_op == ast::ZeroOrMore {
|
let mut new_ei = ei.clone();
|
||||||
let mut new_ei = ei.clone();
|
new_ei.match_cur += match_num;
|
||||||
new_ei.idx += 1u;
|
new_ei.idx += 1u;
|
||||||
//we specifically matched zero repeats.
|
//we specifically matched zero repeats.
|
||||||
for idx in range(match_idx_lo, match_idx_hi) {
|
for idx in range(ei.match_cur, ei.match_cur + match_num) {
|
||||||
new_ei.matches[idx]
|
new_ei.matches[idx]
|
||||||
.push(Rc::new(MatchedSeq(Vec::new(), sp)));
|
.push(Rc::new(MatchedSeq(Vec::new(), sp)));
|
||||||
|
}
|
||||||
|
|
||||||
|
cur_eis.push(new_ei);
|
||||||
}
|
}
|
||||||
|
|
||||||
cur_eis.push(new_ei);
|
let matches = Vec::from_elem(ei.matches.len(), Vec::new());
|
||||||
|
let ei_t = ei;
|
||||||
|
cur_eis.push(box MatcherPos {
|
||||||
|
stack: vec![],
|
||||||
|
elts: matchers.clone(),
|
||||||
|
sep: (*sep).clone(),
|
||||||
|
idx: 0u,
|
||||||
|
matches: matches,
|
||||||
|
match_lo: ei_t.match_cur,
|
||||||
|
match_cur: ei_t.match_cur,
|
||||||
|
match_hi: ei_t.match_cur + match_num,
|
||||||
|
up: Some(ei_t),
|
||||||
|
sp_lo: sp.lo
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
TtToken(_, MatchNt(..)) => {
|
||||||
let matches = Vec::from_elem(ei.matches.len(), Vec::new());
|
// Built-in nonterminals never start with these tokens,
|
||||||
let ei_t = ei;
|
// so we can eliminate them from consideration.
|
||||||
cur_eis.push(box MatcherPos {
|
match tok {
|
||||||
elts: (*matchers).clone(),
|
token::CloseDelim(_) => {},
|
||||||
sep: (*sep).clone(),
|
_ => bb_eis.push(ei),
|
||||||
idx: 0u,
|
}
|
||||||
up: Some(ei_t),
|
|
||||||
matches: matches,
|
|
||||||
match_lo: match_idx_lo, match_hi: match_idx_hi,
|
|
||||||
sp_lo: sp.lo
|
|
||||||
});
|
|
||||||
}
|
|
||||||
MatchNonterminal(_,_,_) => {
|
|
||||||
// Built-in nonterminals never start with these tokens,
|
|
||||||
// so we can eliminate them from consideration.
|
|
||||||
match tok {
|
|
||||||
token::CloseDelim(_) => {},
|
|
||||||
_ => bb_eis.push(ei),
|
|
||||||
}
|
}
|
||||||
}
|
TtToken(sp, SubstNt(..)) => {
|
||||||
MatchTok(ref t) => {
|
return Error(sp, "Cannot transcribe in macro LHS".into_string())
|
||||||
let mut ei_t = ei.clone();
|
}
|
||||||
if token_name_eq(t,&tok) {
|
seq @ TtDelimited(..) | seq @ TtToken(_, DocComment(..)) => {
|
||||||
ei_t.idx += 1;
|
let tts = seq.expand_into_tts();
|
||||||
next_eis.push(ei_t);
|
let elts = mem::replace(&mut ei.elts, tts);
|
||||||
|
let idx = ei.idx;
|
||||||
|
ei.stack.push(MatcherTtFrame {
|
||||||
|
elts: elts,
|
||||||
|
idx: idx,
|
||||||
|
});
|
||||||
|
ei.idx = 0;
|
||||||
|
cur_eis.push(ei);
|
||||||
|
}
|
||||||
|
TtToken(_, ref t) => {
|
||||||
|
let mut ei_t = ei.clone();
|
||||||
|
if token_name_eq(t,&tok) {
|
||||||
|
ei_t.idx += 1;
|
||||||
|
next_eis.push(ei_t);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -385,8 +434,8 @@ pub fn parse(sess: &ParseSess,
|
||||||
if (bb_eis.len() > 0u && next_eis.len() > 0u)
|
if (bb_eis.len() > 0u && next_eis.len() > 0u)
|
||||||
|| bb_eis.len() > 1u {
|
|| bb_eis.len() > 1u {
|
||||||
let nts = bb_eis.iter().map(|ei| {
|
let nts = bb_eis.iter().map(|ei| {
|
||||||
match ei.elts[ei.idx].node {
|
match (*ei.elts)[ei.idx] {
|
||||||
MatchNonterminal(bind, name, _) => {
|
TtToken(_, MatchNt(bind, name, _, _)) => {
|
||||||
(format!("{} ('{}')",
|
(format!("{} ('{}')",
|
||||||
token::get_ident(name),
|
token::get_ident(name),
|
||||||
token::get_ident(bind))).to_string()
|
token::get_ident(bind))).to_string()
|
||||||
|
@ -410,12 +459,14 @@ pub fn parse(sess: &ParseSess,
|
||||||
let mut rust_parser = Parser::new(sess, cfg.clone(), box rdr.clone());
|
let mut rust_parser = Parser::new(sess, cfg.clone(), box rdr.clone());
|
||||||
|
|
||||||
let mut ei = bb_eis.pop().unwrap();
|
let mut ei = bb_eis.pop().unwrap();
|
||||||
match ei.elts[ei.idx].node {
|
match (*ei.elts)[ei.idx] {
|
||||||
MatchNonterminal(_, name, idx) => {
|
TtToken(_, MatchNt(_, name, _, _)) => {
|
||||||
let name_string = token::get_ident(name);
|
let name_string = token::get_ident(name);
|
||||||
ei.matches[idx].push(Rc::new(MatchedNonterminal(
|
let match_cur = ei.match_cur;
|
||||||
|
ei.matches[match_cur].push(Rc::new(MatchedNonterminal(
|
||||||
parse_nt(&mut rust_parser, name_string.get()))));
|
parse_nt(&mut rust_parser, name_string.get()))));
|
||||||
ei.idx += 1u;
|
ei.idx += 1u;
|
||||||
|
ei.match_cur += 1;
|
||||||
}
|
}
|
||||||
_ => panic!()
|
_ => panic!()
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,8 +9,9 @@
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
use ast::{Ident, Matcher_, Matcher, MatchTok, MatchNonterminal, MatchSeq, TtDelimited};
|
use ast::{Ident, Matcher_, Matcher, MatchTok, MatchNonterminal, MatchSeq, TtDelimited};
|
||||||
|
use ast::{TtSequence, TtToken};
|
||||||
use ast;
|
use ast;
|
||||||
use codemap::{Span, Spanned, DUMMY_SP};
|
use codemap::{Span, DUMMY_SP};
|
||||||
use ext::base::{ExtCtxt, MacResult, MacroDef};
|
use ext::base::{ExtCtxt, MacResult, MacroDef};
|
||||||
use ext::base::{NormalTT, TTMacroExpander};
|
use ext::base::{NormalTT, TTMacroExpander};
|
||||||
use ext::tt::macro_parser::{Success, Error, Failure};
|
use ext::tt::macro_parser::{Success, Error, Failure};
|
||||||
|
@ -20,7 +21,7 @@ use parse::lexer::new_tt_reader;
|
||||||
use parse::parser::Parser;
|
use parse::parser::Parser;
|
||||||
use parse::attr::ParserAttr;
|
use parse::attr::ParserAttr;
|
||||||
use parse::token::{special_idents, gensym_ident};
|
use parse::token::{special_idents, gensym_ident};
|
||||||
use parse::token::{NtMatchers, NtTT};
|
use parse::token::{MatchNt, NtMatchers, NtTT};
|
||||||
use parse::token;
|
use parse::token;
|
||||||
use print;
|
use print;
|
||||||
use ptr::P;
|
use ptr::P;
|
||||||
|
@ -158,14 +159,18 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
|
||||||
|
|
||||||
for (i, lhs) in lhses.iter().enumerate() { // try each arm's matchers
|
for (i, lhs) in lhses.iter().enumerate() { // try each arm's matchers
|
||||||
match **lhs {
|
match **lhs {
|
||||||
MatchedNonterminal(NtMatchers(ref mtcs)) => {
|
MatchedNonterminal(NtTT(ref lhs_tt)) => {
|
||||||
|
let lhs_tt = match **lhs_tt {
|
||||||
|
TtDelimited(_, ref delim) => delim.tts.as_slice(),
|
||||||
|
_ => cx.span_fatal(sp, "malformed macro lhs")
|
||||||
|
};
|
||||||
// `None` is because we're not interpolating
|
// `None` is because we're not interpolating
|
||||||
let arg_rdr = new_tt_reader(&cx.parse_sess().span_diagnostic,
|
let arg_rdr = new_tt_reader(&cx.parse_sess().span_diagnostic,
|
||||||
None,
|
None,
|
||||||
arg.iter()
|
arg.iter()
|
||||||
.map(|x| (*x).clone())
|
.map(|x| (*x).clone())
|
||||||
.collect());
|
.collect());
|
||||||
match parse(cx.parse_sess(), cx.cfg(), arg_rdr, mtcs.as_slice()) {
|
match parse(cx.parse_sess(), cx.cfg(), arg_rdr, lhs_tt) {
|
||||||
Success(named_matches) => {
|
Success(named_matches) => {
|
||||||
let rhs = match *rhses[i] {
|
let rhs = match *rhses[i] {
|
||||||
// okay, what's your transcriber?
|
// okay, what's your transcriber?
|
||||||
|
@ -210,31 +215,33 @@ pub fn add_new_extension<'cx>(cx: &'cx mut ExtCtxt,
|
||||||
name: Ident,
|
name: Ident,
|
||||||
arg: Vec<ast::TokenTree> )
|
arg: Vec<ast::TokenTree> )
|
||||||
-> Box<MacResult+'cx> {
|
-> Box<MacResult+'cx> {
|
||||||
// these spans won't matter, anyways
|
|
||||||
fn ms(m: Matcher_) -> Matcher {
|
|
||||||
Spanned {
|
|
||||||
node: m.clone(),
|
|
||||||
span: DUMMY_SP
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let lhs_nm = gensym_ident("lhs");
|
let lhs_nm = gensym_ident("lhs");
|
||||||
let rhs_nm = gensym_ident("rhs");
|
let rhs_nm = gensym_ident("rhs");
|
||||||
|
|
||||||
// The pattern that macro_rules matches.
|
// The pattern that macro_rules matches.
|
||||||
// The grammar for macro_rules! is:
|
// The grammar for macro_rules! is:
|
||||||
// $( $lhs:mtcs => $rhs:tt );+
|
// $( $lhs:tt => $rhs:tt );+
|
||||||
// ...quasiquoting this would be nice.
|
// ...quasiquoting this would be nice.
|
||||||
|
// These spans won't matter, anyways
|
||||||
|
let match_lhs_tok = MatchNt(lhs_nm, special_idents::tt, token::Plain, token::Plain);
|
||||||
|
let match_rhs_tok = MatchNt(rhs_nm, special_idents::tt, token::Plain, token::Plain);
|
||||||
let argument_gram = vec!(
|
let argument_gram = vec!(
|
||||||
ms(MatchSeq(vec!(
|
TtSequence(DUMMY_SP,
|
||||||
ms(MatchNonterminal(lhs_nm, special_idents::matchers, 0u)),
|
Rc::new(vec![
|
||||||
ms(MatchTok(token::FatArrow)),
|
TtToken(DUMMY_SP, match_lhs),
|
||||||
ms(MatchNonterminal(rhs_nm, special_idents::tt, 1u))),
|
TtToken(DUMMY_SP, token::FatArrow),
|
||||||
Some(token::Semi), ast::OneOrMore, 0u, 2u)),
|
TtToken(DUMMY_SP, match_rhs)]),
|
||||||
|
Some(token::Semi),
|
||||||
|
ast::OneOrMore,
|
||||||
|
2),
|
||||||
//to phase into semicolon-termination instead of
|
//to phase into semicolon-termination instead of
|
||||||
//semicolon-separation
|
//semicolon-separation
|
||||||
ms(MatchSeq(vec!(ms(MatchTok(token::Semi))), None,
|
TtSequence(DUMMY_SP,
|
||||||
ast::ZeroOrMore, 2u, 2u)));
|
Rc::new(vec![TtToken(DUMMY_SP, token::Semi)]),
|
||||||
|
None,
|
||||||
|
ast::ZeroOrMore,
|
||||||
|
0));
|
||||||
|
|
||||||
|
|
||||||
// Parse the macro_rules! invocation (`none` is for no interpolations):
|
// Parse the macro_rules! invocation (`none` is for no interpolations):
|
||||||
|
|
|
@ -9,10 +9,11 @@
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
use ast;
|
use ast;
|
||||||
use ast::{TokenTree, TtDelimited, TtToken, TtSequence, TtNonterminal, Ident};
|
use ast::{TokenTree, TtDelimited, TtToken, TtSequence, Ident};
|
||||||
use codemap::{Span, DUMMY_SP};
|
use codemap::{Span, DUMMY_SP};
|
||||||
use diagnostic::SpanHandler;
|
use diagnostic::SpanHandler;
|
||||||
use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
|
use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
|
||||||
|
use parse::token::{Eof, DocComment, Interpolated, MatchNt, SubstNt};
|
||||||
use parse::token::{Token, NtIdent};
|
use parse::token::{Token, NtIdent};
|
||||||
use parse::token;
|
use parse::token;
|
||||||
use parse::lexer::TokenAndSpan;
|
use parse::lexer::TokenAndSpan;
|
||||||
|
@ -85,17 +86,9 @@ fn lookup_cur_matched_by_matched(r: &TtReader, start: Rc<NamedMatch>) -> Rc<Name
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn lookup_cur_matched(r: &TtReader, name: Ident) -> Rc<NamedMatch> {
|
fn lookup_cur_matched(r: &TtReader, name: Ident) -> Option<Rc<NamedMatch>> {
|
||||||
let matched_opt = r.interpolations.find_copy(&name);
|
let matched_opt = r.interpolations.find_copy(&name);
|
||||||
match matched_opt {
|
matched_opt.map(|s| lookup_cur_matched_by_matched(r, s))
|
||||||
Some(s) => lookup_cur_matched_by_matched(r, s),
|
|
||||||
None => {
|
|
||||||
r.sp_diag
|
|
||||||
.span_fatal(r.cur_span,
|
|
||||||
format!("unknown macro variable `{}`",
|
|
||||||
token::get_ident(name)).as_slice());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[deriving(Clone)]
|
#[deriving(Clone)]
|
||||||
|
@ -133,16 +126,20 @@ fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize {
|
||||||
size + lockstep_iter_size(tt, r)
|
size + lockstep_iter_size(tt, r)
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
TtSequence(_, ref tts, _, _) => {
|
TtSequence(_, ref tts, _, _, _) => {
|
||||||
tts.iter().fold(LisUnconstrained, |size, tt| {
|
tts.iter().fold(LisUnconstrained, |size, tt| {
|
||||||
size + lockstep_iter_size(tt, r)
|
size + lockstep_iter_size(tt, r)
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
|
TtToken(_, SubstNt(name, _)) | TtToken(_, MatchNt(name, _, _, _)) =>
|
||||||
|
match lookup_cur_matched(r, name) {
|
||||||
|
Some(matched) => match *matched {
|
||||||
|
MatchedNonterminal(_) => LisUnconstrained,
|
||||||
|
MatchedSeq(ref ads, _) => LisConstraint(ads.len(), name),
|
||||||
|
},
|
||||||
|
_ => LisUnconstrained
|
||||||
|
},
|
||||||
TtToken(..) => LisUnconstrained,
|
TtToken(..) => LisUnconstrained,
|
||||||
TtNonterminal(_, name) => match *lookup_cur_matched(r, name) {
|
|
||||||
MatchedNonterminal(_) => LisUnconstrained,
|
|
||||||
MatchedSeq(ref ads, _) => LisConstraint(ads.len(), name)
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -205,40 +202,21 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
|
||||||
(*frame.forest)[frame.idx].clone()
|
(*frame.forest)[frame.idx].clone()
|
||||||
};
|
};
|
||||||
match t {
|
match t {
|
||||||
TtDelimited(_, ref delimed) => {
|
TtSequence(sp, tts, sep, kleene_op, n) => {
|
||||||
let mut tts = Vec::with_capacity(1 + delimed.tts.len() + 1);
|
|
||||||
tts.push(delimed.open_tt());
|
|
||||||
tts.extend(delimed.tts.iter().map(|tt| tt.clone()));
|
|
||||||
tts.push(delimed.close_tt());
|
|
||||||
|
|
||||||
r.stack.push(TtFrame {
|
|
||||||
forest: Rc::new(tts),
|
|
||||||
idx: 0,
|
|
||||||
dotdotdoted: false,
|
|
||||||
sep: None
|
|
||||||
});
|
|
||||||
// if this could be 0-length, we'd need to potentially recur here
|
|
||||||
}
|
|
||||||
TtToken(sp, tok) => {
|
|
||||||
r.cur_span = sp;
|
|
||||||
r.cur_tok = tok;
|
|
||||||
r.stack.last_mut().unwrap().idx += 1;
|
|
||||||
return ret_val;
|
|
||||||
}
|
|
||||||
TtSequence(sp, tts, sep, kleene_op) => {
|
|
||||||
// FIXME(pcwalton): Bad copy.
|
// FIXME(pcwalton): Bad copy.
|
||||||
match lockstep_iter_size(&TtSequence(sp, tts.clone(), sep.clone(), kleene_op), r) {
|
match lockstep_iter_size(&TtSequence(sp, tts.clone(), sep.clone(), kleene_op, n),
|
||||||
|
r) {
|
||||||
LisUnconstrained => {
|
LisUnconstrained => {
|
||||||
r.sp_diag.span_fatal(
|
r.sp_diag.span_fatal(
|
||||||
sp.clone(), /* blame macro writer */
|
sp.clone(), /* blame macro writer */
|
||||||
"attempted to repeat an expression \
|
"attempted to repeat an expression \
|
||||||
containing no syntax \
|
containing no syntax \
|
||||||
variables matched as repeating at this depth");
|
variables matched as repeating at this depth");
|
||||||
}
|
}
|
||||||
LisContradiction(ref msg) => {
|
LisContradiction(ref msg) => {
|
||||||
// FIXME #2887 blame macro invoker instead
|
// FIXME #2887 blame macro invoker instead
|
||||||
r.sp_diag.span_fatal(sp.clone(), msg.as_slice());
|
r.sp_diag.span_fatal(sp.clone(), msg.as_slice());
|
||||||
}
|
}
|
||||||
LisConstraint(len, _) => {
|
LisConstraint(len, _) => {
|
||||||
if len == 0 {
|
if len == 0 {
|
||||||
if kleene_op == ast::OneOrMore {
|
if kleene_op == ast::OneOrMore {
|
||||||
|
@ -262,31 +240,62 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// FIXME #2887: think about span stuff here
|
// FIXME #2887: think about span stuff here
|
||||||
TtNonterminal(sp, ident) => {
|
TtToken(sp, SubstNt(ident, namep)) => {
|
||||||
r.stack.last_mut().unwrap().idx += 1;
|
match lookup_cur_matched(r, ident) {
|
||||||
match *lookup_cur_matched(r, ident) {
|
None => {
|
||||||
/* sidestep the interpolation tricks for ident because
|
r.stack.push(TtFrame {
|
||||||
(a) idents can be in lots of places, so it'd be a pain
|
forest: TtToken(sp, SubstNt(ident, namep)).expand_into_tts(),
|
||||||
(b) we actually can, since it's a token. */
|
idx: 0,
|
||||||
MatchedNonterminal(NtIdent(box sn, b)) => {
|
dotdotdoted: false,
|
||||||
r.cur_span = sp;
|
sep: None
|
||||||
r.cur_tok = token::Ident(sn,b);
|
});
|
||||||
return ret_val;
|
// this can't be 0 length, just like TtDelimited
|
||||||
}
|
}
|
||||||
MatchedNonterminal(ref other_whole_nt) => {
|
Some(cur_matched) => {
|
||||||
// FIXME(pcwalton): Bad copy.
|
r.stack.last_mut().unwrap().idx += 1;
|
||||||
r.cur_span = sp;
|
match *cur_matched {
|
||||||
r.cur_tok = token::Interpolated((*other_whole_nt).clone());
|
// sidestep the interpolation tricks for ident because
|
||||||
return ret_val;
|
// (a) idents can be in lots of places, so it'd be a pain
|
||||||
}
|
// (b) we actually can, since it's a token.
|
||||||
MatchedSeq(..) => {
|
MatchedNonterminal(NtIdent(box sn, b)) => {
|
||||||
r.sp_diag.span_fatal(
|
r.cur_span = sp;
|
||||||
r.cur_span, /* blame the macro writer */
|
r.cur_tok = token::Ident(sn, b);
|
||||||
format!("variable '{}' is still repeating at this depth",
|
return ret_val;
|
||||||
token::get_ident(ident)).as_slice());
|
}
|
||||||
|
MatchedNonterminal(ref other_whole_nt) => {
|
||||||
|
// FIXME(pcwalton): Bad copy.
|
||||||
|
r.cur_span = sp;
|
||||||
|
r.cur_tok = token::Interpolated((*other_whole_nt).clone());
|
||||||
|
return ret_val;
|
||||||
|
}
|
||||||
|
MatchedSeq(..) => {
|
||||||
|
r.sp_diag.span_fatal(
|
||||||
|
r.cur_span, /* blame the macro writer */
|
||||||
|
format!("variable '{}' is still repeating at this depth",
|
||||||
|
token::get_ident(ident)).as_slice());
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// TtDelimited or any token that can be unzipped
|
||||||
|
seq @ TtDelimited(..) | seq @ TtToken(_, DocComment(..))
|
||||||
|
| seq @ TtToken(_, MatchNt(..)) => {
|
||||||
|
// do not advance the idx yet
|
||||||
|
r.stack.push(TtFrame {
|
||||||
|
forest: seq.expand_into_tts(),
|
||||||
|
idx: 0,
|
||||||
|
dotdotdoted: false,
|
||||||
|
sep: None
|
||||||
|
});
|
||||||
|
// if this could be 0-length, we'd need to potentially recur here
|
||||||
|
}
|
||||||
|
TtToken(sp, tok) => {
|
||||||
|
r.cur_span = sp;
|
||||||
|
r.cur_tok = tok;
|
||||||
|
r.stack.last_mut().unwrap().idx += 1;
|
||||||
|
return ret_val;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -581,13 +581,12 @@ pub fn noop_fold_tt<T: Folder>(tt: &TokenTree, fld: &mut T) -> TokenTree {
|
||||||
}
|
}
|
||||||
))
|
))
|
||||||
},
|
},
|
||||||
TtSequence(span, ref pattern, ref sep, is_optional) =>
|
TtSequence(span, ref pattern, ref sep, is_optional, advance_by) =>
|
||||||
TtSequence(span,
|
TtSequence(span,
|
||||||
Rc::new(fld.fold_tts(pattern.as_slice())),
|
Rc::new(fld.fold_tts(pattern.as_slice())),
|
||||||
sep.clone().map(|tok| fld.fold_token(tok)),
|
sep.clone().map(|tok| fld.fold_token(tok)),
|
||||||
is_optional),
|
is_optional,
|
||||||
TtNonterminal(sp,ref ident) =>
|
advance_by),
|
||||||
TtNonterminal(sp,fld.fold_ident(*ident))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -603,6 +602,12 @@ pub fn noop_fold_token<T: Folder>(t: token::Token, fld: &mut T) -> token::Token
|
||||||
}
|
}
|
||||||
token::Lifetime(id) => token::Lifetime(fld.fold_ident(id)),
|
token::Lifetime(id) => token::Lifetime(fld.fold_ident(id)),
|
||||||
token::Interpolated(nt) => token::Interpolated(fld.fold_interpolated(nt)),
|
token::Interpolated(nt) => token::Interpolated(fld.fold_interpolated(nt)),
|
||||||
|
token::SubstNt(ident, namep) => {
|
||||||
|
token::SubstNt(fld.fold_ident(ident), namep)
|
||||||
|
}
|
||||||
|
token::MatchNt(name, kind, namep, kindp) => {
|
||||||
|
token::MatchNt(fld.fold_ident(name), fld.fold_ident(kind), namep, kindp)
|
||||||
|
}
|
||||||
_ => t
|
_ => t
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -49,7 +49,7 @@ use ast::{StructVariantKind, BiSub};
|
||||||
use ast::StrStyle;
|
use ast::StrStyle;
|
||||||
use ast::{SelfExplicit, SelfRegion, SelfStatic, SelfValue};
|
use ast::{SelfExplicit, SelfRegion, SelfStatic, SelfValue};
|
||||||
use ast::{Delimited, TokenTree, TraitItem, TraitRef, TtDelimited, TtSequence, TtToken};
|
use ast::{Delimited, TokenTree, TraitItem, TraitRef, TtDelimited, TtSequence, TtToken};
|
||||||
use ast::{TtNonterminal, TupleVariantKind, Ty, Ty_, TyBot};
|
use ast::{TupleVariantKind, Ty, Ty_, TyBot};
|
||||||
use ast::{TypeField, TyFixedLengthVec, TyClosure, TyProc, TyBareFn};
|
use ast::{TypeField, TyFixedLengthVec, TyClosure, TyProc, TyBareFn};
|
||||||
use ast::{TyTypeof, TyInfer, TypeMethod};
|
use ast::{TyTypeof, TyInfer, TypeMethod};
|
||||||
use ast::{TyNil, TyParam, TyParamBound, TyParen, TyPath, TyPtr, TyQPath};
|
use ast::{TyNil, TyParam, TyParamBound, TyParen, TyPath, TyPtr, TyQPath};
|
||||||
|
@ -65,6 +65,7 @@ use ast_util::{as_prec, ident_to_path, operator_prec};
|
||||||
use ast_util;
|
use ast_util;
|
||||||
use codemap::{Span, BytePos, Spanned, spanned, mk_sp};
|
use codemap::{Span, BytePos, Spanned, spanned, mk_sp};
|
||||||
use codemap;
|
use codemap;
|
||||||
|
use ext::tt::macro_parser;
|
||||||
use parse;
|
use parse;
|
||||||
use parse::attr::ParserAttr;
|
use parse::attr::ParserAttr;
|
||||||
use parse::classify;
|
use parse::classify;
|
||||||
|
@ -73,7 +74,7 @@ use parse::common::{seq_sep_trailing_allowed};
|
||||||
use parse::lexer::Reader;
|
use parse::lexer::Reader;
|
||||||
use parse::lexer::TokenAndSpan;
|
use parse::lexer::TokenAndSpan;
|
||||||
use parse::obsolete::*;
|
use parse::obsolete::*;
|
||||||
use parse::token::InternedString;
|
use parse::token::{MatchNt, SubstNt, InternedString};
|
||||||
use parse::token::{keywords, special_idents};
|
use parse::token::{keywords, special_idents};
|
||||||
use parse::token;
|
use parse::token;
|
||||||
use parse::{new_sub_parser_from_file, ParseSess};
|
use parse::{new_sub_parser_from_file, ParseSess};
|
||||||
|
@ -2508,7 +2509,7 @@ impl<'a> Parser<'a> {
|
||||||
pub fn parse_token_tree(&mut self) -> TokenTree {
|
pub fn parse_token_tree(&mut self) -> TokenTree {
|
||||||
// FIXME #6994: currently, this is too eager. It
|
// FIXME #6994: currently, this is too eager. It
|
||||||
// parses token trees but also identifies TtSequence's
|
// parses token trees but also identifies TtSequence's
|
||||||
// and TtNonterminal's; it's too early to know yet
|
// and token::SubstNt's; it's too early to know yet
|
||||||
// whether something will be a nonterminal or a seq
|
// whether something will be a nonterminal or a seq
|
||||||
// yet.
|
// yet.
|
||||||
maybe_whole!(deref self, NtTT);
|
maybe_whole!(deref self, NtTT);
|
||||||
|
@ -2549,9 +2550,21 @@ impl<'a> Parser<'a> {
|
||||||
let seq = match seq {
|
let seq = match seq {
|
||||||
Spanned { node, .. } => node,
|
Spanned { node, .. } => node,
|
||||||
};
|
};
|
||||||
TtSequence(mk_sp(sp.lo, p.span.hi), Rc::new(seq), sep, repeat)
|
let name_num = macro_parser::count_names(seq.as_slice());
|
||||||
|
TtSequence(mk_sp(sp.lo, p.span.hi), Rc::new(seq), sep, repeat, name_num)
|
||||||
} else {
|
} else {
|
||||||
TtNonterminal(sp, p.parse_ident())
|
// A nonterminal that matches or not
|
||||||
|
let namep = match p.token { token::Ident(_, p) => p, _ => token::Plain };
|
||||||
|
let name = p.parse_ident();
|
||||||
|
if p.token == token::Colon && p.look_ahead(1, |t| t.is_ident()) {
|
||||||
|
p.bump();
|
||||||
|
let kindp = match p.token { token::Ident(_, p) => p, _ => token::Plain };
|
||||||
|
let nt_kind = p.parse_ident();
|
||||||
|
let m = TtToken(sp, MatchNt(name, nt_kind, namep, kindp));
|
||||||
|
m
|
||||||
|
} else {
|
||||||
|
TtToken(sp, SubstNt(name, namep))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
|
|
|
@ -108,7 +108,15 @@ pub enum Token {
|
||||||
|
|
||||||
/* For interpolation */
|
/* For interpolation */
|
||||||
Interpolated(Nonterminal),
|
Interpolated(Nonterminal),
|
||||||
|
// Can be expanded into several tokens.
|
||||||
|
/// Doc comment
|
||||||
DocComment(ast::Name),
|
DocComment(ast::Name),
|
||||||
|
// In left-hand-sides of MBE macros:
|
||||||
|
/// Parse a nonterminal (name to bind, name of NT, styles of their idents)
|
||||||
|
MatchNt(ast::Ident, ast::Ident, IdentStyle, IdentStyle),
|
||||||
|
// In right-hand-sides of MBE macros:
|
||||||
|
/// A syntactic variable that will be filled in by macro expansion.
|
||||||
|
SubstNt(ast::Ident, IdentStyle),
|
||||||
|
|
||||||
// Junk. These carry no data because we don't really care about the data
|
// Junk. These carry no data because we don't really care about the data
|
||||||
// they *would* carry, and don't really want to allocate a new ident for
|
// they *would* carry, and don't really want to allocate a new ident for
|
||||||
|
|
|
@ -254,6 +254,8 @@ pub fn token_to_string(tok: &Token) -> String {
|
||||||
|
|
||||||
/* Other */
|
/* Other */
|
||||||
token::DocComment(s) => s.as_str().into_string(),
|
token::DocComment(s) => s.as_str().into_string(),
|
||||||
|
token::SubstNt(s, _) => format!("${}", s),
|
||||||
|
token::MatchNt(s, t, _, _) => format!("${}:{}", s, t),
|
||||||
token::Eof => "<eof>".into_string(),
|
token::Eof => "<eof>".into_string(),
|
||||||
token::Whitespace => " ".into_string(),
|
token::Whitespace => " ".into_string(),
|
||||||
token::Comment => "/* */".into_string(),
|
token::Comment => "/* */".into_string(),
|
||||||
|
@ -1120,13 +1122,6 @@ impl<'a> State<'a> {
|
||||||
/// expression arguments as expressions). It can be done! I think.
|
/// expression arguments as expressions). It can be done! I think.
|
||||||
pub fn print_tt(&mut self, tt: &ast::TokenTree) -> IoResult<()> {
|
pub fn print_tt(&mut self, tt: &ast::TokenTree) -> IoResult<()> {
|
||||||
match *tt {
|
match *tt {
|
||||||
ast::TtDelimited(_, ref delimed) => {
|
|
||||||
try!(word(&mut self.s, token_to_string(&delimed.open_token()).as_slice()));
|
|
||||||
try!(space(&mut self.s));
|
|
||||||
try!(self.print_tts(delimed.tts.as_slice()));
|
|
||||||
try!(space(&mut self.s));
|
|
||||||
word(&mut self.s, token_to_string(&delimed.close_token()).as_slice())
|
|
||||||
},
|
|
||||||
ast::TtToken(_, ref tk) => {
|
ast::TtToken(_, ref tk) => {
|
||||||
try!(word(&mut self.s, token_to_string(tk).as_slice()));
|
try!(word(&mut self.s, token_to_string(tk).as_slice()));
|
||||||
match *tk {
|
match *tk {
|
||||||
|
@ -1136,7 +1131,14 @@ impl<'a> State<'a> {
|
||||||
_ => Ok(())
|
_ => Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ast::TtSequence(_, ref tts, ref separator, kleene_op) => {
|
ast::TtDelimited(_, ref delimed) => {
|
||||||
|
try!(word(&mut self.s, token_to_string(&delimed.open_token()).as_slice()));
|
||||||
|
try!(space(&mut self.s));
|
||||||
|
try!(self.print_tts(delimed.tts.as_slice()));
|
||||||
|
try!(space(&mut self.s));
|
||||||
|
word(&mut self.s, token_to_string(&delimed.close_token()).as_slice())
|
||||||
|
},
|
||||||
|
ast::TtSequence(_, ref tts, ref separator, kleene_op, _) => {
|
||||||
try!(word(&mut self.s, "$("));
|
try!(word(&mut self.s, "$("));
|
||||||
for tt_elt in (*tts).iter() {
|
for tt_elt in (*tts).iter() {
|
||||||
try!(self.print_tt(tt_elt));
|
try!(self.print_tt(tt_elt));
|
||||||
|
@ -1153,10 +1155,6 @@ impl<'a> State<'a> {
|
||||||
ast::OneOrMore => word(&mut self.s, "+"),
|
ast::OneOrMore => word(&mut self.s, "+"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ast::TtNonterminal(_, name) => {
|
|
||||||
try!(word(&mut self.s, "$"));
|
|
||||||
self.print_ident(name)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue