Auto merge of #29582 - oli-obk:token_tree, r=sfackler
This commit is contained in:
commit
475f91f46e
15 changed files with 219 additions and 209 deletions
|
@ -46,7 +46,7 @@ extern crate rustc;
|
||||||
|
|
||||||
use syntax::codemap::Span;
|
use syntax::codemap::Span;
|
||||||
use syntax::parse::token;
|
use syntax::parse::token;
|
||||||
use syntax::ast::{TokenTree, TtToken};
|
use syntax::ast::TokenTree;
|
||||||
use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacEager};
|
use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacEager};
|
||||||
use syntax::ext::build::AstBuilder; // trait for expr_usize
|
use syntax::ext::build::AstBuilder; // trait for expr_usize
|
||||||
use rustc::plugin::Registry;
|
use rustc::plugin::Registry;
|
||||||
|
@ -61,7 +61,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
|
||||||
("I", 1)];
|
("I", 1)];
|
||||||
|
|
||||||
let text = match args {
|
let text = match args {
|
||||||
[TtToken(_, token::Ident(s, _))] => s.to_string(),
|
[TokenTree::Token(_, token::Ident(s, _))] => s.to_string(),
|
||||||
_ => {
|
_ => {
|
||||||
cx.span_err(sp, "argument should be a single identifier");
|
cx.span_err(sp, "argument should be a single identifier");
|
||||||
return DummyResult::any(sp);
|
return DummyResult::any(sp);
|
||||||
|
|
|
@ -36,7 +36,6 @@ pub use self::Sign::*;
|
||||||
pub use self::Stmt_::*;
|
pub use self::Stmt_::*;
|
||||||
pub use self::StrStyle::*;
|
pub use self::StrStyle::*;
|
||||||
pub use self::StructFieldKind::*;
|
pub use self::StructFieldKind::*;
|
||||||
pub use self::TokenTree::*;
|
|
||||||
pub use self::TraitItem_::*;
|
pub use self::TraitItem_::*;
|
||||||
pub use self::Ty_::*;
|
pub use self::Ty_::*;
|
||||||
pub use self::TyParamBound::*;
|
pub use self::TyParamBound::*;
|
||||||
|
@ -954,12 +953,12 @@ impl Delimited {
|
||||||
|
|
||||||
/// Returns the opening delimiter as a token tree.
|
/// Returns the opening delimiter as a token tree.
|
||||||
pub fn open_tt(&self) -> TokenTree {
|
pub fn open_tt(&self) -> TokenTree {
|
||||||
TtToken(self.open_span, self.open_token())
|
TokenTree::Token(self.open_span, self.open_token())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the closing delimiter as a token tree.
|
/// Returns the closing delimiter as a token tree.
|
||||||
pub fn close_tt(&self) -> TokenTree {
|
pub fn close_tt(&self) -> TokenTree {
|
||||||
TtToken(self.close_span, self.close_token())
|
TokenTree::Token(self.close_span, self.close_token())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -999,61 +998,61 @@ pub enum KleeneOp {
|
||||||
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
|
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
|
||||||
pub enum TokenTree {
|
pub enum TokenTree {
|
||||||
/// A single token
|
/// A single token
|
||||||
TtToken(Span, token::Token),
|
Token(Span, token::Token),
|
||||||
/// A delimited sequence of token trees
|
/// A delimited sequence of token trees
|
||||||
TtDelimited(Span, Rc<Delimited>),
|
Delimited(Span, Rc<Delimited>),
|
||||||
|
|
||||||
// This only makes sense in MBE macros.
|
// This only makes sense in MBE macros.
|
||||||
|
|
||||||
/// A kleene-style repetition sequence with a span
|
/// A kleene-style repetition sequence with a span
|
||||||
// FIXME(eddyb) #12938 Use DST.
|
// FIXME(eddyb) #12938 Use DST.
|
||||||
TtSequence(Span, Rc<SequenceRepetition>),
|
Sequence(Span, Rc<SequenceRepetition>),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TokenTree {
|
impl TokenTree {
|
||||||
pub fn len(&self) -> usize {
|
pub fn len(&self) -> usize {
|
||||||
match *self {
|
match *self {
|
||||||
TtToken(_, token::DocComment(name)) => {
|
TokenTree::Token(_, token::DocComment(name)) => {
|
||||||
match doc_comment_style(&name.as_str()) {
|
match doc_comment_style(&name.as_str()) {
|
||||||
AttrStyle::Outer => 2,
|
AttrStyle::Outer => 2,
|
||||||
AttrStyle::Inner => 3
|
AttrStyle::Inner => 3
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
TtToken(_, token::SpecialVarNt(..)) => 2,
|
TokenTree::Token(_, token::SpecialVarNt(..)) => 2,
|
||||||
TtToken(_, token::MatchNt(..)) => 3,
|
TokenTree::Token(_, token::MatchNt(..)) => 3,
|
||||||
TtDelimited(_, ref delimed) => {
|
TokenTree::Delimited(_, ref delimed) => {
|
||||||
delimed.tts.len() + 2
|
delimed.tts.len() + 2
|
||||||
}
|
}
|
||||||
TtSequence(_, ref seq) => {
|
TokenTree::Sequence(_, ref seq) => {
|
||||||
seq.tts.len()
|
seq.tts.len()
|
||||||
}
|
}
|
||||||
TtToken(..) => 0
|
TokenTree::Token(..) => 0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_tt(&self, index: usize) -> TokenTree {
|
pub fn get_tt(&self, index: usize) -> TokenTree {
|
||||||
match (self, index) {
|
match (self, index) {
|
||||||
(&TtToken(sp, token::DocComment(_)), 0) => {
|
(&TokenTree::Token(sp, token::DocComment(_)), 0) => {
|
||||||
TtToken(sp, token::Pound)
|
TokenTree::Token(sp, token::Pound)
|
||||||
}
|
}
|
||||||
(&TtToken(sp, token::DocComment(name)), 1)
|
(&TokenTree::Token(sp, token::DocComment(name)), 1)
|
||||||
if doc_comment_style(&name.as_str()) == AttrStyle::Inner => {
|
if doc_comment_style(&name.as_str()) == AttrStyle::Inner => {
|
||||||
TtToken(sp, token::Not)
|
TokenTree::Token(sp, token::Not)
|
||||||
}
|
}
|
||||||
(&TtToken(sp, token::DocComment(name)), _) => {
|
(&TokenTree::Token(sp, token::DocComment(name)), _) => {
|
||||||
let stripped = strip_doc_comment_decoration(&name.as_str());
|
let stripped = strip_doc_comment_decoration(&name.as_str());
|
||||||
TtDelimited(sp, Rc::new(Delimited {
|
TokenTree::Delimited(sp, Rc::new(Delimited {
|
||||||
delim: token::Bracket,
|
delim: token::Bracket,
|
||||||
open_span: sp,
|
open_span: sp,
|
||||||
tts: vec![TtToken(sp, token::Ident(token::str_to_ident("doc"),
|
tts: vec![TokenTree::Token(sp, token::Ident(token::str_to_ident("doc"),
|
||||||
token::Plain)),
|
token::Plain)),
|
||||||
TtToken(sp, token::Eq),
|
TokenTree::Token(sp, token::Eq),
|
||||||
TtToken(sp, token::Literal(
|
TokenTree::Token(sp, token::Literal(
|
||||||
token::StrRaw(token::intern(&stripped), 0), None))],
|
token::StrRaw(token::intern(&stripped), 0), None))],
|
||||||
close_span: sp,
|
close_span: sp,
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
(&TtDelimited(_, ref delimed), _) => {
|
(&TokenTree::Delimited(_, ref delimed), _) => {
|
||||||
if index == 0 {
|
if index == 0 {
|
||||||
return delimed.open_tt();
|
return delimed.open_tt();
|
||||||
}
|
}
|
||||||
|
@ -1062,19 +1061,19 @@ impl TokenTree {
|
||||||
}
|
}
|
||||||
delimed.tts[index - 1].clone()
|
delimed.tts[index - 1].clone()
|
||||||
}
|
}
|
||||||
(&TtToken(sp, token::SpecialVarNt(var)), _) => {
|
(&TokenTree::Token(sp, token::SpecialVarNt(var)), _) => {
|
||||||
let v = [TtToken(sp, token::Dollar),
|
let v = [TokenTree::Token(sp, token::Dollar),
|
||||||
TtToken(sp, token::Ident(token::str_to_ident(var.as_str()),
|
TokenTree::Token(sp, token::Ident(token::str_to_ident(var.as_str()),
|
||||||
token::Plain))];
|
token::Plain))];
|
||||||
v[index].clone()
|
v[index].clone()
|
||||||
}
|
}
|
||||||
(&TtToken(sp, token::MatchNt(name, kind, name_st, kind_st)), _) => {
|
(&TokenTree::Token(sp, token::MatchNt(name, kind, name_st, kind_st)), _) => {
|
||||||
let v = [TtToken(sp, token::SubstNt(name, name_st)),
|
let v = [TokenTree::Token(sp, token::SubstNt(name, name_st)),
|
||||||
TtToken(sp, token::Colon),
|
TokenTree::Token(sp, token::Colon),
|
||||||
TtToken(sp, token::Ident(kind, kind_st))];
|
TokenTree::Token(sp, token::Ident(kind, kind_st))];
|
||||||
v[index].clone()
|
v[index].clone()
|
||||||
}
|
}
|
||||||
(&TtSequence(_, ref seq), _) => {
|
(&TokenTree::Sequence(_, ref seq), _) => {
|
||||||
seq.tts[index].clone()
|
seq.tts[index].clone()
|
||||||
}
|
}
|
||||||
_ => panic!("Cannot expand a token tree")
|
_ => panic!("Cannot expand a token tree")
|
||||||
|
@ -1084,9 +1083,9 @@ impl TokenTree {
|
||||||
/// Returns the `Span` corresponding to this token tree.
|
/// Returns the `Span` corresponding to this token tree.
|
||||||
pub fn get_span(&self) -> Span {
|
pub fn get_span(&self) -> Span {
|
||||||
match *self {
|
match *self {
|
||||||
TtToken(span, _) => span,
|
TokenTree::Token(span, _) => span,
|
||||||
TtDelimited(span, _) => span,
|
TokenTree::Delimited(span, _) => span,
|
||||||
TtSequence(span, _) => span,
|
TokenTree::Sequence(span, _) => span,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -54,7 +54,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt,
|
||||||
token_tree: &[TokenTree])
|
token_tree: &[TokenTree])
|
||||||
-> Box<MacResult+'cx> {
|
-> Box<MacResult+'cx> {
|
||||||
let code = match (token_tree.len(), token_tree.get(0)) {
|
let code = match (token_tree.len(), token_tree.get(0)) {
|
||||||
(1, Some(&ast::TtToken(_, token::Ident(code, _)))) => code,
|
(1, Some(&TokenTree::Token(_, token::Ident(code, _)))) => code,
|
||||||
_ => unreachable!()
|
_ => unreachable!()
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -92,12 +92,12 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt,
|
||||||
token_tree.get(1),
|
token_tree.get(1),
|
||||||
token_tree.get(2)
|
token_tree.get(2)
|
||||||
) {
|
) {
|
||||||
(1, Some(&ast::TtToken(_, token::Ident(ref code, _))), None, None) => {
|
(1, Some(&TokenTree::Token(_, token::Ident(ref code, _))), None, None) => {
|
||||||
(code, None)
|
(code, None)
|
||||||
},
|
},
|
||||||
(3, Some(&ast::TtToken(_, token::Ident(ref code, _))),
|
(3, Some(&TokenTree::Token(_, token::Ident(ref code, _))),
|
||||||
Some(&ast::TtToken(_, token::Comma)),
|
Some(&TokenTree::Token(_, token::Comma)),
|
||||||
Some(&ast::TtToken(_, token::Literal(token::StrRaw(description, _), None)))) => {
|
Some(&TokenTree::Token(_, token::Literal(token::StrRaw(description, _), None)))) => {
|
||||||
(code, Some(description))
|
(code, Some(description))
|
||||||
}
|
}
|
||||||
_ => unreachable!()
|
_ => unreachable!()
|
||||||
|
@ -160,9 +160,9 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt,
|
||||||
let (crate_name, name) = match (&token_tree[0], &token_tree[2]) {
|
let (crate_name, name) = match (&token_tree[0], &token_tree[2]) {
|
||||||
(
|
(
|
||||||
// Crate name.
|
// Crate name.
|
||||||
&ast::TtToken(_, token::Ident(ref crate_name, _)),
|
&TokenTree::Token(_, token::Ident(ref crate_name, _)),
|
||||||
// DIAGNOSTICS ident.
|
// DIAGNOSTICS ident.
|
||||||
&ast::TtToken(_, token::Ident(ref name, _))
|
&TokenTree::Token(_, token::Ident(ref name, _))
|
||||||
) => (*&crate_name, name),
|
) => (*&crate_name, name),
|
||||||
_ => unreachable!()
|
_ => unreachable!()
|
||||||
};
|
};
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
use ast;
|
use ast::{self, TokenTree};
|
||||||
use codemap::Span;
|
use codemap::Span;
|
||||||
use ext::base::*;
|
use ext::base::*;
|
||||||
use ext::base;
|
use ext::base;
|
||||||
|
@ -17,7 +17,7 @@ use parse::token;
|
||||||
use parse::token::str_to_ident;
|
use parse::token::str_to_ident;
|
||||||
use ptr::P;
|
use ptr::P;
|
||||||
|
|
||||||
pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree])
|
||||||
-> Box<base::MacResult+'cx> {
|
-> Box<base::MacResult+'cx> {
|
||||||
if !cx.ecfg.enable_concat_idents() {
|
if !cx.ecfg.enable_concat_idents() {
|
||||||
feature_gate::emit_feature_err(&cx.parse_sess.span_diagnostic,
|
feature_gate::emit_feature_err(&cx.parse_sess.span_diagnostic,
|
||||||
|
@ -32,7 +32,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]
|
||||||
for (i, e) in tts.iter().enumerate() {
|
for (i, e) in tts.iter().enumerate() {
|
||||||
if i & 1 == 1 {
|
if i & 1 == 1 {
|
||||||
match *e {
|
match *e {
|
||||||
ast::TtToken(_, token::Comma) => {},
|
TokenTree::Token(_, token::Comma) => {},
|
||||||
_ => {
|
_ => {
|
||||||
cx.span_err(sp, "concat_idents! expecting comma.");
|
cx.span_err(sp, "concat_idents! expecting comma.");
|
||||||
return DummyResult::expr(sp);
|
return DummyResult::expr(sp);
|
||||||
|
@ -40,7 +40,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
match *e {
|
match *e {
|
||||||
ast::TtToken(_, token::Ident(ident, _)) => {
|
TokenTree::Token(_, token::Ident(ident, _)) => {
|
||||||
res_str.push_str(&ident.name.as_str())
|
res_str.push_str(&ident.name.as_str())
|
||||||
},
|
},
|
||||||
_ => {
|
_ => {
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
use ast;
|
use ast::{self, TokenTree};
|
||||||
use codemap::Span;
|
use codemap::Span;
|
||||||
use ext::base::ExtCtxt;
|
use ext::base::ExtCtxt;
|
||||||
use ext::base;
|
use ext::base;
|
||||||
|
@ -71,67 +71,69 @@ pub mod rt {
|
||||||
|
|
||||||
impl ToTokens for ast::Ident {
|
impl ToTokens for ast::Ident {
|
||||||
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
||||||
vec![ast::TtToken(DUMMY_SP, token::Ident(*self, token::Plain))]
|
vec![TokenTree::Token(DUMMY_SP, token::Ident(*self, token::Plain))]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToTokens for ast::Path {
|
impl ToTokens for ast::Path {
|
||||||
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
||||||
vec![ast::TtToken(DUMMY_SP, token::Interpolated(token::NtPath(Box::new(self.clone()))))]
|
vec![TokenTree::Token(DUMMY_SP,
|
||||||
|
token::Interpolated(token::NtPath(Box::new(self.clone()))))]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToTokens for ast::Ty {
|
impl ToTokens for ast::Ty {
|
||||||
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
||||||
vec![ast::TtToken(self.span, token::Interpolated(token::NtTy(P(self.clone()))))]
|
vec![TokenTree::Token(self.span, token::Interpolated(token::NtTy(P(self.clone()))))]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToTokens for ast::Block {
|
impl ToTokens for ast::Block {
|
||||||
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
||||||
vec![ast::TtToken(self.span, token::Interpolated(token::NtBlock(P(self.clone()))))]
|
vec![TokenTree::Token(self.span, token::Interpolated(token::NtBlock(P(self.clone()))))]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToTokens for ast::Generics {
|
impl ToTokens for ast::Generics {
|
||||||
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
||||||
vec![ast::TtToken(DUMMY_SP, token::Interpolated(token::NtGenerics(self.clone())))]
|
vec![TokenTree::Token(DUMMY_SP, token::Interpolated(token::NtGenerics(self.clone())))]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToTokens for ast::WhereClause {
|
impl ToTokens for ast::WhereClause {
|
||||||
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
||||||
vec![ast::TtToken(DUMMY_SP, token::Interpolated(token::NtWhereClause(self.clone())))]
|
vec![TokenTree::Token(DUMMY_SP,
|
||||||
|
token::Interpolated(token::NtWhereClause(self.clone())))]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToTokens for P<ast::Item> {
|
impl ToTokens for P<ast::Item> {
|
||||||
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
||||||
vec![ast::TtToken(self.span, token::Interpolated(token::NtItem(self.clone())))]
|
vec![TokenTree::Token(self.span, token::Interpolated(token::NtItem(self.clone())))]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToTokens for P<ast::ImplItem> {
|
impl ToTokens for P<ast::ImplItem> {
|
||||||
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
||||||
vec![ast::TtToken(self.span, token::Interpolated(token::NtImplItem(self.clone())))]
|
vec![TokenTree::Token(self.span, token::Interpolated(token::NtImplItem(self.clone())))]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToTokens for P<ast::TraitItem> {
|
impl ToTokens for P<ast::TraitItem> {
|
||||||
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
||||||
vec![ast::TtToken(self.span, token::Interpolated(token::NtTraitItem(self.clone())))]
|
vec![TokenTree::Token(self.span, token::Interpolated(token::NtTraitItem(self.clone())))]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToTokens for P<ast::Stmt> {
|
impl ToTokens for P<ast::Stmt> {
|
||||||
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
||||||
let mut tts = vec![
|
let mut tts = vec![
|
||||||
ast::TtToken(self.span, token::Interpolated(token::NtStmt(self.clone())))
|
TokenTree::Token(self.span, token::Interpolated(token::NtStmt(self.clone())))
|
||||||
];
|
];
|
||||||
|
|
||||||
// Some statements require a trailing semicolon.
|
// Some statements require a trailing semicolon.
|
||||||
if classify::stmt_ends_with_semi(&self.node) {
|
if classify::stmt_ends_with_semi(&self.node) {
|
||||||
tts.push(ast::TtToken(self.span, token::Semi));
|
tts.push(TokenTree::Token(self.span, token::Semi));
|
||||||
}
|
}
|
||||||
|
|
||||||
tts
|
tts
|
||||||
|
@ -140,19 +142,19 @@ pub mod rt {
|
||||||
|
|
||||||
impl ToTokens for P<ast::Expr> {
|
impl ToTokens for P<ast::Expr> {
|
||||||
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
||||||
vec![ast::TtToken(self.span, token::Interpolated(token::NtExpr(self.clone())))]
|
vec![TokenTree::Token(self.span, token::Interpolated(token::NtExpr(self.clone())))]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToTokens for P<ast::Pat> {
|
impl ToTokens for P<ast::Pat> {
|
||||||
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
||||||
vec![ast::TtToken(self.span, token::Interpolated(token::NtPat(self.clone())))]
|
vec![TokenTree::Token(self.span, token::Interpolated(token::NtPat(self.clone())))]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToTokens for ast::Arm {
|
impl ToTokens for ast::Arm {
|
||||||
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
||||||
vec![ast::TtToken(DUMMY_SP, token::Interpolated(token::NtArm(self.clone())))]
|
vec![TokenTree::Token(DUMMY_SP, token::Interpolated(token::NtArm(self.clone())))]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -173,12 +175,12 @@ pub mod rt {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
impl_to_tokens_slice! { ast::Ty, [ast::TtToken(DUMMY_SP, token::Comma)] }
|
impl_to_tokens_slice! { ast::Ty, [TokenTree::Token(DUMMY_SP, token::Comma)] }
|
||||||
impl_to_tokens_slice! { P<ast::Item>, [] }
|
impl_to_tokens_slice! { P<ast::Item>, [] }
|
||||||
|
|
||||||
impl ToTokens for P<ast::MetaItem> {
|
impl ToTokens for P<ast::MetaItem> {
|
||||||
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
||||||
vec![ast::TtToken(DUMMY_SP, token::Interpolated(token::NtMeta(self.clone())))]
|
vec![TokenTree::Token(DUMMY_SP, token::Interpolated(token::NtMeta(self.clone())))]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -186,11 +188,11 @@ pub mod rt {
|
||||||
fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
|
fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
|
||||||
let mut r = vec![];
|
let mut r = vec![];
|
||||||
// FIXME: The spans could be better
|
// FIXME: The spans could be better
|
||||||
r.push(ast::TtToken(self.span, token::Pound));
|
r.push(TokenTree::Token(self.span, token::Pound));
|
||||||
if self.node.style == ast::AttrStyle::Inner {
|
if self.node.style == ast::AttrStyle::Inner {
|
||||||
r.push(ast::TtToken(self.span, token::Not));
|
r.push(TokenTree::Token(self.span, token::Not));
|
||||||
}
|
}
|
||||||
r.push(ast::TtDelimited(self.span, Rc::new(ast::Delimited {
|
r.push(TokenTree::Delimited(self.span, Rc::new(ast::Delimited {
|
||||||
delim: token::Bracket,
|
delim: token::Bracket,
|
||||||
open_span: self.span,
|
open_span: self.span,
|
||||||
tts: self.node.value.to_tokens(cx),
|
tts: self.node.value.to_tokens(cx),
|
||||||
|
@ -210,7 +212,7 @@ pub mod rt {
|
||||||
|
|
||||||
impl ToTokens for () {
|
impl ToTokens for () {
|
||||||
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
||||||
vec![ast::TtDelimited(DUMMY_SP, Rc::new(ast::Delimited {
|
vec![TokenTree::Delimited(DUMMY_SP, Rc::new(ast::Delimited {
|
||||||
delim: token::Paren,
|
delim: token::Paren,
|
||||||
open_span: DUMMY_SP,
|
open_span: DUMMY_SP,
|
||||||
tts: vec![],
|
tts: vec![],
|
||||||
|
@ -278,7 +280,7 @@ pub mod rt {
|
||||||
fn parse_item(&self, s: String) -> P<ast::Item>;
|
fn parse_item(&self, s: String) -> P<ast::Item>;
|
||||||
fn parse_expr(&self, s: String) -> P<ast::Expr>;
|
fn parse_expr(&self, s: String) -> P<ast::Expr>;
|
||||||
fn parse_stmt(&self, s: String) -> P<ast::Stmt>;
|
fn parse_stmt(&self, s: String) -> P<ast::Stmt>;
|
||||||
fn parse_tts(&self, s: String) -> Vec<ast::TokenTree>;
|
fn parse_tts(&self, s: String) -> Vec<TokenTree>;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> ExtParseUtils for ExtCtxt<'a> {
|
impl<'a> ExtParseUtils for ExtCtxt<'a> {
|
||||||
|
@ -305,7 +307,7 @@ pub mod rt {
|
||||||
self.parse_sess())
|
self.parse_sess())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_tts(&self, s: String) -> Vec<ast::TokenTree> {
|
fn parse_tts(&self, s: String) -> Vec<TokenTree> {
|
||||||
parse::parse_tts_from_source_str("<quote expansion>".to_string(),
|
parse::parse_tts_from_source_str("<quote expansion>".to_string(),
|
||||||
s,
|
s,
|
||||||
self.cfg(),
|
self.cfg(),
|
||||||
|
@ -316,7 +318,7 @@ pub mod rt {
|
||||||
|
|
||||||
pub fn expand_quote_tokens<'cx>(cx: &'cx mut ExtCtxt,
|
pub fn expand_quote_tokens<'cx>(cx: &'cx mut ExtCtxt,
|
||||||
sp: Span,
|
sp: Span,
|
||||||
tts: &[ast::TokenTree])
|
tts: &[TokenTree])
|
||||||
-> Box<base::MacResult+'cx> {
|
-> Box<base::MacResult+'cx> {
|
||||||
let (cx_expr, expr) = expand_tts(cx, sp, tts);
|
let (cx_expr, expr) = expand_tts(cx, sp, tts);
|
||||||
let expanded = expand_wrapper(cx, sp, cx_expr, expr, &[&["syntax", "ext", "quote", "rt"]]);
|
let expanded = expand_wrapper(cx, sp, cx_expr, expr, &[&["syntax", "ext", "quote", "rt"]]);
|
||||||
|
@ -325,7 +327,7 @@ pub fn expand_quote_tokens<'cx>(cx: &'cx mut ExtCtxt,
|
||||||
|
|
||||||
pub fn expand_quote_expr<'cx>(cx: &'cx mut ExtCtxt,
|
pub fn expand_quote_expr<'cx>(cx: &'cx mut ExtCtxt,
|
||||||
sp: Span,
|
sp: Span,
|
||||||
tts: &[ast::TokenTree])
|
tts: &[TokenTree])
|
||||||
-> Box<base::MacResult+'cx> {
|
-> Box<base::MacResult+'cx> {
|
||||||
let expanded = expand_parse_call(cx, sp, "parse_expr_panic", vec!(), tts);
|
let expanded = expand_parse_call(cx, sp, "parse_expr_panic", vec!(), tts);
|
||||||
base::MacEager::expr(expanded)
|
base::MacEager::expr(expanded)
|
||||||
|
@ -333,7 +335,7 @@ pub fn expand_quote_expr<'cx>(cx: &'cx mut ExtCtxt,
|
||||||
|
|
||||||
pub fn expand_quote_item<'cx>(cx: &mut ExtCtxt,
|
pub fn expand_quote_item<'cx>(cx: &mut ExtCtxt,
|
||||||
sp: Span,
|
sp: Span,
|
||||||
tts: &[ast::TokenTree])
|
tts: &[TokenTree])
|
||||||
-> Box<base::MacResult+'cx> {
|
-> Box<base::MacResult+'cx> {
|
||||||
let expanded = expand_parse_call(cx, sp, "parse_item_panic", vec!(), tts);
|
let expanded = expand_parse_call(cx, sp, "parse_item_panic", vec!(), tts);
|
||||||
base::MacEager::expr(expanded)
|
base::MacEager::expr(expanded)
|
||||||
|
@ -341,7 +343,7 @@ pub fn expand_quote_item<'cx>(cx: &mut ExtCtxt,
|
||||||
|
|
||||||
pub fn expand_quote_pat<'cx>(cx: &'cx mut ExtCtxt,
|
pub fn expand_quote_pat<'cx>(cx: &'cx mut ExtCtxt,
|
||||||
sp: Span,
|
sp: Span,
|
||||||
tts: &[ast::TokenTree])
|
tts: &[TokenTree])
|
||||||
-> Box<base::MacResult+'cx> {
|
-> Box<base::MacResult+'cx> {
|
||||||
let expanded = expand_parse_call(cx, sp, "parse_pat_panic", vec!(), tts);
|
let expanded = expand_parse_call(cx, sp, "parse_pat_panic", vec!(), tts);
|
||||||
base::MacEager::expr(expanded)
|
base::MacEager::expr(expanded)
|
||||||
|
@ -349,7 +351,7 @@ pub fn expand_quote_pat<'cx>(cx: &'cx mut ExtCtxt,
|
||||||
|
|
||||||
pub fn expand_quote_arm(cx: &mut ExtCtxt,
|
pub fn expand_quote_arm(cx: &mut ExtCtxt,
|
||||||
sp: Span,
|
sp: Span,
|
||||||
tts: &[ast::TokenTree])
|
tts: &[TokenTree])
|
||||||
-> Box<base::MacResult+'static> {
|
-> Box<base::MacResult+'static> {
|
||||||
let expanded = expand_parse_call(cx, sp, "parse_arm_panic", vec!(), tts);
|
let expanded = expand_parse_call(cx, sp, "parse_arm_panic", vec!(), tts);
|
||||||
base::MacEager::expr(expanded)
|
base::MacEager::expr(expanded)
|
||||||
|
@ -357,7 +359,7 @@ pub fn expand_quote_arm(cx: &mut ExtCtxt,
|
||||||
|
|
||||||
pub fn expand_quote_ty(cx: &mut ExtCtxt,
|
pub fn expand_quote_ty(cx: &mut ExtCtxt,
|
||||||
sp: Span,
|
sp: Span,
|
||||||
tts: &[ast::TokenTree])
|
tts: &[TokenTree])
|
||||||
-> Box<base::MacResult+'static> {
|
-> Box<base::MacResult+'static> {
|
||||||
let expanded = expand_parse_call(cx, sp, "parse_ty_panic", vec!(), tts);
|
let expanded = expand_parse_call(cx, sp, "parse_ty_panic", vec!(), tts);
|
||||||
base::MacEager::expr(expanded)
|
base::MacEager::expr(expanded)
|
||||||
|
@ -365,7 +367,7 @@ pub fn expand_quote_ty(cx: &mut ExtCtxt,
|
||||||
|
|
||||||
pub fn expand_quote_stmt(cx: &mut ExtCtxt,
|
pub fn expand_quote_stmt(cx: &mut ExtCtxt,
|
||||||
sp: Span,
|
sp: Span,
|
||||||
tts: &[ast::TokenTree])
|
tts: &[TokenTree])
|
||||||
-> Box<base::MacResult+'static> {
|
-> Box<base::MacResult+'static> {
|
||||||
let expanded = expand_parse_call(cx, sp, "parse_stmt_panic", vec!(), tts);
|
let expanded = expand_parse_call(cx, sp, "parse_stmt_panic", vec!(), tts);
|
||||||
base::MacEager::expr(expanded)
|
base::MacEager::expr(expanded)
|
||||||
|
@ -373,7 +375,7 @@ pub fn expand_quote_stmt(cx: &mut ExtCtxt,
|
||||||
|
|
||||||
pub fn expand_quote_attr(cx: &mut ExtCtxt,
|
pub fn expand_quote_attr(cx: &mut ExtCtxt,
|
||||||
sp: Span,
|
sp: Span,
|
||||||
tts: &[ast::TokenTree])
|
tts: &[TokenTree])
|
||||||
-> Box<base::MacResult+'static> {
|
-> Box<base::MacResult+'static> {
|
||||||
let expanded = expand_parse_call(cx, sp, "parse_attribute_panic",
|
let expanded = expand_parse_call(cx, sp, "parse_attribute_panic",
|
||||||
vec!(cx.expr_bool(sp, true)), tts);
|
vec!(cx.expr_bool(sp, true)), tts);
|
||||||
|
@ -383,7 +385,7 @@ pub fn expand_quote_attr(cx: &mut ExtCtxt,
|
||||||
|
|
||||||
pub fn expand_quote_matcher(cx: &mut ExtCtxt,
|
pub fn expand_quote_matcher(cx: &mut ExtCtxt,
|
||||||
sp: Span,
|
sp: Span,
|
||||||
tts: &[ast::TokenTree])
|
tts: &[TokenTree])
|
||||||
-> Box<base::MacResult+'static> {
|
-> Box<base::MacResult+'static> {
|
||||||
let (cx_expr, tts) = parse_arguments_to_quote(cx, tts);
|
let (cx_expr, tts) = parse_arguments_to_quote(cx, tts);
|
||||||
let mut vector = mk_stmts_let(cx, sp);
|
let mut vector = mk_stmts_let(cx, sp);
|
||||||
|
@ -423,6 +425,11 @@ fn mk_name(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> P<ast::Expr> {
|
||||||
vec!(e_str))
|
vec!(e_str))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn mk_tt_path(cx: &ExtCtxt, sp: Span, name: &str) -> P<ast::Expr> {
|
||||||
|
let idents = vec!(id_ext("syntax"), id_ext("ast"), id_ext("TokenTree"), id_ext(name));
|
||||||
|
cx.expr_path(cx.path_global(sp, idents))
|
||||||
|
}
|
||||||
|
|
||||||
fn mk_ast_path(cx: &ExtCtxt, sp: Span, name: &str) -> P<ast::Expr> {
|
fn mk_ast_path(cx: &ExtCtxt, sp: Span, name: &str) -> P<ast::Expr> {
|
||||||
let idents = vec!(id_ext("syntax"), id_ext("ast"), id_ext(name));
|
let idents = vec!(id_ext("syntax"), id_ext("ast"), id_ext(name));
|
||||||
cx.expr_path(cx.path_global(sp, idents))
|
cx.expr_path(cx.path_global(sp, idents))
|
||||||
|
@ -591,9 +598,9 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
|
||||||
mk_token_path(cx, sp, name)
|
mk_token_path(cx, sp, name)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn statements_mk_tt(cx: &ExtCtxt, tt: &ast::TokenTree, matcher: bool) -> Vec<P<ast::Stmt>> {
|
fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec<P<ast::Stmt>> {
|
||||||
match *tt {
|
match *tt {
|
||||||
ast::TtToken(sp, SubstNt(ident, _)) => {
|
TokenTree::Token(sp, SubstNt(ident, _)) => {
|
||||||
// tt.extend($ident.to_tokens(ext_cx))
|
// tt.extend($ident.to_tokens(ext_cx))
|
||||||
|
|
||||||
let e_to_toks =
|
let e_to_toks =
|
||||||
|
@ -612,17 +619,17 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &ast::TokenTree, matcher: bool) -> Vec<P<a
|
||||||
|
|
||||||
vec!(cx.stmt_expr(e_push))
|
vec!(cx.stmt_expr(e_push))
|
||||||
}
|
}
|
||||||
ref tt @ ast::TtToken(_, MatchNt(..)) if !matcher => {
|
ref tt @ TokenTree::Token(_, MatchNt(..)) if !matcher => {
|
||||||
let mut seq = vec![];
|
let mut seq = vec![];
|
||||||
for i in 0..tt.len() {
|
for i in 0..tt.len() {
|
||||||
seq.push(tt.get_tt(i));
|
seq.push(tt.get_tt(i));
|
||||||
}
|
}
|
||||||
statements_mk_tts(cx, &seq[..], matcher)
|
statements_mk_tts(cx, &seq[..], matcher)
|
||||||
}
|
}
|
||||||
ast::TtToken(sp, ref tok) => {
|
TokenTree::Token(sp, ref tok) => {
|
||||||
let e_sp = cx.expr_ident(sp, id_ext("_sp"));
|
let e_sp = cx.expr_ident(sp, id_ext("_sp"));
|
||||||
let e_tok = cx.expr_call(sp,
|
let e_tok = cx.expr_call(sp,
|
||||||
mk_ast_path(cx, sp, "TtToken"),
|
mk_tt_path(cx, sp, "Token"),
|
||||||
vec!(e_sp, expr_mk_token(cx, sp, tok)));
|
vec!(e_sp, expr_mk_token(cx, sp, tok)));
|
||||||
let e_push =
|
let e_push =
|
||||||
cx.expr_method_call(sp,
|
cx.expr_method_call(sp,
|
||||||
|
@ -631,16 +638,16 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &ast::TokenTree, matcher: bool) -> Vec<P<a
|
||||||
vec!(e_tok));
|
vec!(e_tok));
|
||||||
vec!(cx.stmt_expr(e_push))
|
vec!(cx.stmt_expr(e_push))
|
||||||
},
|
},
|
||||||
ast::TtDelimited(_, ref delimed) => {
|
TokenTree::Delimited(_, ref delimed) => {
|
||||||
statements_mk_tt(cx, &delimed.open_tt(), matcher).into_iter()
|
statements_mk_tt(cx, &delimed.open_tt(), matcher).into_iter()
|
||||||
.chain(delimed.tts.iter()
|
.chain(delimed.tts.iter()
|
||||||
.flat_map(|tt| statements_mk_tt(cx, tt, matcher)))
|
.flat_map(|tt| statements_mk_tt(cx, tt, matcher)))
|
||||||
.chain(statements_mk_tt(cx, &delimed.close_tt(), matcher))
|
.chain(statements_mk_tt(cx, &delimed.close_tt(), matcher))
|
||||||
.collect()
|
.collect()
|
||||||
},
|
},
|
||||||
ast::TtSequence(sp, ref seq) => {
|
TokenTree::Sequence(sp, ref seq) => {
|
||||||
if !matcher {
|
if !matcher {
|
||||||
panic!("TtSequence in quote!");
|
panic!("TokenTree::Sequence in quote!");
|
||||||
}
|
}
|
||||||
|
|
||||||
let e_sp = cx.expr_ident(sp, id_ext("_sp"));
|
let e_sp = cx.expr_ident(sp, id_ext("_sp"));
|
||||||
|
@ -671,7 +678,7 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &ast::TokenTree, matcher: bool) -> Vec<P<a
|
||||||
id_ext("new")],
|
id_ext("new")],
|
||||||
vec![e_seq_struct]);
|
vec![e_seq_struct]);
|
||||||
let e_tok = cx.expr_call(sp,
|
let e_tok = cx.expr_call(sp,
|
||||||
mk_ast_path(cx, sp, "TtSequence"),
|
mk_tt_path(cx, sp, "Sequence"),
|
||||||
vec!(e_sp, e_rc_new));
|
vec!(e_sp, e_rc_new));
|
||||||
let e_push =
|
let e_push =
|
||||||
cx.expr_method_call(sp,
|
cx.expr_method_call(sp,
|
||||||
|
@ -683,8 +690,8 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &ast::TokenTree, matcher: bool) -> Vec<P<a
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_arguments_to_quote(cx: &ExtCtxt, tts: &[ast::TokenTree])
|
fn parse_arguments_to_quote(cx: &ExtCtxt, tts: &[TokenTree])
|
||||||
-> (P<ast::Expr>, Vec<ast::TokenTree>) {
|
-> (P<ast::Expr>, Vec<TokenTree>) {
|
||||||
// NB: It appears that the main parser loses its mind if we consider
|
// NB: It appears that the main parser loses its mind if we consider
|
||||||
// $foo as a SubstNt during the main parse, so we have to re-parse
|
// $foo as a SubstNt during the main parse, so we have to re-parse
|
||||||
// under quote_depth > 0. This is silly and should go away; the _guess_ is
|
// under quote_depth > 0. This is silly and should go away; the _guess_ is
|
||||||
|
@ -746,7 +753,7 @@ fn mk_stmts_let(cx: &ExtCtxt, sp: Span) -> Vec<P<ast::Stmt>> {
|
||||||
vec!(stmt_let_sp, stmt_let_tt)
|
vec!(stmt_let_sp, stmt_let_tt)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn statements_mk_tts(cx: &ExtCtxt, tts: &[ast::TokenTree], matcher: bool) -> Vec<P<ast::Stmt>> {
|
fn statements_mk_tts(cx: &ExtCtxt, tts: &[TokenTree], matcher: bool) -> Vec<P<ast::Stmt>> {
|
||||||
let mut ss = Vec::new();
|
let mut ss = Vec::new();
|
||||||
for tt in tts {
|
for tt in tts {
|
||||||
ss.extend(statements_mk_tt(cx, tt, matcher));
|
ss.extend(statements_mk_tt(cx, tt, matcher));
|
||||||
|
@ -754,7 +761,7 @@ fn statements_mk_tts(cx: &ExtCtxt, tts: &[ast::TokenTree], matcher: bool) -> Vec
|
||||||
ss
|
ss
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[TokenTree])
|
||||||
-> (P<ast::Expr>, P<ast::Expr>) {
|
-> (P<ast::Expr>, P<ast::Expr>) {
|
||||||
let (cx_expr, tts) = parse_arguments_to_quote(cx, tts);
|
let (cx_expr, tts) = parse_arguments_to_quote(cx, tts);
|
||||||
|
|
||||||
|
@ -790,7 +797,7 @@ fn expand_parse_call(cx: &ExtCtxt,
|
||||||
sp: Span,
|
sp: Span,
|
||||||
parse_method: &str,
|
parse_method: &str,
|
||||||
arg_exprs: Vec<P<ast::Expr>> ,
|
arg_exprs: Vec<P<ast::Expr>> ,
|
||||||
tts: &[ast::TokenTree]) -> P<ast::Expr> {
|
tts: &[TokenTree]) -> P<ast::Expr> {
|
||||||
let (cx_expr, tts_expr) = expand_tts(cx, sp, tts);
|
let (cx_expr, tts_expr) = expand_tts(cx, sp, tts);
|
||||||
|
|
||||||
let cfg_call = || cx.expr_method_call(
|
let cfg_call = || cx.expr_method_call(
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
use ast;
|
use ast::TokenTree;
|
||||||
use codemap::Span;
|
use codemap::Span;
|
||||||
use ext::base::ExtCtxt;
|
use ext::base::ExtCtxt;
|
||||||
use ext::base;
|
use ext::base;
|
||||||
|
@ -18,7 +18,7 @@ use parse::token::keywords;
|
||||||
|
|
||||||
pub fn expand_trace_macros(cx: &mut ExtCtxt,
|
pub fn expand_trace_macros(cx: &mut ExtCtxt,
|
||||||
sp: Span,
|
sp: Span,
|
||||||
tt: &[ast::TokenTree])
|
tt: &[TokenTree])
|
||||||
-> Box<base::MacResult+'static> {
|
-> Box<base::MacResult+'static> {
|
||||||
if !cx.ecfg.enable_trace_macros() {
|
if !cx.ecfg.enable_trace_macros() {
|
||||||
feature_gate::emit_feature_err(&cx.parse_sess.span_diagnostic,
|
feature_gate::emit_feature_err(&cx.parse_sess.span_diagnostic,
|
||||||
|
@ -30,10 +30,10 @@ pub fn expand_trace_macros(cx: &mut ExtCtxt,
|
||||||
}
|
}
|
||||||
|
|
||||||
match (tt.len(), tt.first()) {
|
match (tt.len(), tt.first()) {
|
||||||
(1, Some(&ast::TtToken(_, ref tok))) if tok.is_keyword(keywords::True) => {
|
(1, Some(&TokenTree::Token(_, ref tok))) if tok.is_keyword(keywords::True) => {
|
||||||
cx.set_trace_macros(true);
|
cx.set_trace_macros(true);
|
||||||
}
|
}
|
||||||
(1, Some(&ast::TtToken(_, ref tok))) if tok.is_keyword(keywords::False) => {
|
(1, Some(&TokenTree::Token(_, ref tok))) if tok.is_keyword(keywords::False) => {
|
||||||
cx.set_trace_macros(false);
|
cx.set_trace_macros(false);
|
||||||
}
|
}
|
||||||
_ => cx.span_err(sp, "trace_macros! accepts only `true` or `false`"),
|
_ => cx.span_err(sp, "trace_macros! accepts only `true` or `false`"),
|
||||||
|
|
|
@ -80,7 +80,6 @@ use self::TokenTreeOrTokenTreeVec::*;
|
||||||
|
|
||||||
use ast;
|
use ast;
|
||||||
use ast::{TokenTree, Name};
|
use ast::{TokenTree, Name};
|
||||||
use ast::{TtDelimited, TtSequence, TtToken};
|
|
||||||
use codemap::{BytePos, mk_sp, Span};
|
use codemap::{BytePos, mk_sp, Span};
|
||||||
use codemap;
|
use codemap;
|
||||||
use parse::lexer::*; //resolve bug?
|
use parse::lexer::*; //resolve bug?
|
||||||
|
@ -146,16 +145,16 @@ pub struct MatcherPos {
|
||||||
pub fn count_names(ms: &[TokenTree]) -> usize {
|
pub fn count_names(ms: &[TokenTree]) -> usize {
|
||||||
ms.iter().fold(0, |count, elt| {
|
ms.iter().fold(0, |count, elt| {
|
||||||
count + match elt {
|
count + match elt {
|
||||||
&TtSequence(_, ref seq) => {
|
&TokenTree::Sequence(_, ref seq) => {
|
||||||
seq.num_captures
|
seq.num_captures
|
||||||
}
|
}
|
||||||
&TtDelimited(_, ref delim) => {
|
&TokenTree::Delimited(_, ref delim) => {
|
||||||
count_names(&delim.tts)
|
count_names(&delim.tts)
|
||||||
}
|
}
|
||||||
&TtToken(_, MatchNt(..)) => {
|
&TokenTree::Token(_, MatchNt(..)) => {
|
||||||
1
|
1
|
||||||
}
|
}
|
||||||
&TtToken(_, _) => 0,
|
&TokenTree::Token(_, _) => 0,
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -205,17 +204,17 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
|
||||||
fn n_rec(p_s: &ParseSess, m: &TokenTree, res: &[Rc<NamedMatch>],
|
fn n_rec(p_s: &ParseSess, m: &TokenTree, res: &[Rc<NamedMatch>],
|
||||||
ret_val: &mut HashMap<Name, Rc<NamedMatch>>, idx: &mut usize) {
|
ret_val: &mut HashMap<Name, Rc<NamedMatch>>, idx: &mut usize) {
|
||||||
match m {
|
match m {
|
||||||
&TtSequence(_, ref seq) => {
|
&TokenTree::Sequence(_, ref seq) => {
|
||||||
for next_m in &seq.tts {
|
for next_m in &seq.tts {
|
||||||
n_rec(p_s, next_m, res, ret_val, idx)
|
n_rec(p_s, next_m, res, ret_val, idx)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
&TtDelimited(_, ref delim) => {
|
&TokenTree::Delimited(_, ref delim) => {
|
||||||
for next_m in &delim.tts {
|
for next_m in &delim.tts {
|
||||||
n_rec(p_s, next_m, res, ret_val, idx)
|
n_rec(p_s, next_m, res, ret_val, idx)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
&TtToken(sp, MatchNt(bind_name, _, _, _)) => {
|
&TokenTree::Token(sp, MatchNt(bind_name, _, _, _)) => {
|
||||||
match ret_val.entry(bind_name.name) {
|
match ret_val.entry(bind_name.name) {
|
||||||
Vacant(spot) => {
|
Vacant(spot) => {
|
||||||
spot.insert(res[*idx].clone());
|
spot.insert(res[*idx].clone());
|
||||||
|
@ -229,8 +228,8 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
&TtToken(_, SubstNt(..)) => panic!("Cannot fill in a NT"),
|
&TokenTree::Token(_, SubstNt(..)) => panic!("Cannot fill in a NT"),
|
||||||
&TtToken(_, _) => (),
|
&TokenTree::Token(_, _) => (),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let mut ret_val = HashMap::new();
|
let mut ret_val = HashMap::new();
|
||||||
|
@ -362,7 +361,7 @@ pub fn parse(sess: &ParseSess,
|
||||||
} else {
|
} else {
|
||||||
match ei.top_elts.get_tt(idx) {
|
match ei.top_elts.get_tt(idx) {
|
||||||
/* need to descend into sequence */
|
/* need to descend into sequence */
|
||||||
TtSequence(sp, seq) => {
|
TokenTree::Sequence(sp, seq) => {
|
||||||
if seq.op == ast::ZeroOrMore {
|
if seq.op == ast::ZeroOrMore {
|
||||||
let mut new_ei = ei.clone();
|
let mut new_ei = ei.clone();
|
||||||
new_ei.match_cur += seq.num_captures;
|
new_ei.match_cur += seq.num_captures;
|
||||||
|
@ -388,10 +387,10 @@ pub fn parse(sess: &ParseSess,
|
||||||
match_hi: ei_t.match_cur + seq.num_captures,
|
match_hi: ei_t.match_cur + seq.num_captures,
|
||||||
up: Some(ei_t),
|
up: Some(ei_t),
|
||||||
sp_lo: sp.lo,
|
sp_lo: sp.lo,
|
||||||
top_elts: Tt(TtSequence(sp, seq)),
|
top_elts: Tt(TokenTree::Sequence(sp, seq)),
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
TtToken(_, MatchNt(..)) => {
|
TokenTree::Token(_, MatchNt(..)) => {
|
||||||
// Built-in nonterminals never start with these tokens,
|
// Built-in nonterminals never start with these tokens,
|
||||||
// so we can eliminate them from consideration.
|
// so we can eliminate them from consideration.
|
||||||
match tok {
|
match tok {
|
||||||
|
@ -399,10 +398,10 @@ pub fn parse(sess: &ParseSess,
|
||||||
_ => bb_eis.push(ei),
|
_ => bb_eis.push(ei),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
TtToken(sp, SubstNt(..)) => {
|
TokenTree::Token(sp, SubstNt(..)) => {
|
||||||
return Error(sp, "missing fragment specifier".to_string())
|
return Error(sp, "missing fragment specifier".to_string())
|
||||||
}
|
}
|
||||||
seq @ TtDelimited(..) | seq @ TtToken(_, DocComment(..)) => {
|
seq @ TokenTree::Delimited(..) | seq @ TokenTree::Token(_, DocComment(..)) => {
|
||||||
let lower_elts = mem::replace(&mut ei.top_elts, Tt(seq));
|
let lower_elts = mem::replace(&mut ei.top_elts, Tt(seq));
|
||||||
let idx = ei.idx;
|
let idx = ei.idx;
|
||||||
ei.stack.push(MatcherTtFrame {
|
ei.stack.push(MatcherTtFrame {
|
||||||
|
@ -412,7 +411,7 @@ pub fn parse(sess: &ParseSess,
|
||||||
ei.idx = 0;
|
ei.idx = 0;
|
||||||
cur_eis.push(ei);
|
cur_eis.push(ei);
|
||||||
}
|
}
|
||||||
TtToken(_, ref t) => {
|
TokenTree::Token(_, ref t) => {
|
||||||
let mut ei_t = ei.clone();
|
let mut ei_t = ei.clone();
|
||||||
if token_name_eq(t,&tok) {
|
if token_name_eq(t,&tok) {
|
||||||
ei_t.idx += 1;
|
ei_t.idx += 1;
|
||||||
|
@ -440,7 +439,7 @@ pub fn parse(sess: &ParseSess,
|
||||||
if (!bb_eis.is_empty() && !next_eis.is_empty())
|
if (!bb_eis.is_empty() && !next_eis.is_empty())
|
||||||
|| bb_eis.len() > 1 {
|
|| bb_eis.len() > 1 {
|
||||||
let nts = bb_eis.iter().map(|ei| match ei.top_elts.get_tt(ei.idx) {
|
let nts = bb_eis.iter().map(|ei| match ei.top_elts.get_tt(ei.idx) {
|
||||||
TtToken(_, MatchNt(bind, name, _, _)) => {
|
TokenTree::Token(_, MatchNt(bind, name, _, _)) => {
|
||||||
format!("{} ('{}')", name, bind)
|
format!("{} ('{}')", name, bind)
|
||||||
}
|
}
|
||||||
_ => panic!()
|
_ => panic!()
|
||||||
|
@ -468,7 +467,7 @@ pub fn parse(sess: &ParseSess,
|
||||||
|
|
||||||
let mut ei = bb_eis.pop().unwrap();
|
let mut ei = bb_eis.pop().unwrap();
|
||||||
match ei.top_elts.get_tt(ei.idx) {
|
match ei.top_elts.get_tt(ei.idx) {
|
||||||
TtToken(span, MatchNt(_, ident, _, _)) => {
|
TokenTree::Token(span, MatchNt(_, ident, _, _)) => {
|
||||||
let match_cur = ei.match_cur;
|
let match_cur = ei.match_cur;
|
||||||
(&mut ei.matches[match_cur]).push(Rc::new(MatchedNonterminal(
|
(&mut ei.matches[match_cur]).push(Rc::new(MatchedNonterminal(
|
||||||
parse_nt(&mut rust_parser, span, &ident.name.as_str()))));
|
parse_nt(&mut rust_parser, span, &ident.name.as_str()))));
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
use ast::{self, TokenTree, TtDelimited, TtSequence, TtToken};
|
use ast::{self, TokenTree};
|
||||||
use codemap::{Span, DUMMY_SP};
|
use codemap::{Span, DUMMY_SP};
|
||||||
use ext::base::{ExtCtxt, MacResult, SyntaxExtension};
|
use ext::base::{ExtCtxt, MacResult, SyntaxExtension};
|
||||||
use ext::base::{NormalTT, TTMacroExpander};
|
use ext::base::{NormalTT, TTMacroExpander};
|
||||||
|
@ -26,6 +26,7 @@ use util::small_vector::SmallVector;
|
||||||
|
|
||||||
use std::cell::RefCell;
|
use std::cell::RefCell;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
use std::iter::once;
|
||||||
|
|
||||||
struct ParserAnyMacro<'a> {
|
struct ParserAnyMacro<'a> {
|
||||||
parser: RefCell<Parser<'a>>,
|
parser: RefCell<Parser<'a>>,
|
||||||
|
@ -171,7 +172,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
|
||||||
match **lhs {
|
match **lhs {
|
||||||
MatchedNonterminal(NtTT(ref lhs_tt)) => {
|
MatchedNonterminal(NtTT(ref lhs_tt)) => {
|
||||||
let lhs_tt = match **lhs_tt {
|
let lhs_tt = match **lhs_tt {
|
||||||
TtDelimited(_, ref delim) => &delim.tts[..],
|
TokenTree::Delimited(_, ref delim) => &delim.tts[..],
|
||||||
_ => panic!(cx.span_fatal(sp, "malformed macro lhs"))
|
_ => panic!(cx.span_fatal(sp, "malformed macro lhs"))
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -182,7 +183,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
|
||||||
MatchedNonterminal(NtTT(ref tt)) => {
|
MatchedNonterminal(NtTT(ref tt)) => {
|
||||||
match **tt {
|
match **tt {
|
||||||
// ignore delimiters
|
// ignore delimiters
|
||||||
TtDelimited(_, ref delimed) => delimed.tts.clone(),
|
TokenTree::Delimited(_, ref delimed) => delimed.tts.clone(),
|
||||||
_ => panic!(cx.span_fatal(sp, "macro rhs must be delimited")),
|
_ => panic!(cx.span_fatal(sp, "macro rhs must be delimited")),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -243,21 +244,21 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt,
|
||||||
let match_lhs_tok = MatchNt(lhs_nm, special_idents::tt, token::Plain, token::Plain);
|
let match_lhs_tok = MatchNt(lhs_nm, special_idents::tt, token::Plain, token::Plain);
|
||||||
let match_rhs_tok = MatchNt(rhs_nm, special_idents::tt, token::Plain, token::Plain);
|
let match_rhs_tok = MatchNt(rhs_nm, special_idents::tt, token::Plain, token::Plain);
|
||||||
let argument_gram = vec!(
|
let argument_gram = vec!(
|
||||||
TtSequence(DUMMY_SP,
|
TokenTree::Sequence(DUMMY_SP,
|
||||||
Rc::new(ast::SequenceRepetition {
|
Rc::new(ast::SequenceRepetition {
|
||||||
tts: vec![
|
tts: vec![
|
||||||
TtToken(DUMMY_SP, match_lhs_tok),
|
TokenTree::Token(DUMMY_SP, match_lhs_tok),
|
||||||
TtToken(DUMMY_SP, token::FatArrow),
|
TokenTree::Token(DUMMY_SP, token::FatArrow),
|
||||||
TtToken(DUMMY_SP, match_rhs_tok)],
|
TokenTree::Token(DUMMY_SP, match_rhs_tok)],
|
||||||
separator: Some(token::Semi),
|
separator: Some(token::Semi),
|
||||||
op: ast::OneOrMore,
|
op: ast::OneOrMore,
|
||||||
num_captures: 2
|
num_captures: 2
|
||||||
})),
|
})),
|
||||||
//to phase into semicolon-termination instead of
|
//to phase into semicolon-termination instead of
|
||||||
//semicolon-separation
|
//semicolon-separation
|
||||||
TtSequence(DUMMY_SP,
|
TokenTree::Sequence(DUMMY_SP,
|
||||||
Rc::new(ast::SequenceRepetition {
|
Rc::new(ast::SequenceRepetition {
|
||||||
tts: vec![TtToken(DUMMY_SP, token::Semi)],
|
tts: vec![TokenTree::Token(DUMMY_SP, token::Semi)],
|
||||||
separator: None,
|
separator: None,
|
||||||
op: ast::ZeroOrMore,
|
op: ast::ZeroOrMore,
|
||||||
num_captures: 0
|
num_captures: 0
|
||||||
|
@ -307,14 +308,14 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_lhs_nt_follows(cx: &mut ExtCtxt, lhs: &NamedMatch, sp: Span) {
|
fn check_lhs_nt_follows(cx: &mut ExtCtxt, lhs: &NamedMatch, sp: Span) {
|
||||||
// lhs is going to be like MatchedNonterminal(NtTT(TtDelimited(...))), where the entire lhs is
|
// lhs is going to be like MatchedNonterminal(NtTT(TokenTree::Delimited(...))), where the
|
||||||
// those tts. Or, it can be a "bare sequence", not wrapped in parens.
|
// entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens.
|
||||||
match lhs {
|
match lhs {
|
||||||
&MatchedNonterminal(NtTT(ref inner)) => match &**inner {
|
&MatchedNonterminal(NtTT(ref inner)) => match &**inner {
|
||||||
&TtDelimited(_, ref tts) => {
|
&TokenTree::Delimited(_, ref tts) => {
|
||||||
check_matcher(cx, tts.tts.iter(), &Eof);
|
check_matcher(cx, tts.tts.iter(), &Eof);
|
||||||
},
|
},
|
||||||
tt @ &TtSequence(..) => {
|
tt @ &TokenTree::Sequence(..) => {
|
||||||
check_matcher(cx, Some(tt).into_iter(), &Eof);
|
check_matcher(cx, Some(tt).into_iter(), &Eof);
|
||||||
},
|
},
|
||||||
_ => cx.span_err(sp, "Invalid macro matcher; matchers must be contained \
|
_ => cx.span_err(sp, "Invalid macro matcher; matchers must be contained \
|
||||||
|
@ -327,7 +328,7 @@ fn check_lhs_nt_follows(cx: &mut ExtCtxt, lhs: &NamedMatch, sp: Span) {
|
||||||
// after parsing/expansion. we can report every error in every macro this way.
|
// after parsing/expansion. we can report every error in every macro this way.
|
||||||
}
|
}
|
||||||
|
|
||||||
// returns the last token that was checked, for TtSequence. this gets used later on.
|
// returns the last token that was checked, for TokenTree::Sequence. this gets used later on.
|
||||||
fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token)
|
fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token)
|
||||||
-> Option<(Span, Token)> where I: Iterator<Item=&'a TokenTree> {
|
-> Option<(Span, Token)> where I: Iterator<Item=&'a TokenTree> {
|
||||||
use print::pprust::token_to_string;
|
use print::pprust::token_to_string;
|
||||||
|
@ -338,7 +339,7 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token)
|
||||||
let mut tokens = matcher.peekable();
|
let mut tokens = matcher.peekable();
|
||||||
while let Some(token) = tokens.next() {
|
while let Some(token) = tokens.next() {
|
||||||
last = match *token {
|
last = match *token {
|
||||||
TtToken(sp, MatchNt(ref name, ref frag_spec, _, _)) => {
|
TokenTree::Token(sp, MatchNt(ref name, ref frag_spec, _, _)) => {
|
||||||
// ii. If T is a simple NT, look ahead to the next token T' in
|
// ii. If T is a simple NT, look ahead to the next token T' in
|
||||||
// M. If T' is in the set FOLLOW(NT), continue. Else; reject.
|
// M. If T' is in the set FOLLOW(NT), continue. Else; reject.
|
||||||
if can_be_followed_by_any(&frag_spec.name.as_str()) {
|
if can_be_followed_by_any(&frag_spec.name.as_str()) {
|
||||||
|
@ -346,9 +347,9 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token)
|
||||||
} else {
|
} else {
|
||||||
let next_token = match tokens.peek() {
|
let next_token = match tokens.peek() {
|
||||||
// If T' closes a complex NT, replace T' with F
|
// If T' closes a complex NT, replace T' with F
|
||||||
Some(&&TtToken(_, CloseDelim(_))) => follow.clone(),
|
Some(&&TokenTree::Token(_, CloseDelim(_))) => follow.clone(),
|
||||||
Some(&&TtToken(_, ref tok)) => tok.clone(),
|
Some(&&TokenTree::Token(_, ref tok)) => tok.clone(),
|
||||||
Some(&&TtSequence(sp, _)) => {
|
Some(&&TokenTree::Sequence(sp, _)) => {
|
||||||
// Be conservative around sequences: to be
|
// Be conservative around sequences: to be
|
||||||
// more specific, we would need to
|
// more specific, we would need to
|
||||||
// consider FIRST sets, but also the
|
// consider FIRST sets, but also the
|
||||||
|
@ -366,12 +367,16 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token)
|
||||||
Eof
|
Eof
|
||||||
},
|
},
|
||||||
// die next iteration
|
// die next iteration
|
||||||
Some(&&TtDelimited(_, ref delim)) => delim.close_token(),
|
Some(&&TokenTree::Delimited(_, ref delim)) => delim.close_token(),
|
||||||
// else, we're at the end of the macro or sequence
|
// else, we're at the end of the macro or sequence
|
||||||
None => follow.clone()
|
None => follow.clone()
|
||||||
};
|
};
|
||||||
|
|
||||||
let tok = if let TtToken(_, ref tok) = *token { tok } else { unreachable!() };
|
let tok = if let TokenTree::Token(_, ref tok) = *token {
|
||||||
|
tok
|
||||||
|
} else {
|
||||||
|
unreachable!()
|
||||||
|
};
|
||||||
|
|
||||||
// If T' is in the set FOLLOW(NT), continue. Else, reject.
|
// If T' is in the set FOLLOW(NT), continue. Else, reject.
|
||||||
match (&next_token, is_in_follow(cx, &next_token, &frag_spec.name.as_str())) {
|
match (&next_token, is_in_follow(cx, &next_token, &frag_spec.name.as_str())) {
|
||||||
|
@ -391,7 +396,7 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TtSequence(sp, ref seq) => {
|
TokenTree::Sequence(sp, ref seq) => {
|
||||||
// iii. Else, T is a complex NT.
|
// iii. Else, T is a complex NT.
|
||||||
match seq.separator {
|
match seq.separator {
|
||||||
// If T has the form $(...)U+ or $(...)U* for some token U,
|
// If T has the form $(...)U+ or $(...)U* for some token U,
|
||||||
|
@ -408,8 +413,9 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token)
|
||||||
// but conservatively correct.
|
// but conservatively correct.
|
||||||
Some((span, tok)) => {
|
Some((span, tok)) => {
|
||||||
let fol = match tokens.peek() {
|
let fol = match tokens.peek() {
|
||||||
Some(&&TtToken(_, ref tok)) => tok.clone(),
|
Some(&&TokenTree::Token(_, ref tok)) => tok.clone(),
|
||||||
Some(&&TtDelimited(_, ref delim)) => delim.close_token(),
|
Some(&&TokenTree::Delimited(_, ref delim)) =>
|
||||||
|
delim.close_token(),
|
||||||
Some(_) => {
|
Some(_) => {
|
||||||
cx.span_err(sp, "sequence repetition followed by \
|
cx.span_err(sp, "sequence repetition followed by \
|
||||||
another sequence repetition, which is not allowed");
|
another sequence repetition, which is not allowed");
|
||||||
|
@ -417,7 +423,7 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token)
|
||||||
},
|
},
|
||||||
None => Eof
|
None => Eof
|
||||||
};
|
};
|
||||||
check_matcher(cx, Some(&TtToken(span, tok.clone())).into_iter(),
|
check_matcher(cx, once(&TokenTree::Token(span, tok.clone())),
|
||||||
&fol)
|
&fol)
|
||||||
},
|
},
|
||||||
None => last,
|
None => last,
|
||||||
|
@ -428,8 +434,8 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token)
|
||||||
// sequence. If it accepts, continue, else, reject.
|
// sequence. If it accepts, continue, else, reject.
|
||||||
None => {
|
None => {
|
||||||
let fol = match tokens.peek() {
|
let fol = match tokens.peek() {
|
||||||
Some(&&TtToken(_, ref tok)) => tok.clone(),
|
Some(&&TokenTree::Token(_, ref tok)) => tok.clone(),
|
||||||
Some(&&TtDelimited(_, ref delim)) => delim.close_token(),
|
Some(&&TokenTree::Delimited(_, ref delim)) => delim.close_token(),
|
||||||
Some(_) => {
|
Some(_) => {
|
||||||
cx.span_err(sp, "sequence repetition followed by another \
|
cx.span_err(sp, "sequence repetition followed by another \
|
||||||
sequence repetition, which is not allowed");
|
sequence repetition, which is not allowed");
|
||||||
|
@ -441,11 +447,11 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TtToken(..) => {
|
TokenTree::Token(..) => {
|
||||||
// i. If T is not an NT, continue.
|
// i. If T is not an NT, continue.
|
||||||
continue
|
continue
|
||||||
},
|
},
|
||||||
TtDelimited(_, ref tts) => {
|
TokenTree::Delimited(_, ref tts) => {
|
||||||
// if we don't pass in that close delimiter, we'll incorrectly consider the matcher
|
// if we don't pass in that close delimiter, we'll incorrectly consider the matcher
|
||||||
// `{ $foo:ty }` as having a follow that isn't `RBrace`
|
// `{ $foo:ty }` as having a follow that isn't `RBrace`
|
||||||
check_matcher(cx, tts.tts.iter(), &tts.close_token())
|
check_matcher(cx, tts.tts.iter(), &tts.close_token())
|
||||||
|
|
|
@ -10,7 +10,7 @@
|
||||||
use self::LockstepIterSize::*;
|
use self::LockstepIterSize::*;
|
||||||
|
|
||||||
use ast;
|
use ast;
|
||||||
use ast::{TokenTree, TtDelimited, TtToken, TtSequence, Ident, Name};
|
use ast::{TokenTree, Ident, Name};
|
||||||
use codemap::{Span, DUMMY_SP};
|
use codemap::{Span, DUMMY_SP};
|
||||||
use diagnostic::SpanHandler;
|
use diagnostic::SpanHandler;
|
||||||
use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
|
use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
|
||||||
|
@ -53,7 +53,7 @@ pub struct TtReader<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This can do Macro-By-Example transcription. On the other hand, if
|
/// This can do Macro-By-Example transcription. On the other hand, if
|
||||||
/// `src` contains no `TtSequence`s, `MatchNt`s or `SubstNt`s, `interp` can
|
/// `src` contains no `TokenTree::Sequence`s, `MatchNt`s or `SubstNt`s, `interp` can
|
||||||
/// (and should) be None.
|
/// (and should) be None.
|
||||||
pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler,
|
pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler,
|
||||||
interp: Option<HashMap<Name, Rc<NamedMatch>>>,
|
interp: Option<HashMap<Name, Rc<NamedMatch>>>,
|
||||||
|
@ -67,7 +67,7 @@ pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler,
|
||||||
/// like any other attribute which consists of `meta` and surrounding #[ ] tokens.
|
/// like any other attribute which consists of `meta` and surrounding #[ ] tokens.
|
||||||
///
|
///
|
||||||
/// This can do Macro-By-Example transcription. On the other hand, if
|
/// This can do Macro-By-Example transcription. On the other hand, if
|
||||||
/// `src` contains no `TtSequence`s, `MatchNt`s or `SubstNt`s, `interp` can
|
/// `src` contains no `TokenTree::Sequence`s, `MatchNt`s or `SubstNt`s, `interp` can
|
||||||
/// (and should) be None.
|
/// (and should) be None.
|
||||||
pub fn new_tt_reader_with_doc_flag<'a>(sp_diag: &'a SpanHandler,
|
pub fn new_tt_reader_with_doc_flag<'a>(sp_diag: &'a SpanHandler,
|
||||||
interp: Option<HashMap<Name, Rc<NamedMatch>>>,
|
interp: Option<HashMap<Name, Rc<NamedMatch>>>,
|
||||||
|
@ -78,7 +78,7 @@ pub fn new_tt_reader_with_doc_flag<'a>(sp_diag: &'a SpanHandler,
|
||||||
let mut r = TtReader {
|
let mut r = TtReader {
|
||||||
sp_diag: sp_diag,
|
sp_diag: sp_diag,
|
||||||
stack: vec!(TtFrame {
|
stack: vec!(TtFrame {
|
||||||
forest: TtSequence(DUMMY_SP, Rc::new(ast::SequenceRepetition {
|
forest: TokenTree::Sequence(DUMMY_SP, Rc::new(ast::SequenceRepetition {
|
||||||
tts: src,
|
tts: src,
|
||||||
// doesn't matter. This merely holds the root unzipping.
|
// doesn't matter. This merely holds the root unzipping.
|
||||||
separator: None, op: ast::ZeroOrMore, num_captures: 0
|
separator: None, op: ast::ZeroOrMore, num_captures: 0
|
||||||
|
@ -151,17 +151,17 @@ impl Add for LockstepIterSize {
|
||||||
|
|
||||||
fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize {
|
fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize {
|
||||||
match *t {
|
match *t {
|
||||||
TtDelimited(_, ref delimed) => {
|
TokenTree::Delimited(_, ref delimed) => {
|
||||||
delimed.tts.iter().fold(LisUnconstrained, |size, tt| {
|
delimed.tts.iter().fold(LisUnconstrained, |size, tt| {
|
||||||
size + lockstep_iter_size(tt, r)
|
size + lockstep_iter_size(tt, r)
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
TtSequence(_, ref seq) => {
|
TokenTree::Sequence(_, ref seq) => {
|
||||||
seq.tts.iter().fold(LisUnconstrained, |size, tt| {
|
seq.tts.iter().fold(LisUnconstrained, |size, tt| {
|
||||||
size + lockstep_iter_size(tt, r)
|
size + lockstep_iter_size(tt, r)
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
TtToken(_, SubstNt(name, _)) | TtToken(_, MatchNt(name, _, _, _)) =>
|
TokenTree::Token(_, SubstNt(name, _)) | TokenTree::Token(_, MatchNt(name, _, _, _)) =>
|
||||||
match lookup_cur_matched(r, name) {
|
match lookup_cur_matched(r, name) {
|
||||||
Some(matched) => match *matched {
|
Some(matched) => match *matched {
|
||||||
MatchedNonterminal(_) => LisUnconstrained,
|
MatchedNonterminal(_) => LisUnconstrained,
|
||||||
|
@ -169,7 +169,7 @@ fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize {
|
||||||
},
|
},
|
||||||
_ => LisUnconstrained
|
_ => LisUnconstrained
|
||||||
},
|
},
|
||||||
TtToken(..) => LisUnconstrained,
|
TokenTree::Token(..) => LisUnconstrained,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -232,17 +232,17 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
loop { /* because it's easiest, this handles `TtDelimited` not starting
|
loop { /* because it's easiest, this handles `TokenTree::Delimited` not starting
|
||||||
with a `TtToken`, even though it won't happen */
|
with a `TokenTree::Token`, even though it won't happen */
|
||||||
let t = {
|
let t = {
|
||||||
let frame = r.stack.last().unwrap();
|
let frame = r.stack.last().unwrap();
|
||||||
// FIXME(pcwalton): Bad copy.
|
// FIXME(pcwalton): Bad copy.
|
||||||
frame.forest.get_tt(frame.idx)
|
frame.forest.get_tt(frame.idx)
|
||||||
};
|
};
|
||||||
match t {
|
match t {
|
||||||
TtSequence(sp, seq) => {
|
TokenTree::Sequence(sp, seq) => {
|
||||||
// FIXME(pcwalton): Bad copy.
|
// FIXME(pcwalton): Bad copy.
|
||||||
match lockstep_iter_size(&TtSequence(sp, seq.clone()),
|
match lockstep_iter_size(&TokenTree::Sequence(sp, seq.clone()),
|
||||||
r) {
|
r) {
|
||||||
LisUnconstrained => {
|
LisUnconstrained => {
|
||||||
panic!(r.sp_diag.span_fatal(
|
panic!(r.sp_diag.span_fatal(
|
||||||
|
@ -272,20 +272,20 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
|
||||||
idx: 0,
|
idx: 0,
|
||||||
dotdotdoted: true,
|
dotdotdoted: true,
|
||||||
sep: seq.separator.clone(),
|
sep: seq.separator.clone(),
|
||||||
forest: TtSequence(sp, seq),
|
forest: TokenTree::Sequence(sp, seq),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// FIXME #2887: think about span stuff here
|
// FIXME #2887: think about span stuff here
|
||||||
TtToken(sp, SubstNt(ident, namep)) => {
|
TokenTree::Token(sp, SubstNt(ident, namep)) => {
|
||||||
r.stack.last_mut().unwrap().idx += 1;
|
r.stack.last_mut().unwrap().idx += 1;
|
||||||
match lookup_cur_matched(r, ident) {
|
match lookup_cur_matched(r, ident) {
|
||||||
None => {
|
None => {
|
||||||
r.cur_span = sp;
|
r.cur_span = sp;
|
||||||
r.cur_tok = SubstNt(ident, namep);
|
r.cur_tok = SubstNt(ident, namep);
|
||||||
return ret_val;
|
return ret_val;
|
||||||
// this can't be 0 length, just like TtDelimited
|
// this can't be 0 length, just like TokenTree::Delimited
|
||||||
}
|
}
|
||||||
Some(cur_matched) => {
|
Some(cur_matched) => {
|
||||||
match *cur_matched {
|
match *cur_matched {
|
||||||
|
@ -313,8 +313,8 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// TtDelimited or any token that can be unzipped
|
// TokenTree::Delimited or any token that can be unzipped
|
||||||
seq @ TtDelimited(..) | seq @ TtToken(_, MatchNt(..)) => {
|
seq @ TokenTree::Delimited(..) | seq @ TokenTree::Token(_, MatchNt(..)) => {
|
||||||
// do not advance the idx yet
|
// do not advance the idx yet
|
||||||
r.stack.push(TtFrame {
|
r.stack.push(TtFrame {
|
||||||
forest: seq,
|
forest: seq,
|
||||||
|
@ -324,15 +324,15 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
|
||||||
});
|
});
|
||||||
// if this could be 0-length, we'd need to potentially recur here
|
// if this could be 0-length, we'd need to potentially recur here
|
||||||
}
|
}
|
||||||
TtToken(sp, DocComment(name)) if r.desugar_doc_comments => {
|
TokenTree::Token(sp, DocComment(name)) if r.desugar_doc_comments => {
|
||||||
r.stack.push(TtFrame {
|
r.stack.push(TtFrame {
|
||||||
forest: TtToken(sp, DocComment(name)),
|
forest: TokenTree::Token(sp, DocComment(name)),
|
||||||
idx: 0,
|
idx: 0,
|
||||||
dotdotdoted: false,
|
dotdotdoted: false,
|
||||||
sep: None
|
sep: None
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
TtToken(sp, token::SpecialVarNt(SpecialMacroVar::CrateMacroVar)) => {
|
TokenTree::Token(sp, token::SpecialVarNt(SpecialMacroVar::CrateMacroVar)) => {
|
||||||
r.stack.last_mut().unwrap().idx += 1;
|
r.stack.last_mut().unwrap().idx += 1;
|
||||||
|
|
||||||
if r.imported_from.is_some() {
|
if r.imported_from.is_some() {
|
||||||
|
@ -344,7 +344,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
|
||||||
|
|
||||||
// otherwise emit nothing and proceed to the next token
|
// otherwise emit nothing and proceed to the next token
|
||||||
}
|
}
|
||||||
TtToken(sp, tok) => {
|
TokenTree::Token(sp, tok) => {
|
||||||
r.cur_span = sp;
|
r.cur_span = sp;
|
||||||
r.cur_tok = tok;
|
r.cur_tok = tok;
|
||||||
r.stack.last_mut().unwrap().idx += 1;
|
r.stack.last_mut().unwrap().idx += 1;
|
||||||
|
|
|
@ -586,10 +586,10 @@ pub fn noop_fold_arg<T: Folder>(Arg {id, pat, ty}: Arg, fld: &mut T) -> Arg {
|
||||||
|
|
||||||
pub fn noop_fold_tt<T: Folder>(tt: &TokenTree, fld: &mut T) -> TokenTree {
|
pub fn noop_fold_tt<T: Folder>(tt: &TokenTree, fld: &mut T) -> TokenTree {
|
||||||
match *tt {
|
match *tt {
|
||||||
TtToken(span, ref tok) =>
|
TokenTree::Token(span, ref tok) =>
|
||||||
TtToken(span, fld.fold_token(tok.clone())),
|
TokenTree::Token(span, fld.fold_token(tok.clone())),
|
||||||
TtDelimited(span, ref delimed) => {
|
TokenTree::Delimited(span, ref delimed) => {
|
||||||
TtDelimited(span, Rc::new(
|
TokenTree::Delimited(span, Rc::new(
|
||||||
Delimited {
|
Delimited {
|
||||||
delim: delimed.delim,
|
delim: delimed.delim,
|
||||||
open_span: delimed.open_span,
|
open_span: delimed.open_span,
|
||||||
|
@ -598,8 +598,8 @@ pub fn noop_fold_tt<T: Folder>(tt: &TokenTree, fld: &mut T) -> TokenTree {
|
||||||
}
|
}
|
||||||
))
|
))
|
||||||
},
|
},
|
||||||
TtSequence(span, ref seq) =>
|
TokenTree::Sequence(span, ref seq) =>
|
||||||
TtSequence(span,
|
TokenTree::Sequence(span,
|
||||||
Rc::new(SequenceRepetition {
|
Rc::new(SequenceRepetition {
|
||||||
tts: fld.fold_tts(&seq.tts),
|
tts: fld.fold_tts(&seq.tts),
|
||||||
separator: seq.separator.clone().map(|tok| fld.fold_token(tok)),
|
separator: seq.separator.clone().map(|tok| fld.fold_token(tok)),
|
||||||
|
|
|
@ -669,7 +669,7 @@ mod tests {
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
use codemap::{Span, BytePos, Pos, Spanned, NO_EXPANSION};
|
use codemap::{Span, BytePos, Pos, Spanned, NO_EXPANSION};
|
||||||
use owned_slice::OwnedSlice;
|
use owned_slice::OwnedSlice;
|
||||||
use ast;
|
use ast::{self, TokenTree};
|
||||||
use abi;
|
use abi;
|
||||||
use attr::{first_attr_value_str_by_name, AttrMetaMethods};
|
use attr::{first_attr_value_str_by_name, AttrMetaMethods};
|
||||||
use parse;
|
use parse;
|
||||||
|
@ -739,10 +739,10 @@ mod tests {
|
||||||
match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) {
|
match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) {
|
||||||
(
|
(
|
||||||
4,
|
4,
|
||||||
Some(&ast::TtToken(_, token::Ident(name_macro_rules, token::Plain))),
|
Some(&TokenTree::Token(_, token::Ident(name_macro_rules, token::Plain))),
|
||||||
Some(&ast::TtToken(_, token::Not)),
|
Some(&TokenTree::Token(_, token::Not)),
|
||||||
Some(&ast::TtToken(_, token::Ident(name_zip, token::Plain))),
|
Some(&TokenTree::Token(_, token::Ident(name_zip, token::Plain))),
|
||||||
Some(&ast::TtDelimited(_, ref macro_delimed)),
|
Some(&TokenTree::Delimited(_, ref macro_delimed)),
|
||||||
)
|
)
|
||||||
if name_macro_rules.name.as_str() == "macro_rules"
|
if name_macro_rules.name.as_str() == "macro_rules"
|
||||||
&& name_zip.name.as_str() == "zip" => {
|
&& name_zip.name.as_str() == "zip" => {
|
||||||
|
@ -750,17 +750,17 @@ mod tests {
|
||||||
match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) {
|
match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) {
|
||||||
(
|
(
|
||||||
3,
|
3,
|
||||||
Some(&ast::TtDelimited(_, ref first_delimed)),
|
Some(&TokenTree::Delimited(_, ref first_delimed)),
|
||||||
Some(&ast::TtToken(_, token::FatArrow)),
|
Some(&TokenTree::Token(_, token::FatArrow)),
|
||||||
Some(&ast::TtDelimited(_, ref second_delimed)),
|
Some(&TokenTree::Delimited(_, ref second_delimed)),
|
||||||
)
|
)
|
||||||
if macro_delimed.delim == token::Paren => {
|
if macro_delimed.delim == token::Paren => {
|
||||||
let tts = &first_delimed.tts[..];
|
let tts = &first_delimed.tts[..];
|
||||||
match (tts.len(), tts.get(0), tts.get(1)) {
|
match (tts.len(), tts.get(0), tts.get(1)) {
|
||||||
(
|
(
|
||||||
2,
|
2,
|
||||||
Some(&ast::TtToken(_, token::Dollar)),
|
Some(&TokenTree::Token(_, token::Dollar)),
|
||||||
Some(&ast::TtToken(_, token::Ident(ident, token::Plain))),
|
Some(&TokenTree::Token(_, token::Ident(ident, token::Plain))),
|
||||||
)
|
)
|
||||||
if first_delimed.delim == token::Paren
|
if first_delimed.delim == token::Paren
|
||||||
&& ident.name.as_str() == "a" => {},
|
&& ident.name.as_str() == "a" => {},
|
||||||
|
@ -770,8 +770,8 @@ mod tests {
|
||||||
match (tts.len(), tts.get(0), tts.get(1)) {
|
match (tts.len(), tts.get(0), tts.get(1)) {
|
||||||
(
|
(
|
||||||
2,
|
2,
|
||||||
Some(&ast::TtToken(_, token::Dollar)),
|
Some(&TokenTree::Token(_, token::Dollar)),
|
||||||
Some(&ast::TtToken(_, token::Ident(ident, token::Plain))),
|
Some(&TokenTree::Token(_, token::Ident(ident, token::Plain))),
|
||||||
)
|
)
|
||||||
if second_delimed.delim == token::Paren
|
if second_delimed.delim == token::Paren
|
||||||
&& ident.name.as_str() == "a" => {},
|
&& ident.name.as_str() == "a" => {},
|
||||||
|
@ -790,39 +790,39 @@ mod tests {
|
||||||
let tts = string_to_tts("fn a (b : i32) { b; }".to_string());
|
let tts = string_to_tts("fn a (b : i32) { b; }".to_string());
|
||||||
|
|
||||||
let expected = vec![
|
let expected = vec![
|
||||||
ast::TtToken(sp(0, 2),
|
TokenTree::Token(sp(0, 2),
|
||||||
token::Ident(str_to_ident("fn"),
|
token::Ident(str_to_ident("fn"),
|
||||||
token::IdentStyle::Plain)),
|
token::IdentStyle::Plain)),
|
||||||
ast::TtToken(sp(3, 4),
|
TokenTree::Token(sp(3, 4),
|
||||||
token::Ident(str_to_ident("a"),
|
token::Ident(str_to_ident("a"),
|
||||||
token::IdentStyle::Plain)),
|
token::IdentStyle::Plain)),
|
||||||
ast::TtDelimited(
|
TokenTree::Delimited(
|
||||||
sp(5, 14),
|
sp(5, 14),
|
||||||
Rc::new(ast::Delimited {
|
Rc::new(ast::Delimited {
|
||||||
delim: token::DelimToken::Paren,
|
delim: token::DelimToken::Paren,
|
||||||
open_span: sp(5, 6),
|
open_span: sp(5, 6),
|
||||||
tts: vec![
|
tts: vec![
|
||||||
ast::TtToken(sp(6, 7),
|
TokenTree::Token(sp(6, 7),
|
||||||
token::Ident(str_to_ident("b"),
|
token::Ident(str_to_ident("b"),
|
||||||
token::IdentStyle::Plain)),
|
token::IdentStyle::Plain)),
|
||||||
ast::TtToken(sp(8, 9),
|
TokenTree::Token(sp(8, 9),
|
||||||
token::Colon),
|
token::Colon),
|
||||||
ast::TtToken(sp(10, 13),
|
TokenTree::Token(sp(10, 13),
|
||||||
token::Ident(str_to_ident("i32"),
|
token::Ident(str_to_ident("i32"),
|
||||||
token::IdentStyle::Plain)),
|
token::IdentStyle::Plain)),
|
||||||
],
|
],
|
||||||
close_span: sp(13, 14),
|
close_span: sp(13, 14),
|
||||||
})),
|
})),
|
||||||
ast::TtDelimited(
|
TokenTree::Delimited(
|
||||||
sp(15, 21),
|
sp(15, 21),
|
||||||
Rc::new(ast::Delimited {
|
Rc::new(ast::Delimited {
|
||||||
delim: token::DelimToken::Brace,
|
delim: token::DelimToken::Brace,
|
||||||
open_span: sp(15, 16),
|
open_span: sp(15, 16),
|
||||||
tts: vec![
|
tts: vec![
|
||||||
ast::TtToken(sp(17, 18),
|
TokenTree::Token(sp(17, 18),
|
||||||
token::Ident(str_to_ident("b"),
|
token::Ident(str_to_ident("b"),
|
||||||
token::IdentStyle::Plain)),
|
token::IdentStyle::Plain)),
|
||||||
ast::TtToken(sp(18, 19),
|
TokenTree::Token(sp(18, 19),
|
||||||
token::Semi)
|
token::Semi)
|
||||||
],
|
],
|
||||||
close_span: sp(20, 21),
|
close_span: sp(20, 21),
|
||||||
|
|
|
@ -48,7 +48,6 @@ use ast::{StmtExpr, StmtSemi, StmtMac, VariantData, StructField};
|
||||||
use ast::{BiSub, StrStyle};
|
use ast::{BiSub, StrStyle};
|
||||||
use ast::{SelfExplicit, SelfRegion, SelfStatic, SelfValue};
|
use ast::{SelfExplicit, SelfRegion, SelfStatic, SelfValue};
|
||||||
use ast::{Delimited, SequenceRepetition, TokenTree, TraitItem, TraitRef};
|
use ast::{Delimited, SequenceRepetition, TokenTree, TraitItem, TraitRef};
|
||||||
use ast::{TtDelimited, TtSequence, TtToken};
|
|
||||||
use ast::{Ty, Ty_, TypeBinding, TyMac};
|
use ast::{Ty, Ty_, TypeBinding, TyMac};
|
||||||
use ast::{TyFixedLengthVec, TyBareFn, TyTypeof, TyInfer};
|
use ast::{TyFixedLengthVec, TyBareFn, TyTypeof, TyInfer};
|
||||||
use ast::{TyParam, TyParamBound, TyParen, TyPath, TyPolyTraitRef, TyPtr};
|
use ast::{TyParam, TyParamBound, TyParen, TyPath, TyPolyTraitRef, TyPtr};
|
||||||
|
@ -2428,7 +2427,7 @@ impl<'a> Parser<'a> {
|
||||||
));
|
));
|
||||||
let (sep, repeat) = try!(self.parse_sep_and_kleene_op());
|
let (sep, repeat) = try!(self.parse_sep_and_kleene_op());
|
||||||
let name_num = macro_parser::count_names(&seq);
|
let name_num = macro_parser::count_names(&seq);
|
||||||
return Ok(TtSequence(mk_sp(sp.lo, seq_span.hi),
|
return Ok(TokenTree::Sequence(mk_sp(sp.lo, seq_span.hi),
|
||||||
Rc::new(SequenceRepetition {
|
Rc::new(SequenceRepetition {
|
||||||
tts: seq,
|
tts: seq,
|
||||||
separator: sep,
|
separator: sep,
|
||||||
|
@ -2437,7 +2436,7 @@ impl<'a> Parser<'a> {
|
||||||
})));
|
})));
|
||||||
} else if self.token.is_keyword_allow_following_colon(keywords::Crate) {
|
} else if self.token.is_keyword_allow_following_colon(keywords::Crate) {
|
||||||
try!(self.bump());
|
try!(self.bump());
|
||||||
return Ok(TtToken(sp, SpecialVarNt(SpecialMacroVar::CrateMacroVar)));
|
return Ok(TokenTree::Token(sp, SpecialVarNt(SpecialMacroVar::CrateMacroVar)));
|
||||||
} else {
|
} else {
|
||||||
sp = mk_sp(sp.lo, self.span.hi);
|
sp = mk_sp(sp.lo, self.span.hi);
|
||||||
let namep = match self.token { token::Ident(_, p) => p, _ => token::Plain };
|
let namep = match self.token { token::Ident(_, p) => p, _ => token::Plain };
|
||||||
|
@ -2459,9 +2458,9 @@ impl<'a> Parser<'a> {
|
||||||
sp = mk_sp(sp.lo, self.span.hi);
|
sp = mk_sp(sp.lo, self.span.hi);
|
||||||
let kindp = match self.token { token::Ident(_, p) => p, _ => token::Plain };
|
let kindp = match self.token { token::Ident(_, p) => p, _ => token::Plain };
|
||||||
let nt_kind = try!(self.parse_ident());
|
let nt_kind = try!(self.parse_ident());
|
||||||
Ok(TtToken(sp, MatchNt(name, nt_kind, namep, kindp)))
|
Ok(TokenTree::Token(sp, MatchNt(name, nt_kind, namep, kindp)))
|
||||||
} else {
|
} else {
|
||||||
Ok(TtToken(sp, SubstNt(name, namep)))
|
Ok(TokenTree::Token(sp, SubstNt(name, namep)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2509,7 +2508,7 @@ impl<'a> Parser<'a> {
|
||||||
/// parse a single token tree from the input.
|
/// parse a single token tree from the input.
|
||||||
pub fn parse_token_tree(&mut self) -> PResult<TokenTree> {
|
pub fn parse_token_tree(&mut self) -> PResult<TokenTree> {
|
||||||
// FIXME #6994: currently, this is too eager. It
|
// FIXME #6994: currently, this is too eager. It
|
||||||
// parses token trees but also identifies TtSequence's
|
// parses token trees but also identifies TokenType::Sequence's
|
||||||
// and token::SubstNt's; it's too early to know yet
|
// and token::SubstNt's; it's too early to know yet
|
||||||
// whether something will be a nonterminal or a seq
|
// whether something will be a nonterminal or a seq
|
||||||
// yet.
|
// yet.
|
||||||
|
@ -2540,7 +2539,7 @@ impl<'a> Parser<'a> {
|
||||||
p.parse_unquoted()
|
p.parse_unquoted()
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
Ok(TtToken(p.span, try!(p.bump_and_get())))
|
Ok(TokenTree::Token(p.span, try!(p.bump_and_get())))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2579,7 +2578,7 @@ impl<'a> Parser<'a> {
|
||||||
// Expand to cover the entire delimited token tree
|
// Expand to cover the entire delimited token tree
|
||||||
let span = Span { hi: close_span.hi, ..pre_span };
|
let span = Span { hi: close_span.hi, ..pre_span };
|
||||||
|
|
||||||
Ok(TtDelimited(span, Rc::new(Delimited {
|
Ok(TokenTree::Delimited(span, Rc::new(Delimited {
|
||||||
delim: delim,
|
delim: delim,
|
||||||
open_span: open_span,
|
open_span: open_span,
|
||||||
tts: tts,
|
tts: tts,
|
||||||
|
|
|
@ -11,7 +11,7 @@
|
||||||
pub use self::AnnNode::*;
|
pub use self::AnnNode::*;
|
||||||
|
|
||||||
use abi;
|
use abi;
|
||||||
use ast;
|
use ast::{self, TokenTree};
|
||||||
use ast::{RegionTyParamBound, TraitTyParamBound, TraitBoundModifier};
|
use ast::{RegionTyParamBound, TraitTyParamBound, TraitBoundModifier};
|
||||||
use ast_util;
|
use ast_util;
|
||||||
use util::parser::AssocOp;
|
use util::parser::AssocOp;
|
||||||
|
@ -1452,7 +1452,7 @@ impl<'a> State<'a> {
|
||||||
/// expression arguments as expressions). It can be done! I think.
|
/// expression arguments as expressions). It can be done! I think.
|
||||||
pub fn print_tt(&mut self, tt: &ast::TokenTree) -> io::Result<()> {
|
pub fn print_tt(&mut self, tt: &ast::TokenTree) -> io::Result<()> {
|
||||||
match *tt {
|
match *tt {
|
||||||
ast::TtToken(_, ref tk) => {
|
TokenTree::Token(_, ref tk) => {
|
||||||
try!(word(&mut self.s, &token_to_string(tk)));
|
try!(word(&mut self.s, &token_to_string(tk)));
|
||||||
match *tk {
|
match *tk {
|
||||||
parse::token::DocComment(..) => {
|
parse::token::DocComment(..) => {
|
||||||
|
@ -1461,14 +1461,14 @@ impl<'a> State<'a> {
|
||||||
_ => Ok(())
|
_ => Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ast::TtDelimited(_, ref delimed) => {
|
TokenTree::Delimited(_, ref delimed) => {
|
||||||
try!(word(&mut self.s, &token_to_string(&delimed.open_token())));
|
try!(word(&mut self.s, &token_to_string(&delimed.open_token())));
|
||||||
try!(space(&mut self.s));
|
try!(space(&mut self.s));
|
||||||
try!(self.print_tts(&delimed.tts));
|
try!(self.print_tts(&delimed.tts));
|
||||||
try!(space(&mut self.s));
|
try!(space(&mut self.s));
|
||||||
word(&mut self.s, &token_to_string(&delimed.close_token()))
|
word(&mut self.s, &token_to_string(&delimed.close_token()))
|
||||||
},
|
},
|
||||||
ast::TtSequence(_, ref seq) => {
|
TokenTree::Sequence(_, ref seq) => {
|
||||||
try!(word(&mut self.s, "$("));
|
try!(word(&mut self.s, "$("));
|
||||||
for tt_elt in &seq.tts {
|
for tt_elt in &seq.tts {
|
||||||
try!(self.print_tt(tt_elt));
|
try!(self.print_tt(tt_elt));
|
||||||
|
@ -1499,9 +1499,9 @@ impl<'a> State<'a> {
|
||||||
// There should be no space between the module name and the following `::` in paths,
|
// There should be no space between the module name and the following `::` in paths,
|
||||||
// otherwise imported macros get re-parsed from crate metadata incorrectly (#20701)
|
// otherwise imported macros get re-parsed from crate metadata incorrectly (#20701)
|
||||||
suppress_space = match tt {
|
suppress_space = match tt {
|
||||||
&ast::TtToken(_, token::Ident(_, token::ModName)) |
|
&TokenTree::Token(_, token::Ident(_, token::ModName)) |
|
||||||
&ast::TtToken(_, token::MatchNt(_, _, _, token::ModName)) |
|
&TokenTree::Token(_, token::MatchNt(_, _, _, token::ModName)) |
|
||||||
&ast::TtToken(_, token::SubstNt(_, token::ModName)) => true,
|
&TokenTree::Token(_, token::SubstNt(_, token::ModName)) => true,
|
||||||
_ => false
|
_ => false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,7 +18,7 @@ extern crate rustc;
|
||||||
|
|
||||||
use syntax::codemap::Span;
|
use syntax::codemap::Span;
|
||||||
use syntax::parse::token::{self, str_to_ident, NtExpr, NtPat};
|
use syntax::parse::token::{self, str_to_ident, NtExpr, NtPat};
|
||||||
use syntax::ast::{TokenTree, TtToken, Pat};
|
use syntax::ast::{TokenTree, Pat};
|
||||||
use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacEager};
|
use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacEager};
|
||||||
use syntax::ext::build::AstBuilder;
|
use syntax::ext::build::AstBuilder;
|
||||||
use syntax::ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal};
|
use syntax::ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal};
|
||||||
|
|
|
@ -18,7 +18,7 @@ extern crate syntax;
|
||||||
extern crate rustc;
|
extern crate rustc;
|
||||||
|
|
||||||
use syntax::codemap::Span;
|
use syntax::codemap::Span;
|
||||||
use syntax::ast::{TokenTree, TtToken};
|
use syntax::ast::TokenTree;
|
||||||
use syntax::parse::token;
|
use syntax::parse::token;
|
||||||
use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacEager};
|
use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacEager};
|
||||||
use syntax::ext::build::AstBuilder; // trait for expr_usize
|
use syntax::ext::build::AstBuilder; // trait for expr_usize
|
||||||
|
@ -40,7 +40,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
|
||||||
("I", 1)];
|
("I", 1)];
|
||||||
|
|
||||||
let text = match args {
|
let text = match args {
|
||||||
[TtToken(_, token::Ident(s, _))] => s.to_string(),
|
[TokenTree::Token(_, token::Ident(s, _))] => s.to_string(),
|
||||||
_ => {
|
_ => {
|
||||||
cx.span_err(sp, "argument should be a single identifier");
|
cx.span_err(sp, "argument should be a single identifier");
|
||||||
return DummyResult::any(sp);
|
return DummyResult::any(sp);
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue