De-@ TokenTree.
This commit is contained in:
parent
7cf4d8bc44
commit
8f226e5694
7 changed files with 26 additions and 17 deletions
|
@ -581,14 +581,16 @@ pub enum TokenTree {
|
||||||
TTTok(Span, ::parse::token::Token),
|
TTTok(Span, ::parse::token::Token),
|
||||||
// a delimited sequence (the delimiters appear as the first
|
// a delimited sequence (the delimiters appear as the first
|
||||||
// and last elements of the vector)
|
// and last elements of the vector)
|
||||||
TTDelim(@Vec<TokenTree> ),
|
// FIXME(eddyb) #6308 Use Rc<[TokenTree]> after DST.
|
||||||
|
TTDelim(Rc<Vec<TokenTree>>),
|
||||||
|
|
||||||
// These only make sense for right-hand-sides of MBE macros:
|
// These only make sense for right-hand-sides of MBE macros:
|
||||||
|
|
||||||
// a kleene-style repetition sequence with a span, a TTForest,
|
// a kleene-style repetition sequence with a span, a TTForest,
|
||||||
// an optional separator, and a boolean where true indicates
|
// an optional separator, and a boolean where true indicates
|
||||||
// zero or more (..), and false indicates one or more (+).
|
// zero or more (..), and false indicates one or more (+).
|
||||||
TTSeq(Span, @Vec<TokenTree> , Option<::parse::token::Token>, bool),
|
// FIXME(eddyb) #6308 Use Rc<[TokenTree]> after DST.
|
||||||
|
TTSeq(Span, Rc<Vec<TokenTree>>, Option<::parse::token::Token>, bool),
|
||||||
|
|
||||||
// a syntactic variable that will be filled in by macro expansion.
|
// a syntactic variable that will be filled in by macro expansion.
|
||||||
TTNonterminal(Span, Ident)
|
TTNonterminal(Span, Ident)
|
||||||
|
|
|
@ -13,6 +13,8 @@ use codemap;
|
||||||
use ext::base;
|
use ext::base;
|
||||||
use print;
|
use print;
|
||||||
|
|
||||||
|
use std::rc::Rc;
|
||||||
|
|
||||||
pub fn expand_syntax_ext(cx: &mut base::ExtCtxt,
|
pub fn expand_syntax_ext(cx: &mut base::ExtCtxt,
|
||||||
sp: codemap::Span,
|
sp: codemap::Span,
|
||||||
tt: &[ast::TokenTree])
|
tt: &[ast::TokenTree])
|
||||||
|
@ -20,7 +22,7 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt,
|
||||||
|
|
||||||
cx.print_backtrace();
|
cx.print_backtrace();
|
||||||
println!("{}", print::pprust::tt_to_str(&ast::TTDelim(
|
println!("{}", print::pprust::tt_to_str(&ast::TTDelim(
|
||||||
@tt.iter().map(|x| (*x).clone()).collect())));
|
Rc::new(tt.iter().map(|x| (*x).clone()).collect()))));
|
||||||
|
|
||||||
// any so that `log_syntax` can be invoked as an expression and item.
|
// any so that `log_syntax` can be invoked as an expression and item.
|
||||||
base::MacResult::dummy_any(sp)
|
base::MacResult::dummy_any(sp)
|
||||||
|
|
|
@ -28,6 +28,7 @@ use print;
|
||||||
use util::small_vector::SmallVector;
|
use util::small_vector::SmallVector;
|
||||||
|
|
||||||
use std::cell::RefCell;
|
use std::cell::RefCell;
|
||||||
|
use std::rc::Rc;
|
||||||
|
|
||||||
struct ParserAnyMacro<'a> {
|
struct ParserAnyMacro<'a> {
|
||||||
parser: RefCell<Parser<'a>>,
|
parser: RefCell<Parser<'a>>,
|
||||||
|
@ -115,9 +116,9 @@ fn generic_extension(cx: &ExtCtxt,
|
||||||
if cx.trace_macros() {
|
if cx.trace_macros() {
|
||||||
println!("{}! \\{ {} \\}",
|
println!("{}! \\{ {} \\}",
|
||||||
token::get_ident(name),
|
token::get_ident(name),
|
||||||
print::pprust::tt_to_str(&TTDelim(@arg.iter()
|
print::pprust::tt_to_str(&TTDelim(Rc::new(arg.iter()
|
||||||
.map(|x| (*x).clone())
|
.map(|x| (*x).clone())
|
||||||
.collect())));
|
.collect()))));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Which arm's failure should we report? (the one furthest along)
|
// Which arm's failure should we report? (the one furthest along)
|
||||||
|
|
|
@ -17,12 +17,13 @@ use parse::token::{EOF, INTERPOLATED, IDENT, Token, NtIdent};
|
||||||
use parse::token;
|
use parse::token;
|
||||||
use parse::lexer::TokenAndSpan;
|
use parse::lexer::TokenAndSpan;
|
||||||
|
|
||||||
|
use std::rc::Rc;
|
||||||
use collections::HashMap;
|
use collections::HashMap;
|
||||||
|
|
||||||
///an unzipping of `TokenTree`s
|
///an unzipping of `TokenTree`s
|
||||||
#[deriving(Clone)]
|
#[deriving(Clone)]
|
||||||
struct TtFrame {
|
struct TtFrame {
|
||||||
forest: @Vec<ast::TokenTree>,
|
forest: Rc<Vec<ast::TokenTree>>,
|
||||||
idx: uint,
|
idx: uint,
|
||||||
dotdotdoted: bool,
|
dotdotdoted: bool,
|
||||||
sep: Option<Token>,
|
sep: Option<Token>,
|
||||||
|
@ -52,7 +53,7 @@ pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler,
|
||||||
let mut r = TtReader {
|
let mut r = TtReader {
|
||||||
sp_diag: sp_diag,
|
sp_diag: sp_diag,
|
||||||
stack: vec!(TtFrame {
|
stack: vec!(TtFrame {
|
||||||
forest: @src,
|
forest: Rc::new(src),
|
||||||
idx: 0,
|
idx: 0,
|
||||||
dotdotdoted: false,
|
dotdotdoted: false,
|
||||||
sep: None,
|
sep: None,
|
||||||
|
@ -212,7 +213,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
|
||||||
}
|
}
|
||||||
TTSeq(sp, tts, sep, zerok) => {
|
TTSeq(sp, tts, sep, zerok) => {
|
||||||
// FIXME(pcwalton): Bad copy.
|
// FIXME(pcwalton): Bad copy.
|
||||||
match lockstep_iter_size(&TTSeq(sp, tts, sep.clone(), zerok), r) {
|
match lockstep_iter_size(&TTSeq(sp, tts.clone(), sep.clone(), zerok), r) {
|
||||||
LisUnconstrained => {
|
LisUnconstrained => {
|
||||||
r.sp_diag.span_fatal(
|
r.sp_diag.span_fatal(
|
||||||
sp.clone(), /* blame macro writer */
|
sp.clone(), /* blame macro writer */
|
||||||
|
|
|
@ -16,6 +16,8 @@ use parse::token;
|
||||||
use owned_slice::OwnedSlice;
|
use owned_slice::OwnedSlice;
|
||||||
use util::small_vector::SmallVector;
|
use util::small_vector::SmallVector;
|
||||||
|
|
||||||
|
use std::rc::Rc;
|
||||||
|
|
||||||
// We may eventually want to be able to fold over type parameters, too.
|
// We may eventually want to be able to fold over type parameters, too.
|
||||||
pub trait Folder {
|
pub trait Folder {
|
||||||
fn fold_crate(&mut self, c: Crate) -> Crate {
|
fn fold_crate(&mut self, c: Crate) -> Crate {
|
||||||
|
@ -375,10 +377,10 @@ pub fn fold_tts<T: Folder>(tts: &[TokenTree], fld: &mut T) -> Vec<TokenTree> {
|
||||||
match *tt {
|
match *tt {
|
||||||
TTTok(span, ref tok) =>
|
TTTok(span, ref tok) =>
|
||||||
TTTok(span,maybe_fold_ident(tok,fld)),
|
TTTok(span,maybe_fold_ident(tok,fld)),
|
||||||
TTDelim(tts) => TTDelim(@fold_tts(tts.as_slice(), fld)),
|
TTDelim(ref tts) => TTDelim(Rc::new(fold_tts(tts.as_slice(), fld))),
|
||||||
TTSeq(span, pattern, ref sep, is_optional) =>
|
TTSeq(span, ref pattern, ref sep, is_optional) =>
|
||||||
TTSeq(span,
|
TTSeq(span,
|
||||||
@fold_tts(pattern.as_slice(), fld),
|
Rc::new(fold_tts(pattern.as_slice(), fld)),
|
||||||
sep.as_ref().map(|tok|maybe_fold_ident(tok,fld)),
|
sep.as_ref().map(|tok|maybe_fold_ident(tok,fld)),
|
||||||
is_optional),
|
is_optional),
|
||||||
TTNonterminal(sp,ref ident) =>
|
TTNonterminal(sp,ref ident) =>
|
||||||
|
|
|
@ -366,13 +366,13 @@ mod test {
|
||||||
[ast::TTTok(_,_),
|
[ast::TTTok(_,_),
|
||||||
ast::TTTok(_,token::NOT),
|
ast::TTTok(_,token::NOT),
|
||||||
ast::TTTok(_,_),
|
ast::TTTok(_,_),
|
||||||
ast::TTDelim(delim_elts)] => {
|
ast::TTDelim(ref delim_elts)] => {
|
||||||
let delim_elts: &[ast::TokenTree] = delim_elts.as_slice();
|
let delim_elts: &[ast::TokenTree] = delim_elts.as_slice();
|
||||||
match delim_elts {
|
match delim_elts {
|
||||||
[ast::TTTok(_,token::LPAREN),
|
[ast::TTTok(_,token::LPAREN),
|
||||||
ast::TTDelim(first_set),
|
ast::TTDelim(ref first_set),
|
||||||
ast::TTTok(_,token::FAT_ARROW),
|
ast::TTTok(_,token::FAT_ARROW),
|
||||||
ast::TTDelim(second_set),
|
ast::TTDelim(ref second_set),
|
||||||
ast::TTTok(_,token::RPAREN)] => {
|
ast::TTTok(_,token::RPAREN)] => {
|
||||||
let first_set: &[ast::TokenTree] =
|
let first_set: &[ast::TokenTree] =
|
||||||
first_set.as_slice();
|
first_set.as_slice();
|
||||||
|
|
|
@ -80,6 +80,7 @@ use owned_slice::OwnedSlice;
|
||||||
use collections::HashSet;
|
use collections::HashSet;
|
||||||
use std::kinds::marker;
|
use std::kinds::marker;
|
||||||
use std::mem::replace;
|
use std::mem::replace;
|
||||||
|
use std::rc::Rc;
|
||||||
use std::vec;
|
use std::vec;
|
||||||
|
|
||||||
#[allow(non_camel_case_types)]
|
#[allow(non_camel_case_types)]
|
||||||
|
@ -2101,7 +2102,7 @@ impl<'a> Parser<'a> {
|
||||||
let seq = match seq {
|
let seq = match seq {
|
||||||
Spanned { node, .. } => node,
|
Spanned { node, .. } => node,
|
||||||
};
|
};
|
||||||
TTSeq(mk_sp(sp.lo, p.span.hi), @seq, s, z)
|
TTSeq(mk_sp(sp.lo, p.span.hi), Rc::new(seq), s, z)
|
||||||
} else {
|
} else {
|
||||||
TTNonterminal(sp, p.parse_ident())
|
TTNonterminal(sp, p.parse_ident())
|
||||||
}
|
}
|
||||||
|
@ -2144,7 +2145,7 @@ impl<'a> Parser<'a> {
|
||||||
result.push(parse_any_tt_tok(self));
|
result.push(parse_any_tt_tok(self));
|
||||||
self.open_braces.pop().unwrap();
|
self.open_braces.pop().unwrap();
|
||||||
|
|
||||||
TTDelim(@result)
|
TTDelim(Rc::new(result))
|
||||||
}
|
}
|
||||||
_ => parse_non_delim_tt_tok(self)
|
_ => parse_non_delim_tt_tok(self)
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue