syntax: Get rid of token::IdentStyle

This commit is contained in:
Vadim Petrochenkov 2016-04-16 04:12:02 +03:00
parent 8dbf8f5f0a
commit 546c052d22
16 changed files with 111 additions and 183 deletions

View file

@ -147,7 +147,7 @@ fn write_source(sess: &parse::ParseSess,
} }
// keywords are also included in the identifier set // keywords are also included in the identifier set
token::Ident(ident, _is_mod_sep) => { token::Ident(ident) => {
match &*ident.name.as_str() { match &*ident.name.as_str() {
"ref" | "mut" => "kw-2", "ref" | "mut" => "kw-2",

View file

@ -1206,8 +1206,7 @@ impl TokenTree {
TokenTree::Delimited(sp, Rc::new(Delimited { TokenTree::Delimited(sp, Rc::new(Delimited {
delim: token::Bracket, delim: token::Bracket,
open_span: sp, open_span: sp,
tts: vec![TokenTree::Token(sp, token::Ident(token::str_to_ident("doc"), tts: vec![TokenTree::Token(sp, token::Ident(token::str_to_ident("doc"))),
token::Plain)),
TokenTree::Token(sp, token::Eq), TokenTree::Token(sp, token::Eq),
TokenTree::Token(sp, token::Literal( TokenTree::Token(sp, token::Literal(
token::StrRaw(token::intern(&stripped), num_of_hashes), None))], token::StrRaw(token::intern(&stripped), num_of_hashes), None))],
@ -1225,14 +1224,13 @@ impl TokenTree {
} }
(&TokenTree::Token(sp, token::SpecialVarNt(var)), _) => { (&TokenTree::Token(sp, token::SpecialVarNt(var)), _) => {
let v = [TokenTree::Token(sp, token::Dollar), let v = [TokenTree::Token(sp, token::Dollar),
TokenTree::Token(sp, token::Ident(token::str_to_ident(var.as_str()), TokenTree::Token(sp, token::Ident(token::str_to_ident(var.as_str())))];
token::Plain))];
v[index].clone() v[index].clone()
} }
(&TokenTree::Token(sp, token::MatchNt(name, kind, name_st, kind_st)), _) => { (&TokenTree::Token(sp, token::MatchNt(name, kind)), _) => {
let v = [TokenTree::Token(sp, token::SubstNt(name, name_st)), let v = [TokenTree::Token(sp, token::SubstNt(name)),
TokenTree::Token(sp, token::Colon), TokenTree::Token(sp, token::Colon),
TokenTree::Token(sp, token::Ident(kind, kind_st))]; TokenTree::Token(sp, token::Ident(kind))];
v[index].clone() v[index].clone()
} }
(&TokenTree::Sequence(_, ref seq), _) => { (&TokenTree::Sequence(_, ref seq), _) => {

View file

@ -54,7 +54,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt,
token_tree: &[TokenTree]) token_tree: &[TokenTree])
-> Box<MacResult+'cx> { -> Box<MacResult+'cx> {
let code = match (token_tree.len(), token_tree.get(0)) { let code = match (token_tree.len(), token_tree.get(0)) {
(1, Some(&TokenTree::Token(_, token::Ident(code, _)))) => code, (1, Some(&TokenTree::Token(_, token::Ident(code)))) => code,
_ => unreachable!() _ => unreachable!()
}; };
@ -92,10 +92,10 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt,
token_tree.get(1), token_tree.get(1),
token_tree.get(2) token_tree.get(2)
) { ) {
(1, Some(&TokenTree::Token(_, token::Ident(ref code, _))), None, None) => { (1, Some(&TokenTree::Token(_, token::Ident(ref code))), None, None) => {
(code, None) (code, None)
}, },
(3, Some(&TokenTree::Token(_, token::Ident(ref code, _))), (3, Some(&TokenTree::Token(_, token::Ident(ref code))),
Some(&TokenTree::Token(_, token::Comma)), Some(&TokenTree::Token(_, token::Comma)),
Some(&TokenTree::Token(_, token::Literal(token::StrRaw(description, _), None)))) => { Some(&TokenTree::Token(_, token::Literal(token::StrRaw(description, _), None)))) => {
(code, Some(description)) (code, Some(description))
@ -160,9 +160,9 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt,
let (crate_name, name) = match (&token_tree[0], &token_tree[2]) { let (crate_name, name) = match (&token_tree[0], &token_tree[2]) {
( (
// Crate name. // Crate name.
&TokenTree::Token(_, token::Ident(ref crate_name, _)), &TokenTree::Token(_, token::Ident(ref crate_name)),
// DIAGNOSTICS ident. // DIAGNOSTICS ident.
&TokenTree::Token(_, token::Ident(ref name, _)) &TokenTree::Token(_, token::Ident(ref name))
) => (*&crate_name, name), ) => (*&crate_name, name),
_ => unreachable!() _ => unreachable!()
}; };

View file

@ -72,7 +72,7 @@ pub mod rt {
impl ToTokens for ast::Ident { impl ToTokens for ast::Ident {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
vec![TokenTree::Token(DUMMY_SP, token::Ident(*self, token::Plain))] vec![TokenTree::Token(DUMMY_SP, token::Ident(*self))]
} }
} }
@ -646,14 +646,10 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
cx.expr_usize(sp, n)) cx.expr_usize(sp, n))
} }
token::Ident(ident, style) => { token::Ident(ident) => {
return cx.expr_call(sp, return cx.expr_call(sp,
mk_token_path(cx, sp, "Ident"), mk_token_path(cx, sp, "Ident"),
vec![mk_ident(cx, sp, ident), vec![mk_ident(cx, sp, ident)]);
match style {
ModName => mk_token_path(cx, sp, "ModName"),
Plain => mk_token_path(cx, sp, "Plain"),
}]);
} }
token::Lifetime(ident) => { token::Lifetime(ident) => {
@ -668,19 +664,10 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
vec!(mk_name(cx, sp, ast::Ident::with_empty_ctxt(ident)))); vec!(mk_name(cx, sp, ast::Ident::with_empty_ctxt(ident))));
} }
token::MatchNt(name, kind, namep, kindp) => { token::MatchNt(name, kind) => {
return cx.expr_call(sp, return cx.expr_call(sp,
mk_token_path(cx, sp, "MatchNt"), mk_token_path(cx, sp, "MatchNt"),
vec!(mk_ident(cx, sp, name), vec![mk_ident(cx, sp, name), mk_ident(cx, sp, kind)]);
mk_ident(cx, sp, kind),
match namep {
ModName => mk_token_path(cx, sp, "ModName"),
Plain => mk_token_path(cx, sp, "Plain"),
},
match kindp {
ModName => mk_token_path(cx, sp, "ModName"),
Plain => mk_token_path(cx, sp, "Plain"),
}));
} }
token::Interpolated(_) => panic!("quote! with interpolated token"), token::Interpolated(_) => panic!("quote! with interpolated token"),
@ -722,7 +709,7 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec<ast::Stmt> { fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec<ast::Stmt> {
match *tt { match *tt {
TokenTree::Token(sp, SubstNt(ident, _)) => { TokenTree::Token(sp, SubstNt(ident)) => {
// tt.extend($ident.to_tokens(ext_cx)) // tt.extend($ident.to_tokens(ext_cx))
let e_to_toks = let e_to_toks =

View file

@ -216,7 +216,7 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
n_rec(p_s, next_m, res, ret_val, idx)?; n_rec(p_s, next_m, res, ret_val, idx)?;
} }
} }
TokenTree::Token(sp, MatchNt(bind_name, _, _, _)) => { TokenTree::Token(sp, MatchNt(bind_name, _)) => {
match ret_val.entry(bind_name.name) { match ret_val.entry(bind_name.name) {
Vacant(spot) => { Vacant(spot) => {
spot.insert(res[*idx].clone()); spot.insert(res[*idx].clone());
@ -263,7 +263,7 @@ pub type PositionalParseResult = ParseResult<Vec<Rc<NamedMatch>>>;
/// unhygienic comparison) /// unhygienic comparison)
pub fn token_name_eq(t1 : &Token, t2 : &Token) -> bool { pub fn token_name_eq(t1 : &Token, t2 : &Token) -> bool {
match (t1,t2) { match (t1,t2) {
(&token::Ident(id1,_),&token::Ident(id2,_)) (&token::Ident(id1),&token::Ident(id2))
| (&token::Lifetime(id1),&token::Lifetime(id2)) => | (&token::Lifetime(id1),&token::Lifetime(id2)) =>
id1.name == id2.name, id1.name == id2.name,
_ => *t1 == *t2 _ => *t1 == *t2
@ -451,7 +451,7 @@ pub fn parse(sess: &ParseSess,
if (!bb_eis.is_empty() && !next_eis.is_empty()) if (!bb_eis.is_empty() && !next_eis.is_empty())
|| bb_eis.len() > 1 { || bb_eis.len() > 1 {
let nts = bb_eis.iter().map(|ei| match ei.top_elts.get_tt(ei.idx) { let nts = bb_eis.iter().map(|ei| match ei.top_elts.get_tt(ei.idx) {
TokenTree::Token(_, MatchNt(bind, name, _, _)) => { TokenTree::Token(_, MatchNt(bind, name)) => {
format!("{} ('{}')", name, bind) format!("{} ('{}')", name, bind)
} }
_ => panic!() _ => panic!()
@ -479,7 +479,7 @@ pub fn parse(sess: &ParseSess,
let mut ei = bb_eis.pop().unwrap(); let mut ei = bb_eis.pop().unwrap();
match ei.top_elts.get_tt(ei.idx) { match ei.top_elts.get_tt(ei.idx) {
TokenTree::Token(span, MatchNt(_, ident, _, _)) => { TokenTree::Token(span, MatchNt(_, ident)) => {
let match_cur = ei.match_cur; let match_cur = ei.match_cur;
(&mut ei.matches[match_cur]).push(Rc::new(MatchedNonterminal( (&mut ei.matches[match_cur]).push(Rc::new(MatchedNonterminal(
parse_nt(&mut rust_parser, span, &ident.name.as_str())))); parse_nt(&mut rust_parser, span, &ident.name.as_str()))));
@ -534,9 +534,9 @@ pub fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
"ty" => token::NtTy(panictry!(p.parse_ty())), "ty" => token::NtTy(panictry!(p.parse_ty())),
// this could be handled like a token, since it is one // this could be handled like a token, since it is one
"ident" => match p.token { "ident" => match p.token {
token::Ident(sn,b) => { token::Ident(sn) => {
p.bump(); p.bump();
token::NtIdent(Box::new(Spanned::<Ident>{node: sn, span: p.span}),b) token::NtIdent(Box::new(Spanned::<Ident>{node: sn, span: p.span}))
} }
_ => { _ => {
let token_str = pprust::token_to_string(&p.token); let token_str = pprust::token_to_string(&p.token);

View file

@ -244,8 +244,8 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt,
// $( $lhs:tt => $rhs:tt );+ // $( $lhs:tt => $rhs:tt );+
// ...quasiquoting this would be nice. // ...quasiquoting this would be nice.
// These spans won't matter, anyways // These spans won't matter, anyways
let match_lhs_tok = MatchNt(lhs_nm, special_idents::tt, token::Plain, token::Plain); let match_lhs_tok = MatchNt(lhs_nm, special_idents::tt);
let match_rhs_tok = MatchNt(rhs_nm, special_idents::tt, token::Plain, token::Plain); let match_rhs_tok = MatchNt(rhs_nm, special_idents::tt);
let argument_gram = vec!( let argument_gram = vec!(
TokenTree::Sequence(DUMMY_SP, TokenTree::Sequence(DUMMY_SP,
Rc::new(ast::SequenceRepetition { Rc::new(ast::SequenceRepetition {
@ -415,7 +415,7 @@ fn check_matcher_old<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token, on_fai
let mut tokens = matcher.peekable(); let mut tokens = matcher.peekable();
while let Some(token) = tokens.next() { while let Some(token) = tokens.next() {
last = match *token { last = match *token {
TokenTree::Token(sp, MatchNt(ref name, ref frag_spec, _, _)) => { TokenTree::Token(sp, MatchNt(ref name, ref frag_spec)) => {
// ii. If T is a simple NT, look ahead to the next token T' in // ii. If T is a simple NT, look ahead to the next token T' in
// M. If T' is in the set FOLLOW(NT), continue. Else; reject. // M. If T' is in the set FOLLOW(NT), continue. Else; reject.
if can_be_followed_by_any(&frag_spec.name.as_str()) { if can_be_followed_by_any(&frag_spec.name.as_str()) {
@ -881,7 +881,7 @@ fn check_matcher_core(cx: &mut ExtCtxt,
// Now `last` holds the complete set of NT tokens that could // Now `last` holds the complete set of NT tokens that could
// end the sequence before SUFFIX. Check that every one works with `suffix`. // end the sequence before SUFFIX. Check that every one works with `suffix`.
'each_last: for &(_sp, ref t) in &last.tokens { 'each_last: for &(_sp, ref t) in &last.tokens {
if let MatchNt(ref name, ref frag_spec, _, _) = *t { if let MatchNt(ref name, ref frag_spec) = *t {
for &(sp, ref next_token) in &suffix_first.tokens { for &(sp, ref next_token) in &suffix_first.tokens {
match is_in_follow(cx, next_token, &frag_spec.name.as_str()) { match is_in_follow(cx, next_token, &frag_spec.name.as_str()) {
Err(msg) => { Err(msg) => {
@ -917,9 +917,8 @@ fn check_matcher_core(cx: &mut ExtCtxt,
last last
} }
fn token_can_be_followed_by_any(tok: &Token) -> bool { fn token_can_be_followed_by_any(tok: &Token) -> bool {
if let &MatchNt(_, ref frag_spec, _, _) = tok { if let &MatchNt(_, ref frag_spec) = tok {
frag_can_be_followed_by_any(&frag_spec.name.as_str()) frag_can_be_followed_by_any(&frag_spec.name.as_str())
} else { } else {
// (Non NT's can always be followed by anthing in matchers.) // (Non NT's can always be followed by anthing in matchers.)
@ -1005,8 +1004,8 @@ fn is_in_follow(_: &ExtCtxt, tok: &Token, frag: &str) -> Result<bool, String> {
"pat" => { "pat" => {
match *tok { match *tok {
FatArrow | Comma | Eq | BinOp(token::Or) => Ok(true), FatArrow | Comma | Eq | BinOp(token::Or) => Ok(true),
Ident(i, _) if (i.name.as_str() == "if" || Ident(i) if (i.name.as_str() == "if" ||
i.name.as_str() == "in") => Ok(true), i.name.as_str() == "in") => Ok(true),
_ => Ok(false) _ => Ok(false)
} }
}, },
@ -1014,9 +1013,8 @@ fn is_in_follow(_: &ExtCtxt, tok: &Token, frag: &str) -> Result<bool, String> {
match *tok { match *tok {
OpenDelim(token::DelimToken::Brace) | OpenDelim(token::DelimToken::Bracket) | OpenDelim(token::DelimToken::Brace) | OpenDelim(token::DelimToken::Bracket) |
Comma | FatArrow | Colon | Eq | Gt | Semi | BinOp(token::Or) => Ok(true), Comma | FatArrow | Colon | Eq | Gt | Semi | BinOp(token::Or) => Ok(true),
MatchNt(_, ref frag, _, _) if frag.name.as_str() == "block" => Ok(true), MatchNt(_, ref frag) if frag.name.as_str() == "block" => Ok(true),
Ident(i, _) if (i.name.as_str() == "as" || Ident(i) if i.name.as_str() == "as" || i.name.as_str() == "where" => Ok(true),
i.name.as_str() == "where") => Ok(true),
_ => Ok(false) _ => Ok(false)
} }
}, },
@ -1036,7 +1034,7 @@ fn is_in_follow(_: &ExtCtxt, tok: &Token, frag: &str) -> Result<bool, String> {
fn has_legal_fragment_specifier(tok: &Token) -> Result<(), String> { fn has_legal_fragment_specifier(tok: &Token) -> Result<(), String> {
debug!("has_legal_fragment_specifier({:?})", tok); debug!("has_legal_fragment_specifier({:?})", tok);
if let &MatchNt(_, ref frag_spec, _, _) = tok { if let &MatchNt(_, ref frag_spec) = tok {
let s = &frag_spec.name.as_str(); let s = &frag_spec.name.as_str();
if !is_legal_fragment_specifier(s) { if !is_legal_fragment_specifier(s) {
return Err(s.to_string()); return Err(s.to_string());

View file

@ -161,7 +161,7 @@ fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize {
size + lockstep_iter_size(tt, r) size + lockstep_iter_size(tt, r)
}) })
}, },
TokenTree::Token(_, SubstNt(name, _)) | TokenTree::Token(_, MatchNt(name, _, _, _)) => TokenTree::Token(_, SubstNt(name)) | TokenTree::Token(_, MatchNt(name, _)) =>
match lookup_cur_matched(r, name) { match lookup_cur_matched(r, name) {
Some(matched) => match *matched { Some(matched) => match *matched {
MatchedNonterminal(_) => LisUnconstrained, MatchedNonterminal(_) => LisUnconstrained,
@ -186,7 +186,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
None => (), None => (),
Some(sp) => { Some(sp) => {
r.cur_span = sp; r.cur_span = sp;
r.cur_tok = token::Ident(r.imported_from.unwrap(), token::Plain); r.cur_tok = token::Ident(r.imported_from.unwrap());
return ret_val; return ret_val;
}, },
} }
@ -278,12 +278,12 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
} }
} }
// FIXME #2887: think about span stuff here // FIXME #2887: think about span stuff here
TokenTree::Token(sp, SubstNt(ident, namep)) => { TokenTree::Token(sp, SubstNt(ident)) => {
r.stack.last_mut().unwrap().idx += 1; r.stack.last_mut().unwrap().idx += 1;
match lookup_cur_matched(r, ident) { match lookup_cur_matched(r, ident) {
None => { None => {
r.cur_span = sp; r.cur_span = sp;
r.cur_tok = SubstNt(ident, namep); r.cur_tok = SubstNt(ident);
return ret_val; return ret_val;
// this can't be 0 length, just like TokenTree::Delimited // this can't be 0 length, just like TokenTree::Delimited
} }
@ -292,9 +292,9 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
// sidestep the interpolation tricks for ident because // sidestep the interpolation tricks for ident because
// (a) idents can be in lots of places, so it'd be a pain // (a) idents can be in lots of places, so it'd be a pain
// (b) we actually can, since it's a token. // (b) we actually can, since it's a token.
MatchedNonterminal(NtIdent(ref sn, b)) => { MatchedNonterminal(NtIdent(ref sn)) => {
r.cur_span = sn.span; r.cur_span = sn.span;
r.cur_tok = token::Ident(sn.node, b); r.cur_tok = token::Ident(sn.node);
return ret_val; return ret_val;
} }
MatchedNonterminal(ref other_whole_nt) => { MatchedNonterminal(ref other_whole_nt) => {

View file

@ -610,17 +610,11 @@ pub fn noop_fold_tts<T: Folder>(tts: &[TokenTree], fld: &mut T) -> Vec<TokenTree
// apply ident folder if it's an ident, apply other folds to interpolated nodes // apply ident folder if it's an ident, apply other folds to interpolated nodes
pub fn noop_fold_token<T: Folder>(t: token::Token, fld: &mut T) -> token::Token { pub fn noop_fold_token<T: Folder>(t: token::Token, fld: &mut T) -> token::Token {
match t { match t {
token::Ident(id, followed_by_colons) => { token::Ident(id) => token::Ident(fld.fold_ident(id)),
token::Ident(fld.fold_ident(id), followed_by_colons)
}
token::Lifetime(id) => token::Lifetime(fld.fold_ident(id)), token::Lifetime(id) => token::Lifetime(fld.fold_ident(id)),
token::Interpolated(nt) => token::Interpolated(fld.fold_interpolated(nt)), token::Interpolated(nt) => token::Interpolated(fld.fold_interpolated(nt)),
token::SubstNt(ident, namep) => { token::SubstNt(ident) => token::SubstNt(fld.fold_ident(ident)),
token::SubstNt(fld.fold_ident(ident), namep) token::MatchNt(name, kind) => token::MatchNt(fld.fold_ident(name), fld.fold_ident(kind)),
}
token::MatchNt(name, kind, namep, kindp) => {
token::MatchNt(fld.fold_ident(name), fld.fold_ident(kind), namep, kindp)
}
_ => t _ => t
} }
} }
@ -664,9 +658,8 @@ pub fn noop_fold_interpolated<T: Folder>(nt: token::Nonterminal, fld: &mut T)
token::NtPat(pat) => token::NtPat(fld.fold_pat(pat)), token::NtPat(pat) => token::NtPat(fld.fold_pat(pat)),
token::NtExpr(expr) => token::NtExpr(fld.fold_expr(expr)), token::NtExpr(expr) => token::NtExpr(fld.fold_expr(expr)),
token::NtTy(ty) => token::NtTy(fld.fold_ty(ty)), token::NtTy(ty) => token::NtTy(fld.fold_ty(ty)),
token::NtIdent(id, is_mod_name) => token::NtIdent(id) =>
token::NtIdent(Box::new(Spanned::<Ident>{node: fld.fold_ident(id.node), .. *id}), token::NtIdent(Box::new(Spanned::<Ident>{node: fld.fold_ident(id.node), ..*id})),
is_mod_name),
token::NtMeta(meta_item) => token::NtMeta(fld.fold_meta_item(meta_item)), token::NtMeta(meta_item) => token::NtMeta(fld.fold_meta_item(meta_item)),
token::NtPath(path) => token::NtPath(Box::new(fld.fold_path(*path))), token::NtPath(path) => token::NtPath(Box::new(fld.fold_path(*path))),
token::NtTT(tt) => token::NtTT(P(fld.fold_tt(&tt))), token::NtTT(tt) => token::NtTT(P(fld.fold_tt(&tt))),

View file

@ -1039,11 +1039,7 @@ impl<'a> StringReader<'a> {
token::Underscore token::Underscore
} else { } else {
// FIXME: perform NFKC normalization here. (Issue #2253) // FIXME: perform NFKC normalization here. (Issue #2253)
if self.curr_is(':') && self.nextch_is(':') { token::Ident(str_to_ident(string))
token::Ident(str_to_ident(string), token::ModName)
} else {
token::Ident(str_to_ident(string), token::Plain)
}
} }
}); });
} }
@ -1231,8 +1227,7 @@ impl<'a> StringReader<'a> {
let keyword_checking_ident = self.with_str_from(start, |lifetime_name| { let keyword_checking_ident = self.with_str_from(start, |lifetime_name| {
str_to_ident(lifetime_name) str_to_ident(lifetime_name)
}); });
let keyword_checking_token = &token::Ident(keyword_checking_ident, let keyword_checking_token = &token::Ident(keyword_checking_ident);
token::Plain);
let last_bpos = self.last_pos; let last_bpos = self.last_pos;
if keyword_checking_token.is_keyword(token::keywords::SelfValue) { if keyword_checking_token.is_keyword(token::keywords::SelfValue) {
self.err_span_(start, self.err_span_(start,
@ -1687,7 +1682,7 @@ mod tests {
assert_eq!(string_reader.next_token().tok, token::Whitespace); assert_eq!(string_reader.next_token().tok, token::Whitespace);
let tok1 = string_reader.next_token(); let tok1 = string_reader.next_token();
let tok2 = TokenAndSpan { let tok2 = TokenAndSpan {
tok: token::Ident(id, token::Plain), tok: token::Ident(id),
sp: Span { sp: Span {
lo: BytePos(21), lo: BytePos(21),
hi: BytePos(23), hi: BytePos(23),
@ -1701,7 +1696,7 @@ mod tests {
// read another token: // read another token:
let tok3 = string_reader.next_token(); let tok3 = string_reader.next_token();
let tok4 = TokenAndSpan { let tok4 = TokenAndSpan {
tok: token::Ident(str_to_ident("main"), token::Plain), tok: token::Ident(str_to_ident("main")),
sp: Span { sp: Span {
lo: BytePos(24), lo: BytePos(24),
hi: BytePos(28), hi: BytePos(28),
@ -1722,8 +1717,8 @@ mod tests {
} }
// make the identifier by looking up the string in the interner // make the identifier by looking up the string in the interner
fn mk_ident(id: &str, style: token::IdentStyle) -> token::Token { fn mk_ident(id: &str) -> token::Token {
token::Ident(str_to_ident(id), style) token::Ident(str_to_ident(id))
} }
#[test] #[test]
@ -1731,9 +1726,7 @@ mod tests {
let cm = Rc::new(CodeMap::new()); let cm = Rc::new(CodeMap::new());
let sh = mk_sh(cm.clone()); let sh = mk_sh(cm.clone());
check_tokenization(setup(&cm, &sh, "a b".to_string()), check_tokenization(setup(&cm, &sh, "a b".to_string()),
vec![mk_ident("a", token::Plain), vec![mk_ident("a"), token::Whitespace, mk_ident("b")]);
token::Whitespace,
mk_ident("b", token::Plain)]);
} }
#[test] #[test]
@ -1741,9 +1734,7 @@ mod tests {
let cm = Rc::new(CodeMap::new()); let cm = Rc::new(CodeMap::new());
let sh = mk_sh(cm.clone()); let sh = mk_sh(cm.clone());
check_tokenization(setup(&cm, &sh, "a::b".to_string()), check_tokenization(setup(&cm, &sh, "a::b".to_string()),
vec![mk_ident("a", token::ModName), vec![mk_ident("a"), token::ModSep, mk_ident("b")]);
token::ModSep,
mk_ident("b", token::Plain)]);
} }
#[test] #[test]
@ -1751,10 +1742,7 @@ mod tests {
let cm = Rc::new(CodeMap::new()); let cm = Rc::new(CodeMap::new());
let sh = mk_sh(cm.clone()); let sh = mk_sh(cm.clone());
check_tokenization(setup(&cm, &sh, "a ::b".to_string()), check_tokenization(setup(&cm, &sh, "a ::b".to_string()),
vec![mk_ident("a", token::Plain), vec![mk_ident("a"), token::Whitespace, token::ModSep, mk_ident("b")]);
token::Whitespace,
token::ModSep,
mk_ident("b", token::Plain)]);
} }
#[test] #[test]
@ -1762,10 +1750,7 @@ mod tests {
let cm = Rc::new(CodeMap::new()); let cm = Rc::new(CodeMap::new());
let sh = mk_sh(cm.clone()); let sh = mk_sh(cm.clone());
check_tokenization(setup(&cm, &sh, "a:: b".to_string()), check_tokenization(setup(&cm, &sh, "a:: b".to_string()),
vec![mk_ident("a", token::ModName), vec![mk_ident("a"), token::ModSep, token::Whitespace, mk_ident("b")]);
token::ModSep,
token::Whitespace,
mk_ident("b", token::Plain)]);
} }
#[test] #[test]

View file

@ -734,9 +734,9 @@ mod tests {
match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) { match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) {
( (
4, 4,
Some(&TokenTree::Token(_, token::Ident(name_macro_rules, token::Plain))), Some(&TokenTree::Token(_, token::Ident(name_macro_rules))),
Some(&TokenTree::Token(_, token::Not)), Some(&TokenTree::Token(_, token::Not)),
Some(&TokenTree::Token(_, token::Ident(name_zip, token::Plain))), Some(&TokenTree::Token(_, token::Ident(name_zip))),
Some(&TokenTree::Delimited(_, ref macro_delimed)), Some(&TokenTree::Delimited(_, ref macro_delimed)),
) )
if name_macro_rules.name.as_str() == "macro_rules" if name_macro_rules.name.as_str() == "macro_rules"
@ -755,7 +755,7 @@ mod tests {
( (
2, 2,
Some(&TokenTree::Token(_, token::Dollar)), Some(&TokenTree::Token(_, token::Dollar)),
Some(&TokenTree::Token(_, token::Ident(ident, token::Plain))), Some(&TokenTree::Token(_, token::Ident(ident))),
) )
if first_delimed.delim == token::Paren if first_delimed.delim == token::Paren
&& ident.name.as_str() == "a" => {}, && ident.name.as_str() == "a" => {},
@ -766,7 +766,7 @@ mod tests {
( (
2, 2,
Some(&TokenTree::Token(_, token::Dollar)), Some(&TokenTree::Token(_, token::Dollar)),
Some(&TokenTree::Token(_, token::Ident(ident, token::Plain))), Some(&TokenTree::Token(_, token::Ident(ident))),
) )
if second_delimed.delim == token::Paren if second_delimed.delim == token::Paren
&& ident.name.as_str() == "a" => {}, && ident.name.as_str() == "a" => {},
@ -785,26 +785,17 @@ mod tests {
let tts = string_to_tts("fn a (b : i32) { b; }".to_string()); let tts = string_to_tts("fn a (b : i32) { b; }".to_string());
let expected = vec![ let expected = vec![
TokenTree::Token(sp(0, 2), TokenTree::Token(sp(0, 2), token::Ident(str_to_ident("fn"))),
token::Ident(str_to_ident("fn"), TokenTree::Token(sp(3, 4), token::Ident(str_to_ident("a"))),
token::IdentStyle::Plain)),
TokenTree::Token(sp(3, 4),
token::Ident(str_to_ident("a"),
token::IdentStyle::Plain)),
TokenTree::Delimited( TokenTree::Delimited(
sp(5, 14), sp(5, 14),
Rc::new(ast::Delimited { Rc::new(ast::Delimited {
delim: token::DelimToken::Paren, delim: token::DelimToken::Paren,
open_span: sp(5, 6), open_span: sp(5, 6),
tts: vec![ tts: vec![
TokenTree::Token(sp(6, 7), TokenTree::Token(sp(6, 7), token::Ident(str_to_ident("b"))),
token::Ident(str_to_ident("b"), TokenTree::Token(sp(8, 9), token::Colon),
token::IdentStyle::Plain)), TokenTree::Token(sp(10, 13), token::Ident(str_to_ident("i32"))),
TokenTree::Token(sp(8, 9),
token::Colon),
TokenTree::Token(sp(10, 13),
token::Ident(str_to_ident("i32"),
token::IdentStyle::Plain)),
], ],
close_span: sp(13, 14), close_span: sp(13, 14),
})), })),
@ -814,11 +805,8 @@ mod tests {
delim: token::DelimToken::Brace, delim: token::DelimToken::Brace,
open_span: sp(15, 16), open_span: sp(15, 16),
tts: vec![ tts: vec![
TokenTree::Token(sp(17, 18), TokenTree::Token(sp(17, 18), token::Ident(str_to_ident("b"))),
token::Ident(str_to_ident("b"), TokenTree::Token(sp(18, 19), token::Semi),
token::IdentStyle::Plain)),
TokenTree::Token(sp(18, 19),
token::Semi)
], ],
close_span: sp(20, 21), close_span: sp(20, 21),
})) }))

View file

@ -567,7 +567,7 @@ impl<'a> Parser<'a> {
} }
self.check_reserved_keywords(); self.check_reserved_keywords();
match self.token { match self.token {
token::Ident(i, _) => { token::Ident(i) => {
self.bump(); self.bump();
Ok(i) Ok(i)
} }
@ -629,9 +629,8 @@ impl<'a> Parser<'a> {
} }
pub fn check_contextual_keyword(&mut self, ident: Ident) -> bool { pub fn check_contextual_keyword(&mut self, ident: Ident) -> bool {
let tok = token::Ident(ident, token::Plain); self.expected_tokens.push(TokenType::Token(token::Ident(ident)));
self.expected_tokens.push(TokenType::Token(tok)); if let token::Ident(ref cur_ident) = self.token {
if let token::Ident(ref cur_ident, _) = self.token {
cur_ident.name == ident.name cur_ident.name == ident.name
} else { } else {
false false
@ -1699,7 +1698,7 @@ impl<'a> Parser<'a> {
pub fn parse_path_segment_ident(&mut self) -> PResult<'a, ast::Ident> { pub fn parse_path_segment_ident(&mut self) -> PResult<'a, ast::Ident> {
match self.token { match self.token {
token::Ident(sid, _) if self.token.is_path_segment_keyword() => { token::Ident(sid) if self.token.is_path_segment_keyword() => {
self.bump(); self.bump();
Ok(sid) Ok(sid)
} }
@ -2564,7 +2563,7 @@ impl<'a> Parser<'a> {
// expr.f // expr.f
if self.eat(&token::Dot) { if self.eat(&token::Dot) {
match self.token { match self.token {
token::Ident(i, _) => { token::Ident(i) => {
let dot_pos = self.last_span.hi; let dot_pos = self.last_span.hi;
hi = self.span.hi; hi = self.span.hi;
self.bump(); self.bump();
@ -2661,7 +2660,7 @@ impl<'a> Parser<'a> {
// Parse unquoted tokens after a `$` in a token tree // Parse unquoted tokens after a `$` in a token tree
fn parse_unquoted(&mut self) -> PResult<'a, TokenTree> { fn parse_unquoted(&mut self) -> PResult<'a, TokenTree> {
let mut sp = self.span; let mut sp = self.span;
let (name, namep) = match self.token { let name = match self.token {
token::Dollar => { token::Dollar => {
self.bump(); self.bump();
@ -2686,14 +2685,12 @@ impl<'a> Parser<'a> {
return Ok(TokenTree::Token(sp, SpecialVarNt(SpecialMacroVar::CrateMacroVar))); return Ok(TokenTree::Token(sp, SpecialVarNt(SpecialMacroVar::CrateMacroVar)));
} else { } else {
sp = mk_sp(sp.lo, self.span.hi); sp = mk_sp(sp.lo, self.span.hi);
let namep = match self.token { token::Ident(_, p) => p, _ => token::Plain }; self.parse_ident()?
let name = self.parse_ident()?;
(name, namep)
} }
} }
token::SubstNt(name, namep) => { token::SubstNt(name) => {
self.bump(); self.bump();
(name, namep) name
} }
_ => unreachable!() _ => unreachable!()
}; };
@ -2703,18 +2700,17 @@ impl<'a> Parser<'a> {
!t.is_reserved_keyword()) { !t.is_reserved_keyword()) {
self.bump(); self.bump();
sp = mk_sp(sp.lo, self.span.hi); sp = mk_sp(sp.lo, self.span.hi);
let kindp = match self.token { token::Ident(_, p) => p, _ => token::Plain };
let nt_kind = self.parse_ident()?; let nt_kind = self.parse_ident()?;
Ok(TokenTree::Token(sp, MatchNt(name, nt_kind, namep, kindp))) Ok(TokenTree::Token(sp, MatchNt(name, nt_kind)))
} else { } else {
Ok(TokenTree::Token(sp, SubstNt(name, namep))) Ok(TokenTree::Token(sp, SubstNt(name)))
} }
} }
pub fn check_unknown_macro_variable(&mut self) { pub fn check_unknown_macro_variable(&mut self) {
if self.quote_depth == 0 { if self.quote_depth == 0 {
match self.token { match self.token {
token::SubstNt(name, _) => token::SubstNt(name) =>
self.fatal(&format!("unknown macro variable `{}`", name)).emit(), self.fatal(&format!("unknown macro variable `{}`", name)).emit(),
_ => {} _ => {}
} }
@ -4614,7 +4610,7 @@ impl<'a> Parser<'a> {
fn expect_self_ident(&mut self) -> PResult<'a, ast::Ident> { fn expect_self_ident(&mut self) -> PResult<'a, ast::Ident> {
match self.token { match self.token {
token::Ident(id, _) if id.name == special_idents::self_.name => { token::Ident(id) if id.name == special_idents::self_.name => {
self.bump(); self.bump();
Ok(id) Ok(id)
}, },
@ -4927,7 +4923,7 @@ impl<'a> Parser<'a> {
Visibility::Inherited => (), Visibility::Inherited => (),
_ => { _ => {
let is_macro_rules: bool = match self.token { let is_macro_rules: bool = match self.token {
token::Ident(sid, _) => sid.name == intern("macro_rules"), token::Ident(sid) => sid.name == intern("macro_rules"),
_ => false, _ => false,
}; };
if is_macro_rules { if is_macro_rules {

View file

@ -11,7 +11,6 @@
pub use self::BinOpToken::*; pub use self::BinOpToken::*;
pub use self::Nonterminal::*; pub use self::Nonterminal::*;
pub use self::DelimToken::*; pub use self::DelimToken::*;
pub use self::IdentStyle::*;
pub use self::Lit::*; pub use self::Lit::*;
pub use self::Token::*; pub use self::Token::*;
@ -51,13 +50,6 @@ pub enum DelimToken {
Brace, Brace,
} }
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)]
pub enum IdentStyle {
/// `::` follows the identifier with no whitespace in-between.
ModName,
Plain,
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)] #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)]
pub enum SpecialMacroVar { pub enum SpecialMacroVar {
/// `$crate` will be filled in with the name of the crate a macro was /// `$crate` will be filled in with the name of the crate a macro was
@ -139,7 +131,7 @@ pub enum Token {
Literal(Lit, Option<ast::Name>), Literal(Lit, Option<ast::Name>),
/* Name components */ /* Name components */
Ident(ast::Ident, IdentStyle), Ident(ast::Ident),
Underscore, Underscore,
Lifetime(ast::Ident), Lifetime(ast::Ident),
@ -150,10 +142,10 @@ pub enum Token {
DocComment(ast::Name), DocComment(ast::Name),
// In left-hand-sides of MBE macros: // In left-hand-sides of MBE macros:
/// Parse a nonterminal (name to bind, name of NT, styles of their idents) /// Parse a nonterminal (name to bind, name of NT, styles of their idents)
MatchNt(ast::Ident, ast::Ident, IdentStyle, IdentStyle), MatchNt(ast::Ident, ast::Ident),
// In right-hand-sides of MBE macros: // In right-hand-sides of MBE macros:
/// A syntactic variable that will be filled in by macro expansion. /// A syntactic variable that will be filled in by macro expansion.
SubstNt(ast::Ident, IdentStyle), SubstNt(ast::Ident),
/// A macro variable with special meaning. /// A macro variable with special meaning.
SpecialVarNt(SpecialMacroVar), SpecialVarNt(SpecialMacroVar),
@ -279,16 +271,16 @@ impl Token {
/// Returns `true` if the token is a given keyword, `kw`. /// Returns `true` if the token is a given keyword, `kw`.
pub fn is_keyword(&self, kw: keywords::Keyword) -> bool { pub fn is_keyword(&self, kw: keywords::Keyword) -> bool {
match *self { match *self {
Ident(id, _) => id.name == kw.to_name(), Ident(id) => id.name == kw.to_name(),
_ => false, _ => false,
} }
} }
pub fn is_path_segment_keyword(&self) -> bool { pub fn is_path_segment_keyword(&self) -> bool {
match *self { match *self {
Ident(id, _) => id.name == SUPER_KEYWORD_NAME || Ident(id) => id.name == SUPER_KEYWORD_NAME ||
id.name == SELF_KEYWORD_NAME || id.name == SELF_KEYWORD_NAME ||
id.name == SELF_TYPE_KEYWORD_NAME, id.name == SELF_TYPE_KEYWORD_NAME,
_ => false, _ => false,
} }
} }
@ -296,12 +288,12 @@ impl Token {
/// Returns `true` if the token is either a strict or reserved keyword. /// Returns `true` if the token is either a strict or reserved keyword.
pub fn is_any_keyword(&self) -> bool { pub fn is_any_keyword(&self) -> bool {
match *self { match *self {
Ident(id, _) => id.name == SELF_KEYWORD_NAME || Ident(id) => id.name == SELF_KEYWORD_NAME ||
id.name == STATIC_KEYWORD_NAME || id.name == STATIC_KEYWORD_NAME ||
id.name == SUPER_KEYWORD_NAME || id.name == SUPER_KEYWORD_NAME ||
id.name == SELF_TYPE_KEYWORD_NAME || id.name == SELF_TYPE_KEYWORD_NAME ||
id.name >= STRICT_KEYWORD_START && id.name >= STRICT_KEYWORD_START &&
id.name <= RESERVED_KEYWORD_FINAL, id.name <= RESERVED_KEYWORD_FINAL,
_ => false _ => false
} }
} }
@ -309,12 +301,12 @@ impl Token {
/// Returns `true` if the token is either a strict keyword. /// Returns `true` if the token is either a strict keyword.
pub fn is_strict_keyword(&self) -> bool { pub fn is_strict_keyword(&self) -> bool {
match *self { match *self {
Ident(id, _) => id.name == SELF_KEYWORD_NAME || Ident(id) => id.name == SELF_KEYWORD_NAME ||
id.name == STATIC_KEYWORD_NAME || id.name == STATIC_KEYWORD_NAME ||
id.name == SUPER_KEYWORD_NAME || id.name == SUPER_KEYWORD_NAME ||
id.name == SELF_TYPE_KEYWORD_NAME || id.name == SELF_TYPE_KEYWORD_NAME ||
id.name >= STRICT_KEYWORD_START && id.name >= STRICT_KEYWORD_START &&
id.name <= STRICT_KEYWORD_FINAL, id.name <= STRICT_KEYWORD_FINAL,
_ => false, _ => false,
} }
} }
@ -322,8 +314,8 @@ impl Token {
/// Returns `true` if the token is either a keyword reserved for possible future use. /// Returns `true` if the token is either a keyword reserved for possible future use.
pub fn is_reserved_keyword(&self) -> bool { pub fn is_reserved_keyword(&self) -> bool {
match *self { match *self {
Ident(id, _) => id.name >= RESERVED_KEYWORD_START && Ident(id) => id.name >= RESERVED_KEYWORD_START &&
id.name <= RESERVED_KEYWORD_FINAL, id.name <= RESERVED_KEYWORD_FINAL,
_ => false, _ => false,
} }
} }
@ -333,7 +325,7 @@ impl Token {
/// See `styntax::ext::mtwt`. /// See `styntax::ext::mtwt`.
pub fn mtwt_eq(&self, other : &Token) -> bool { pub fn mtwt_eq(&self, other : &Token) -> bool {
match (self, other) { match (self, other) {
(&Ident(id1,_), &Ident(id2,_)) | (&Lifetime(id1), &Lifetime(id2)) => (&Ident(id1), &Ident(id2)) | (&Lifetime(id1), &Lifetime(id2)) =>
mtwt::resolve(id1) == mtwt::resolve(id2), mtwt::resolve(id1) == mtwt::resolve(id2),
_ => *self == *other _ => *self == *other
} }
@ -349,7 +341,7 @@ pub enum Nonterminal {
NtPat(P<ast::Pat>), NtPat(P<ast::Pat>),
NtExpr(P<ast::Expr>), NtExpr(P<ast::Expr>),
NtTy(P<ast::Ty>), NtTy(P<ast::Ty>),
NtIdent(Box<ast::SpannedIdent>, IdentStyle), NtIdent(Box<ast::SpannedIdent>),
/// Stuff inside brackets for attributes /// Stuff inside brackets for attributes
NtMeta(P<ast::MetaItem>), NtMeta(P<ast::MetaItem>),
NtPath(Box<ast::Path>), NtPath(Box<ast::Path>),
@ -743,6 +735,6 @@ mod tests {
assert!(Gt.mtwt_eq(&Gt)); assert!(Gt.mtwt_eq(&Gt));
let a = str_to_ident("bac"); let a = str_to_ident("bac");
let a1 = mark_ident(a,92); let a1 = mark_ident(a,92);
assert!(Ident(a, ModName).mtwt_eq(&Ident(a1, Plain))); assert!(Ident(a).mtwt_eq(&Ident(a1)));
} }
} }

View file

@ -270,14 +270,14 @@ pub fn token_to_string(tok: &Token) -> String {
} }
/* Name components */ /* Name components */
token::Ident(s, _) => s.to_string(), token::Ident(s) => s.to_string(),
token::Lifetime(s) => s.to_string(), token::Lifetime(s) => s.to_string(),
token::Underscore => "_".to_string(), token::Underscore => "_".to_string(),
/* Other */ /* Other */
token::DocComment(s) => s.to_string(), token::DocComment(s) => s.to_string(),
token::SubstNt(s, _) => format!("${}", s), token::SubstNt(s) => format!("${}", s),
token::MatchNt(s, t, _, _) => format!("${}:{}", s, t), token::MatchNt(s, t) => format!("${}:{}", s, t),
token::Eof => "<eof>".to_string(), token::Eof => "<eof>".to_string(),
token::Whitespace => " ".to_string(), token::Whitespace => " ".to_string(),
token::Comment => "/* */".to_string(), token::Comment => "/* */".to_string(),
@ -294,7 +294,7 @@ pub fn token_to_string(tok: &Token) -> String {
token::NtBlock(ref e) => block_to_string(&e), token::NtBlock(ref e) => block_to_string(&e),
token::NtStmt(ref e) => stmt_to_string(&e), token::NtStmt(ref e) => stmt_to_string(&e),
token::NtPat(ref e) => pat_to_string(&e), token::NtPat(ref e) => pat_to_string(&e),
token::NtIdent(ref e, _) => ident_to_string(e.node), token::NtIdent(ref e) => ident_to_string(e.node),
token::NtTT(ref e) => tt_to_string(&e), token::NtTT(ref e) => tt_to_string(&e),
token::NtArm(ref e) => arm_to_string(&e), token::NtArm(ref e) => arm_to_string(&e),
token::NtImplItem(ref e) => impl_item_to_string(&e), token::NtImplItem(ref e) => impl_item_to_string(&e),
@ -1488,20 +1488,11 @@ impl<'a> State<'a> {
pub fn print_tts(&mut self, tts: &[ast::TokenTree]) -> io::Result<()> { pub fn print_tts(&mut self, tts: &[ast::TokenTree]) -> io::Result<()> {
self.ibox(0)?; self.ibox(0)?;
let mut suppress_space = false;
for (i, tt) in tts.iter().enumerate() { for (i, tt) in tts.iter().enumerate() {
if i != 0 && !suppress_space { if i != 0 {
space(&mut self.s)?; space(&mut self.s)?;
} }
self.print_tt(tt)?; self.print_tt(tt)?;
// There should be no space between the module name and the following `::` in paths,
// otherwise imported macros get re-parsed from crate metadata incorrectly (#20701)
suppress_space = match *tt {
TokenTree::Token(_, token::Ident(_, token::ModName)) |
TokenTree::Token(_, token::MatchNt(_, _, _, token::ModName)) |
TokenTree::Token(_, token::SubstNt(_, token::ModName)) => true,
_ => false
}
} }
self.end() self.end()
} }

View file

@ -40,7 +40,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree])
} }
} else { } else {
match *e { match *e {
TokenTree::Token(_, token::Ident(ident, _)) => { TokenTree::Token(_, token::Ident(ident)) => {
res_str.push_str(&ident.name.as_str()) res_str.push_str(&ident.name.as_str())
}, },
_ => { _ => {

View file

@ -106,7 +106,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
if named || (p.token.is_ident() && p.look_ahead(1, |t| *t == token::Eq)) { if named || (p.token.is_ident() && p.look_ahead(1, |t| *t == token::Eq)) {
named = true; named = true;
let ident = match p.token { let ident = match p.token {
token::Ident(i, _) => { token::Ident(i) => {
p.bump(); p.bump();
i i
} }

View file

@ -48,7 +48,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
} }
let text = match args[0] { let text = match args[0] {
TokenTree::Token(_, token::Ident(s, _)) => s.to_string(), TokenTree::Token(_, token::Ident(s)) => s.to_string(),
_ => { _ => {
cx.span_err(sp, "argument should be a single identifier"); cx.span_err(sp, "argument should be a single identifier");
return DummyResult::any(sp); return DummyResult::any(sp);