1
Fork 0

syntax: Use Token in TokenTree::Token

This commit is contained in:
Vadim Petrochenkov 2019-06-04 20:42:43 +03:00
parent a3425edb46
commit e0127dbf81
31 changed files with 329 additions and 314 deletions

View file

@ -67,7 +67,7 @@ use syntax::source_map::CompilerDesugaringKind::IfTemporary;
use syntax::std_inject; use syntax::std_inject;
use syntax::symbol::{kw, sym, Symbol}; use syntax::symbol::{kw, sym, Symbol};
use syntax::tokenstream::{TokenStream, TokenTree}; use syntax::tokenstream::{TokenStream, TokenTree};
use syntax::parse::token::{self, TokenKind}; use syntax::parse::token::{self, Token};
use syntax::visit::{self, Visitor}; use syntax::visit::{self, Visitor};
use syntax_pos::{DUMMY_SP, edition, Span}; use syntax_pos::{DUMMY_SP, edition, Span};
@ -1328,7 +1328,7 @@ impl<'a> LoweringContext<'a> {
fn lower_token_tree(&mut self, tree: TokenTree) -> TokenStream { fn lower_token_tree(&mut self, tree: TokenTree) -> TokenStream {
match tree { match tree {
TokenTree::Token(span, token) => self.lower_token(token, span), TokenTree::Token(token) => self.lower_token(token),
TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited( TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
span, span,
delim, delim,
@ -1337,13 +1337,13 @@ impl<'a> LoweringContext<'a> {
} }
} }
fn lower_token(&mut self, token: TokenKind, span: Span) -> TokenStream { fn lower_token(&mut self, token: Token) -> TokenStream {
match token { match token.kind {
token::Interpolated(nt) => { token::Interpolated(nt) => {
let tts = nt.to_tokenstream(&self.sess.parse_sess, span); let tts = nt.to_tokenstream(&self.sess.parse_sess, token.span);
self.lower_token_stream(tts) self.lower_token_stream(tts)
} }
other => TokenTree::Token(span, other).into(), _ => TokenTree::Token(token).into(),
} }
} }

View file

@ -261,9 +261,8 @@ for tokenstream::TokenTree {
hasher: &mut StableHasher<W>) { hasher: &mut StableHasher<W>) {
mem::discriminant(self).hash_stable(hcx, hasher); mem::discriminant(self).hash_stable(hcx, hasher);
match *self { match *self {
tokenstream::TokenTree::Token(span, ref token) => { tokenstream::TokenTree::Token(ref token) => {
span.hash_stable(hcx, hasher); token.hash_stable(hcx, hasher);
hash_token(token, hcx, hasher);
} }
tokenstream::TokenTree::Delimited(span, delim, ref tts) => { tokenstream::TokenTree::Delimited(span, delim, ref tts) => {
span.hash_stable(hcx, hasher); span.hash_stable(hcx, hasher);
@ -306,70 +305,75 @@ impl_stable_hash_for!(struct token::Lit {
suffix suffix
}); });
fn hash_token<'a, 'gcx, W: StableHasherResult>( impl<'a> HashStable<StableHashingContext<'a>> for token::TokenKind {
token: &token::TokenKind, fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'a>, hcx: &mut StableHashingContext<'a>,
hasher: &mut StableHasher<W>, hasher: &mut StableHasher<W>) {
) { mem::discriminant(self).hash_stable(hcx, hasher);
mem::discriminant(token).hash_stable(hcx, hasher); match *self {
match *token { token::Eq |
token::Eq | token::Lt |
token::Lt | token::Le |
token::Le | token::EqEq |
token::EqEq | token::Ne |
token::Ne | token::Ge |
token::Ge | token::Gt |
token::Gt | token::AndAnd |
token::AndAnd | token::OrOr |
token::OrOr | token::Not |
token::Not | token::Tilde |
token::Tilde | token::At |
token::At | token::Dot |
token::Dot | token::DotDot |
token::DotDot | token::DotDotDot |
token::DotDotDot | token::DotDotEq |
token::DotDotEq | token::Comma |
token::Comma | token::Semi |
token::Semi | token::Colon |
token::Colon | token::ModSep |
token::ModSep | token::RArrow |
token::RArrow | token::LArrow |
token::LArrow | token::FatArrow |
token::FatArrow | token::Pound |
token::Pound | token::Dollar |
token::Dollar | token::Question |
token::Question | token::SingleQuote |
token::SingleQuote | token::Whitespace |
token::Whitespace | token::Comment |
token::Comment | token::Eof => {}
token::Eof => {}
token::BinOp(bin_op_token) | token::BinOp(bin_op_token) |
token::BinOpEq(bin_op_token) => { token::BinOpEq(bin_op_token) => {
std_hash::Hash::hash(&bin_op_token, hasher); std_hash::Hash::hash(&bin_op_token, hasher);
}
token::OpenDelim(delim_token) |
token::CloseDelim(delim_token) => {
std_hash::Hash::hash(&delim_token, hasher);
}
token::Literal(lit) => lit.hash_stable(hcx, hasher),
token::Ident(ident, is_raw) => {
ident.name.hash_stable(hcx, hasher);
is_raw.hash_stable(hcx, hasher);
}
token::Lifetime(ident) => ident.name.hash_stable(hcx, hasher),
token::Interpolated(_) => {
bug!("interpolated tokens should not be present in the HIR")
}
token::DocComment(val) |
token::Shebang(val) => val.hash_stable(hcx, hasher),
} }
token::OpenDelim(delim_token) |
token::CloseDelim(delim_token) => {
std_hash::Hash::hash(&delim_token, hasher);
}
token::Literal(lit) => lit.hash_stable(hcx, hasher),
token::Ident(ident, is_raw) => {
ident.name.hash_stable(hcx, hasher);
is_raw.hash_stable(hcx, hasher);
}
token::Lifetime(ident) => ident.name.hash_stable(hcx, hasher),
token::Interpolated(_) => {
bug!("interpolated tokens should not be present in the HIR")
}
token::DocComment(val) |
token::Shebang(val) => val.hash_stable(hcx, hasher),
} }
} }
impl_stable_hash_for!(struct token::Token {
kind,
span
});
impl_stable_hash_for!(enum ::syntax::ast::NestedMetaItem { impl_stable_hash_for!(enum ::syntax::ast::NestedMetaItem {
MetaItem(meta_item), MetaItem(meta_item),
Literal(lit) Literal(lit)

View file

@ -1414,11 +1414,11 @@ impl KeywordIdents {
fn check_tokens(&mut self, cx: &EarlyContext<'_>, tokens: TokenStream) { fn check_tokens(&mut self, cx: &EarlyContext<'_>, tokens: TokenStream) {
for tt in tokens.into_trees() { for tt in tokens.into_trees() {
match tt { match tt {
TokenTree::Token(span, tok) => match tok.ident() { TokenTree::Token(token) => match token.ident() {
// only report non-raw idents // only report non-raw idents
Some((ident, false)) => { Some((ident, false)) => {
self.check_ident_token(cx, UnderMacro(true), ast::Ident { self.check_ident_token(cx, UnderMacro(true), ast::Ident {
span: span.substitute_dummy(ident.span), span: token.span.substitute_dummy(ident.span),
..ident ..ident
}); });
} }

View file

@ -234,7 +234,7 @@ impl<'a> Classifier<'a> {
// reference or dereference operator or a reference or pointer type, instead of the // reference or dereference operator or a reference or pointer type, instead of the
// bit-and or multiplication operator. // bit-and or multiplication operator.
token::BinOp(token::And) | token::BinOp(token::Star) token::BinOp(token::And) | token::BinOp(token::Star)
if self.lexer.peek().kind != token::Whitespace => Class::RefKeyWord, if self.lexer.peek() != token::Whitespace => Class::RefKeyWord,
// Consider this as part of a macro invocation if there was a // Consider this as part of a macro invocation if there was a
// leading identifier. // leading identifier.
@ -335,7 +335,7 @@ impl<'a> Classifier<'a> {
sym::Option | sym::Result => Class::PreludeTy, sym::Option | sym::Result => Class::PreludeTy,
sym::Some | sym::None | sym::Ok | sym::Err => Class::PreludeVal, sym::Some | sym::None | sym::Ok | sym::Err => Class::PreludeVal,
_ if token.kind.is_reserved_ident() => Class::KeyWord, _ if token.is_reserved_ident() => Class::KeyWord,
_ => { _ => {
if self.in_macro_nonterminal { if self.in_macro_nonterminal {

View file

@ -20,7 +20,7 @@ use crate::source_map::{BytePos, Spanned, dummy_spanned};
use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
use crate::parse::parser::Parser; use crate::parse::parser::Parser;
use crate::parse::{self, ParseSess, PResult}; use crate::parse::{self, ParseSess, PResult};
use crate::parse::token::{self, TokenKind}; use crate::parse::token::{self, Token, TokenKind};
use crate::ptr::P; use crate::ptr::P;
use crate::symbol::{sym, Symbol}; use crate::symbol::{sym, Symbol};
use crate::ThinVec; use crate::ThinVec;
@ -465,9 +465,9 @@ impl MetaItem {
let mod_sep_span = Span::new(last_pos, let mod_sep_span = Span::new(last_pos,
segment.ident.span.lo(), segment.ident.span.lo(),
segment.ident.span.ctxt()); segment.ident.span.ctxt());
idents.push(TokenTree::Token(mod_sep_span, token::ModSep).into()); idents.push(TokenTree::token(mod_sep_span, token::ModSep).into());
} }
idents.push(TokenTree::Token(segment.ident.span, idents.push(TokenTree::token(segment.ident.span,
TokenKind::from_ast_ident(segment.ident)).into()); TokenKind::from_ast_ident(segment.ident)).into());
last_pos = segment.ident.span.hi(); last_pos = segment.ident.span.hi();
} }
@ -480,10 +480,10 @@ impl MetaItem {
{ {
// FIXME: Share code with `parse_path`. // FIXME: Share code with `parse_path`.
let path = match tokens.next() { let path = match tokens.next() {
Some(TokenTree::Token(span, token @ token::Ident(..))) | Some(TokenTree::Token(Token { kind: kind @ token::Ident(..), span })) |
Some(TokenTree::Token(span, token @ token::ModSep)) => 'arm: { Some(TokenTree::Token(Token { kind: kind @ token::ModSep, span })) => 'arm: {
let mut segments = if let token::Ident(ident, _) = token { let mut segments = if let token::Ident(ident, _) = kind {
if let Some(TokenTree::Token(_, token::ModSep)) = tokens.peek() { if let Some(TokenTree::Token(Token { kind: token::ModSep, .. })) = tokens.peek() {
tokens.next(); tokens.next();
vec![PathSegment::from_ident(ident.with_span_pos(span))] vec![PathSegment::from_ident(ident.with_span_pos(span))]
} else { } else {
@ -493,13 +493,12 @@ impl MetaItem {
vec![PathSegment::path_root(span)] vec![PathSegment::path_root(span)]
}; };
loop { loop {
if let Some(TokenTree::Token(span, if let Some(TokenTree::Token(Token { kind: token::Ident(ident, _), span })) = tokens.next() {
token::Ident(ident, _))) = tokens.next() {
segments.push(PathSegment::from_ident(ident.with_span_pos(span))); segments.push(PathSegment::from_ident(ident.with_span_pos(span)));
} else { } else {
return None; return None;
} }
if let Some(TokenTree::Token(_, token::ModSep)) = tokens.peek() { if let Some(TokenTree::Token(Token { kind: token::ModSep, .. })) = tokens.peek() {
tokens.next(); tokens.next();
} else { } else {
break; break;
@ -508,7 +507,7 @@ impl MetaItem {
let span = span.with_hi(segments.last().unwrap().ident.span.hi()); let span = span.with_hi(segments.last().unwrap().ident.span.hi());
Path { span, segments } Path { span, segments }
} }
Some(TokenTree::Token(_, token::Interpolated(nt))) => match *nt { Some(TokenTree::Token(Token { kind: token::Interpolated(nt), .. })) => match *nt {
token::Nonterminal::NtIdent(ident, _) => Path::from_ident(ident), token::Nonterminal::NtIdent(ident, _) => Path::from_ident(ident),
token::Nonterminal::NtMeta(ref meta) => return Some(meta.clone()), token::Nonterminal::NtMeta(ref meta) => return Some(meta.clone()),
token::Nonterminal::NtPath(ref path) => path.clone(), token::Nonterminal::NtPath(ref path) => path.clone(),
@ -533,7 +532,7 @@ impl MetaItemKind {
match *self { match *self {
MetaItemKind::Word => TokenStream::empty(), MetaItemKind::Word => TokenStream::empty(),
MetaItemKind::NameValue(ref lit) => { MetaItemKind::NameValue(ref lit) => {
let mut vec = vec![TokenTree::Token(span, token::Eq).into()]; let mut vec = vec![TokenTree::token(span, token::Eq).into()];
lit.tokens().append_to_tree_and_joint_vec(&mut vec); lit.tokens().append_to_tree_and_joint_vec(&mut vec);
TokenStream::new(vec) TokenStream::new(vec)
} }
@ -541,7 +540,7 @@ impl MetaItemKind {
let mut tokens = Vec::new(); let mut tokens = Vec::new();
for (i, item) in list.iter().enumerate() { for (i, item) in list.iter().enumerate() {
if i > 0 { if i > 0 {
tokens.push(TokenTree::Token(span, token::Comma).into()); tokens.push(TokenTree::token(span, token::Comma).into());
} }
item.tokens().append_to_tree_and_joint_vec(&mut tokens); item.tokens().append_to_tree_and_joint_vec(&mut tokens);
} }
@ -558,10 +557,10 @@ impl MetaItemKind {
where I: Iterator<Item = TokenTree>, where I: Iterator<Item = TokenTree>,
{ {
let delimited = match tokens.peek().cloned() { let delimited = match tokens.peek().cloned() {
Some(TokenTree::Token(_, token::Eq)) => { Some(TokenTree::Token(token)) if token == token::Eq => {
tokens.next(); tokens.next();
return if let Some(TokenTree::Token(span, token)) = tokens.next() { return if let Some(TokenTree::Token(token)) = tokens.next() {
Lit::from_token(&token, span).ok().map(MetaItemKind::NameValue) Lit::from_token(&token, token.span).ok().map(MetaItemKind::NameValue)
} else { } else {
None None
}; };
@ -579,7 +578,7 @@ impl MetaItemKind {
let item = NestedMetaItem::from_tokens(&mut tokens)?; let item = NestedMetaItem::from_tokens(&mut tokens)?;
result.push(item); result.push(item);
match tokens.next() { match tokens.next() {
None | Some(TokenTree::Token(_, token::Comma)) => {} None | Some(TokenTree::Token(Token { kind: token::Comma, .. })) => {}
_ => return None, _ => return None,
} }
} }
@ -605,8 +604,8 @@ impl NestedMetaItem {
fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<NestedMetaItem> fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<NestedMetaItem>
where I: Iterator<Item = TokenTree>, where I: Iterator<Item = TokenTree>,
{ {
if let Some(TokenTree::Token(span, token)) = tokens.peek().cloned() { if let Some(TokenTree::Token(token)) = tokens.peek().cloned() {
if let Ok(lit) = Lit::from_token(&token, span) { if let Ok(lit) = Lit::from_token(&token, token.span) {
tokens.next(); tokens.next();
return Some(NestedMetaItem::Literal(lit)); return Some(NestedMetaItem::Literal(lit));
} }

View file

@ -5,7 +5,7 @@ use crate::ast::{self, Ident, Name};
use crate::source_map; use crate::source_map;
use crate::ext::base::{ExtCtxt, MacEager, MacResult}; use crate::ext::base::{ExtCtxt, MacEager, MacResult};
use crate::ext::build::AstBuilder; use crate::ext::build::AstBuilder;
use crate::parse::token; use crate::parse::token::{self, Token};
use crate::ptr::P; use crate::ptr::P;
use crate::symbol::kw; use crate::symbol::kw;
use crate::tokenstream::{TokenTree}; use crate::tokenstream::{TokenTree};
@ -34,7 +34,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt<'_>,
token_tree: &[TokenTree]) token_tree: &[TokenTree])
-> Box<dyn MacResult+'cx> { -> Box<dyn MacResult+'cx> {
let code = match (token_tree.len(), token_tree.get(0)) { let code = match (token_tree.len(), token_tree.get(0)) {
(1, Some(&TokenTree::Token(_, token::Ident(code, _)))) => code, (1, Some(&TokenTree::Token(Token { kind: token::Ident(code, _), .. }))) => code,
_ => unreachable!() _ => unreachable!()
}; };
@ -72,12 +72,12 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt<'_>,
token_tree.get(1), token_tree.get(1),
token_tree.get(2) token_tree.get(2)
) { ) {
(1, Some(&TokenTree::Token(_, token::Ident(ref code, _))), None, None) => { (1, Some(&TokenTree::Token(Token { kind: token::Ident(ref code, _), .. })), None, None) => {
(code, None) (code, None)
}, },
(3, Some(&TokenTree::Token(_, token::Ident(ref code, _))), (3, Some(&TokenTree::Token(Token { kind: token::Ident(ref code, _), .. })),
Some(&TokenTree::Token(_, token::Comma)), Some(&TokenTree::Token(Token { kind: token::Comma, .. })),
Some(&TokenTree::Token(_, token::Literal(token::Lit { symbol, .. })))) => { Some(&TokenTree::Token(Token { kind: token::Literal(token::Lit { symbol, .. }), .. }))) => {
(code, Some(symbol)) (code, Some(symbol))
} }
_ => unreachable!() _ => unreachable!()
@ -143,9 +143,9 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt<'_>,
let (crate_name, name) = match (&token_tree[0], &token_tree[2]) { let (crate_name, name) = match (&token_tree[0], &token_tree[2]) {
( (
// Crate name. // Crate name.
&TokenTree::Token(_, token::Ident(ref crate_name, _)), &TokenTree::Token(Token { kind: token::Ident(ref crate_name, _), .. }),
// DIAGNOSTICS ident. // DIAGNOSTICS ident.
&TokenTree::Token(_, token::Ident(ref name, _)) &TokenTree::Token(Token { kind: token::Ident(ref name, _), .. })
) => (*&crate_name, name), ) => (*&crate_name, name),
_ => unreachable!() _ => unreachable!()
}; };

View file

@ -265,10 +265,12 @@ impl<F> TTMacroExpander for F
impl MutVisitor for AvoidInterpolatedIdents { impl MutVisitor for AvoidInterpolatedIdents {
fn visit_tt(&mut self, tt: &mut tokenstream::TokenTree) { fn visit_tt(&mut self, tt: &mut tokenstream::TokenTree) {
if let tokenstream::TokenTree::Token(_, token::Interpolated(nt)) = tt { if let tokenstream::TokenTree::Token(token) = tt {
if let token::NtIdent(ident, is_raw) = **nt { if let token::Interpolated(nt) = &token.kind {
*tt = tokenstream::TokenTree::Token(ident.span, if let token::NtIdent(ident, is_raw) = **nt {
token::Ident(ident, is_raw)); *tt = tokenstream::TokenTree::token(ident.span,
token::Ident(ident, is_raw));
}
} }
} }
mut_visit::noop_visit_tt(tt, self) mut_visit::noop_visit_tt(tt, self)

View file

@ -585,7 +585,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
} }
AttrProcMacro(ref mac, ..) => { AttrProcMacro(ref mac, ..) => {
self.gate_proc_macro_attr_item(attr.span, &item); self.gate_proc_macro_attr_item(attr.span, &item);
let item_tok = TokenTree::Token(DUMMY_SP, token::Interpolated(Lrc::new(match item { let item_tok = TokenTree::token(DUMMY_SP, token::Interpolated(Lrc::new(match item {
Annotatable::Item(item) => token::NtItem(item), Annotatable::Item(item) => token::NtItem(item),
Annotatable::TraitItem(item) => token::NtTraitItem(item.into_inner()), Annotatable::TraitItem(item) => token::NtTraitItem(item.into_inner()),
Annotatable::ImplItem(item) => token::NtImplItem(item.into_inner()), Annotatable::ImplItem(item) => token::NtImplItem(item.into_inner()),

View file

@ -78,7 +78,7 @@ use crate::ast::Ident;
use crate::ext::tt::quoted::{self, TokenTree}; use crate::ext::tt::quoted::{self, TokenTree};
use crate::parse::{Directory, ParseSess}; use crate::parse::{Directory, ParseSess};
use crate::parse::parser::{Parser, PathStyle}; use crate::parse::parser::{Parser, PathStyle};
use crate::parse::token::{self, DocComment, Nonterminal, TokenKind}; use crate::parse::token::{self, DocComment, Nonterminal, Token, TokenKind};
use crate::print::pprust; use crate::print::pprust;
use crate::symbol::{kw, sym, Symbol}; use crate::symbol::{kw, sym, Symbol};
use crate::tokenstream::{DelimSpan, TokenStream}; use crate::tokenstream::{DelimSpan, TokenStream};
@ -609,7 +609,8 @@ fn inner_parse_loop<'root, 'tt>(
// //
// At the beginning of the loop, if we reach the end of the delimited submatcher, // At the beginning of the loop, if we reach the end of the delimited submatcher,
// we pop the stack to backtrack out of the descent. // we pop the stack to backtrack out of the descent.
seq @ TokenTree::Delimited(..) | seq @ TokenTree::Token(_, DocComment(..)) => { seq @ TokenTree::Delimited(..) |
seq @ TokenTree::Token(Token { kind: DocComment(..), .. }) => {
let lower_elts = mem::replace(&mut item.top_elts, Tt(seq)); let lower_elts = mem::replace(&mut item.top_elts, Tt(seq));
let idx = item.idx; let idx = item.idx;
item.stack.push(MatcherTtFrame { item.stack.push(MatcherTtFrame {
@ -621,7 +622,7 @@ fn inner_parse_loop<'root, 'tt>(
} }
// We just matched a normal token. We can just advance the parser. // We just matched a normal token. We can just advance the parser.
TokenTree::Token(_, ref t) if token_name_eq(t, token) => { TokenTree::Token(t) if token_name_eq(&t, token) => {
item.idx += 1; item.idx += 1;
next_items.push(item); next_items.push(item);
} }

View file

@ -11,7 +11,7 @@ use crate::ext::tt::transcribe::transcribe;
use crate::feature_gate::Features; use crate::feature_gate::Features;
use crate::parse::{Directory, ParseSess}; use crate::parse::{Directory, ParseSess};
use crate::parse::parser::Parser; use crate::parse::parser::Parser;
use crate::parse::token::{self, NtTT}; use crate::parse::token::{self, Token, NtTT};
use crate::parse::token::TokenKind::*; use crate::parse::token::TokenKind::*;
use crate::symbol::{Symbol, kw, sym}; use crate::symbol::{Symbol, kw, sym};
use crate::tokenstream::{DelimSpan, TokenStream, TokenTree}; use crate::tokenstream::{DelimSpan, TokenStream, TokenTree};
@ -270,7 +270,7 @@ pub fn compile(
quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition { quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition {
tts: vec![ tts: vec![
quoted::TokenTree::MetaVarDecl(DUMMY_SP, lhs_nm, ast::Ident::from_str("tt")), quoted::TokenTree::MetaVarDecl(DUMMY_SP, lhs_nm, ast::Ident::from_str("tt")),
quoted::TokenTree::Token(DUMMY_SP, token::FatArrow), quoted::TokenTree::token(DUMMY_SP, token::FatArrow),
quoted::TokenTree::MetaVarDecl(DUMMY_SP, rhs_nm, ast::Ident::from_str("tt")), quoted::TokenTree::MetaVarDecl(DUMMY_SP, rhs_nm, ast::Ident::from_str("tt")),
], ],
separator: Some(if body.legacy { token::Semi } else { token::Comma }), separator: Some(if body.legacy { token::Semi } else { token::Comma }),
@ -279,7 +279,7 @@ pub fn compile(
})), })),
// to phase into semicolon-termination instead of semicolon-separation // to phase into semicolon-termination instead of semicolon-separation
quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition { quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition {
tts: vec![quoted::TokenTree::Token(DUMMY_SP, token::Semi)], tts: vec![quoted::TokenTree::token(DUMMY_SP, token::Semi)],
separator: None, separator: None,
op: quoted::KleeneOp::ZeroOrMore, op: quoted::KleeneOp::ZeroOrMore,
num_captures: 0 num_captures: 0
@ -613,7 +613,7 @@ impl FirstSets {
if let (Some(ref sep), true) = (seq_rep.separator.clone(), if let (Some(ref sep), true) = (seq_rep.separator.clone(),
subfirst.maybe_empty) { subfirst.maybe_empty) {
first.add_one_maybe(TokenTree::Token(sp.entire(), sep.clone())); first.add_one_maybe(TokenTree::token(sp.entire(), sep.clone()));
} }
// Reverse scan: Sequence comes before `first`. // Reverse scan: Sequence comes before `first`.
@ -663,7 +663,7 @@ impl FirstSets {
if let (Some(ref sep), true) = (seq_rep.separator.clone(), if let (Some(ref sep), true) = (seq_rep.separator.clone(),
subfirst.maybe_empty) { subfirst.maybe_empty) {
first.add_one_maybe(TokenTree::Token(sp.entire(), sep.clone())); first.add_one_maybe(TokenTree::token(sp.entire(), sep.clone()));
} }
assert!(first.maybe_empty); assert!(first.maybe_empty);
@ -869,7 +869,7 @@ fn check_matcher_core(sess: &ParseSess,
let mut new; let mut new;
let my_suffix = if let Some(ref u) = seq_rep.separator { let my_suffix = if let Some(ref u) = seq_rep.separator {
new = suffix_first.clone(); new = suffix_first.clone();
new.add_one_maybe(TokenTree::Token(sp.entire(), u.clone())); new.add_one_maybe(TokenTree::token(sp.entire(), u.clone()));
&new &new
} else { } else {
&suffix_first &suffix_first
@ -1015,7 +1015,7 @@ enum IsInFollow {
fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> IsInFollow { fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> IsInFollow {
use quoted::TokenTree; use quoted::TokenTree;
if let TokenTree::Token(_, token::CloseDelim(_)) = *tok { if let TokenTree::Token(Token { kind: token::CloseDelim(_), .. }) = *tok {
// closing a token tree can never be matched by any fragment; // closing a token tree can never be matched by any fragment;
// iow, we always require that `(` and `)` match, etc. // iow, we always require that `(` and `)` match, etc.
IsInFollow::Yes IsInFollow::Yes
@ -1033,8 +1033,8 @@ fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> IsInFollow {
}, },
"stmt" | "expr" => { "stmt" | "expr" => {
let tokens = vec!["`=>`", "`,`", "`;`"]; let tokens = vec!["`=>`", "`,`", "`;`"];
match *tok { match tok {
TokenTree::Token(_, ref tok) => match *tok { TokenTree::Token(token) => match token.kind {
FatArrow | Comma | Semi => IsInFollow::Yes, FatArrow | Comma | Semi => IsInFollow::Yes,
_ => IsInFollow::No(tokens), _ => IsInFollow::No(tokens),
}, },
@ -1043,8 +1043,8 @@ fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> IsInFollow {
}, },
"pat" => { "pat" => {
let tokens = vec!["`=>`", "`,`", "`=`", "`|`", "`if`", "`in`"]; let tokens = vec!["`=>`", "`,`", "`=`", "`|`", "`if`", "`in`"];
match *tok { match tok {
TokenTree::Token(_, ref tok) => match *tok { TokenTree::Token(token) => match token.kind {
FatArrow | Comma | Eq | BinOp(token::Or) => IsInFollow::Yes, FatArrow | Comma | Eq | BinOp(token::Or) => IsInFollow::Yes,
Ident(i, false) if i.name == kw::If || Ident(i, false) if i.name == kw::If ||
i.name == kw::In => IsInFollow::Yes, i.name == kw::In => IsInFollow::Yes,
@ -1058,8 +1058,8 @@ fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> IsInFollow {
"`{`", "`[`", "`=>`", "`,`", "`>`","`=`", "`:`", "`;`", "`|`", "`as`", "`{`", "`[`", "`=>`", "`,`", "`>`","`=`", "`:`", "`;`", "`|`", "`as`",
"`where`", "`where`",
]; ];
match *tok { match tok {
TokenTree::Token(_, ref tok) => match *tok { TokenTree::Token(token) => match token.kind {
OpenDelim(token::DelimToken::Brace) | OpenDelim(token::DelimToken::Brace) |
OpenDelim(token::DelimToken::Bracket) | OpenDelim(token::DelimToken::Bracket) |
Comma | FatArrow | Colon | Eq | Gt | BinOp(token::Shr) | Semi | Comma | FatArrow | Colon | Eq | Gt | BinOp(token::Shr) | Semi |
@ -1089,8 +1089,8 @@ fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> IsInFollow {
"vis" => { "vis" => {
// Explicitly disallow `priv`, on the off chance it comes back. // Explicitly disallow `priv`, on the off chance it comes back.
let tokens = vec!["`,`", "an ident", "a type"]; let tokens = vec!["`,`", "an ident", "a type"];
match *tok { match tok {
TokenTree::Token(_, ref tok) => match *tok { TokenTree::Token(token) => match token.kind {
Comma => IsInFollow::Yes, Comma => IsInFollow::Yes,
Ident(i, is_raw) if is_raw || i.name != kw::Priv => Ident(i, is_raw) if is_raw || i.name != kw::Priv =>
IsInFollow::Yes, IsInFollow::Yes,
@ -1150,7 +1150,7 @@ fn is_legal_fragment_specifier(_sess: &ParseSess,
fn quoted_tt_to_string(tt: &quoted::TokenTree) -> String { fn quoted_tt_to_string(tt: &quoted::TokenTree) -> String {
match *tt { match *tt {
quoted::TokenTree::Token(_, ref tok) => crate::print::pprust::token_to_string(tok), quoted::TokenTree::Token(ref token) => crate::print::pprust::token_to_string(&token),
quoted::TokenTree::MetaVar(_, name) => format!("${}", name), quoted::TokenTree::MetaVar(_, name) => format!("${}", name),
quoted::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind), quoted::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind),
_ => panic!("unexpected quoted::TokenTree::{{Sequence or Delimited}} \ _ => panic!("unexpected quoted::TokenTree::{{Sequence or Delimited}} \

View file

@ -2,7 +2,8 @@ use crate::ast::NodeId;
use crate::early_buffered_lints::BufferedEarlyLintId; use crate::early_buffered_lints::BufferedEarlyLintId;
use crate::ext::tt::macro_parser; use crate::ext::tt::macro_parser;
use crate::feature_gate::Features; use crate::feature_gate::Features;
use crate::parse::{token, ParseSess}; use crate::parse::token::{self, Token, TokenKind};
use crate::parse::ParseSess;
use crate::print::pprust; use crate::print::pprust;
use crate::tokenstream::{self, DelimSpan}; use crate::tokenstream::{self, DelimSpan};
use crate::ast; use crate::ast;
@ -39,7 +40,7 @@ impl Delimited {
} else { } else {
span.with_lo(span.lo() + BytePos(self.delim.len() as u32)) span.with_lo(span.lo() + BytePos(self.delim.len() as u32))
}; };
TokenTree::Token(open_span, self.open_token()) TokenTree::token(open_span, self.open_token())
} }
/// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter. /// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter.
@ -49,7 +50,7 @@ impl Delimited {
} else { } else {
span.with_lo(span.hi() - BytePos(self.delim.len() as u32)) span.with_lo(span.hi() - BytePos(self.delim.len() as u32))
}; };
TokenTree::Token(close_span, self.close_token()) TokenTree::token(close_span, self.close_token())
} }
} }
@ -81,7 +82,7 @@ pub enum KleeneOp {
/// are "first-class" token trees. Useful for parsing macros. /// are "first-class" token trees. Useful for parsing macros.
#[derive(Debug, Clone, PartialEq, RustcEncodable, RustcDecodable)] #[derive(Debug, Clone, PartialEq, RustcEncodable, RustcDecodable)]
pub enum TokenTree { pub enum TokenTree {
Token(Span, token::TokenKind), Token(Token),
Delimited(DelimSpan, Lrc<Delimited>), Delimited(DelimSpan, Lrc<Delimited>),
/// A kleene-style repetition sequence /// A kleene-style repetition sequence
Sequence(DelimSpan, Lrc<SequenceRepetition>), Sequence(DelimSpan, Lrc<SequenceRepetition>),
@ -144,13 +145,17 @@ impl TokenTree {
/// Retrieves the `TokenTree`'s span. /// Retrieves the `TokenTree`'s span.
pub fn span(&self) -> Span { pub fn span(&self) -> Span {
match *self { match *self {
TokenTree::Token(sp, _) TokenTree::Token(Token { span, .. })
| TokenTree::MetaVar(sp, _) | TokenTree::MetaVar(span, _)
| TokenTree::MetaVarDecl(sp, _, _) => sp, | TokenTree::MetaVarDecl(span, _, _) => span,
TokenTree::Delimited(sp, _) TokenTree::Delimited(span, _)
| TokenTree::Sequence(sp, _) => sp.entire(), | TokenTree::Sequence(span, _) => span.entire(),
} }
} }
crate fn token(span: Span, kind: TokenKind) -> TokenTree {
TokenTree::Token(Token { kind, span })
}
} }
/// Takes a `tokenstream::TokenStream` and returns a `Vec<self::TokenTree>`. Specifically, this /// Takes a `tokenstream::TokenStream` and returns a `Vec<self::TokenTree>`. Specifically, this
@ -205,14 +210,14 @@ pub fn parse(
match tree { match tree {
TokenTree::MetaVar(start_sp, ident) if expect_matchers => { TokenTree::MetaVar(start_sp, ident) if expect_matchers => {
let span = match trees.next() { let span = match trees.next() {
Some(tokenstream::TokenTree::Token(span, token::Colon)) => match trees.next() { Some(tokenstream::TokenTree::Token(Token { kind: token::Colon, span })) => match trees.next() {
Some(tokenstream::TokenTree::Token(end_sp, ref tok)) => match tok.ident() { Some(tokenstream::TokenTree::Token(token)) => match token.ident() {
Some((kind, _)) => { Some((kind, _)) => {
let span = end_sp.with_lo(start_sp.lo()); let span = token.span.with_lo(start_sp.lo());
result.push(TokenTree::MetaVarDecl(span, ident, kind)); result.push(TokenTree::MetaVarDecl(span, ident, kind));
continue; continue;
} }
_ => end_sp, _ => token.span,
}, },
tree => tree tree => tree
.as_ref() .as_ref()
@ -270,7 +275,7 @@ where
// Depending on what `tree` is, we could be parsing different parts of a macro // Depending on what `tree` is, we could be parsing different parts of a macro
match tree { match tree {
// `tree` is a `$` token. Look at the next token in `trees` // `tree` is a `$` token. Look at the next token in `trees`
tokenstream::TokenTree::Token(span, token::Dollar) => match trees.next() { tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }) => match trees.next() {
// `tree` is followed by a delimited set of token trees. This indicates the beginning // `tree` is followed by a delimited set of token trees. This indicates the beginning
// of a repetition sequence in the macro (e.g. `$(pat)*`). // of a repetition sequence in the macro (e.g. `$(pat)*`).
Some(tokenstream::TokenTree::Delimited(span, delim, tts)) => { Some(tokenstream::TokenTree::Delimited(span, delim, tts)) => {
@ -316,33 +321,33 @@ where
// `tree` is followed by an `ident`. This could be `$meta_var` or the `$crate` special // `tree` is followed by an `ident`. This could be `$meta_var` or the `$crate` special
// metavariable that names the crate of the invocation. // metavariable that names the crate of the invocation.
Some(tokenstream::TokenTree::Token(ident_span, ref token)) if token.is_ident() => { Some(tokenstream::TokenTree::Token(token)) if token.is_ident() => {
let (ident, is_raw) = token.ident().unwrap(); let (ident, is_raw) = token.ident().unwrap();
let span = ident_span.with_lo(span.lo()); let span = token.span.with_lo(span.lo());
if ident.name == kw::Crate && !is_raw { if ident.name == kw::Crate && !is_raw {
let ident = ast::Ident::new(kw::DollarCrate, ident.span); let ident = ast::Ident::new(kw::DollarCrate, ident.span);
TokenTree::Token(span, token::Ident(ident, is_raw)) TokenTree::token(span, token::Ident(ident, is_raw))
} else { } else {
TokenTree::MetaVar(span, ident) TokenTree::MetaVar(span, ident)
} }
} }
// `tree` is followed by a random token. This is an error. // `tree` is followed by a random token. This is an error.
Some(tokenstream::TokenTree::Token(span, tok)) => { Some(tokenstream::TokenTree::Token(token)) => {
let msg = format!( let msg = format!(
"expected identifier, found `{}`", "expected identifier, found `{}`",
pprust::token_to_string(&tok) pprust::token_to_string(&token),
); );
sess.span_diagnostic.span_err(span, &msg); sess.span_diagnostic.span_err(token.span, &msg);
TokenTree::MetaVar(span, ast::Ident::invalid()) TokenTree::MetaVar(token.span, ast::Ident::invalid())
} }
// There are no more tokens. Just return the `$` we already have. // There are no more tokens. Just return the `$` we already have.
None => TokenTree::Token(span, token::Dollar), None => TokenTree::token(span, token::Dollar),
}, },
// `tree` is an arbitrary token. Keep it. // `tree` is an arbitrary token. Keep it.
tokenstream::TokenTree::Token(span, tok) => TokenTree::Token(span, tok), tokenstream::TokenTree::Token(token) => TokenTree::Token(token),
// `tree` is the beginning of a delimited set of tokens (e.g., `(` or `{`). We need to // `tree` is the beginning of a delimited set of tokens (e.g., `(` or `{`). We need to
// descend into the delimited set and further parse it. // descend into the delimited set and further parse it.
@ -380,17 +385,14 @@ fn kleene_op(token: &token::TokenKind) -> Option<KleeneOp> {
/// - Ok(Ok((op, span))) if the next token tree is a KleeneOp /// - Ok(Ok((op, span))) if the next token tree is a KleeneOp
/// - Ok(Err(tok, span)) if the next token tree is a token but not a KleeneOp /// - Ok(Err(tok, span)) if the next token tree is a token but not a KleeneOp
/// - Err(span) if the next token tree is not a token /// - Err(span) if the next token tree is not a token
fn parse_kleene_op<I>( fn parse_kleene_op<I>(input: &mut I, span: Span) -> Result<Result<(KleeneOp, Span), Token>, Span>
input: &mut I,
span: Span,
) -> Result<Result<(KleeneOp, Span), (token::TokenKind, Span)>, Span>
where where
I: Iterator<Item = tokenstream::TokenTree>, I: Iterator<Item = tokenstream::TokenTree>,
{ {
match input.next() { match input.next() {
Some(tokenstream::TokenTree::Token(span, tok)) => match kleene_op(&tok) { Some(tokenstream::TokenTree::Token(token)) => match kleene_op(&token) {
Some(op) => Ok(Ok((op, span))), Some(op) => Ok(Ok((op, token.span))),
None => Ok(Err((tok, span))), None => Ok(Err(token)),
}, },
tree => Err(tree tree => Err(tree
.as_ref() .as_ref()
@ -466,7 +468,7 @@ where
assert_eq!(op, KleeneOp::ZeroOrOne); assert_eq!(op, KleeneOp::ZeroOrOne);
// Lookahead at #2. If it is a KleenOp, then #1 is a separator. // Lookahead at #2. If it is a KleenOp, then #1 is a separator.
let is_1_sep = if let Some(&tokenstream::TokenTree::Token(_, ref tok2)) = input.peek() { let is_1_sep = if let Some(tokenstream::TokenTree::Token(tok2)) = input.peek() {
kleene_op(tok2).is_some() kleene_op(tok2).is_some()
} else { } else {
false false
@ -504,7 +506,7 @@ where
} }
// #2 is a random token (this is an error) :( // #2 is a random token (this is an error) :(
Ok(Err((_, _))) => op1_span, Ok(Err(_)) => op1_span,
// #2 is not even a token at all :( // #2 is not even a token at all :(
Err(_) => op1_span, Err(_) => op1_span,
@ -524,7 +526,7 @@ where
} }
// #1 is a separator followed by #2, a KleeneOp // #1 is a separator followed by #2, a KleeneOp
Ok(Err((tok, span))) => match parse_kleene_op(input, span) { Ok(Err(token)) => match parse_kleene_op(input, token.span) {
// #2 is a `?`, which is not allowed as a Kleene op in 2015 edition, // #2 is a `?`, which is not allowed as a Kleene op in 2015 edition,
// but is allowed in the 2018 edition // but is allowed in the 2018 edition
Ok(Ok((op, op2_span))) if op == KleeneOp::ZeroOrOne => { Ok(Ok((op, op2_span))) if op == KleeneOp::ZeroOrOne => {
@ -539,10 +541,10 @@ where
} }
// #2 is a KleeneOp :D // #2 is a KleeneOp :D
Ok(Ok((op, _))) => return (Some(tok), op), Ok(Ok((op, _))) => return (Some(token.kind), op),
// #2 is a random token :( // #2 is a random token :(
Ok(Err((_, span))) => span, Ok(Err(token)) => token.span,
// #2 is not a token at all :( // #2 is not a token at all :(
Err(span) => span, Err(span) => span,
@ -580,12 +582,12 @@ where
Ok(Ok((op, _))) => return (None, op), Ok(Ok((op, _))) => return (None, op),
// #1 is a separator followed by #2, a KleeneOp // #1 is a separator followed by #2, a KleeneOp
Ok(Err((tok, span))) => match parse_kleene_op(input, span) { Ok(Err(token)) => match parse_kleene_op(input, token.span) {
// #2 is the `?` Kleene op, which does not take a separator (error) // #2 is the `?` Kleene op, which does not take a separator (error)
Ok(Ok((op, _op2_span))) if op == KleeneOp::ZeroOrOne => { Ok(Ok((op, _op2_span))) if op == KleeneOp::ZeroOrOne => {
// Error! // Error!
sess.span_diagnostic.span_err( sess.span_diagnostic.span_err(
span, token.span,
"the `?` macro repetition operator does not take a separator", "the `?` macro repetition operator does not take a separator",
); );
@ -594,10 +596,10 @@ where
} }
// #2 is a KleeneOp :D // #2 is a KleeneOp :D
Ok(Ok((op, _))) => return (Some(tok), op), Ok(Ok((op, _))) => return (Some(token.kind), op),
// #2 is a random token :( // #2 is a random token :(
Ok(Err((_, span))) => span, Ok(Err(token)) => token.span,
// #2 is not a token at all :( // #2 is not a token at all :(
Err(span) => span, Err(span) => span,

View file

@ -119,7 +119,7 @@ pub fn transcribe(
Some((tt, _)) => tt.span(), Some((tt, _)) => tt.span(),
None => DUMMY_SP, None => DUMMY_SP,
}; };
result.push(TokenTree::Token(prev_span, sep).into()); result.push(TokenTree::token(prev_span, sep).into());
} }
continue; continue;
} }
@ -225,7 +225,7 @@ pub fn transcribe(
result.push(tt.clone().into()); result.push(tt.clone().into());
} else { } else {
sp = sp.apply_mark(cx.current_expansion.mark); sp = sp.apply_mark(cx.current_expansion.mark);
let token = TokenTree::Token(sp, token::Interpolated(nt.clone())); let token = TokenTree::token(sp, token::Interpolated(nt.clone()));
result.push(token.into()); result.push(token.into());
} }
} else { } else {
@ -241,8 +241,8 @@ pub fn transcribe(
let ident = let ident =
Ident::new(ident.name, ident.span.apply_mark(cx.current_expansion.mark)); Ident::new(ident.name, ident.span.apply_mark(cx.current_expansion.mark));
sp = sp.apply_mark(cx.current_expansion.mark); sp = sp.apply_mark(cx.current_expansion.mark);
result.push(TokenTree::Token(sp, token::Dollar).into()); result.push(TokenTree::token(sp, token::Dollar).into());
result.push(TokenTree::Token(sp, token::TokenKind::from_ast_ident(ident)).into()); result.push(TokenTree::token(sp, token::TokenKind::from_ast_ident(ident)).into());
} }
} }
@ -259,9 +259,9 @@ pub fn transcribe(
// Nothing much to do here. Just push the token to the result, being careful to // Nothing much to do here. Just push the token to the result, being careful to
// preserve syntax context. // preserve syntax context.
quoted::TokenTree::Token(sp, tok) => { quoted::TokenTree::Token(token) => {
let mut marker = Marker(cx.current_expansion.mark); let mut marker = Marker(cx.current_expansion.mark);
let mut tt = TokenTree::Token(sp, tok); let mut tt = TokenTree::Token(token);
noop_visit_tt(&mut tt, &mut marker); noop_visit_tt(&mut tt, &mut marker);
result.push(tt.into()); result.push(tt.into());
} }

View file

@ -1958,9 +1958,11 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
name, name,
template template
), ),
None => if let Some(TokenTree::Token(_, token::Eq)) = attr.tokens.trees().next() { None => if let Some(TokenTree::Token(token)) = attr.tokens.trees().next() {
// All key-value attributes are restricted to meta-item syntax. if token == token::Eq {
attr.parse_meta(self.context.parse_sess).map_err(|mut err| err.emit()).ok(); // All key-value attributes are restricted to meta-item syntax.
attr.parse_meta(self.context.parse_sess).map_err(|mut err| err.emit()).ok();
}
} }
} }
} }

View file

@ -10,6 +10,7 @@
#![deny(rust_2018_idioms)] #![deny(rust_2018_idioms)]
#![deny(internal)] #![deny(internal)]
#![feature(bind_by_move_pattern_guards)]
#![feature(crate_visibility_modifier)] #![feature(crate_visibility_modifier)]
#![feature(label_break_value)] #![feature(label_break_value)]
#![feature(nll)] #![feature(nll)]

View file

@ -9,7 +9,7 @@
use crate::ast::*; use crate::ast::*;
use crate::source_map::{Spanned, respan}; use crate::source_map::{Spanned, respan};
use crate::parse::token::{self, TokenKind}; use crate::parse::token::{self, Token, TokenKind};
use crate::ptr::P; use crate::ptr::P;
use crate::ThinVec; use crate::ThinVec;
use crate::tokenstream::*; use crate::tokenstream::*;
@ -576,9 +576,9 @@ pub fn noop_visit_arg<T: MutVisitor>(Arg { id, pat, ty }: &mut Arg, vis: &mut T)
pub fn noop_visit_tt<T: MutVisitor>(tt: &mut TokenTree, vis: &mut T) { pub fn noop_visit_tt<T: MutVisitor>(tt: &mut TokenTree, vis: &mut T) {
match tt { match tt {
TokenTree::Token(span, tok) => { TokenTree::Token(Token { kind, span }) => {
vis.visit_token(kind);
vis.visit_span(span); vis.visit_span(span);
vis.visit_token(tok);
} }
TokenTree::Delimited(DelimSpan { open, close }, _delim, tts) => { TokenTree::Delimited(DelimSpan { open, close }, _delim, tts) => {
vis.visit_span(open); vis.visit_span(open);

View file

@ -157,7 +157,7 @@ impl<'a> Parser<'a> {
self.check(&token::OpenDelim(DelimToken::Brace)) { self.check(&token::OpenDelim(DelimToken::Brace)) {
self.parse_token_tree().into() self.parse_token_tree().into()
} else if self.eat(&token::Eq) { } else if self.eat(&token::Eq) {
let eq = TokenTree::Token(self.prev_span, token::Eq); let eq = TokenTree::token(self.prev_span, token::Eq);
let mut is_interpolated_expr = false; let mut is_interpolated_expr = false;
if let token::Interpolated(nt) = &self.token { if let token::Interpolated(nt) = &self.token {
if let token::NtExpr(..) = **nt { if let token::NtExpr(..) = **nt {

View file

@ -1596,8 +1596,8 @@ mod tests {
"/* my source file */ fn main() { println!(\"zebra\"); }\n" "/* my source file */ fn main() { println!(\"zebra\"); }\n"
.to_string()); .to_string());
let id = Ident::from_str("fn"); let id = Ident::from_str("fn");
assert_eq!(string_reader.next_token().kind, token::Comment); assert_eq!(string_reader.next_token(), token::Comment);
assert_eq!(string_reader.next_token().kind, token::Whitespace); assert_eq!(string_reader.next_token(), token::Whitespace);
let tok1 = string_reader.next_token(); let tok1 = string_reader.next_token();
let tok2 = Token { let tok2 = Token {
kind: token::Ident(id, false), kind: token::Ident(id, false),
@ -1605,7 +1605,7 @@ mod tests {
}; };
assert_eq!(tok1.kind, tok2.kind); assert_eq!(tok1.kind, tok2.kind);
assert_eq!(tok1.span, tok2.span); assert_eq!(tok1.span, tok2.span);
assert_eq!(string_reader.next_token().kind, token::Whitespace); assert_eq!(string_reader.next_token(), token::Whitespace);
// the 'main' id is already read: // the 'main' id is already read:
assert_eq!(string_reader.pos.clone(), BytePos(28)); assert_eq!(string_reader.pos.clone(), BytePos(28));
// read another token: // read another token:
@ -1625,7 +1625,7 @@ mod tests {
// of tokens (stop checking after exhausting the expected vec) // of tokens (stop checking after exhausting the expected vec)
fn check_tokenization(mut string_reader: StringReader<'_>, expected: Vec<TokenKind>) { fn check_tokenization(mut string_reader: StringReader<'_>, expected: Vec<TokenKind>) {
for expected_tok in &expected { for expected_tok in &expected {
assert_eq!(&string_reader.next_token().kind, expected_tok); assert_eq!(&string_reader.next_token(), expected_tok);
} }
} }
@ -1683,7 +1683,7 @@ mod tests {
with_default_globals(|| { with_default_globals(|| {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone()); let sh = mk_sess(sm.clone());
assert_eq!(setup(&sm, &sh, "'a'".to_string()).next_token().kind, assert_eq!(setup(&sm, &sh, "'a'".to_string()).next_token(),
mk_lit(token::Char, "a", None)); mk_lit(token::Char, "a", None));
}) })
} }
@ -1693,7 +1693,7 @@ mod tests {
with_default_globals(|| { with_default_globals(|| {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone()); let sh = mk_sess(sm.clone());
assert_eq!(setup(&sm, &sh, "' '".to_string()).next_token().kind, assert_eq!(setup(&sm, &sh, "' '".to_string()).next_token(),
mk_lit(token::Char, " ", None)); mk_lit(token::Char, " ", None));
}) })
} }
@ -1703,7 +1703,7 @@ mod tests {
with_default_globals(|| { with_default_globals(|| {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone()); let sh = mk_sess(sm.clone());
assert_eq!(setup(&sm, &sh, "'\\n'".to_string()).next_token().kind, assert_eq!(setup(&sm, &sh, "'\\n'".to_string()).next_token(),
mk_lit(token::Char, "\\n", None)); mk_lit(token::Char, "\\n", None));
}) })
} }
@ -1713,7 +1713,7 @@ mod tests {
with_default_globals(|| { with_default_globals(|| {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone()); let sh = mk_sess(sm.clone());
assert_eq!(setup(&sm, &sh, "'abc".to_string()).next_token().kind, assert_eq!(setup(&sm, &sh, "'abc".to_string()).next_token(),
token::Lifetime(Ident::from_str("'abc"))); token::Lifetime(Ident::from_str("'abc")));
}) })
} }
@ -1723,7 +1723,7 @@ mod tests {
with_default_globals(|| { with_default_globals(|| {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone()); let sh = mk_sess(sm.clone());
assert_eq!(setup(&sm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()).next_token().kind, assert_eq!(setup(&sm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()).next_token(),
mk_lit(token::StrRaw(3), "\"#a\\b\x00c\"", None)); mk_lit(token::StrRaw(3), "\"#a\\b\x00c\"", None));
}) })
} }
@ -1735,10 +1735,10 @@ mod tests {
let sh = mk_sess(sm.clone()); let sh = mk_sess(sm.clone());
macro_rules! test { macro_rules! test {
($input: expr, $tok_type: ident, $tok_contents: expr) => {{ ($input: expr, $tok_type: ident, $tok_contents: expr) => {{
assert_eq!(setup(&sm, &sh, format!("{}suffix", $input)).next_token().kind, assert_eq!(setup(&sm, &sh, format!("{}suffix", $input)).next_token(),
mk_lit(token::$tok_type, $tok_contents, Some("suffix"))); mk_lit(token::$tok_type, $tok_contents, Some("suffix")));
// with a whitespace separator: // with a whitespace separator:
assert_eq!(setup(&sm, &sh, format!("{} suffix", $input)).next_token().kind, assert_eq!(setup(&sm, &sh, format!("{} suffix", $input)).next_token(),
mk_lit(token::$tok_type, $tok_contents, None)); mk_lit(token::$tok_type, $tok_contents, None));
}} }}
} }
@ -1753,11 +1753,11 @@ mod tests {
test!("1.0", Float, "1.0"); test!("1.0", Float, "1.0");
test!("1.0e10", Float, "1.0e10"); test!("1.0e10", Float, "1.0e10");
assert_eq!(setup(&sm, &sh, "2us".to_string()).next_token().kind, assert_eq!(setup(&sm, &sh, "2us".to_string()).next_token(),
mk_lit(token::Integer, "2", Some("us"))); mk_lit(token::Integer, "2", Some("us")));
assert_eq!(setup(&sm, &sh, "r###\"raw\"###suffix".to_string()).next_token().kind, assert_eq!(setup(&sm, &sh, "r###\"raw\"###suffix".to_string()).next_token(),
mk_lit(token::StrRaw(3), "raw", Some("suffix"))); mk_lit(token::StrRaw(3), "raw", Some("suffix")));
assert_eq!(setup(&sm, &sh, "br###\"raw\"###suffix".to_string()).next_token().kind, assert_eq!(setup(&sm, &sh, "br###\"raw\"###suffix".to_string()).next_token(),
mk_lit(token::ByteStrRaw(3), "raw", Some("suffix"))); mk_lit(token::ByteStrRaw(3), "raw", Some("suffix")));
}) })
} }
@ -1775,11 +1775,8 @@ mod tests {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone()); let sh = mk_sess(sm.clone());
let mut lexer = setup(&sm, &sh, "/* /* */ */'a'".to_string()); let mut lexer = setup(&sm, &sh, "/* /* */ */'a'".to_string());
match lexer.next_token().kind { assert_eq!(lexer.next_token(), token::Comment);
token::Comment => {} assert_eq!(lexer.next_token(), mk_lit(token::Char, "a", None));
_ => panic!("expected a comment!"),
}
assert_eq!(lexer.next_token().kind, mk_lit(token::Char, "a", None));
}) })
} }
@ -1792,9 +1789,8 @@ mod tests {
let comment = lexer.next_token(); let comment = lexer.next_token();
assert_eq!(comment.kind, token::Comment); assert_eq!(comment.kind, token::Comment);
assert_eq!((comment.span.lo(), comment.span.hi()), (BytePos(0), BytePos(7))); assert_eq!((comment.span.lo(), comment.span.hi()), (BytePos(0), BytePos(7)));
assert_eq!(lexer.next_token().kind, token::Whitespace); assert_eq!(lexer.next_token(), token::Whitespace);
assert_eq!(lexer.next_token().kind, assert_eq!(lexer.next_token(), token::DocComment(Symbol::intern("/// test")));
token::DocComment(Symbol::intern("/// test")));
}) })
} }
} }

View file

@ -203,7 +203,7 @@ impl<'a> TokenTreesReader<'a> {
Err(err) Err(err)
}, },
_ => { _ => {
let tt = TokenTree::Token(self.span, self.token.clone()); let tt = TokenTree::token(self.span, self.token.clone());
// Note that testing for joint-ness here is done via the raw // Note that testing for joint-ness here is done via the raw
// source span as the joint-ness is a property of the raw source // source span as the joint-ness is a property of the raw source
// rather than wanting to take `override_span` into account. // rather than wanting to take `override_span` into account.

View file

@ -261,7 +261,7 @@ impl Lit {
token::Bool => token::Ident(Ident::new(self.token.symbol, self.span), false), token::Bool => token::Ident(Ident::new(self.token.symbol, self.span), false),
_ => token::Literal(self.token), _ => token::Literal(self.token),
}; };
TokenTree::Token(self.span, token).into() TokenTree::token(self.span, token).into()
} }
} }

View file

@ -385,6 +385,7 @@ mod tests {
use crate::ast::{self, Ident, PatKind}; use crate::ast::{self, Ident, PatKind};
use crate::attr::first_attr_value_str_by_name; use crate::attr::first_attr_value_str_by_name;
use crate::ptr::P; use crate::ptr::P;
use crate::parse::token::Token;
use crate::print::pprust::item_to_string; use crate::print::pprust::item_to_string;
use crate::tokenstream::{DelimSpan, TokenTree}; use crate::tokenstream::{DelimSpan, TokenTree};
use crate::util::parser_testing::string_to_stream; use crate::util::parser_testing::string_to_stream;
@ -426,9 +427,9 @@ mod tests {
match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) { match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) {
( (
4, 4,
Some(&TokenTree::Token(_, token::Ident(name_macro_rules, false))), Some(&TokenTree::Token(Token { kind: token::Ident(name_macro_rules, false), .. })),
Some(&TokenTree::Token(_, token::Not)), Some(&TokenTree::Token(Token { kind: token::Not, .. })),
Some(&TokenTree::Token(_, token::Ident(name_zip, false))), Some(&TokenTree::Token(Token { kind: token::Ident(name_zip, false), .. })),
Some(&TokenTree::Delimited(_, macro_delim, ref macro_tts)), Some(&TokenTree::Delimited(_, macro_delim, ref macro_tts)),
) )
if name_macro_rules.name == sym::macro_rules if name_macro_rules.name == sym::macro_rules
@ -438,7 +439,7 @@ mod tests {
( (
3, 3,
Some(&TokenTree::Delimited(_, first_delim, ref first_tts)), Some(&TokenTree::Delimited(_, first_delim, ref first_tts)),
Some(&TokenTree::Token(_, token::FatArrow)), Some(&TokenTree::Token(Token { kind: token::FatArrow, .. })),
Some(&TokenTree::Delimited(_, second_delim, ref second_tts)), Some(&TokenTree::Delimited(_, second_delim, ref second_tts)),
) )
if macro_delim == token::Paren => { if macro_delim == token::Paren => {
@ -446,8 +447,8 @@ mod tests {
match (tts.len(), tts.get(0), tts.get(1)) { match (tts.len(), tts.get(0), tts.get(1)) {
( (
2, 2,
Some(&TokenTree::Token(_, token::Dollar)), Some(&TokenTree::Token(Token { kind: token::Dollar, .. })),
Some(&TokenTree::Token(_, token::Ident(ident, false))), Some(&TokenTree::Token(Token { kind: token::Ident(ident, false), .. })),
) )
if first_delim == token::Paren && ident.name.as_str() == "a" => {}, if first_delim == token::Paren && ident.name.as_str() == "a" => {},
_ => panic!("value 3: {:?} {:?}", first_delim, first_tts), _ => panic!("value 3: {:?} {:?}", first_delim, first_tts),
@ -456,8 +457,8 @@ mod tests {
match (tts.len(), tts.get(0), tts.get(1)) { match (tts.len(), tts.get(0), tts.get(1)) {
( (
2, 2,
Some(&TokenTree::Token(_, token::Dollar)), Some(&TokenTree::Token(Token { kind: token::Dollar, .. })),
Some(&TokenTree::Token(_, token::Ident(ident, false))), Some(&TokenTree::Token(Token { kind: token::Ident(ident, false), .. })),
) )
if second_delim == token::Paren && ident.name.as_str() == "a" => {}, if second_delim == token::Paren && ident.name.as_str() == "a" => {},
_ => panic!("value 4: {:?} {:?}", second_delim, second_tts), _ => panic!("value 4: {:?} {:?}", second_delim, second_tts),
@ -477,16 +478,16 @@ mod tests {
let tts = string_to_stream("fn a (b : i32) { b; }".to_string()); let tts = string_to_stream("fn a (b : i32) { b; }".to_string());
let expected = TokenStream::new(vec![ let expected = TokenStream::new(vec![
TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"), false)).into(), TokenTree::token(sp(0, 2), token::Ident(Ident::from_str("fn"), false)).into(),
TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"), false)).into(), TokenTree::token(sp(3, 4), token::Ident(Ident::from_str("a"), false)).into(),
TokenTree::Delimited( TokenTree::Delimited(
DelimSpan::from_pair(sp(5, 6), sp(13, 14)), DelimSpan::from_pair(sp(5, 6), sp(13, 14)),
token::DelimToken::Paren, token::DelimToken::Paren,
TokenStream::new(vec![ TokenStream::new(vec![
TokenTree::Token(sp(6, 7), TokenTree::token(sp(6, 7),
token::Ident(Ident::from_str("b"), false)).into(), token::Ident(Ident::from_str("b"), false)).into(),
TokenTree::Token(sp(8, 9), token::Colon).into(), TokenTree::token(sp(8, 9), token::Colon).into(),
TokenTree::Token(sp(10, 13), TokenTree::token(sp(10, 13),
token::Ident(Ident::from_str("i32"), false)).into(), token::Ident(Ident::from_str("i32"), false)).into(),
]).into(), ]).into(),
).into(), ).into(),
@ -494,9 +495,9 @@ mod tests {
DelimSpan::from_pair(sp(15, 16), sp(20, 21)), DelimSpan::from_pair(sp(15, 16), sp(20, 21)),
token::DelimToken::Brace, token::DelimToken::Brace,
TokenStream::new(vec![ TokenStream::new(vec![
TokenTree::Token(sp(17, 18), TokenTree::token(sp(17, 18),
token::Ident(Ident::from_str("b"), false)).into(), token::Ident(Ident::from_str("b"), false)).into(),
TokenTree::Token(sp(18, 19), token::Semi).into(), TokenTree::token(sp(18, 19), token::Semi).into(),
]).into(), ]).into(),
).into() ).into()
]); ]);

View file

@ -318,7 +318,7 @@ impl TokenCursor {
} }
match tree { match tree {
TokenTree::Token(span, kind) => return Token { kind, span }, TokenTree::Token(token) => return token,
TokenTree::Delimited(sp, delim, tts) => { TokenTree::Delimited(sp, delim, tts) => {
let frame = TokenCursorFrame::new(sp, delim, &tts); let frame = TokenCursorFrame::new(sp, delim, &tts);
self.stack.push(mem::replace(&mut self.frame, frame)); self.stack.push(mem::replace(&mut self.frame, frame));
@ -353,9 +353,9 @@ impl TokenCursor {
delim_span, delim_span,
token::Bracket, token::Bracket,
[ [
TokenTree::Token(sp, token::Ident(ast::Ident::with_empty_ctxt(sym::doc), false)), TokenTree::token(sp, token::Ident(ast::Ident::with_empty_ctxt(sym::doc), false)),
TokenTree::Token(sp, token::Eq), TokenTree::token(sp, token::Eq),
TokenTree::Token(sp, token::TokenKind::lit( TokenTree::token(sp, token::TokenKind::lit(
token::StrRaw(num_of_hashes), Symbol::intern(&stripped), None token::StrRaw(num_of_hashes), Symbol::intern(&stripped), None
)), )),
] ]
@ -366,10 +366,10 @@ impl TokenCursor {
delim_span, delim_span,
token::NoDelim, token::NoDelim,
&if doc_comment_style(&name.as_str()) == AttrStyle::Inner { &if doc_comment_style(&name.as_str()) == AttrStyle::Inner {
[TokenTree::Token(sp, token::Pound), TokenTree::Token(sp, token::Not), body] [TokenTree::token(sp, token::Pound), TokenTree::token(sp, token::Not), body]
.iter().cloned().collect::<TokenStream>().into() .iter().cloned().collect::<TokenStream>().into()
} else { } else {
[TokenTree::Token(sp, token::Pound), body] [TokenTree::token(sp, token::Pound), body]
.iter().cloned().collect::<TokenStream>().into() .iter().cloned().collect::<TokenStream>().into()
}, },
))); )));
@ -1052,7 +1052,7 @@ impl<'a> Parser<'a> {
f(&match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) { f(&match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) {
Some(tree) => match tree { Some(tree) => match tree {
TokenTree::Token(_, tok) => tok, TokenTree::Token(token) => token.kind,
TokenTree::Delimited(_, delim, _) => token::OpenDelim(delim), TokenTree::Delimited(_, delim, _) => token::OpenDelim(delim),
}, },
None => token::CloseDelim(self.token_cursor.frame.delim), None => token::CloseDelim(self.token_cursor.frame.delim),
@ -1065,7 +1065,7 @@ impl<'a> Parser<'a> {
} }
match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) { match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) {
Some(TokenTree::Token(span, _)) => span, Some(TokenTree::Token(token)) => token.span,
Some(TokenTree::Delimited(span, ..)) => span.entire(), Some(TokenTree::Delimited(span, ..)) => span.entire(),
None => self.look_ahead_span(dist - 1), None => self.look_ahead_span(dist - 1),
} }
@ -2675,7 +2675,7 @@ impl<'a> Parser<'a> {
_ => { _ => {
let (token, span) = (mem::replace(&mut self.token, token::Whitespace), self.span); let (token, span) = (mem::replace(&mut self.token, token::Whitespace), self.span);
self.bump(); self.bump();
TokenTree::Token(span, token) TokenTree::token(span, token)
} }
} }
} }
@ -4344,7 +4344,7 @@ impl<'a> Parser<'a> {
}; };
TokenStream::new(vec![ TokenStream::new(vec![
args.into(), args.into(),
TokenTree::Token(token_lo.to(self.prev_span), token::FatArrow).into(), TokenTree::token(token_lo.to(self.prev_span), token::FatArrow).into(),
body.into(), body.into(),
]) ])
} else { } else {

View file

@ -18,6 +18,7 @@ use log::info;
use std::fmt; use std::fmt;
use std::mem; use std::mem;
use std::ops::Deref;
#[cfg(target_arch = "x86_64")] #[cfg(target_arch = "x86_64")]
use rustc_data_structures::static_assert_size; use rustc_data_structures::static_assert_size;
use rustc_data_structures::sync::Lrc; use rustc_data_structures::sync::Lrc;
@ -165,7 +166,7 @@ fn ident_can_begin_type(ident: ast::Ident, is_raw: bool) -> bool {
].contains(&ident.name) ].contains(&ident.name)
} }
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] #[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
pub enum TokenKind { pub enum TokenKind {
/* Expression-operator symbols. */ /* Expression-operator symbols. */
Eq, Eq,
@ -235,7 +236,7 @@ pub enum TokenKind {
#[cfg(target_arch = "x86_64")] #[cfg(target_arch = "x86_64")]
static_assert_size!(TokenKind, 16); static_assert_size!(TokenKind, 16);
#[derive(Clone, Debug)] #[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
pub struct Token { pub struct Token {
pub kind: TokenKind, pub kind: TokenKind,
pub span: Span, pub span: Span,
@ -614,6 +615,14 @@ impl PartialEq<TokenKind> for Token {
} }
} }
// FIXME: Remove this after all necessary methods are moved from `TokenKind` to `Token`.
impl Deref for Token {
type Target = TokenKind;
fn deref(&self) -> &Self::Target {
&self.kind
}
}
#[derive(Clone, RustcEncodable, RustcDecodable)] #[derive(Clone, RustcEncodable, RustcDecodable)]
/// For interpolation during macro expansion. /// For interpolation during macro expansion.
pub enum Nonterminal { pub enum Nonterminal {
@ -704,11 +713,11 @@ impl Nonterminal {
} }
Nonterminal::NtIdent(ident, is_raw) => { Nonterminal::NtIdent(ident, is_raw) => {
let token = Ident(ident, is_raw); let token = Ident(ident, is_raw);
Some(TokenTree::Token(ident.span, token).into()) Some(TokenTree::token(ident.span, token).into())
} }
Nonterminal::NtLifetime(ident) => { Nonterminal::NtLifetime(ident) => {
let token = Lifetime(ident); let token = Lifetime(ident);
Some(TokenTree::Token(ident.span, token).into()) Some(TokenTree::token(ident.span, token).into())
} }
Nonterminal::NtTT(ref tt) => { Nonterminal::NtTT(ref tt) => {
Some(tt.clone().into()) Some(tt.clone().into())
@ -794,7 +803,7 @@ fn prepend_attrs(sess: &ParseSess,
if attr.path.segments.len() == 1 && attr.path.segments[0].args.is_none() { if attr.path.segments.len() == 1 && attr.path.segments[0].args.is_none() {
let ident = attr.path.segments[0].ident; let ident = attr.path.segments[0].ident;
let token = Ident(ident, ident.as_str().starts_with("r#")); let token = Ident(ident, ident.as_str().starts_with("r#"));
brackets.push(tokenstream::TokenTree::Token(ident.span, token)); brackets.push(tokenstream::TokenTree::token(ident.span, token));
// ... and for more complicated paths, fall back to a reparse hack that // ... and for more complicated paths, fall back to a reparse hack that
// should eventually be removed. // should eventually be removed.
@ -808,7 +817,7 @@ fn prepend_attrs(sess: &ParseSess,
// The span we list here for `#` and for `[ ... ]` are both wrong in // The span we list here for `#` and for `[ ... ]` are both wrong in
// that it encompasses more than each token, but it hopefully is "good // that it encompasses more than each token, but it hopefully is "good
// enough" for now at least. // enough" for now at least.
builder.push(tokenstream::TokenTree::Token(attr.span, Pound)); builder.push(tokenstream::TokenTree::token(attr.span, Pound));
let delim_span = DelimSpan::from_single(attr.span); let delim_span = DelimSpan::from_single(attr.span);
builder.push(tokenstream::TokenTree::Delimited( builder.push(tokenstream::TokenTree::Delimited(
delim_span, DelimToken::Bracket, brackets.build().into())); delim_span, DelimToken::Bracket, brackets.build().into()));

View file

@ -724,10 +724,10 @@ pub trait PrintState<'a> {
/// expression arguments as expressions). It can be done! I think. /// expression arguments as expressions). It can be done! I think.
fn print_tt(&mut self, tt: tokenstream::TokenTree) -> io::Result<()> { fn print_tt(&mut self, tt: tokenstream::TokenTree) -> io::Result<()> {
match tt { match tt {
TokenTree::Token(_, ref tk) => { TokenTree::Token(ref token) => {
self.writer().word(token_to_string(tk))?; self.writer().word(token_to_string(&token))?;
match *tk { match token.kind {
parse::token::DocComment(..) => { token::DocComment(..) => {
self.writer().hardbreak() self.writer().hardbreak()
} }
_ => Ok(()) _ => Ok(())

View file

@ -16,7 +16,7 @@
use crate::ext::base; use crate::ext::base;
use crate::ext::tt::{macro_parser, quoted}; use crate::ext::tt::{macro_parser, quoted};
use crate::parse::Directory; use crate::parse::Directory;
use crate::parse::token::{self, DelimToken, TokenKind}; use crate::parse::token::{self, DelimToken, Token, TokenKind};
use crate::print::pprust; use crate::print::pprust;
use syntax_pos::{BytePos, Mark, Span, DUMMY_SP}; use syntax_pos::{BytePos, Mark, Span, DUMMY_SP};
@ -44,7 +44,7 @@ use std::{fmt, iter, mem};
#[derive(Debug, Clone, PartialEq, RustcEncodable, RustcDecodable)] #[derive(Debug, Clone, PartialEq, RustcEncodable, RustcDecodable)]
pub enum TokenTree { pub enum TokenTree {
/// A single token /// A single token
Token(Span, token::TokenKind), Token(Token),
/// A delimited sequence of token trees /// A delimited sequence of token trees
Delimited(DelimSpan, DelimToken, TokenStream), Delimited(DelimSpan, DelimToken, TokenStream),
} }
@ -53,8 +53,7 @@ pub enum TokenTree {
#[cfg(parallel_compiler)] #[cfg(parallel_compiler)]
fn _dummy() fn _dummy()
where where
Span: Send + Sync, Token: Send + Sync,
token::TokenKind: Send + Sync,
DelimSpan: Send + Sync, DelimSpan: Send + Sync,
DelimToken: Send + Sync, DelimToken: Send + Sync,
TokenStream: Send + Sync, TokenStream: Send + Sync,
@ -86,12 +85,11 @@ impl TokenTree {
/// Checks if this TokenTree is equal to the other, regardless of span information. /// Checks if this TokenTree is equal to the other, regardless of span information.
pub fn eq_unspanned(&self, other: &TokenTree) -> bool { pub fn eq_unspanned(&self, other: &TokenTree) -> bool {
match (self, other) { match (self, other) {
(&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => tk == tk2, (TokenTree::Token(token), TokenTree::Token(token2)) => token.kind == token2.kind,
(&TokenTree::Delimited(_, delim, ref tts), (TokenTree::Delimited(_, delim, tts), TokenTree::Delimited(_, delim2, tts2)) => {
&TokenTree::Delimited(_, delim2, ref tts2)) => {
delim == delim2 && tts.eq_unspanned(&tts2) delim == delim2 && tts.eq_unspanned(&tts2)
} }
(_, _) => false, _ => false,
} }
} }
@ -102,37 +100,36 @@ impl TokenTree {
// different method. // different method.
pub fn probably_equal_for_proc_macro(&self, other: &TokenTree) -> bool { pub fn probably_equal_for_proc_macro(&self, other: &TokenTree) -> bool {
match (self, other) { match (self, other) {
(&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => { (TokenTree::Token(token), TokenTree::Token(token2)) => {
tk.probably_equal_for_proc_macro(tk2) token.probably_equal_for_proc_macro(token2)
} }
(&TokenTree::Delimited(_, delim, ref tts), (TokenTree::Delimited(_, delim, tts), TokenTree::Delimited(_, delim2, tts2)) => {
&TokenTree::Delimited(_, delim2, ref tts2)) => {
delim == delim2 && tts.probably_equal_for_proc_macro(&tts2) delim == delim2 && tts.probably_equal_for_proc_macro(&tts2)
} }
(_, _) => false, _ => false,
} }
} }
/// Retrieves the TokenTree's span. /// Retrieves the TokenTree's span.
pub fn span(&self) -> Span { pub fn span(&self) -> Span {
match *self { match self {
TokenTree::Token(sp, _) => sp, TokenTree::Token(token) => token.span,
TokenTree::Delimited(sp, ..) => sp.entire(), TokenTree::Delimited(sp, ..) => sp.entire(),
} }
} }
/// Modify the `TokenTree`'s span in-place. /// Modify the `TokenTree`'s span in-place.
pub fn set_span(&mut self, span: Span) { pub fn set_span(&mut self, span: Span) {
match *self { match self {
TokenTree::Token(ref mut sp, _) => *sp = span, TokenTree::Token(token) => token.span = span,
TokenTree::Delimited(ref mut sp, ..) => *sp = DelimSpan::from_single(span), TokenTree::Delimited(dspan, ..) => *dspan = DelimSpan::from_single(span),
} }
} }
/// Indicates if the stream is a token that is equal to the provided token. /// Indicates if the stream is a token that is equal to the provided token.
pub fn eq_token(&self, t: TokenKind) -> bool { pub fn eq_token(&self, t: TokenKind) -> bool {
match *self { match self {
TokenTree::Token(_, ref tk) => *tk == t, TokenTree::Token(token) => *token == t,
_ => false, _ => false,
} }
} }
@ -141,6 +138,10 @@ impl TokenTree {
TokenStream::new(vec![(self, Joint)]) TokenStream::new(vec![(self, Joint)])
} }
pub fn token(span: Span, kind: TokenKind) -> TokenTree {
TokenTree::Token(Token { kind, span })
}
/// Returns the opening delimiter as a token tree. /// Returns the opening delimiter as a token tree.
pub fn open_tt(span: Span, delim: DelimToken) -> TokenTree { pub fn open_tt(span: Span, delim: DelimToken) -> TokenTree {
let open_span = if span.is_dummy() { let open_span = if span.is_dummy() {
@ -148,7 +149,7 @@ impl TokenTree {
} else { } else {
span.with_hi(span.lo() + BytePos(delim.len() as u32)) span.with_hi(span.lo() + BytePos(delim.len() as u32))
}; };
TokenTree::Token(open_span, token::OpenDelim(delim)) TokenTree::token(open_span, token::OpenDelim(delim))
} }
/// Returns the closing delimiter as a token tree. /// Returns the closing delimiter as a token tree.
@ -158,7 +159,7 @@ impl TokenTree {
} else { } else {
span.with_lo(span.hi() - BytePos(delim.len() as u32)) span.with_lo(span.hi() - BytePos(delim.len() as u32))
}; };
TokenTree::Token(close_span, token::CloseDelim(delim)) TokenTree::token(close_span, token::CloseDelim(delim))
} }
} }
@ -201,18 +202,17 @@ impl TokenStream {
while let Some((pos, ts)) = iter.next() { while let Some((pos, ts)) = iter.next() {
if let Some((_, next)) = iter.peek() { if let Some((_, next)) = iter.peek() {
let sp = match (&ts, &next) { let sp = match (&ts, &next) {
(_, (TokenTree::Token(_, token::Comma), _)) => continue, (_, (TokenTree::Token(Token { kind: token::Comma, .. }), _)) => continue,
((TokenTree::Token(sp, token_left), NonJoint), ((TokenTree::Token(token_left), NonJoint), (TokenTree::Token(token_right), _))
(TokenTree::Token(_, token_right), _))
if ((token_left.is_ident() && !token_left.is_reserved_ident()) if ((token_left.is_ident() && !token_left.is_reserved_ident())
|| token_left.is_lit()) && || token_left.is_lit()) &&
((token_right.is_ident() && !token_right.is_reserved_ident()) ((token_right.is_ident() && !token_right.is_reserved_ident())
|| token_right.is_lit()) => *sp, || token_right.is_lit()) => token_left.span,
((TokenTree::Delimited(sp, ..), NonJoint), _) => sp.entire(), ((TokenTree::Delimited(sp, ..), NonJoint), _) => sp.entire(),
_ => continue, _ => continue,
}; };
let sp = sp.shrink_to_hi(); let sp = sp.shrink_to_hi();
let comma = (TokenTree::Token(sp, token::Comma), NonJoint); let comma = (TokenTree::token(sp, token::Comma), NonJoint);
suggestion = Some((pos, comma, sp)); suggestion = Some((pos, comma, sp));
} }
} }
@ -241,12 +241,6 @@ impl From<TokenTree> for TreeAndJoint {
} }
} }
impl From<TokenKind> for TokenStream {
fn from(token: TokenKind) -> TokenStream {
TokenTree::Token(DUMMY_SP, token).into()
}
}
impl<T: Into<TokenStream>> iter::FromIterator<T> for TokenStream { impl<T: Into<TokenStream>> iter::FromIterator<T> for TokenStream {
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self { fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
TokenStream::from_streams(iter.into_iter().map(Into::into).collect::<SmallVec<_>>()) TokenStream::from_streams(iter.into_iter().map(Into::into).collect::<SmallVec<_>>())
@ -349,22 +343,25 @@ impl TokenStream {
// streams, making a comparison between a token stream generated from an // streams, making a comparison between a token stream generated from an
// AST and a token stream which was parsed into an AST more reliable. // AST and a token stream which was parsed into an AST more reliable.
fn semantic_tree(tree: &TokenTree) -> bool { fn semantic_tree(tree: &TokenTree) -> bool {
match tree { if let TokenTree::Token(token) = tree {
// The pretty printer tends to add trailing commas to if let
// everything, and in particular, after struct fields. // The pretty printer tends to add trailing commas to
| TokenTree::Token(_, token::Comma) // everything, and in particular, after struct fields.
// The pretty printer emits `NoDelim` as whitespace. | token::Comma
| TokenTree::Token(_, token::OpenDelim(DelimToken::NoDelim)) // The pretty printer emits `NoDelim` as whitespace.
| TokenTree::Token(_, token::CloseDelim(DelimToken::NoDelim)) | token::OpenDelim(DelimToken::NoDelim)
// The pretty printer collapses many semicolons into one. | token::CloseDelim(DelimToken::NoDelim)
| TokenTree::Token(_, token::Semi) // The pretty printer collapses many semicolons into one.
// The pretty printer collapses whitespace arbitrarily and can | token::Semi
// introduce whitespace from `NoDelim`. // The pretty printer collapses whitespace arbitrarily and can
| TokenTree::Token(_, token::Whitespace) // introduce whitespace from `NoDelim`.
// The pretty printer can turn `$crate` into `::crate_name` | token::Whitespace
| TokenTree::Token(_, token::ModSep) => false, // The pretty printer can turn `$crate` into `::crate_name`
_ => true | token::ModSep = token.kind {
return false;
}
} }
true
} }
let mut t1 = self.trees().filter(semantic_tree); let mut t1 = self.trees().filter(semantic_tree);
@ -430,13 +427,13 @@ impl TokenStreamBuilder {
pub fn push<T: Into<TokenStream>>(&mut self, stream: T) { pub fn push<T: Into<TokenStream>>(&mut self, stream: T) {
let stream = stream.into(); let stream = stream.into();
let last_tree_if_joint = self.0.last().and_then(TokenStream::last_tree_if_joint); let last_tree_if_joint = self.0.last().and_then(TokenStream::last_tree_if_joint);
if let Some(TokenTree::Token(last_span, last_tok)) = last_tree_if_joint { if let Some(TokenTree::Token(last_token)) = last_tree_if_joint {
if let Some((TokenTree::Token(span, tok), is_joint)) = stream.first_tree_and_joint() { if let Some((TokenTree::Token(token), is_joint)) = stream.first_tree_and_joint() {
if let Some(glued_tok) = last_tok.glue(tok) { if let Some(glued_tok) = last_token.kind.glue(token.kind) {
let last_stream = self.0.pop().unwrap(); let last_stream = self.0.pop().unwrap();
self.push_all_but_last_tree(&last_stream); self.push_all_but_last_tree(&last_stream);
let glued_span = last_span.to(span); let glued_span = last_token.span.to(token.span);
let glued_tt = TokenTree::Token(glued_span, glued_tok); let glued_tt = TokenTree::token(glued_span, glued_tok);
let glued_tokenstream = TokenStream::new(vec![(glued_tt, is_joint)]); let glued_tokenstream = TokenStream::new(vec![(glued_tt, is_joint)]);
self.0.push(glued_tokenstream); self.0.push(glued_tokenstream);
self.push_all_but_first_tree(&stream); self.push_all_but_first_tree(&stream);
@ -663,7 +660,7 @@ mod tests {
with_default_globals(|| { with_default_globals(|| {
let test0: TokenStream = Vec::<TokenTree>::new().into_iter().collect(); let test0: TokenStream = Vec::<TokenTree>::new().into_iter().collect();
let test1: TokenStream = let test1: TokenStream =
TokenTree::Token(sp(0, 1), token::Ident(Ident::from_str("a"), false)).into(); TokenTree::token(sp(0, 1), token::Ident(Ident::from_str("a"), false)).into();
let test2 = string_to_ts("foo(bar::baz)"); let test2 = string_to_ts("foo(bar::baz)");
assert_eq!(test0.is_empty(), true); assert_eq!(test0.is_empty(), true);
@ -676,9 +673,9 @@ mod tests {
fn test_dotdotdot() { fn test_dotdotdot() {
with_default_globals(|| { with_default_globals(|| {
let mut builder = TokenStreamBuilder::new(); let mut builder = TokenStreamBuilder::new();
builder.push(TokenTree::Token(sp(0, 1), token::Dot).joint()); builder.push(TokenTree::token(sp(0, 1), token::Dot).joint());
builder.push(TokenTree::Token(sp(1, 2), token::Dot).joint()); builder.push(TokenTree::token(sp(1, 2), token::Dot).joint());
builder.push(TokenTree::Token(sp(2, 3), token::Dot)); builder.push(TokenTree::token(sp(2, 3), token::Dot));
let stream = builder.build(); let stream = builder.build();
assert!(stream.eq_unspanned(&string_to_ts("..."))); assert!(stream.eq_unspanned(&string_to_ts("...")));
assert_eq!(stream.trees().count(), 1); assert_eq!(stream.trees().count(), 1);

View file

@ -855,7 +855,7 @@ pub fn walk_attribute<'a, V: Visitor<'a>>(visitor: &mut V, attr: &'a Attribute)
pub fn walk_tt<'a, V: Visitor<'a>>(visitor: &mut V, tt: TokenTree) { pub fn walk_tt<'a, V: Visitor<'a>>(visitor: &mut V, tt: TokenTree) {
match tt { match tt {
TokenTree::Token(_, tok) => visitor.visit_token(tok), TokenTree::Token(token) => visitor.visit_token(token.kind),
TokenTree::Delimited(_, _, tts) => visitor.visit_tts(tts), TokenTree::Delimited(_, _, tts) => visitor.visit_tts(tts),
} }
} }

View file

@ -9,7 +9,8 @@ use errors::DiagnosticBuilder;
use syntax::ast; use syntax::ast;
use syntax::ext::base::{self, *}; use syntax::ext::base::{self, *};
use syntax::feature_gate; use syntax::feature_gate;
use syntax::parse::{self, token}; use syntax::parse;
use syntax::parse::token::{self, Token};
use syntax::ptr::P; use syntax::ptr::P;
use syntax::symbol::{kw, sym, Symbol}; use syntax::symbol::{kw, sym, Symbol};
use syntax::ast::AsmDialect; use syntax::ast::AsmDialect;
@ -86,8 +87,8 @@ fn parse_inline_asm<'a>(
let first_colon = tts.iter() let first_colon = tts.iter()
.position(|tt| { .position(|tt| {
match *tt { match *tt {
tokenstream::TokenTree::Token(_, token::Colon) | tokenstream::TokenTree::Token(Token { kind: token::Colon, .. }) |
tokenstream::TokenTree::Token(_, token::ModSep) => true, tokenstream::TokenTree::Token(Token { kind: token::ModSep, .. }) => true,
_ => false, _ => false,
} }
}) })

View file

@ -29,7 +29,7 @@ pub fn expand_assert<'cx>(
let panic_call = Mac_ { let panic_call = Mac_ {
path: Path::from_ident(Ident::new(sym::panic, sp)), path: Path::from_ident(Ident::new(sym::panic, sp)),
tts: custom_message.unwrap_or_else(|| { tts: custom_message.unwrap_or_else(|| {
TokenStream::from(TokenTree::Token( TokenStream::from(TokenTree::token(
DUMMY_SP, DUMMY_SP,
TokenKind::lit(token::Str, Symbol::intern(&format!( TokenKind::lit(token::Str, Symbol::intern(&format!(
"assertion failed: {}", "assertion failed: {}",

View file

@ -3,7 +3,7 @@ use rustc_data_structures::thin_vec::ThinVec;
use syntax::ast; use syntax::ast;
use syntax::ext::base::{self, *}; use syntax::ext::base::{self, *};
use syntax::feature_gate; use syntax::feature_gate;
use syntax::parse::token; use syntax::parse::token::{self, Token};
use syntax::ptr::P; use syntax::ptr::P;
use syntax_pos::Span; use syntax_pos::Span;
use syntax_pos::symbol::{Symbol, sym}; use syntax_pos::symbol::{Symbol, sym};
@ -30,7 +30,7 @@ pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt<'_>,
for (i, e) in tts.iter().enumerate() { for (i, e) in tts.iter().enumerate() {
if i & 1 == 1 { if i & 1 == 1 {
match *e { match *e {
TokenTree::Token(_, token::Comma) => {} TokenTree::Token(Token { kind: token::Comma, .. }) => {}
_ => { _ => {
cx.span_err(sp, "concat_idents! expecting comma."); cx.span_err(sp, "concat_idents! expecting comma.");
return DummyResult::any(sp); return DummyResult::any(sp);
@ -38,7 +38,7 @@ pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt<'_>,
} }
} else { } else {
match *e { match *e {
TokenTree::Token(_, token::Ident(ident, _)) => TokenTree::Token(Token { kind: token::Ident(ident, _), .. }) =>
res_str.push_str(&ident.as_str()), res_str.push_str(&ident.as_str()),
_ => { _ => {
cx.span_err(sp, "concat_idents! requires ident args."); cx.span_err(sp, "concat_idents! requires ident args.");

View file

@ -69,7 +69,7 @@ impl MultiItemModifier for ProcMacroDerive {
MarkAttrs(&self.attrs).visit_item(&item); MarkAttrs(&self.attrs).visit_item(&item);
let token = token::Interpolated(Lrc::new(token::NtItem(item))); let token = token::Interpolated(Lrc::new(token::NtItem(item)));
let input = tokenstream::TokenTree::Token(DUMMY_SP, token).into(); let input = tokenstream::TokenTree::token(DUMMY_SP, token).into();
let server = proc_macro_server::Rustc::new(ecx); let server = proc_macro_server::Rustc::new(ecx);
let stream = match self.client.run(&EXEC_STRATEGY, server, input) { let stream = match self.client.run(&EXEC_STRATEGY, server, input) {

View file

@ -55,7 +55,7 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
use syntax::parse::token::*; use syntax::parse::token::*;
let joint = is_joint == Joint; let joint = is_joint == Joint;
let (span, token) = match tree { let Token { kind, span } = match tree {
tokenstream::TokenTree::Delimited(span, delim, tts) => { tokenstream::TokenTree::Delimited(span, delim, tts) => {
let delimiter = Delimiter::from_internal(delim); let delimiter = Delimiter::from_internal(delim);
return TokenTree::Group(Group { return TokenTree::Group(Group {
@ -64,7 +64,7 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
span, span,
}); });
} }
tokenstream::TokenTree::Token(span, token) => (span, token), tokenstream::TokenTree::Token(token) => token,
}; };
macro_rules! tt { macro_rules! tt {
@ -93,7 +93,7 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
}}; }};
} }
match token { match kind {
Eq => op!('='), Eq => op!('='),
Lt => op!('<'), Lt => op!('<'),
Le => op!('<', '='), Le => op!('<', '='),
@ -164,7 +164,7 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
TokenKind::lit(token::Str, Symbol::intern(&escaped), None), TokenKind::lit(token::Str, Symbol::intern(&escaped), None),
] ]
.into_iter() .into_iter()
.map(|token| tokenstream::TokenTree::Token(span, token)) .map(|kind| tokenstream::TokenTree::token(span, kind))
.collect(); .collect();
stack.push(TokenTree::Group(Group { stack.push(TokenTree::Group(Group {
delimiter: Delimiter::Bracket, delimiter: Delimiter::Bracket,
@ -212,7 +212,7 @@ impl ToInternal<TokenStream> for TokenTree<Group, Punct, Ident, Literal> {
} }
TokenTree::Ident(self::Ident { sym, is_raw, span }) => { TokenTree::Ident(self::Ident { sym, is_raw, span }) => {
let token = Ident(ast::Ident::new(sym, span), is_raw); let token = Ident(ast::Ident::new(sym, span), is_raw);
return tokenstream::TokenTree::Token(span, token).into(); return tokenstream::TokenTree::token(span, token).into();
} }
TokenTree::Literal(self::Literal { TokenTree::Literal(self::Literal {
lit: token::Lit { kind: token::Integer, symbol, suffix }, lit: token::Lit { kind: token::Integer, symbol, suffix },
@ -221,8 +221,8 @@ impl ToInternal<TokenStream> for TokenTree<Group, Punct, Ident, Literal> {
let minus = BinOp(BinOpToken::Minus); let minus = BinOp(BinOpToken::Minus);
let symbol = Symbol::intern(&symbol.as_str()[1..]); let symbol = Symbol::intern(&symbol.as_str()[1..]);
let integer = TokenKind::lit(token::Integer, symbol, suffix); let integer = TokenKind::lit(token::Integer, symbol, suffix);
let a = tokenstream::TokenTree::Token(span, minus); let a = tokenstream::TokenTree::token(span, minus);
let b = tokenstream::TokenTree::Token(span, integer); let b = tokenstream::TokenTree::token(span, integer);
return vec![a, b].into_iter().collect(); return vec![a, b].into_iter().collect();
} }
TokenTree::Literal(self::Literal { TokenTree::Literal(self::Literal {
@ -232,16 +232,16 @@ impl ToInternal<TokenStream> for TokenTree<Group, Punct, Ident, Literal> {
let minus = BinOp(BinOpToken::Minus); let minus = BinOp(BinOpToken::Minus);
let symbol = Symbol::intern(&symbol.as_str()[1..]); let symbol = Symbol::intern(&symbol.as_str()[1..]);
let float = TokenKind::lit(token::Float, symbol, suffix); let float = TokenKind::lit(token::Float, symbol, suffix);
let a = tokenstream::TokenTree::Token(span, minus); let a = tokenstream::TokenTree::token(span, minus);
let b = tokenstream::TokenTree::Token(span, float); let b = tokenstream::TokenTree::token(span, float);
return vec![a, b].into_iter().collect(); return vec![a, b].into_iter().collect();
} }
TokenTree::Literal(self::Literal { lit, span }) => { TokenTree::Literal(self::Literal { lit, span }) => {
return tokenstream::TokenTree::Token(span, Literal(lit)).into() return tokenstream::TokenTree::token(span, Literal(lit)).into()
} }
}; };
let token = match ch { let kind = match ch {
'=' => Eq, '=' => Eq,
'<' => Lt, '<' => Lt,
'>' => Gt, '>' => Gt,
@ -267,7 +267,7 @@ impl ToInternal<TokenStream> for TokenTree<Group, Punct, Ident, Literal> {
_ => unreachable!(), _ => unreachable!(),
}; };
let tree = tokenstream::TokenTree::Token(span, token); let tree = tokenstream::TokenTree::token(span, kind);
TokenStream::new(vec![(tree, if joint { Joint } else { NonJoint })]) TokenStream::new(vec![(tree, if joint { Joint } else { NonJoint })])
} }
} }

View file

@ -17,10 +17,10 @@ pub fn expand_trace_macros(cx: &mut ExtCtxt<'_>,
} }
match (tt.len(), tt.first()) { match (tt.len(), tt.first()) {
(1, Some(&TokenTree::Token(_, ref tok))) if tok.is_keyword(kw::True) => { (1, Some(TokenTree::Token(token))) if token.is_keyword(kw::True) => {
cx.set_trace_macros(true); cx.set_trace_macros(true);
} }
(1, Some(&TokenTree::Token(_, ref tok))) if tok.is_keyword(kw::False) => { (1, Some(TokenTree::Token(token))) if token.is_keyword(kw::False) => {
cx.set_trace_macros(false); cx.set_trace_macros(false);
} }
_ => cx.span_err(sp, "trace_macros! accepts only `true` or `false`"), _ => cx.span_err(sp, "trace_macros! accepts only `true` or `false`"),