Always use token kinds through token
module rather than Token
type
This commit is contained in:
parent
daf1ed0e98
commit
eac3846b65
15 changed files with 129 additions and 129 deletions
|
@ -67,7 +67,7 @@ use syntax::source_map::CompilerDesugaringKind::IfTemporary;
|
|||
use syntax::std_inject;
|
||||
use syntax::symbol::{kw, sym, Symbol};
|
||||
use syntax::tokenstream::{TokenStream, TokenTree};
|
||||
use syntax::parse::token::Token;
|
||||
use syntax::parse::token::{self, Token};
|
||||
use syntax::visit::{self, Visitor};
|
||||
use syntax_pos::{DUMMY_SP, edition, Span};
|
||||
|
||||
|
@ -1339,7 +1339,7 @@ impl<'a> LoweringContext<'a> {
|
|||
|
||||
fn lower_token(&mut self, token: Token, span: Span) -> TokenStream {
|
||||
match token {
|
||||
Token::Interpolated(nt) => {
|
||||
token::Interpolated(nt) => {
|
||||
let tts = nt.to_tokenstream(&self.sess.parse_sess, span);
|
||||
self.lower_token_stream(tts)
|
||||
}
|
||||
|
|
|
@ -326,7 +326,7 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> {
|
|||
}
|
||||
|
||||
fn visit_token(&mut self, t: Token) {
|
||||
if let Token::Interpolated(nt) = t {
|
||||
if let token::Interpolated(nt) = t {
|
||||
if let token::NtExpr(ref expr) = *nt {
|
||||
if let ExprKind::Mac(..) = expr.node {
|
||||
self.visit_macro_invoc(expr.id);
|
||||
|
|
|
@ -313,60 +313,60 @@ fn hash_token<'a, 'gcx, W: StableHasherResult>(
|
|||
) {
|
||||
mem::discriminant(token).hash_stable(hcx, hasher);
|
||||
match *token {
|
||||
token::Token::Eq |
|
||||
token::Token::Lt |
|
||||
token::Token::Le |
|
||||
token::Token::EqEq |
|
||||
token::Token::Ne |
|
||||
token::Token::Ge |
|
||||
token::Token::Gt |
|
||||
token::Token::AndAnd |
|
||||
token::Token::OrOr |
|
||||
token::Token::Not |
|
||||
token::Token::Tilde |
|
||||
token::Token::At |
|
||||
token::Token::Dot |
|
||||
token::Token::DotDot |
|
||||
token::Token::DotDotDot |
|
||||
token::Token::DotDotEq |
|
||||
token::Token::Comma |
|
||||
token::Token::Semi |
|
||||
token::Token::Colon |
|
||||
token::Token::ModSep |
|
||||
token::Token::RArrow |
|
||||
token::Token::LArrow |
|
||||
token::Token::FatArrow |
|
||||
token::Token::Pound |
|
||||
token::Token::Dollar |
|
||||
token::Token::Question |
|
||||
token::Token::SingleQuote |
|
||||
token::Token::Whitespace |
|
||||
token::Token::Comment |
|
||||
token::Token::Eof => {}
|
||||
token::Eq |
|
||||
token::Lt |
|
||||
token::Le |
|
||||
token::EqEq |
|
||||
token::Ne |
|
||||
token::Ge |
|
||||
token::Gt |
|
||||
token::AndAnd |
|
||||
token::OrOr |
|
||||
token::Not |
|
||||
token::Tilde |
|
||||
token::At |
|
||||
token::Dot |
|
||||
token::DotDot |
|
||||
token::DotDotDot |
|
||||
token::DotDotEq |
|
||||
token::Comma |
|
||||
token::Semi |
|
||||
token::Colon |
|
||||
token::ModSep |
|
||||
token::RArrow |
|
||||
token::LArrow |
|
||||
token::FatArrow |
|
||||
token::Pound |
|
||||
token::Dollar |
|
||||
token::Question |
|
||||
token::SingleQuote |
|
||||
token::Whitespace |
|
||||
token::Comment |
|
||||
token::Eof => {}
|
||||
|
||||
token::Token::BinOp(bin_op_token) |
|
||||
token::Token::BinOpEq(bin_op_token) => {
|
||||
token::BinOp(bin_op_token) |
|
||||
token::BinOpEq(bin_op_token) => {
|
||||
std_hash::Hash::hash(&bin_op_token, hasher);
|
||||
}
|
||||
|
||||
token::Token::OpenDelim(delim_token) |
|
||||
token::Token::CloseDelim(delim_token) => {
|
||||
token::OpenDelim(delim_token) |
|
||||
token::CloseDelim(delim_token) => {
|
||||
std_hash::Hash::hash(&delim_token, hasher);
|
||||
}
|
||||
token::Token::Literal(lit) => lit.hash_stable(hcx, hasher),
|
||||
token::Literal(lit) => lit.hash_stable(hcx, hasher),
|
||||
|
||||
token::Token::Ident(ident, is_raw) => {
|
||||
token::Ident(ident, is_raw) => {
|
||||
ident.name.hash_stable(hcx, hasher);
|
||||
is_raw.hash_stable(hcx, hasher);
|
||||
}
|
||||
token::Token::Lifetime(ident) => ident.name.hash_stable(hcx, hasher),
|
||||
token::Lifetime(ident) => ident.name.hash_stable(hcx, hasher),
|
||||
|
||||
token::Token::Interpolated(_) => {
|
||||
token::Interpolated(_) => {
|
||||
bug!("interpolated tokens should not be present in the HIR")
|
||||
}
|
||||
|
||||
token::Token::DocComment(val) |
|
||||
token::Token::Shebang(val) => val.hash_stable(hcx, hasher),
|
||||
token::DocComment(val) |
|
||||
token::Shebang(val) => val.hash_stable(hcx, hasher),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1053,7 +1053,7 @@ impl<'a, 'b> Visitor<'a> for BuildReducedGraphVisitor<'a, 'b> {
|
|||
}
|
||||
|
||||
fn visit_token(&mut self, t: Token) {
|
||||
if let Token::Interpolated(nt) = t {
|
||||
if let token::Interpolated(nt) = t {
|
||||
if let token::NtExpr(ref expr) = *nt {
|
||||
if let ast::ExprKind::Mac(..) = expr.node {
|
||||
self.visit_invoc(expr.id);
|
||||
|
|
|
@ -465,7 +465,7 @@ impl MetaItem {
|
|||
let mod_sep_span = Span::new(last_pos,
|
||||
segment.ident.span.lo(),
|
||||
segment.ident.span.ctxt());
|
||||
idents.push(TokenTree::Token(mod_sep_span, Token::ModSep).into());
|
||||
idents.push(TokenTree::Token(mod_sep_span, token::ModSep).into());
|
||||
}
|
||||
idents.push(TokenTree::Token(segment.ident.span,
|
||||
Token::from_ast_ident(segment.ident)).into());
|
||||
|
@ -480,10 +480,10 @@ impl MetaItem {
|
|||
{
|
||||
// FIXME: Share code with `parse_path`.
|
||||
let path = match tokens.next() {
|
||||
Some(TokenTree::Token(span, token @ Token::Ident(..))) |
|
||||
Some(TokenTree::Token(span, token @ Token::ModSep)) => 'arm: {
|
||||
let mut segments = if let Token::Ident(ident, _) = token {
|
||||
if let Some(TokenTree::Token(_, Token::ModSep)) = tokens.peek() {
|
||||
Some(TokenTree::Token(span, token @ token::Ident(..))) |
|
||||
Some(TokenTree::Token(span, token @ token::ModSep)) => 'arm: {
|
||||
let mut segments = if let token::Ident(ident, _) = token {
|
||||
if let Some(TokenTree::Token(_, token::ModSep)) = tokens.peek() {
|
||||
tokens.next();
|
||||
vec![PathSegment::from_ident(ident.with_span_pos(span))]
|
||||
} else {
|
||||
|
@ -494,12 +494,12 @@ impl MetaItem {
|
|||
};
|
||||
loop {
|
||||
if let Some(TokenTree::Token(span,
|
||||
Token::Ident(ident, _))) = tokens.next() {
|
||||
token::Ident(ident, _))) = tokens.next() {
|
||||
segments.push(PathSegment::from_ident(ident.with_span_pos(span)));
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
if let Some(TokenTree::Token(_, Token::ModSep)) = tokens.peek() {
|
||||
if let Some(TokenTree::Token(_, token::ModSep)) = tokens.peek() {
|
||||
tokens.next();
|
||||
} else {
|
||||
break;
|
||||
|
@ -508,7 +508,7 @@ impl MetaItem {
|
|||
let span = span.with_hi(segments.last().unwrap().ident.span.hi());
|
||||
Path { span, segments }
|
||||
}
|
||||
Some(TokenTree::Token(_, Token::Interpolated(nt))) => match *nt {
|
||||
Some(TokenTree::Token(_, token::Interpolated(nt))) => match *nt {
|
||||
token::Nonterminal::NtIdent(ident, _) => Path::from_ident(ident),
|
||||
token::Nonterminal::NtMeta(ref meta) => return Some(meta.clone()),
|
||||
token::Nonterminal::NtPath(ref path) => path.clone(),
|
||||
|
@ -533,7 +533,7 @@ impl MetaItemKind {
|
|||
match *self {
|
||||
MetaItemKind::Word => TokenStream::empty(),
|
||||
MetaItemKind::NameValue(ref lit) => {
|
||||
let mut vec = vec![TokenTree::Token(span, Token::Eq).into()];
|
||||
let mut vec = vec![TokenTree::Token(span, token::Eq).into()];
|
||||
lit.tokens().append_to_tree_and_joint_vec(&mut vec);
|
||||
TokenStream::new(vec)
|
||||
}
|
||||
|
@ -541,7 +541,7 @@ impl MetaItemKind {
|
|||
let mut tokens = Vec::new();
|
||||
for (i, item) in list.iter().enumerate() {
|
||||
if i > 0 {
|
||||
tokens.push(TokenTree::Token(span, Token::Comma).into());
|
||||
tokens.push(TokenTree::Token(span, token::Comma).into());
|
||||
}
|
||||
item.tokens().append_to_tree_and_joint_vec(&mut tokens);
|
||||
}
|
||||
|
@ -579,7 +579,7 @@ impl MetaItemKind {
|
|||
let item = NestedMetaItem::from_tokens(&mut tokens)?;
|
||||
result.push(item);
|
||||
match tokens.next() {
|
||||
None | Some(TokenTree::Token(_, Token::Comma)) => {}
|
||||
None | Some(TokenTree::Token(_, token::Comma)) => {}
|
||||
_ => return None,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,7 +10,7 @@ use crate::ext::placeholders::{placeholder, PlaceholderExpander};
|
|||
use crate::feature_gate::{self, Features, GateIssue, is_builtin_attr, emit_feature_err};
|
||||
use crate::mut_visit::*;
|
||||
use crate::parse::{DirectoryOwnership, PResult, ParseSess};
|
||||
use crate::parse::token::{self, Token};
|
||||
use crate::parse::token;
|
||||
use crate::parse::parser::Parser;
|
||||
use crate::ptr::P;
|
||||
use crate::symbol::Symbol;
|
||||
|
@ -585,7 +585,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
|||
}
|
||||
AttrProcMacro(ref mac, ..) => {
|
||||
self.gate_proc_macro_attr_item(attr.span, &item);
|
||||
let item_tok = TokenTree::Token(DUMMY_SP, Token::Interpolated(Lrc::new(match item {
|
||||
let item_tok = TokenTree::Token(DUMMY_SP, token::Interpolated(Lrc::new(match item {
|
||||
Annotatable::Item(item) => token::NtItem(item),
|
||||
Annotatable::TraitItem(item) => token::NtTraitItem(item.into_inner()),
|
||||
Annotatable::ImplItem(item) => token::NtImplItem(item.into_inner()),
|
||||
|
|
|
@ -835,12 +835,12 @@ fn may_begin_with(name: Symbol, token: &Token) -> bool {
|
|||
sym::literal => token.can_begin_literal_or_bool(),
|
||||
sym::vis => match *token {
|
||||
// The follow-set of :vis + "priv" keyword + interpolated
|
||||
Token::Comma | Token::Ident(..) | Token::Interpolated(_) => true,
|
||||
token::Comma | token::Ident(..) | token::Interpolated(_) => true,
|
||||
_ => token.can_begin_type(),
|
||||
},
|
||||
sym::block => match *token {
|
||||
Token::OpenDelim(token::Brace) => true,
|
||||
Token::Interpolated(ref nt) => match **nt {
|
||||
token::OpenDelim(token::Brace) => true,
|
||||
token::Interpolated(ref nt) => match **nt {
|
||||
token::NtItem(_)
|
||||
| token::NtPat(_)
|
||||
| token::NtTy(_)
|
||||
|
@ -853,32 +853,32 @@ fn may_begin_with(name: Symbol, token: &Token) -> bool {
|
|||
_ => false,
|
||||
},
|
||||
sym::path | sym::meta => match *token {
|
||||
Token::ModSep | Token::Ident(..) => true,
|
||||
Token::Interpolated(ref nt) => match **nt {
|
||||
token::ModSep | token::Ident(..) => true,
|
||||
token::Interpolated(ref nt) => match **nt {
|
||||
token::NtPath(_) | token::NtMeta(_) => true,
|
||||
_ => may_be_ident(&nt),
|
||||
},
|
||||
_ => false,
|
||||
},
|
||||
sym::pat => match *token {
|
||||
Token::Ident(..) | // box, ref, mut, and other identifiers (can stricten)
|
||||
Token::OpenDelim(token::Paren) | // tuple pattern
|
||||
Token::OpenDelim(token::Bracket) | // slice pattern
|
||||
Token::BinOp(token::And) | // reference
|
||||
Token::BinOp(token::Minus) | // negative literal
|
||||
Token::AndAnd | // double reference
|
||||
Token::Literal(..) | // literal
|
||||
Token::DotDot | // range pattern (future compat)
|
||||
Token::DotDotDot | // range pattern (future compat)
|
||||
Token::ModSep | // path
|
||||
Token::Lt | // path (UFCS constant)
|
||||
Token::BinOp(token::Shl) => true, // path (double UFCS)
|
||||
Token::Interpolated(ref nt) => may_be_ident(nt),
|
||||
token::Ident(..) | // box, ref, mut, and other identifiers (can stricten)
|
||||
token::OpenDelim(token::Paren) | // tuple pattern
|
||||
token::OpenDelim(token::Bracket) | // slice pattern
|
||||
token::BinOp(token::And) | // reference
|
||||
token::BinOp(token::Minus) | // negative literal
|
||||
token::AndAnd | // double reference
|
||||
token::Literal(..) | // literal
|
||||
token::DotDot | // range pattern (future compat)
|
||||
token::DotDotDot | // range pattern (future compat)
|
||||
token::ModSep | // path
|
||||
token::Lt | // path (UFCS constant)
|
||||
token::BinOp(token::Shl) => true, // path (double UFCS)
|
||||
token::Interpolated(ref nt) => may_be_ident(nt),
|
||||
_ => false,
|
||||
},
|
||||
sym::lifetime => match *token {
|
||||
Token::Lifetime(_) => true,
|
||||
Token::Interpolated(ref nt) => match **nt {
|
||||
token::Lifetime(_) => true,
|
||||
token::Interpolated(ref nt) => match **nt {
|
||||
token::NtLifetime(_) | token::NtTT(_) => true,
|
||||
_ => false,
|
||||
},
|
||||
|
|
|
@ -225,7 +225,7 @@ pub fn transcribe(
|
|||
result.push(tt.clone().into());
|
||||
} else {
|
||||
sp = sp.apply_mark(cx.current_expansion.mark);
|
||||
let token = TokenTree::Token(sp, Token::Interpolated(nt.clone()));
|
||||
let token = TokenTree::Token(sp, token::Interpolated(nt.clone()));
|
||||
result.push(token.into());
|
||||
}
|
||||
} else {
|
||||
|
|
|
@ -294,7 +294,7 @@ impl<'a> Parser<'a> {
|
|||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
}
|
||||
let sp = if self.token == token::Token::Eof {
|
||||
let sp = if self.token == token::Eof {
|
||||
// This is EOF, don't want to point at the following char, but rather the last token
|
||||
self.prev_span
|
||||
} else {
|
||||
|
@ -732,7 +732,7 @@ impl<'a> Parser<'a> {
|
|||
let this_token_str = self.this_token_descr();
|
||||
let (prev_sp, sp) = match (&self.token, self.subparser_name) {
|
||||
// Point at the end of the macro call when reaching end of macro arguments.
|
||||
(token::Token::Eof, Some(_)) => {
|
||||
(token::Eof, Some(_)) => {
|
||||
let sp = self.sess.source_map().next_point(self.span);
|
||||
(sp, sp)
|
||||
}
|
||||
|
@ -740,14 +740,14 @@ impl<'a> Parser<'a> {
|
|||
// This happens when the parser finds an empty TokenStream.
|
||||
_ if self.prev_span == DUMMY_SP => (self.span, self.span),
|
||||
// EOF, don't want to point at the following char, but rather the last token.
|
||||
(token::Token::Eof, None) => (self.prev_span, self.span),
|
||||
(token::Eof, None) => (self.prev_span, self.span),
|
||||
_ => (self.sess.source_map().next_point(self.prev_span), self.span),
|
||||
};
|
||||
let msg = format!(
|
||||
"expected `{}`, found {}",
|
||||
token_str,
|
||||
match (&self.token, self.subparser_name) {
|
||||
(token::Token::Eof, Some(origin)) => format!("end of {}", origin),
|
||||
(token::Eof, Some(origin)) => format!("end of {}", origin),
|
||||
_ => this_token_str,
|
||||
},
|
||||
);
|
||||
|
@ -1215,7 +1215,7 @@ impl<'a> Parser<'a> {
|
|||
|
||||
crate fn expected_expression_found(&self) -> DiagnosticBuilder<'a> {
|
||||
let (span, msg) = match (&self.token, self.subparser_name) {
|
||||
(&token::Token::Eof, Some(origin)) => {
|
||||
(&token::Eof, Some(origin)) => {
|
||||
let sp = self.sess.source_map().next_point(self.span);
|
||||
(sp, format!("expected expression, found end of {}", origin))
|
||||
}
|
||||
|
|
|
@ -311,7 +311,7 @@ pub fn maybe_file_to_stream(
|
|||
for unmatched in unmatched_braces {
|
||||
let mut db = sess.span_diagnostic.struct_span_err(unmatched.found_span, &format!(
|
||||
"incorrect close delimiter: `{}`",
|
||||
token_to_string(&token::Token::CloseDelim(unmatched.found_delim)),
|
||||
token_to_string(&token::CloseDelim(unmatched.found_delim)),
|
||||
));
|
||||
db.span_label(unmatched.found_span, "incorrect close delimiter");
|
||||
if let Some(sp) = unmatched.candidate_span {
|
||||
|
|
|
@ -3359,7 +3359,7 @@ impl<'a> Parser<'a> {
|
|||
let discriminant = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL,
|
||||
None)?;
|
||||
if let Err(mut e) = self.expect(&token::OpenDelim(token::Brace)) {
|
||||
if self.token == token::Token::Semi {
|
||||
if self.token == token::Semi {
|
||||
e.span_suggestion_short(
|
||||
match_span,
|
||||
"try removing this `match`",
|
||||
|
@ -5920,7 +5920,7 @@ impl<'a> Parser<'a> {
|
|||
while !self.eat(&token::CloseDelim(token::Brace)) {
|
||||
if let token::DocComment(_) = self.token {
|
||||
if self.look_ahead(1,
|
||||
|tok| tok == &token::Token::CloseDelim(token::Brace)) {
|
||||
|tok| tok == &token::CloseDelim(token::Brace)) {
|
||||
let mut err = self.diagnostic().struct_span_err_with_code(
|
||||
self.span,
|
||||
"found a documentation comment that doesn't document anything",
|
||||
|
@ -6796,7 +6796,7 @@ impl<'a> Parser<'a> {
|
|||
let mut replacement = vec![];
|
||||
let mut fixed_crate_name = false;
|
||||
// Accept `extern crate name-like-this` for better diagnostics
|
||||
let dash = token::Token::BinOp(token::BinOpToken::Minus);
|
||||
let dash = token::BinOp(token::BinOpToken::Minus);
|
||||
if self.token == dash { // Do not include `-` as part of the expected tokens list
|
||||
while self.eat(&dash) {
|
||||
fixed_crate_name = true;
|
||||
|
@ -7869,7 +7869,7 @@ pub fn emit_unclosed_delims(unclosed_delims: &mut Vec<UnmatchedBrace>, handler:
|
|||
for unmatched in unclosed_delims.iter() {
|
||||
let mut err = handler.struct_span_err(unmatched.found_span, &format!(
|
||||
"incorrect close delimiter: `{}`",
|
||||
pprust::token_to_string(&token::Token::CloseDelim(unmatched.found_delim)),
|
||||
pprust::token_to_string(&token::CloseDelim(unmatched.found_delim)),
|
||||
));
|
||||
err.span_label(unmatched.found_span, "incorrect close delimiter");
|
||||
if let Some(sp) = unmatched.candidate_span {
|
||||
|
|
|
@ -691,11 +691,11 @@ impl Nonterminal {
|
|||
prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span)
|
||||
}
|
||||
Nonterminal::NtIdent(ident, is_raw) => {
|
||||
let token = Token::Ident(ident, is_raw);
|
||||
let token = Ident(ident, is_raw);
|
||||
Some(TokenTree::Token(ident.span, token).into())
|
||||
}
|
||||
Nonterminal::NtLifetime(ident) => {
|
||||
let token = Token::Lifetime(ident);
|
||||
let token = Lifetime(ident);
|
||||
Some(TokenTree::Token(ident.span, token).into())
|
||||
}
|
||||
Nonterminal::NtTT(ref tt) => {
|
||||
|
|
|
@ -167,7 +167,7 @@ impl TokenTree {
|
|||
/// A `TokenStream` is an abstract sequence of tokens, organized into `TokenTree`s.
|
||||
/// The goal is for procedural macros to work with `TokenStream`s and `TokenTree`s
|
||||
/// instead of a representation of the abstract syntax tree.
|
||||
/// Today's `TokenTree`s can still contain AST via `Token::Interpolated` for back-compat.
|
||||
/// Today's `TokenTree`s can still contain AST via `token::Interpolated` for back-compat.
|
||||
///
|
||||
/// The use of `Option` is an optimization that avoids the need for an
|
||||
/// allocation when the stream is empty. However, it is not guaranteed that an
|
||||
|
@ -201,7 +201,7 @@ impl TokenStream {
|
|||
while let Some((pos, ts)) = iter.next() {
|
||||
if let Some((_, next)) = iter.peek() {
|
||||
let sp = match (&ts, &next) {
|
||||
(_, (TokenTree::Token(_, token::Token::Comma), _)) => continue,
|
||||
(_, (TokenTree::Token(_, token::Comma), _)) => continue,
|
||||
((TokenTree::Token(sp, token_left), NonJoint),
|
||||
(TokenTree::Token(_, token_right), _))
|
||||
if ((token_left.is_ident() && !token_left.is_reserved_ident())
|
||||
|
@ -352,17 +352,17 @@ impl TokenStream {
|
|||
match tree {
|
||||
// The pretty printer tends to add trailing commas to
|
||||
// everything, and in particular, after struct fields.
|
||||
| TokenTree::Token(_, Token::Comma)
|
||||
| TokenTree::Token(_, token::Comma)
|
||||
// The pretty printer emits `NoDelim` as whitespace.
|
||||
| TokenTree::Token(_, Token::OpenDelim(DelimToken::NoDelim))
|
||||
| TokenTree::Token(_, Token::CloseDelim(DelimToken::NoDelim))
|
||||
| TokenTree::Token(_, token::OpenDelim(DelimToken::NoDelim))
|
||||
| TokenTree::Token(_, token::CloseDelim(DelimToken::NoDelim))
|
||||
// The pretty printer collapses many semicolons into one.
|
||||
| TokenTree::Token(_, Token::Semi)
|
||||
| TokenTree::Token(_, token::Semi)
|
||||
// The pretty printer collapses whitespace arbitrarily and can
|
||||
// introduce whitespace from `NoDelim`.
|
||||
| TokenTree::Token(_, Token::Whitespace)
|
||||
| TokenTree::Token(_, token::Whitespace)
|
||||
// The pretty printer can turn `$crate` into `::crate_name`
|
||||
| TokenTree::Token(_, Token::ModSep) => false,
|
||||
| TokenTree::Token(_, token::ModSep) => false,
|
||||
_ => true
|
||||
}
|
||||
}
|
||||
|
@ -664,7 +664,7 @@ mod tests {
|
|||
with_default_globals(|| {
|
||||
let test0: TokenStream = Vec::<TokenTree>::new().into_iter().collect();
|
||||
let test1: TokenStream =
|
||||
TokenTree::Token(sp(0, 1), Token::Ident(Ident::from_str("a"), false)).into();
|
||||
TokenTree::Token(sp(0, 1), token::Ident(Ident::from_str("a"), false)).into();
|
||||
let test2 = string_to_ts("foo(bar::baz)");
|
||||
|
||||
assert_eq!(test0.is_empty(), true);
|
||||
|
@ -677,9 +677,9 @@ mod tests {
|
|||
fn test_dotdotdot() {
|
||||
with_default_globals(|| {
|
||||
let mut builder = TokenStreamBuilder::new();
|
||||
builder.push(TokenTree::Token(sp(0, 1), Token::Dot).joint());
|
||||
builder.push(TokenTree::Token(sp(1, 2), Token::Dot).joint());
|
||||
builder.push(TokenTree::Token(sp(2, 3), Token::Dot));
|
||||
builder.push(TokenTree::Token(sp(0, 1), token::Dot).joint());
|
||||
builder.push(TokenTree::Token(sp(1, 2), token::Dot).joint());
|
||||
builder.push(TokenTree::Token(sp(2, 3), token::Dot));
|
||||
let stream = builder.build();
|
||||
assert!(stream.eq_unspanned(&string_to_ts("...")));
|
||||
assert_eq!(stream.trees().count(), 1);
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use crate::parse::token::{Token, BinOpToken};
|
||||
use crate::parse::token::{self, Token, BinOpToken};
|
||||
use crate::symbol::kw;
|
||||
use crate::ast::{self, BinOpKind};
|
||||
|
||||
|
@ -72,31 +72,31 @@ impl AssocOp {
|
|||
pub fn from_token(t: &Token) -> Option<AssocOp> {
|
||||
use AssocOp::*;
|
||||
match *t {
|
||||
Token::BinOpEq(k) => Some(AssignOp(k)),
|
||||
Token::Eq => Some(Assign),
|
||||
Token::BinOp(BinOpToken::Star) => Some(Multiply),
|
||||
Token::BinOp(BinOpToken::Slash) => Some(Divide),
|
||||
Token::BinOp(BinOpToken::Percent) => Some(Modulus),
|
||||
Token::BinOp(BinOpToken::Plus) => Some(Add),
|
||||
Token::BinOp(BinOpToken::Minus) => Some(Subtract),
|
||||
Token::BinOp(BinOpToken::Shl) => Some(ShiftLeft),
|
||||
Token::BinOp(BinOpToken::Shr) => Some(ShiftRight),
|
||||
Token::BinOp(BinOpToken::And) => Some(BitAnd),
|
||||
Token::BinOp(BinOpToken::Caret) => Some(BitXor),
|
||||
Token::BinOp(BinOpToken::Or) => Some(BitOr),
|
||||
Token::Lt => Some(Less),
|
||||
Token::Le => Some(LessEqual),
|
||||
Token::Ge => Some(GreaterEqual),
|
||||
Token::Gt => Some(Greater),
|
||||
Token::EqEq => Some(Equal),
|
||||
Token::Ne => Some(NotEqual),
|
||||
Token::AndAnd => Some(LAnd),
|
||||
Token::OrOr => Some(LOr),
|
||||
Token::DotDot => Some(DotDot),
|
||||
Token::DotDotEq => Some(DotDotEq),
|
||||
token::BinOpEq(k) => Some(AssignOp(k)),
|
||||
token::Eq => Some(Assign),
|
||||
token::BinOp(BinOpToken::Star) => Some(Multiply),
|
||||
token::BinOp(BinOpToken::Slash) => Some(Divide),
|
||||
token::BinOp(BinOpToken::Percent) => Some(Modulus),
|
||||
token::BinOp(BinOpToken::Plus) => Some(Add),
|
||||
token::BinOp(BinOpToken::Minus) => Some(Subtract),
|
||||
token::BinOp(BinOpToken::Shl) => Some(ShiftLeft),
|
||||
token::BinOp(BinOpToken::Shr) => Some(ShiftRight),
|
||||
token::BinOp(BinOpToken::And) => Some(BitAnd),
|
||||
token::BinOp(BinOpToken::Caret) => Some(BitXor),
|
||||
token::BinOp(BinOpToken::Or) => Some(BitOr),
|
||||
token::Lt => Some(Less),
|
||||
token::Le => Some(LessEqual),
|
||||
token::Ge => Some(GreaterEqual),
|
||||
token::Gt => Some(Greater),
|
||||
token::EqEq => Some(Equal),
|
||||
token::Ne => Some(NotEqual),
|
||||
token::AndAnd => Some(LAnd),
|
||||
token::OrOr => Some(LOr),
|
||||
token::DotDot => Some(DotDot),
|
||||
token::DotDotEq => Some(DotDotEq),
|
||||
// DotDotDot is no longer supported, but we need some way to display the error
|
||||
Token::DotDotDot => Some(DotDotEq),
|
||||
Token::Colon => Some(Colon),
|
||||
token::DotDotDot => Some(DotDotEq),
|
||||
token::Colon => Some(Colon),
|
||||
_ if t.is_keyword(kw::As) => Some(As),
|
||||
_ => None
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ use syntax::attr::{mark_used, mark_known};
|
|||
use syntax::source_map::Span;
|
||||
use syntax::ext::base::*;
|
||||
use syntax::parse;
|
||||
use syntax::parse::token::{self, Token};
|
||||
use syntax::parse::token;
|
||||
use syntax::tokenstream;
|
||||
use syntax::visit::Visitor;
|
||||
use syntax_pos::DUMMY_SP;
|
||||
|
@ -68,7 +68,7 @@ impl MultiItemModifier for ProcMacroDerive {
|
|||
// Mark attributes as known, and used.
|
||||
MarkAttrs(&self.attrs).visit_item(&item);
|
||||
|
||||
let token = Token::Interpolated(Lrc::new(token::NtItem(item)));
|
||||
let token = token::Interpolated(Lrc::new(token::NtItem(item)));
|
||||
let input = tokenstream::TokenTree::Token(DUMMY_SP, token).into();
|
||||
|
||||
let server = proc_macro_server::Rustc::new(ecx);
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue