Always use token kinds through token
module rather than Token
type
This commit is contained in:
parent
daf1ed0e98
commit
eac3846b65
15 changed files with 129 additions and 129 deletions
|
@ -67,7 +67,7 @@ use syntax::source_map::CompilerDesugaringKind::IfTemporary;
|
||||||
use syntax::std_inject;
|
use syntax::std_inject;
|
||||||
use syntax::symbol::{kw, sym, Symbol};
|
use syntax::symbol::{kw, sym, Symbol};
|
||||||
use syntax::tokenstream::{TokenStream, TokenTree};
|
use syntax::tokenstream::{TokenStream, TokenTree};
|
||||||
use syntax::parse::token::Token;
|
use syntax::parse::token::{self, Token};
|
||||||
use syntax::visit::{self, Visitor};
|
use syntax::visit::{self, Visitor};
|
||||||
use syntax_pos::{DUMMY_SP, edition, Span};
|
use syntax_pos::{DUMMY_SP, edition, Span};
|
||||||
|
|
||||||
|
@ -1339,7 +1339,7 @@ impl<'a> LoweringContext<'a> {
|
||||||
|
|
||||||
fn lower_token(&mut self, token: Token, span: Span) -> TokenStream {
|
fn lower_token(&mut self, token: Token, span: Span) -> TokenStream {
|
||||||
match token {
|
match token {
|
||||||
Token::Interpolated(nt) => {
|
token::Interpolated(nt) => {
|
||||||
let tts = nt.to_tokenstream(&self.sess.parse_sess, span);
|
let tts = nt.to_tokenstream(&self.sess.parse_sess, span);
|
||||||
self.lower_token_stream(tts)
|
self.lower_token_stream(tts)
|
||||||
}
|
}
|
||||||
|
|
|
@ -326,7 +326,7 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_token(&mut self, t: Token) {
|
fn visit_token(&mut self, t: Token) {
|
||||||
if let Token::Interpolated(nt) = t {
|
if let token::Interpolated(nt) = t {
|
||||||
if let token::NtExpr(ref expr) = *nt {
|
if let token::NtExpr(ref expr) = *nt {
|
||||||
if let ExprKind::Mac(..) = expr.node {
|
if let ExprKind::Mac(..) = expr.node {
|
||||||
self.visit_macro_invoc(expr.id);
|
self.visit_macro_invoc(expr.id);
|
||||||
|
|
|
@ -313,60 +313,60 @@ fn hash_token<'a, 'gcx, W: StableHasherResult>(
|
||||||
) {
|
) {
|
||||||
mem::discriminant(token).hash_stable(hcx, hasher);
|
mem::discriminant(token).hash_stable(hcx, hasher);
|
||||||
match *token {
|
match *token {
|
||||||
token::Token::Eq |
|
token::Eq |
|
||||||
token::Token::Lt |
|
token::Lt |
|
||||||
token::Token::Le |
|
token::Le |
|
||||||
token::Token::EqEq |
|
token::EqEq |
|
||||||
token::Token::Ne |
|
token::Ne |
|
||||||
token::Token::Ge |
|
token::Ge |
|
||||||
token::Token::Gt |
|
token::Gt |
|
||||||
token::Token::AndAnd |
|
token::AndAnd |
|
||||||
token::Token::OrOr |
|
token::OrOr |
|
||||||
token::Token::Not |
|
token::Not |
|
||||||
token::Token::Tilde |
|
token::Tilde |
|
||||||
token::Token::At |
|
token::At |
|
||||||
token::Token::Dot |
|
token::Dot |
|
||||||
token::Token::DotDot |
|
token::DotDot |
|
||||||
token::Token::DotDotDot |
|
token::DotDotDot |
|
||||||
token::Token::DotDotEq |
|
token::DotDotEq |
|
||||||
token::Token::Comma |
|
token::Comma |
|
||||||
token::Token::Semi |
|
token::Semi |
|
||||||
token::Token::Colon |
|
token::Colon |
|
||||||
token::Token::ModSep |
|
token::ModSep |
|
||||||
token::Token::RArrow |
|
token::RArrow |
|
||||||
token::Token::LArrow |
|
token::LArrow |
|
||||||
token::Token::FatArrow |
|
token::FatArrow |
|
||||||
token::Token::Pound |
|
token::Pound |
|
||||||
token::Token::Dollar |
|
token::Dollar |
|
||||||
token::Token::Question |
|
token::Question |
|
||||||
token::Token::SingleQuote |
|
token::SingleQuote |
|
||||||
token::Token::Whitespace |
|
token::Whitespace |
|
||||||
token::Token::Comment |
|
token::Comment |
|
||||||
token::Token::Eof => {}
|
token::Eof => {}
|
||||||
|
|
||||||
token::Token::BinOp(bin_op_token) |
|
token::BinOp(bin_op_token) |
|
||||||
token::Token::BinOpEq(bin_op_token) => {
|
token::BinOpEq(bin_op_token) => {
|
||||||
std_hash::Hash::hash(&bin_op_token, hasher);
|
std_hash::Hash::hash(&bin_op_token, hasher);
|
||||||
}
|
}
|
||||||
|
|
||||||
token::Token::OpenDelim(delim_token) |
|
token::OpenDelim(delim_token) |
|
||||||
token::Token::CloseDelim(delim_token) => {
|
token::CloseDelim(delim_token) => {
|
||||||
std_hash::Hash::hash(&delim_token, hasher);
|
std_hash::Hash::hash(&delim_token, hasher);
|
||||||
}
|
}
|
||||||
token::Token::Literal(lit) => lit.hash_stable(hcx, hasher),
|
token::Literal(lit) => lit.hash_stable(hcx, hasher),
|
||||||
|
|
||||||
token::Token::Ident(ident, is_raw) => {
|
token::Ident(ident, is_raw) => {
|
||||||
ident.name.hash_stable(hcx, hasher);
|
ident.name.hash_stable(hcx, hasher);
|
||||||
is_raw.hash_stable(hcx, hasher);
|
is_raw.hash_stable(hcx, hasher);
|
||||||
}
|
}
|
||||||
token::Token::Lifetime(ident) => ident.name.hash_stable(hcx, hasher),
|
token::Lifetime(ident) => ident.name.hash_stable(hcx, hasher),
|
||||||
|
|
||||||
token::Token::Interpolated(_) => {
|
token::Interpolated(_) => {
|
||||||
bug!("interpolated tokens should not be present in the HIR")
|
bug!("interpolated tokens should not be present in the HIR")
|
||||||
}
|
}
|
||||||
|
|
||||||
token::Token::DocComment(val) |
|
token::DocComment(val) |
|
||||||
token::Token::Shebang(val) => val.hash_stable(hcx, hasher),
|
token::Shebang(val) => val.hash_stable(hcx, hasher),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1053,7 +1053,7 @@ impl<'a, 'b> Visitor<'a> for BuildReducedGraphVisitor<'a, 'b> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_token(&mut self, t: Token) {
|
fn visit_token(&mut self, t: Token) {
|
||||||
if let Token::Interpolated(nt) = t {
|
if let token::Interpolated(nt) = t {
|
||||||
if let token::NtExpr(ref expr) = *nt {
|
if let token::NtExpr(ref expr) = *nt {
|
||||||
if let ast::ExprKind::Mac(..) = expr.node {
|
if let ast::ExprKind::Mac(..) = expr.node {
|
||||||
self.visit_invoc(expr.id);
|
self.visit_invoc(expr.id);
|
||||||
|
|
|
@ -465,7 +465,7 @@ impl MetaItem {
|
||||||
let mod_sep_span = Span::new(last_pos,
|
let mod_sep_span = Span::new(last_pos,
|
||||||
segment.ident.span.lo(),
|
segment.ident.span.lo(),
|
||||||
segment.ident.span.ctxt());
|
segment.ident.span.ctxt());
|
||||||
idents.push(TokenTree::Token(mod_sep_span, Token::ModSep).into());
|
idents.push(TokenTree::Token(mod_sep_span, token::ModSep).into());
|
||||||
}
|
}
|
||||||
idents.push(TokenTree::Token(segment.ident.span,
|
idents.push(TokenTree::Token(segment.ident.span,
|
||||||
Token::from_ast_ident(segment.ident)).into());
|
Token::from_ast_ident(segment.ident)).into());
|
||||||
|
@ -480,10 +480,10 @@ impl MetaItem {
|
||||||
{
|
{
|
||||||
// FIXME: Share code with `parse_path`.
|
// FIXME: Share code with `parse_path`.
|
||||||
let path = match tokens.next() {
|
let path = match tokens.next() {
|
||||||
Some(TokenTree::Token(span, token @ Token::Ident(..))) |
|
Some(TokenTree::Token(span, token @ token::Ident(..))) |
|
||||||
Some(TokenTree::Token(span, token @ Token::ModSep)) => 'arm: {
|
Some(TokenTree::Token(span, token @ token::ModSep)) => 'arm: {
|
||||||
let mut segments = if let Token::Ident(ident, _) = token {
|
let mut segments = if let token::Ident(ident, _) = token {
|
||||||
if let Some(TokenTree::Token(_, Token::ModSep)) = tokens.peek() {
|
if let Some(TokenTree::Token(_, token::ModSep)) = tokens.peek() {
|
||||||
tokens.next();
|
tokens.next();
|
||||||
vec![PathSegment::from_ident(ident.with_span_pos(span))]
|
vec![PathSegment::from_ident(ident.with_span_pos(span))]
|
||||||
} else {
|
} else {
|
||||||
|
@ -494,12 +494,12 @@ impl MetaItem {
|
||||||
};
|
};
|
||||||
loop {
|
loop {
|
||||||
if let Some(TokenTree::Token(span,
|
if let Some(TokenTree::Token(span,
|
||||||
Token::Ident(ident, _))) = tokens.next() {
|
token::Ident(ident, _))) = tokens.next() {
|
||||||
segments.push(PathSegment::from_ident(ident.with_span_pos(span)));
|
segments.push(PathSegment::from_ident(ident.with_span_pos(span)));
|
||||||
} else {
|
} else {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
if let Some(TokenTree::Token(_, Token::ModSep)) = tokens.peek() {
|
if let Some(TokenTree::Token(_, token::ModSep)) = tokens.peek() {
|
||||||
tokens.next();
|
tokens.next();
|
||||||
} else {
|
} else {
|
||||||
break;
|
break;
|
||||||
|
@ -508,7 +508,7 @@ impl MetaItem {
|
||||||
let span = span.with_hi(segments.last().unwrap().ident.span.hi());
|
let span = span.with_hi(segments.last().unwrap().ident.span.hi());
|
||||||
Path { span, segments }
|
Path { span, segments }
|
||||||
}
|
}
|
||||||
Some(TokenTree::Token(_, Token::Interpolated(nt))) => match *nt {
|
Some(TokenTree::Token(_, token::Interpolated(nt))) => match *nt {
|
||||||
token::Nonterminal::NtIdent(ident, _) => Path::from_ident(ident),
|
token::Nonterminal::NtIdent(ident, _) => Path::from_ident(ident),
|
||||||
token::Nonterminal::NtMeta(ref meta) => return Some(meta.clone()),
|
token::Nonterminal::NtMeta(ref meta) => return Some(meta.clone()),
|
||||||
token::Nonterminal::NtPath(ref path) => path.clone(),
|
token::Nonterminal::NtPath(ref path) => path.clone(),
|
||||||
|
@ -533,7 +533,7 @@ impl MetaItemKind {
|
||||||
match *self {
|
match *self {
|
||||||
MetaItemKind::Word => TokenStream::empty(),
|
MetaItemKind::Word => TokenStream::empty(),
|
||||||
MetaItemKind::NameValue(ref lit) => {
|
MetaItemKind::NameValue(ref lit) => {
|
||||||
let mut vec = vec![TokenTree::Token(span, Token::Eq).into()];
|
let mut vec = vec![TokenTree::Token(span, token::Eq).into()];
|
||||||
lit.tokens().append_to_tree_and_joint_vec(&mut vec);
|
lit.tokens().append_to_tree_and_joint_vec(&mut vec);
|
||||||
TokenStream::new(vec)
|
TokenStream::new(vec)
|
||||||
}
|
}
|
||||||
|
@ -541,7 +541,7 @@ impl MetaItemKind {
|
||||||
let mut tokens = Vec::new();
|
let mut tokens = Vec::new();
|
||||||
for (i, item) in list.iter().enumerate() {
|
for (i, item) in list.iter().enumerate() {
|
||||||
if i > 0 {
|
if i > 0 {
|
||||||
tokens.push(TokenTree::Token(span, Token::Comma).into());
|
tokens.push(TokenTree::Token(span, token::Comma).into());
|
||||||
}
|
}
|
||||||
item.tokens().append_to_tree_and_joint_vec(&mut tokens);
|
item.tokens().append_to_tree_and_joint_vec(&mut tokens);
|
||||||
}
|
}
|
||||||
|
@ -579,7 +579,7 @@ impl MetaItemKind {
|
||||||
let item = NestedMetaItem::from_tokens(&mut tokens)?;
|
let item = NestedMetaItem::from_tokens(&mut tokens)?;
|
||||||
result.push(item);
|
result.push(item);
|
||||||
match tokens.next() {
|
match tokens.next() {
|
||||||
None | Some(TokenTree::Token(_, Token::Comma)) => {}
|
None | Some(TokenTree::Token(_, token::Comma)) => {}
|
||||||
_ => return None,
|
_ => return None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,7 +10,7 @@ use crate::ext::placeholders::{placeholder, PlaceholderExpander};
|
||||||
use crate::feature_gate::{self, Features, GateIssue, is_builtin_attr, emit_feature_err};
|
use crate::feature_gate::{self, Features, GateIssue, is_builtin_attr, emit_feature_err};
|
||||||
use crate::mut_visit::*;
|
use crate::mut_visit::*;
|
||||||
use crate::parse::{DirectoryOwnership, PResult, ParseSess};
|
use crate::parse::{DirectoryOwnership, PResult, ParseSess};
|
||||||
use crate::parse::token::{self, Token};
|
use crate::parse::token;
|
||||||
use crate::parse::parser::Parser;
|
use crate::parse::parser::Parser;
|
||||||
use crate::ptr::P;
|
use crate::ptr::P;
|
||||||
use crate::symbol::Symbol;
|
use crate::symbol::Symbol;
|
||||||
|
@ -585,7 +585,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
||||||
}
|
}
|
||||||
AttrProcMacro(ref mac, ..) => {
|
AttrProcMacro(ref mac, ..) => {
|
||||||
self.gate_proc_macro_attr_item(attr.span, &item);
|
self.gate_proc_macro_attr_item(attr.span, &item);
|
||||||
let item_tok = TokenTree::Token(DUMMY_SP, Token::Interpolated(Lrc::new(match item {
|
let item_tok = TokenTree::Token(DUMMY_SP, token::Interpolated(Lrc::new(match item {
|
||||||
Annotatable::Item(item) => token::NtItem(item),
|
Annotatable::Item(item) => token::NtItem(item),
|
||||||
Annotatable::TraitItem(item) => token::NtTraitItem(item.into_inner()),
|
Annotatable::TraitItem(item) => token::NtTraitItem(item.into_inner()),
|
||||||
Annotatable::ImplItem(item) => token::NtImplItem(item.into_inner()),
|
Annotatable::ImplItem(item) => token::NtImplItem(item.into_inner()),
|
||||||
|
|
|
@ -835,12 +835,12 @@ fn may_begin_with(name: Symbol, token: &Token) -> bool {
|
||||||
sym::literal => token.can_begin_literal_or_bool(),
|
sym::literal => token.can_begin_literal_or_bool(),
|
||||||
sym::vis => match *token {
|
sym::vis => match *token {
|
||||||
// The follow-set of :vis + "priv" keyword + interpolated
|
// The follow-set of :vis + "priv" keyword + interpolated
|
||||||
Token::Comma | Token::Ident(..) | Token::Interpolated(_) => true,
|
token::Comma | token::Ident(..) | token::Interpolated(_) => true,
|
||||||
_ => token.can_begin_type(),
|
_ => token.can_begin_type(),
|
||||||
},
|
},
|
||||||
sym::block => match *token {
|
sym::block => match *token {
|
||||||
Token::OpenDelim(token::Brace) => true,
|
token::OpenDelim(token::Brace) => true,
|
||||||
Token::Interpolated(ref nt) => match **nt {
|
token::Interpolated(ref nt) => match **nt {
|
||||||
token::NtItem(_)
|
token::NtItem(_)
|
||||||
| token::NtPat(_)
|
| token::NtPat(_)
|
||||||
| token::NtTy(_)
|
| token::NtTy(_)
|
||||||
|
@ -853,32 +853,32 @@ fn may_begin_with(name: Symbol, token: &Token) -> bool {
|
||||||
_ => false,
|
_ => false,
|
||||||
},
|
},
|
||||||
sym::path | sym::meta => match *token {
|
sym::path | sym::meta => match *token {
|
||||||
Token::ModSep | Token::Ident(..) => true,
|
token::ModSep | token::Ident(..) => true,
|
||||||
Token::Interpolated(ref nt) => match **nt {
|
token::Interpolated(ref nt) => match **nt {
|
||||||
token::NtPath(_) | token::NtMeta(_) => true,
|
token::NtPath(_) | token::NtMeta(_) => true,
|
||||||
_ => may_be_ident(&nt),
|
_ => may_be_ident(&nt),
|
||||||
},
|
},
|
||||||
_ => false,
|
_ => false,
|
||||||
},
|
},
|
||||||
sym::pat => match *token {
|
sym::pat => match *token {
|
||||||
Token::Ident(..) | // box, ref, mut, and other identifiers (can stricten)
|
token::Ident(..) | // box, ref, mut, and other identifiers (can stricten)
|
||||||
Token::OpenDelim(token::Paren) | // tuple pattern
|
token::OpenDelim(token::Paren) | // tuple pattern
|
||||||
Token::OpenDelim(token::Bracket) | // slice pattern
|
token::OpenDelim(token::Bracket) | // slice pattern
|
||||||
Token::BinOp(token::And) | // reference
|
token::BinOp(token::And) | // reference
|
||||||
Token::BinOp(token::Minus) | // negative literal
|
token::BinOp(token::Minus) | // negative literal
|
||||||
Token::AndAnd | // double reference
|
token::AndAnd | // double reference
|
||||||
Token::Literal(..) | // literal
|
token::Literal(..) | // literal
|
||||||
Token::DotDot | // range pattern (future compat)
|
token::DotDot | // range pattern (future compat)
|
||||||
Token::DotDotDot | // range pattern (future compat)
|
token::DotDotDot | // range pattern (future compat)
|
||||||
Token::ModSep | // path
|
token::ModSep | // path
|
||||||
Token::Lt | // path (UFCS constant)
|
token::Lt | // path (UFCS constant)
|
||||||
Token::BinOp(token::Shl) => true, // path (double UFCS)
|
token::BinOp(token::Shl) => true, // path (double UFCS)
|
||||||
Token::Interpolated(ref nt) => may_be_ident(nt),
|
token::Interpolated(ref nt) => may_be_ident(nt),
|
||||||
_ => false,
|
_ => false,
|
||||||
},
|
},
|
||||||
sym::lifetime => match *token {
|
sym::lifetime => match *token {
|
||||||
Token::Lifetime(_) => true,
|
token::Lifetime(_) => true,
|
||||||
Token::Interpolated(ref nt) => match **nt {
|
token::Interpolated(ref nt) => match **nt {
|
||||||
token::NtLifetime(_) | token::NtTT(_) => true,
|
token::NtLifetime(_) | token::NtTT(_) => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
},
|
},
|
||||||
|
|
|
@ -225,7 +225,7 @@ pub fn transcribe(
|
||||||
result.push(tt.clone().into());
|
result.push(tt.clone().into());
|
||||||
} else {
|
} else {
|
||||||
sp = sp.apply_mark(cx.current_expansion.mark);
|
sp = sp.apply_mark(cx.current_expansion.mark);
|
||||||
let token = TokenTree::Token(sp, Token::Interpolated(nt.clone()));
|
let token = TokenTree::Token(sp, token::Interpolated(nt.clone()));
|
||||||
result.push(token.into());
|
result.push(token.into());
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -294,7 +294,7 @@ impl<'a> Parser<'a> {
|
||||||
Applicability::MaybeIncorrect,
|
Applicability::MaybeIncorrect,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
let sp = if self.token == token::Token::Eof {
|
let sp = if self.token == token::Eof {
|
||||||
// This is EOF, don't want to point at the following char, but rather the last token
|
// This is EOF, don't want to point at the following char, but rather the last token
|
||||||
self.prev_span
|
self.prev_span
|
||||||
} else {
|
} else {
|
||||||
|
@ -732,7 +732,7 @@ impl<'a> Parser<'a> {
|
||||||
let this_token_str = self.this_token_descr();
|
let this_token_str = self.this_token_descr();
|
||||||
let (prev_sp, sp) = match (&self.token, self.subparser_name) {
|
let (prev_sp, sp) = match (&self.token, self.subparser_name) {
|
||||||
// Point at the end of the macro call when reaching end of macro arguments.
|
// Point at the end of the macro call when reaching end of macro arguments.
|
||||||
(token::Token::Eof, Some(_)) => {
|
(token::Eof, Some(_)) => {
|
||||||
let sp = self.sess.source_map().next_point(self.span);
|
let sp = self.sess.source_map().next_point(self.span);
|
||||||
(sp, sp)
|
(sp, sp)
|
||||||
}
|
}
|
||||||
|
@ -740,14 +740,14 @@ impl<'a> Parser<'a> {
|
||||||
// This happens when the parser finds an empty TokenStream.
|
// This happens when the parser finds an empty TokenStream.
|
||||||
_ if self.prev_span == DUMMY_SP => (self.span, self.span),
|
_ if self.prev_span == DUMMY_SP => (self.span, self.span),
|
||||||
// EOF, don't want to point at the following char, but rather the last token.
|
// EOF, don't want to point at the following char, but rather the last token.
|
||||||
(token::Token::Eof, None) => (self.prev_span, self.span),
|
(token::Eof, None) => (self.prev_span, self.span),
|
||||||
_ => (self.sess.source_map().next_point(self.prev_span), self.span),
|
_ => (self.sess.source_map().next_point(self.prev_span), self.span),
|
||||||
};
|
};
|
||||||
let msg = format!(
|
let msg = format!(
|
||||||
"expected `{}`, found {}",
|
"expected `{}`, found {}",
|
||||||
token_str,
|
token_str,
|
||||||
match (&self.token, self.subparser_name) {
|
match (&self.token, self.subparser_name) {
|
||||||
(token::Token::Eof, Some(origin)) => format!("end of {}", origin),
|
(token::Eof, Some(origin)) => format!("end of {}", origin),
|
||||||
_ => this_token_str,
|
_ => this_token_str,
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
@ -1215,7 +1215,7 @@ impl<'a> Parser<'a> {
|
||||||
|
|
||||||
crate fn expected_expression_found(&self) -> DiagnosticBuilder<'a> {
|
crate fn expected_expression_found(&self) -> DiagnosticBuilder<'a> {
|
||||||
let (span, msg) = match (&self.token, self.subparser_name) {
|
let (span, msg) = match (&self.token, self.subparser_name) {
|
||||||
(&token::Token::Eof, Some(origin)) => {
|
(&token::Eof, Some(origin)) => {
|
||||||
let sp = self.sess.source_map().next_point(self.span);
|
let sp = self.sess.source_map().next_point(self.span);
|
||||||
(sp, format!("expected expression, found end of {}", origin))
|
(sp, format!("expected expression, found end of {}", origin))
|
||||||
}
|
}
|
||||||
|
|
|
@ -311,7 +311,7 @@ pub fn maybe_file_to_stream(
|
||||||
for unmatched in unmatched_braces {
|
for unmatched in unmatched_braces {
|
||||||
let mut db = sess.span_diagnostic.struct_span_err(unmatched.found_span, &format!(
|
let mut db = sess.span_diagnostic.struct_span_err(unmatched.found_span, &format!(
|
||||||
"incorrect close delimiter: `{}`",
|
"incorrect close delimiter: `{}`",
|
||||||
token_to_string(&token::Token::CloseDelim(unmatched.found_delim)),
|
token_to_string(&token::CloseDelim(unmatched.found_delim)),
|
||||||
));
|
));
|
||||||
db.span_label(unmatched.found_span, "incorrect close delimiter");
|
db.span_label(unmatched.found_span, "incorrect close delimiter");
|
||||||
if let Some(sp) = unmatched.candidate_span {
|
if let Some(sp) = unmatched.candidate_span {
|
||||||
|
|
|
@ -3359,7 +3359,7 @@ impl<'a> Parser<'a> {
|
||||||
let discriminant = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL,
|
let discriminant = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL,
|
||||||
None)?;
|
None)?;
|
||||||
if let Err(mut e) = self.expect(&token::OpenDelim(token::Brace)) {
|
if let Err(mut e) = self.expect(&token::OpenDelim(token::Brace)) {
|
||||||
if self.token == token::Token::Semi {
|
if self.token == token::Semi {
|
||||||
e.span_suggestion_short(
|
e.span_suggestion_short(
|
||||||
match_span,
|
match_span,
|
||||||
"try removing this `match`",
|
"try removing this `match`",
|
||||||
|
@ -5920,7 +5920,7 @@ impl<'a> Parser<'a> {
|
||||||
while !self.eat(&token::CloseDelim(token::Brace)) {
|
while !self.eat(&token::CloseDelim(token::Brace)) {
|
||||||
if let token::DocComment(_) = self.token {
|
if let token::DocComment(_) = self.token {
|
||||||
if self.look_ahead(1,
|
if self.look_ahead(1,
|
||||||
|tok| tok == &token::Token::CloseDelim(token::Brace)) {
|
|tok| tok == &token::CloseDelim(token::Brace)) {
|
||||||
let mut err = self.diagnostic().struct_span_err_with_code(
|
let mut err = self.diagnostic().struct_span_err_with_code(
|
||||||
self.span,
|
self.span,
|
||||||
"found a documentation comment that doesn't document anything",
|
"found a documentation comment that doesn't document anything",
|
||||||
|
@ -6796,7 +6796,7 @@ impl<'a> Parser<'a> {
|
||||||
let mut replacement = vec![];
|
let mut replacement = vec![];
|
||||||
let mut fixed_crate_name = false;
|
let mut fixed_crate_name = false;
|
||||||
// Accept `extern crate name-like-this` for better diagnostics
|
// Accept `extern crate name-like-this` for better diagnostics
|
||||||
let dash = token::Token::BinOp(token::BinOpToken::Minus);
|
let dash = token::BinOp(token::BinOpToken::Minus);
|
||||||
if self.token == dash { // Do not include `-` as part of the expected tokens list
|
if self.token == dash { // Do not include `-` as part of the expected tokens list
|
||||||
while self.eat(&dash) {
|
while self.eat(&dash) {
|
||||||
fixed_crate_name = true;
|
fixed_crate_name = true;
|
||||||
|
@ -7869,7 +7869,7 @@ pub fn emit_unclosed_delims(unclosed_delims: &mut Vec<UnmatchedBrace>, handler:
|
||||||
for unmatched in unclosed_delims.iter() {
|
for unmatched in unclosed_delims.iter() {
|
||||||
let mut err = handler.struct_span_err(unmatched.found_span, &format!(
|
let mut err = handler.struct_span_err(unmatched.found_span, &format!(
|
||||||
"incorrect close delimiter: `{}`",
|
"incorrect close delimiter: `{}`",
|
||||||
pprust::token_to_string(&token::Token::CloseDelim(unmatched.found_delim)),
|
pprust::token_to_string(&token::CloseDelim(unmatched.found_delim)),
|
||||||
));
|
));
|
||||||
err.span_label(unmatched.found_span, "incorrect close delimiter");
|
err.span_label(unmatched.found_span, "incorrect close delimiter");
|
||||||
if let Some(sp) = unmatched.candidate_span {
|
if let Some(sp) = unmatched.candidate_span {
|
||||||
|
|
|
@ -691,11 +691,11 @@ impl Nonterminal {
|
||||||
prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span)
|
prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span)
|
||||||
}
|
}
|
||||||
Nonterminal::NtIdent(ident, is_raw) => {
|
Nonterminal::NtIdent(ident, is_raw) => {
|
||||||
let token = Token::Ident(ident, is_raw);
|
let token = Ident(ident, is_raw);
|
||||||
Some(TokenTree::Token(ident.span, token).into())
|
Some(TokenTree::Token(ident.span, token).into())
|
||||||
}
|
}
|
||||||
Nonterminal::NtLifetime(ident) => {
|
Nonterminal::NtLifetime(ident) => {
|
||||||
let token = Token::Lifetime(ident);
|
let token = Lifetime(ident);
|
||||||
Some(TokenTree::Token(ident.span, token).into())
|
Some(TokenTree::Token(ident.span, token).into())
|
||||||
}
|
}
|
||||||
Nonterminal::NtTT(ref tt) => {
|
Nonterminal::NtTT(ref tt) => {
|
||||||
|
|
|
@ -167,7 +167,7 @@ impl TokenTree {
|
||||||
/// A `TokenStream` is an abstract sequence of tokens, organized into `TokenTree`s.
|
/// A `TokenStream` is an abstract sequence of tokens, organized into `TokenTree`s.
|
||||||
/// The goal is for procedural macros to work with `TokenStream`s and `TokenTree`s
|
/// The goal is for procedural macros to work with `TokenStream`s and `TokenTree`s
|
||||||
/// instead of a representation of the abstract syntax tree.
|
/// instead of a representation of the abstract syntax tree.
|
||||||
/// Today's `TokenTree`s can still contain AST via `Token::Interpolated` for back-compat.
|
/// Today's `TokenTree`s can still contain AST via `token::Interpolated` for back-compat.
|
||||||
///
|
///
|
||||||
/// The use of `Option` is an optimization that avoids the need for an
|
/// The use of `Option` is an optimization that avoids the need for an
|
||||||
/// allocation when the stream is empty. However, it is not guaranteed that an
|
/// allocation when the stream is empty. However, it is not guaranteed that an
|
||||||
|
@ -201,7 +201,7 @@ impl TokenStream {
|
||||||
while let Some((pos, ts)) = iter.next() {
|
while let Some((pos, ts)) = iter.next() {
|
||||||
if let Some((_, next)) = iter.peek() {
|
if let Some((_, next)) = iter.peek() {
|
||||||
let sp = match (&ts, &next) {
|
let sp = match (&ts, &next) {
|
||||||
(_, (TokenTree::Token(_, token::Token::Comma), _)) => continue,
|
(_, (TokenTree::Token(_, token::Comma), _)) => continue,
|
||||||
((TokenTree::Token(sp, token_left), NonJoint),
|
((TokenTree::Token(sp, token_left), NonJoint),
|
||||||
(TokenTree::Token(_, token_right), _))
|
(TokenTree::Token(_, token_right), _))
|
||||||
if ((token_left.is_ident() && !token_left.is_reserved_ident())
|
if ((token_left.is_ident() && !token_left.is_reserved_ident())
|
||||||
|
@ -352,17 +352,17 @@ impl TokenStream {
|
||||||
match tree {
|
match tree {
|
||||||
// The pretty printer tends to add trailing commas to
|
// The pretty printer tends to add trailing commas to
|
||||||
// everything, and in particular, after struct fields.
|
// everything, and in particular, after struct fields.
|
||||||
| TokenTree::Token(_, Token::Comma)
|
| TokenTree::Token(_, token::Comma)
|
||||||
// The pretty printer emits `NoDelim` as whitespace.
|
// The pretty printer emits `NoDelim` as whitespace.
|
||||||
| TokenTree::Token(_, Token::OpenDelim(DelimToken::NoDelim))
|
| TokenTree::Token(_, token::OpenDelim(DelimToken::NoDelim))
|
||||||
| TokenTree::Token(_, Token::CloseDelim(DelimToken::NoDelim))
|
| TokenTree::Token(_, token::CloseDelim(DelimToken::NoDelim))
|
||||||
// The pretty printer collapses many semicolons into one.
|
// The pretty printer collapses many semicolons into one.
|
||||||
| TokenTree::Token(_, Token::Semi)
|
| TokenTree::Token(_, token::Semi)
|
||||||
// The pretty printer collapses whitespace arbitrarily and can
|
// The pretty printer collapses whitespace arbitrarily and can
|
||||||
// introduce whitespace from `NoDelim`.
|
// introduce whitespace from `NoDelim`.
|
||||||
| TokenTree::Token(_, Token::Whitespace)
|
| TokenTree::Token(_, token::Whitespace)
|
||||||
// The pretty printer can turn `$crate` into `::crate_name`
|
// The pretty printer can turn `$crate` into `::crate_name`
|
||||||
| TokenTree::Token(_, Token::ModSep) => false,
|
| TokenTree::Token(_, token::ModSep) => false,
|
||||||
_ => true
|
_ => true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -664,7 +664,7 @@ mod tests {
|
||||||
with_default_globals(|| {
|
with_default_globals(|| {
|
||||||
let test0: TokenStream = Vec::<TokenTree>::new().into_iter().collect();
|
let test0: TokenStream = Vec::<TokenTree>::new().into_iter().collect();
|
||||||
let test1: TokenStream =
|
let test1: TokenStream =
|
||||||
TokenTree::Token(sp(0, 1), Token::Ident(Ident::from_str("a"), false)).into();
|
TokenTree::Token(sp(0, 1), token::Ident(Ident::from_str("a"), false)).into();
|
||||||
let test2 = string_to_ts("foo(bar::baz)");
|
let test2 = string_to_ts("foo(bar::baz)");
|
||||||
|
|
||||||
assert_eq!(test0.is_empty(), true);
|
assert_eq!(test0.is_empty(), true);
|
||||||
|
@ -677,9 +677,9 @@ mod tests {
|
||||||
fn test_dotdotdot() {
|
fn test_dotdotdot() {
|
||||||
with_default_globals(|| {
|
with_default_globals(|| {
|
||||||
let mut builder = TokenStreamBuilder::new();
|
let mut builder = TokenStreamBuilder::new();
|
||||||
builder.push(TokenTree::Token(sp(0, 1), Token::Dot).joint());
|
builder.push(TokenTree::Token(sp(0, 1), token::Dot).joint());
|
||||||
builder.push(TokenTree::Token(sp(1, 2), Token::Dot).joint());
|
builder.push(TokenTree::Token(sp(1, 2), token::Dot).joint());
|
||||||
builder.push(TokenTree::Token(sp(2, 3), Token::Dot));
|
builder.push(TokenTree::Token(sp(2, 3), token::Dot));
|
||||||
let stream = builder.build();
|
let stream = builder.build();
|
||||||
assert!(stream.eq_unspanned(&string_to_ts("...")));
|
assert!(stream.eq_unspanned(&string_to_ts("...")));
|
||||||
assert_eq!(stream.trees().count(), 1);
|
assert_eq!(stream.trees().count(), 1);
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use crate::parse::token::{Token, BinOpToken};
|
use crate::parse::token::{self, Token, BinOpToken};
|
||||||
use crate::symbol::kw;
|
use crate::symbol::kw;
|
||||||
use crate::ast::{self, BinOpKind};
|
use crate::ast::{self, BinOpKind};
|
||||||
|
|
||||||
|
@ -72,31 +72,31 @@ impl AssocOp {
|
||||||
pub fn from_token(t: &Token) -> Option<AssocOp> {
|
pub fn from_token(t: &Token) -> Option<AssocOp> {
|
||||||
use AssocOp::*;
|
use AssocOp::*;
|
||||||
match *t {
|
match *t {
|
||||||
Token::BinOpEq(k) => Some(AssignOp(k)),
|
token::BinOpEq(k) => Some(AssignOp(k)),
|
||||||
Token::Eq => Some(Assign),
|
token::Eq => Some(Assign),
|
||||||
Token::BinOp(BinOpToken::Star) => Some(Multiply),
|
token::BinOp(BinOpToken::Star) => Some(Multiply),
|
||||||
Token::BinOp(BinOpToken::Slash) => Some(Divide),
|
token::BinOp(BinOpToken::Slash) => Some(Divide),
|
||||||
Token::BinOp(BinOpToken::Percent) => Some(Modulus),
|
token::BinOp(BinOpToken::Percent) => Some(Modulus),
|
||||||
Token::BinOp(BinOpToken::Plus) => Some(Add),
|
token::BinOp(BinOpToken::Plus) => Some(Add),
|
||||||
Token::BinOp(BinOpToken::Minus) => Some(Subtract),
|
token::BinOp(BinOpToken::Minus) => Some(Subtract),
|
||||||
Token::BinOp(BinOpToken::Shl) => Some(ShiftLeft),
|
token::BinOp(BinOpToken::Shl) => Some(ShiftLeft),
|
||||||
Token::BinOp(BinOpToken::Shr) => Some(ShiftRight),
|
token::BinOp(BinOpToken::Shr) => Some(ShiftRight),
|
||||||
Token::BinOp(BinOpToken::And) => Some(BitAnd),
|
token::BinOp(BinOpToken::And) => Some(BitAnd),
|
||||||
Token::BinOp(BinOpToken::Caret) => Some(BitXor),
|
token::BinOp(BinOpToken::Caret) => Some(BitXor),
|
||||||
Token::BinOp(BinOpToken::Or) => Some(BitOr),
|
token::BinOp(BinOpToken::Or) => Some(BitOr),
|
||||||
Token::Lt => Some(Less),
|
token::Lt => Some(Less),
|
||||||
Token::Le => Some(LessEqual),
|
token::Le => Some(LessEqual),
|
||||||
Token::Ge => Some(GreaterEqual),
|
token::Ge => Some(GreaterEqual),
|
||||||
Token::Gt => Some(Greater),
|
token::Gt => Some(Greater),
|
||||||
Token::EqEq => Some(Equal),
|
token::EqEq => Some(Equal),
|
||||||
Token::Ne => Some(NotEqual),
|
token::Ne => Some(NotEqual),
|
||||||
Token::AndAnd => Some(LAnd),
|
token::AndAnd => Some(LAnd),
|
||||||
Token::OrOr => Some(LOr),
|
token::OrOr => Some(LOr),
|
||||||
Token::DotDot => Some(DotDot),
|
token::DotDot => Some(DotDot),
|
||||||
Token::DotDotEq => Some(DotDotEq),
|
token::DotDotEq => Some(DotDotEq),
|
||||||
// DotDotDot is no longer supported, but we need some way to display the error
|
// DotDotDot is no longer supported, but we need some way to display the error
|
||||||
Token::DotDotDot => Some(DotDotEq),
|
token::DotDotDot => Some(DotDotEq),
|
||||||
Token::Colon => Some(Colon),
|
token::Colon => Some(Colon),
|
||||||
_ if t.is_keyword(kw::As) => Some(As),
|
_ if t.is_keyword(kw::As) => Some(As),
|
||||||
_ => None
|
_ => None
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,7 +8,7 @@ use syntax::attr::{mark_used, mark_known};
|
||||||
use syntax::source_map::Span;
|
use syntax::source_map::Span;
|
||||||
use syntax::ext::base::*;
|
use syntax::ext::base::*;
|
||||||
use syntax::parse;
|
use syntax::parse;
|
||||||
use syntax::parse::token::{self, Token};
|
use syntax::parse::token;
|
||||||
use syntax::tokenstream;
|
use syntax::tokenstream;
|
||||||
use syntax::visit::Visitor;
|
use syntax::visit::Visitor;
|
||||||
use syntax_pos::DUMMY_SP;
|
use syntax_pos::DUMMY_SP;
|
||||||
|
@ -68,7 +68,7 @@ impl MultiItemModifier for ProcMacroDerive {
|
||||||
// Mark attributes as known, and used.
|
// Mark attributes as known, and used.
|
||||||
MarkAttrs(&self.attrs).visit_item(&item);
|
MarkAttrs(&self.attrs).visit_item(&item);
|
||||||
|
|
||||||
let token = Token::Interpolated(Lrc::new(token::NtItem(item)));
|
let token = token::Interpolated(Lrc::new(token::NtItem(item)));
|
||||||
let input = tokenstream::TokenTree::Token(DUMMY_SP, token).into();
|
let input = tokenstream::TokenTree::Token(DUMMY_SP, token).into();
|
||||||
|
|
||||||
let server = proc_macro_server::Rustc::new(ecx);
|
let server = proc_macro_server::Rustc::new(ecx);
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue