syntax: Remove duplicate span from token::Ident
This commit is contained in:
parent
4c5d773b4d
commit
f745e5f9b6
21 changed files with 181 additions and 184 deletions
|
@ -353,8 +353,8 @@ impl<'a> HashStable<StableHashingContext<'a>> for token::TokenKind {
|
|||
}
|
||||
token::Literal(lit) => lit.hash_stable(hcx, hasher),
|
||||
|
||||
token::Ident(ident, is_raw) => {
|
||||
ident.name.hash_stable(hcx, hasher);
|
||||
token::Ident(name, is_raw) => {
|
||||
name.hash_stable(hcx, hasher);
|
||||
is_raw.hash_stable(hcx, hasher);
|
||||
}
|
||||
token::Lifetime(name) => name.hash_stable(hcx, hasher),
|
||||
|
|
|
@ -325,8 +325,8 @@ impl<'a> Classifier<'a> {
|
|||
}
|
||||
|
||||
// Keywords are also included in the identifier set.
|
||||
token::Ident(ident, is_raw) => {
|
||||
match ident.name {
|
||||
token::Ident(name, is_raw) => {
|
||||
match name {
|
||||
kw::Ref | kw::Mut if !is_raw => Class::RefKeyWord,
|
||||
|
||||
kw::SelfLower | kw::SelfUpper => Class::Self_,
|
||||
|
|
|
@ -482,19 +482,19 @@ impl MetaItem {
|
|||
let path = match tokens.next() {
|
||||
Some(TokenTree::Token(Token { kind: kind @ token::Ident(..), span })) |
|
||||
Some(TokenTree::Token(Token { kind: kind @ token::ModSep, span })) => 'arm: {
|
||||
let mut segments = if let token::Ident(ident, _) = kind {
|
||||
let mut segments = if let token::Ident(name, _) = kind {
|
||||
if let Some(TokenTree::Token(Token { kind: token::ModSep, .. })) = tokens.peek() {
|
||||
tokens.next();
|
||||
vec![PathSegment::from_ident(ident.with_span_pos(span))]
|
||||
vec![PathSegment::from_ident(Ident::new(name, span))]
|
||||
} else {
|
||||
break 'arm Path::from_ident(ident.with_span_pos(span));
|
||||
break 'arm Path::from_ident(Ident::new(name, span));
|
||||
}
|
||||
} else {
|
||||
vec![PathSegment::path_root(span)]
|
||||
};
|
||||
loop {
|
||||
if let Some(TokenTree::Token(Token { kind: token::Ident(ident, _), span })) = tokens.next() {
|
||||
segments.push(PathSegment::from_ident(ident.with_span_pos(span)));
|
||||
if let Some(TokenTree::Token(Token { kind: token::Ident(name, _), span })) = tokens.next() {
|
||||
segments.push(PathSegment::from_ident(Ident::new(name, span)));
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
|
|
|
@ -39,7 +39,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt<'_>,
|
|||
};
|
||||
|
||||
ecx.parse_sess.registered_diagnostics.with_lock(|diagnostics| {
|
||||
match diagnostics.get_mut(&code.name) {
|
||||
match diagnostics.get_mut(&code) {
|
||||
// Previously used errors.
|
||||
Some(&mut ErrorInfo { description: _, use_site: Some(previous_span) }) => {
|
||||
ecx.struct_span_warn(span, &format!(
|
||||
|
@ -72,10 +72,10 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt<'_>,
|
|||
token_tree.get(1),
|
||||
token_tree.get(2)
|
||||
) {
|
||||
(1, Some(&TokenTree::Token(Token { kind: token::Ident(ref code, _), .. })), None, None) => {
|
||||
(1, Some(&TokenTree::Token(Token { kind: token::Ident(code, _), .. })), None, None) => {
|
||||
(code, None)
|
||||
},
|
||||
(3, Some(&TokenTree::Token(Token { kind: token::Ident(ref code, _), .. })),
|
||||
(3, Some(&TokenTree::Token(Token { kind: token::Ident(code, _), .. })),
|
||||
Some(&TokenTree::Token(Token { kind: token::Comma, .. })),
|
||||
Some(&TokenTree::Token(Token { kind: token::Literal(token::Lit { symbol, .. }), .. }))) => {
|
||||
(code, Some(symbol))
|
||||
|
@ -112,7 +112,7 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt<'_>,
|
|||
description,
|
||||
use_site: None
|
||||
};
|
||||
if diagnostics.insert(code.name, info).is_some() {
|
||||
if diagnostics.insert(code, info).is_some() {
|
||||
ecx.span_err(span, &format!(
|
||||
"diagnostic code {} already registered", code
|
||||
));
|
||||
|
@ -140,13 +140,13 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt<'_>,
|
|||
token_tree: &[TokenTree])
|
||||
-> Box<dyn MacResult+'cx> {
|
||||
assert_eq!(token_tree.len(), 3);
|
||||
let (crate_name, name) = match (&token_tree[0], &token_tree[2]) {
|
||||
let (crate_name, ident) = match (&token_tree[0], &token_tree[2]) {
|
||||
(
|
||||
// Crate name.
|
||||
&TokenTree::Token(Token { kind: token::Ident(ref crate_name, _), .. }),
|
||||
&TokenTree::Token(Token { kind: token::Ident(crate_name, _), .. }),
|
||||
// DIAGNOSTICS ident.
|
||||
&TokenTree::Token(Token { kind: token::Ident(ref name, _), .. })
|
||||
) => (*&crate_name, name),
|
||||
&TokenTree::Token(Token { kind: token::Ident(name, _), span })
|
||||
) => (crate_name, Ident::new(name, span)),
|
||||
_ => unreachable!()
|
||||
};
|
||||
|
||||
|
@ -209,7 +209,7 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt<'_>,
|
|||
|
||||
MacEager::items(smallvec![
|
||||
P(ast::Item {
|
||||
ident: *name,
|
||||
ident,
|
||||
attrs: Vec::new(),
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
node: ast::ItemKind::Const(
|
||||
|
|
|
@ -269,7 +269,7 @@ impl<F> TTMacroExpander for F
|
|||
if let token::Interpolated(nt) = &token.kind {
|
||||
if let token::NtIdent(ident, is_raw) = **nt {
|
||||
*tt = tokenstream::TokenTree::token(ident.span,
|
||||
token::Ident(ident, is_raw));
|
||||
token::Ident(ident.name, is_raw));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -74,7 +74,7 @@ pub use NamedMatch::*;
|
|||
pub use ParseResult::*;
|
||||
use TokenTreeOrTokenTreeSlice::*;
|
||||
|
||||
use crate::ast::Ident;
|
||||
use crate::ast::{Ident, Name};
|
||||
use crate::ext::tt::quoted::{self, TokenTree};
|
||||
use crate::parse::{Directory, ParseSess};
|
||||
use crate::parse::parser::{Parser, PathStyle};
|
||||
|
@ -429,8 +429,8 @@ pub fn parse_failure_msg(tok: TokenKind) -> String {
|
|||
|
||||
/// Performs a token equality check, ignoring syntax context (that is, an unhygienic comparison)
|
||||
fn token_name_eq(t1: &TokenKind, t2: &TokenKind) -> bool {
|
||||
if let (Some((id1, is_raw1)), Some((id2, is_raw2))) = (t1.ident(), t2.ident()) {
|
||||
id1.name == id2.name && is_raw1 == is_raw2
|
||||
if let (Some((name1, is_raw1)), Some((name2, is_raw2))) = (t1.ident_name(), t2.ident_name()) {
|
||||
name1 == name2 && is_raw1 == is_raw2
|
||||
} else if let (Some(name1), Some(name2)) = (t1.lifetime_name(), t2.lifetime_name()) {
|
||||
name1 == name2
|
||||
} else {
|
||||
|
@ -466,8 +466,7 @@ fn inner_parse_loop<'root, 'tt>(
|
|||
next_items: &mut Vec<MatcherPosHandle<'root, 'tt>>,
|
||||
eof_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>,
|
||||
bb_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>,
|
||||
token: &TokenKind,
|
||||
span: syntax_pos::Span,
|
||||
token: &Token,
|
||||
) -> ParseResult<()> {
|
||||
// Pop items from `cur_items` until it is empty.
|
||||
while let Some(mut item) = cur_items.pop() {
|
||||
|
@ -510,7 +509,7 @@ fn inner_parse_loop<'root, 'tt>(
|
|||
// Add matches from this repetition to the `matches` of `up`
|
||||
for idx in item.match_lo..item.match_hi {
|
||||
let sub = item.matches[idx].clone();
|
||||
let span = DelimSpan::from_pair(item.sp_open, span);
|
||||
let span = DelimSpan::from_pair(item.sp_open, token.span);
|
||||
new_pos.push_match(idx, MatchedSeq(sub, span));
|
||||
}
|
||||
|
||||
|
@ -598,7 +597,7 @@ fn inner_parse_loop<'root, 'tt>(
|
|||
TokenTree::MetaVarDecl(_, _, id) => {
|
||||
// Built-in nonterminals never start with these tokens,
|
||||
// so we can eliminate them from consideration.
|
||||
if may_begin_with(id.name, token) {
|
||||
if may_begin_with(token, id.name) {
|
||||
bb_items.push(item);
|
||||
}
|
||||
}
|
||||
|
@ -698,7 +697,6 @@ pub fn parse(
|
|||
&mut eof_items,
|
||||
&mut bb_items,
|
||||
&parser.token,
|
||||
parser.span,
|
||||
) {
|
||||
Success(_) => {}
|
||||
Failure(token, msg) => return Failure(token, msg),
|
||||
|
@ -806,10 +804,9 @@ pub fn parse(
|
|||
|
||||
/// The token is an identifier, but not `_`.
|
||||
/// We prohibit passing `_` to macros expecting `ident` for now.
|
||||
fn get_macro_ident(token: &TokenKind) -> Option<(Ident, bool)> {
|
||||
fn get_macro_name(token: &TokenKind) -> Option<(Name, bool)> {
|
||||
match *token {
|
||||
token::Ident(ident, is_raw) if ident.name != kw::Underscore =>
|
||||
Some((ident, is_raw)),
|
||||
token::Ident(name, is_raw) if name != kw::Underscore => Some((name, is_raw)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
@ -818,7 +815,7 @@ fn get_macro_ident(token: &TokenKind) -> Option<(Ident, bool)> {
|
|||
///
|
||||
/// Returning `false` is a *stability guarantee* that such a matcher will *never* begin with that
|
||||
/// token. Be conservative (return true) if not sure.
|
||||
fn may_begin_with(name: Symbol, token: &TokenKind) -> bool {
|
||||
fn may_begin_with(token: &Token, name: Name) -> bool {
|
||||
/// Checks whether the non-terminal may contain a single (non-keyword) identifier.
|
||||
fn may_be_ident(nt: &token::Nonterminal) -> bool {
|
||||
match *nt {
|
||||
|
@ -830,14 +827,14 @@ fn may_begin_with(name: Symbol, token: &TokenKind) -> bool {
|
|||
match name {
|
||||
sym::expr => token.can_begin_expr(),
|
||||
sym::ty => token.can_begin_type(),
|
||||
sym::ident => get_macro_ident(token).is_some(),
|
||||
sym::ident => get_macro_name(token).is_some(),
|
||||
sym::literal => token.can_begin_literal_or_bool(),
|
||||
sym::vis => match *token {
|
||||
sym::vis => match token.kind {
|
||||
// The follow-set of :vis + "priv" keyword + interpolated
|
||||
token::Comma | token::Ident(..) | token::Interpolated(_) => true,
|
||||
_ => token.can_begin_type(),
|
||||
},
|
||||
sym::block => match *token {
|
||||
sym::block => match token.kind {
|
||||
token::OpenDelim(token::Brace) => true,
|
||||
token::Interpolated(ref nt) => match **nt {
|
||||
token::NtItem(_)
|
||||
|
@ -851,7 +848,7 @@ fn may_begin_with(name: Symbol, token: &TokenKind) -> bool {
|
|||
},
|
||||
_ => false,
|
||||
},
|
||||
sym::path | sym::meta => match *token {
|
||||
sym::path | sym::meta => match token.kind {
|
||||
token::ModSep | token::Ident(..) => true,
|
||||
token::Interpolated(ref nt) => match **nt {
|
||||
token::NtPath(_) | token::NtMeta(_) => true,
|
||||
|
@ -859,7 +856,7 @@ fn may_begin_with(name: Symbol, token: &TokenKind) -> bool {
|
|||
},
|
||||
_ => false,
|
||||
},
|
||||
sym::pat => match *token {
|
||||
sym::pat => match token.kind {
|
||||
token::Ident(..) | // box, ref, mut, and other identifiers (can stricten)
|
||||
token::OpenDelim(token::Paren) | // tuple pattern
|
||||
token::OpenDelim(token::Bracket) | // slice pattern
|
||||
|
@ -875,7 +872,7 @@ fn may_begin_with(name: Symbol, token: &TokenKind) -> bool {
|
|||
token::Interpolated(ref nt) => may_be_ident(nt),
|
||||
_ => false,
|
||||
},
|
||||
sym::lifetime => match *token {
|
||||
sym::lifetime => match token.kind {
|
||||
token::Lifetime(_) => true,
|
||||
token::Interpolated(ref nt) => match **nt {
|
||||
token::NtLifetime(_) | token::NtTT(_) => true,
|
||||
|
@ -883,7 +880,7 @@ fn may_begin_with(name: Symbol, token: &TokenKind) -> bool {
|
|||
},
|
||||
_ => false,
|
||||
},
|
||||
_ => match *token {
|
||||
_ => match token.kind {
|
||||
token::CloseDelim(_) => false,
|
||||
_ => true,
|
||||
},
|
||||
|
@ -929,10 +926,10 @@ fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: Symbol) -> Nonterminal {
|
|||
sym::literal => token::NtLiteral(panictry!(p.parse_literal_maybe_minus())),
|
||||
sym::ty => token::NtTy(panictry!(p.parse_ty())),
|
||||
// this could be handled like a token, since it is one
|
||||
sym::ident => if let Some((ident, is_raw)) = get_macro_ident(&p.token) {
|
||||
sym::ident => if let Some((name, is_raw)) = get_macro_name(&p.token) {
|
||||
let span = p.span;
|
||||
p.bump();
|
||||
token::NtIdent(Ident::new(ident.name, span), is_raw)
|
||||
token::NtIdent(Ident::new(name, span), is_raw)
|
||||
} else {
|
||||
let token_str = pprust::token_to_string(&p.token);
|
||||
p.fatal(&format!("expected ident, found {}", &token_str)).emit();
|
||||
|
|
|
@ -1046,8 +1046,7 @@ fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> IsInFollow {
|
|||
match tok {
|
||||
TokenTree::Token(token) => match token.kind {
|
||||
FatArrow | Comma | Eq | BinOp(token::Or) => IsInFollow::Yes,
|
||||
Ident(i, false) if i.name == kw::If ||
|
||||
i.name == kw::In => IsInFollow::Yes,
|
||||
Ident(name, false) if name == kw::If || name == kw::In => IsInFollow::Yes,
|
||||
_ => IsInFollow::No(tokens),
|
||||
},
|
||||
_ => IsInFollow::No(tokens),
|
||||
|
@ -1064,8 +1063,8 @@ fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> IsInFollow {
|
|||
OpenDelim(token::DelimToken::Bracket) |
|
||||
Comma | FatArrow | Colon | Eq | Gt | BinOp(token::Shr) | Semi |
|
||||
BinOp(token::Or) => IsInFollow::Yes,
|
||||
Ident(i, false) if i.name == kw::As ||
|
||||
i.name == kw::Where => IsInFollow::Yes,
|
||||
Ident(name, false) if name == kw::As ||
|
||||
name == kw::Where => IsInFollow::Yes,
|
||||
_ => IsInFollow::No(tokens),
|
||||
},
|
||||
TokenTree::MetaVarDecl(_, _, frag) if frag.name == sym::block =>
|
||||
|
@ -1092,9 +1091,8 @@ fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> IsInFollow {
|
|||
match tok {
|
||||
TokenTree::Token(token) => match token.kind {
|
||||
Comma => IsInFollow::Yes,
|
||||
Ident(i, is_raw) if is_raw || i.name != kw::Priv =>
|
||||
IsInFollow::Yes,
|
||||
ref tok => if tok.can_begin_type() {
|
||||
Ident(name, is_raw) if is_raw || name != kw::Priv => IsInFollow::Yes,
|
||||
_ => if token.can_begin_type() {
|
||||
IsInFollow::Yes
|
||||
} else {
|
||||
IsInFollow::No(tokens)
|
||||
|
|
|
@ -323,10 +323,9 @@ where
|
|||
// metavariable that names the crate of the invocation.
|
||||
Some(tokenstream::TokenTree::Token(token)) if token.is_ident() => {
|
||||
let (ident, is_raw) = token.ident().unwrap();
|
||||
let span = token.span.with_lo(span.lo());
|
||||
let span = ident.span.with_lo(span.lo());
|
||||
if ident.name == kw::Crate && !is_raw {
|
||||
let ident = ast::Ident::new(kw::DollarCrate, ident.span);
|
||||
TokenTree::token(span, token::Ident(ident, is_raw))
|
||||
TokenTree::token(span, token::Ident(kw::DollarCrate, is_raw))
|
||||
} else {
|
||||
TokenTree::MetaVar(span, ident)
|
||||
}
|
||||
|
|
|
@ -598,7 +598,6 @@ pub fn noop_visit_tts<T: MutVisitor>(TokenStream(tts): &mut TokenStream, vis: &m
|
|||
// apply ident visitor if it's an ident, apply other visits to interpolated nodes
|
||||
pub fn noop_visit_token<T: MutVisitor>(t: &mut TokenKind, vis: &mut T) {
|
||||
match t {
|
||||
token::Ident(id, _is_raw) => vis.visit_ident(id),
|
||||
token::Interpolated(nt) => {
|
||||
let mut nt = Lrc::make_mut(nt);
|
||||
vis.visit_interpolated(&mut nt);
|
||||
|
|
|
@ -201,12 +201,12 @@ impl<'a> Parser<'a> {
|
|||
self.span,
|
||||
&format!("expected identifier, found {}", self.this_token_descr()),
|
||||
);
|
||||
if let token::Ident(ident, false) = &self.token.kind {
|
||||
if ident.is_raw_guess() {
|
||||
if let token::Ident(name, false) = self.token.kind {
|
||||
if Ident::new(name, self.span).is_raw_guess() {
|
||||
err.span_suggestion(
|
||||
self.span,
|
||||
"you can escape reserved keywords to use them as identifiers",
|
||||
format!("r#{}", ident),
|
||||
format!("r#{}", name),
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use crate::ast::{self, Ident};
|
||||
use crate::ast;
|
||||
use crate::parse::ParseSess;
|
||||
use crate::parse::token::{self, Token, TokenKind};
|
||||
use crate::symbol::{sym, Symbol};
|
||||
|
@ -61,15 +61,6 @@ impl<'a> StringReader<'a> {
|
|||
(real, raw)
|
||||
}
|
||||
|
||||
fn mk_ident(&self, string: &str) -> Ident {
|
||||
let mut ident = Ident::from_str(string);
|
||||
if let Some(span) = self.override_span {
|
||||
ident.span = span;
|
||||
}
|
||||
|
||||
ident
|
||||
}
|
||||
|
||||
fn unwrap_or_abort(&mut self, res: Result<Token, ()>) -> Token {
|
||||
match res {
|
||||
Ok(tok) => tok,
|
||||
|
@ -858,17 +849,17 @@ impl<'a> StringReader<'a> {
|
|||
|
||||
return Ok(self.with_str_from(start, |string| {
|
||||
// FIXME: perform NFKC normalization here. (Issue #2253)
|
||||
let ident = self.mk_ident(string);
|
||||
let name = ast::Name::intern(string);
|
||||
|
||||
if is_raw_ident {
|
||||
let span = self.mk_sp(raw_start, self.pos);
|
||||
if !ident.can_be_raw() {
|
||||
self.err_span(span, &format!("`{}` cannot be a raw identifier", ident));
|
||||
if !name.can_be_raw() {
|
||||
self.err_span(span, &format!("`{}` cannot be a raw identifier", name));
|
||||
}
|
||||
self.sess.raw_identifier_spans.borrow_mut().push(span);
|
||||
}
|
||||
|
||||
token::Ident(ident, is_raw_ident)
|
||||
token::Ident(name, is_raw_ident)
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
@ -1567,12 +1558,11 @@ mod tests {
|
|||
&sh,
|
||||
"/* my source file */ fn main() { println!(\"zebra\"); }\n"
|
||||
.to_string());
|
||||
let id = Ident::from_str("fn");
|
||||
assert_eq!(string_reader.next_token(), token::Comment);
|
||||
assert_eq!(string_reader.next_token(), token::Whitespace);
|
||||
let tok1 = string_reader.next_token();
|
||||
let tok2 = Token::new(
|
||||
token::Ident(id, false),
|
||||
token::Ident(Symbol::intern("fn"), false),
|
||||
Span::new(BytePos(21), BytePos(23), NO_EXPANSION),
|
||||
);
|
||||
assert_eq!(tok1.kind, tok2.kind);
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//! Code related to parsing literals.
|
||||
|
||||
use crate::ast::{self, Ident, Lit, LitKind};
|
||||
use crate::ast::{self, Lit, LitKind};
|
||||
use crate::parse::parser::Parser;
|
||||
use crate::parse::PResult;
|
||||
use crate::parse::token::{self, Token, TokenKind};
|
||||
|
@ -230,8 +230,8 @@ impl Lit {
|
|||
/// Converts arbitrary token into an AST literal.
|
||||
crate fn from_token(token: &TokenKind, span: Span) -> Result<Lit, LitError> {
|
||||
let lit = match *token {
|
||||
token::Ident(ident, false) if ident.name == kw::True || ident.name == kw::False =>
|
||||
token::Lit::new(token::Bool, ident.name, None),
|
||||
token::Ident(name, false) if name == kw::True || name == kw::False =>
|
||||
token::Lit::new(token::Bool, name, None),
|
||||
token::Literal(lit) =>
|
||||
lit,
|
||||
token::Interpolated(ref nt) => {
|
||||
|
@ -258,7 +258,7 @@ impl Lit {
|
|||
/// Losslessly convert an AST literal into a token stream.
|
||||
crate fn tokens(&self) -> TokenStream {
|
||||
let token = match self.token.kind {
|
||||
token::Bool => token::Ident(Ident::new(self.token.symbol, self.span), false),
|
||||
token::Bool => token::Ident(self.token.symbol, false),
|
||||
_ => token::Literal(self.token),
|
||||
};
|
||||
TokenTree::token(self.span, token).into()
|
||||
|
|
|
@ -382,11 +382,12 @@ impl SeqSep {
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::ast::{self, Ident, PatKind};
|
||||
use crate::ast::{self, Name, PatKind};
|
||||
use crate::attr::first_attr_value_str_by_name;
|
||||
use crate::ptr::P;
|
||||
use crate::parse::token::Token;
|
||||
use crate::print::pprust::item_to_string;
|
||||
use crate::symbol::{kw, sym};
|
||||
use crate::tokenstream::{DelimSpan, TokenTree};
|
||||
use crate::util::parser_testing::string_to_stream;
|
||||
use crate::util::parser_testing::{string_to_expr, string_to_item};
|
||||
|
@ -418,8 +419,6 @@ mod tests {
|
|||
#[test]
|
||||
fn string_to_tts_macro () {
|
||||
with_default_globals(|| {
|
||||
use crate::symbol::sym;
|
||||
|
||||
let tts: Vec<_> =
|
||||
string_to_stream("macro_rules! zip (($a)=>($a))".to_string()).trees().collect();
|
||||
let tts: &[TokenTree] = &tts[..];
|
||||
|
@ -432,8 +431,7 @@ mod tests {
|
|||
Some(&TokenTree::Token(Token { kind: token::Ident(name_zip, false), .. })),
|
||||
Some(&TokenTree::Delimited(_, macro_delim, ref macro_tts)),
|
||||
)
|
||||
if name_macro_rules.name == sym::macro_rules
|
||||
&& name_zip.name.as_str() == "zip" => {
|
||||
if name_macro_rules == sym::macro_rules && name_zip.as_str() == "zip" => {
|
||||
let tts = ¯o_tts.trees().collect::<Vec<_>>();
|
||||
match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) {
|
||||
(
|
||||
|
@ -448,9 +446,9 @@ mod tests {
|
|||
(
|
||||
2,
|
||||
Some(&TokenTree::Token(Token { kind: token::Dollar, .. })),
|
||||
Some(&TokenTree::Token(Token { kind: token::Ident(ident, false), .. })),
|
||||
Some(&TokenTree::Token(Token { kind: token::Ident(name, false), .. })),
|
||||
)
|
||||
if first_delim == token::Paren && ident.name.as_str() == "a" => {},
|
||||
if first_delim == token::Paren && name.as_str() == "a" => {},
|
||||
_ => panic!("value 3: {:?} {:?}", first_delim, first_tts),
|
||||
}
|
||||
let tts = &second_tts.trees().collect::<Vec<_>>();
|
||||
|
@ -458,9 +456,9 @@ mod tests {
|
|||
(
|
||||
2,
|
||||
Some(&TokenTree::Token(Token { kind: token::Dollar, .. })),
|
||||
Some(&TokenTree::Token(Token { kind: token::Ident(ident, false), .. })),
|
||||
Some(&TokenTree::Token(Token { kind: token::Ident(name, false), .. })),
|
||||
)
|
||||
if second_delim == token::Paren && ident.name.as_str() == "a" => {},
|
||||
if second_delim == token::Paren && name.as_str() == "a" => {},
|
||||
_ => panic!("value 4: {:?} {:?}", second_delim, second_tts),
|
||||
}
|
||||
},
|
||||
|
@ -478,25 +476,22 @@ mod tests {
|
|||
let tts = string_to_stream("fn a (b : i32) { b; }".to_string());
|
||||
|
||||
let expected = TokenStream::new(vec![
|
||||
TokenTree::token(sp(0, 2), token::Ident(Ident::from_str("fn"), false)).into(),
|
||||
TokenTree::token(sp(3, 4), token::Ident(Ident::from_str("a"), false)).into(),
|
||||
TokenTree::token(sp(0, 2), token::Ident(kw::Fn, false)).into(),
|
||||
TokenTree::token(sp(3, 4), token::Ident(Name::intern("a"), false)).into(),
|
||||
TokenTree::Delimited(
|
||||
DelimSpan::from_pair(sp(5, 6), sp(13, 14)),
|
||||
token::DelimToken::Paren,
|
||||
TokenStream::new(vec![
|
||||
TokenTree::token(sp(6, 7),
|
||||
token::Ident(Ident::from_str("b"), false)).into(),
|
||||
TokenTree::token(sp(6, 7), token::Ident(Name::intern("b"), false)).into(),
|
||||
TokenTree::token(sp(8, 9), token::Colon).into(),
|
||||
TokenTree::token(sp(10, 13),
|
||||
token::Ident(Ident::from_str("i32"), false)).into(),
|
||||
TokenTree::token(sp(10, 13), token::Ident(sym::i32, false)).into(),
|
||||
]).into(),
|
||||
).into(),
|
||||
TokenTree::Delimited(
|
||||
DelimSpan::from_pair(sp(15, 16), sp(20, 21)),
|
||||
token::DelimToken::Brace,
|
||||
TokenStream::new(vec![
|
||||
TokenTree::token(sp(17, 18),
|
||||
token::Ident(Ident::from_str("b"), false)).into(),
|
||||
TokenTree::token(sp(17, 18), token::Ident(Name::intern("b"), false)).into(),
|
||||
TokenTree::token(sp(18, 19), token::Semi).into(),
|
||||
]).into(),
|
||||
).into()
|
||||
|
@ -604,8 +599,6 @@ mod tests {
|
|||
|
||||
#[test] fn crlf_doc_comments() {
|
||||
with_default_globals(|| {
|
||||
use crate::symbol::sym;
|
||||
|
||||
let sess = ParseSess::new(FilePathMapping::empty());
|
||||
|
||||
let name_1 = FileName::Custom("crlf_source_1".to_string());
|
||||
|
|
|
@ -362,7 +362,7 @@ impl TokenCursor {
|
|||
delim_span,
|
||||
token::Bracket,
|
||||
[
|
||||
TokenTree::token(sp, token::Ident(ast::Ident::with_empty_ctxt(sym::doc), false)),
|
||||
TokenTree::token(sp, token::Ident(sym::doc, false)),
|
||||
TokenTree::token(sp, token::Eq),
|
||||
TokenTree::token(sp, token::TokenKind::lit(
|
||||
token::StrRaw(num_of_hashes), Symbol::intern(&stripped), None
|
||||
|
@ -541,9 +541,9 @@ impl<'a> Parser<'a> {
|
|||
|
||||
crate fn token_descr(&self) -> Option<&'static str> {
|
||||
Some(match &self.token.kind {
|
||||
t if t.is_special_ident() => "reserved identifier",
|
||||
t if t.is_used_keyword() => "keyword",
|
||||
t if t.is_unused_keyword() => "reserved keyword",
|
||||
_ if self.token.is_special_ident() => "reserved identifier",
|
||||
_ if self.token.is_used_keyword() => "keyword",
|
||||
_ if self.token.is_unused_keyword() => "reserved keyword",
|
||||
token::DocComment(..) => "doc comment",
|
||||
_ => return None,
|
||||
})
|
||||
|
@ -619,7 +619,7 @@ impl<'a> Parser<'a> {
|
|||
|
||||
fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, ast::Ident> {
|
||||
match self.token.kind {
|
||||
token::Ident(ident, _) => {
|
||||
token::Ident(name, _) => {
|
||||
if self.token.is_reserved_ident() {
|
||||
let mut err = self.expected_ident_found();
|
||||
if recover {
|
||||
|
@ -630,7 +630,7 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
let span = self.span;
|
||||
self.bump();
|
||||
Ok(Ident::new(ident.name, span))
|
||||
Ok(Ident::new(name, span))
|
||||
}
|
||||
_ => {
|
||||
Err(if self.prev_token_kind == PrevTokenKind::DocComment {
|
||||
|
@ -1618,10 +1618,10 @@ impl<'a> Parser<'a> {
|
|||
|
||||
fn parse_path_segment_ident(&mut self) -> PResult<'a, ast::Ident> {
|
||||
match self.token.kind {
|
||||
token::Ident(ident, _) if self.token.is_path_segment_keyword() => {
|
||||
token::Ident(name, _) if name.is_path_segment_keyword() => {
|
||||
let span = self.span;
|
||||
self.bump();
|
||||
Ok(Ident::new(ident.name, span))
|
||||
Ok(Ident::new(name, span))
|
||||
}
|
||||
_ => self.parse_ident(),
|
||||
}
|
||||
|
@ -1629,10 +1629,10 @@ impl<'a> Parser<'a> {
|
|||
|
||||
fn parse_ident_or_underscore(&mut self) -> PResult<'a, ast::Ident> {
|
||||
match self.token.kind {
|
||||
token::Ident(ident, false) if ident.name == kw::Underscore => {
|
||||
token::Ident(name, false) if name == kw::Underscore => {
|
||||
let span = self.span;
|
||||
self.bump();
|
||||
Ok(Ident::new(ident.name, span))
|
||||
Ok(Ident::new(name, span))
|
||||
}
|
||||
_ => self.parse_ident(),
|
||||
}
|
||||
|
@ -2368,13 +2368,11 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
|
||||
let mut recovery_field = None;
|
||||
if let token::Ident(ident, _) = self.token.kind {
|
||||
if let token::Ident(name, _) = self.token.kind {
|
||||
if !self.token.is_reserved_ident() && self.look_ahead(1, |t| *t == token::Colon) {
|
||||
// Use in case of error after field-looking code: `S { foo: () with a }`
|
||||
let mut ident = ident.clone();
|
||||
ident.span = self.span;
|
||||
recovery_field = Some(ast::Field {
|
||||
ident,
|
||||
ident: Ident::new(name, self.span),
|
||||
span: self.span,
|
||||
expr: self.mk_expr(self.span, ExprKind::Err, ThinVec::new()),
|
||||
is_shorthand: false,
|
||||
|
@ -2637,7 +2635,7 @@ impl<'a> Parser<'a> {
|
|||
self.look_ahead(1, |t| t.is_ident()) => {
|
||||
self.bump();
|
||||
let name = match self.token.kind {
|
||||
token::Ident(ident, _) => ident,
|
||||
token::Ident(name, _) => name,
|
||||
_ => unreachable!()
|
||||
};
|
||||
let mut err = self.fatal(&format!("unknown macro variable `{}`", name));
|
||||
|
@ -2651,7 +2649,7 @@ impl<'a> Parser<'a> {
|
|||
// Interpolated identifier and lifetime tokens are replaced with usual identifier
|
||||
// and lifetime tokens, so the former are never encountered during normal parsing.
|
||||
match **nt {
|
||||
token::NtIdent(ident, is_raw) => Token::new(token::Ident(ident, is_raw), ident.span),
|
||||
token::NtIdent(ident, is_raw) => Token::new(token::Ident(ident.name, is_raw), ident.span),
|
||||
token::NtLifetime(ident) => Token::new(token::Lifetime(ident.name), ident.span),
|
||||
_ => return,
|
||||
}
|
||||
|
@ -2766,7 +2764,7 @@ impl<'a> Parser<'a> {
|
|||
let token_cannot_continue_expr = |t: &Token| match t.kind {
|
||||
// These tokens can start an expression after `!`, but
|
||||
// can't continue an expression after an ident
|
||||
token::Ident(ident, is_raw) => token::ident_can_begin_expr(ident, is_raw),
|
||||
token::Ident(name, is_raw) => token::ident_can_begin_expr(name, t.span, is_raw),
|
||||
token::Literal(..) | token::Pound => true,
|
||||
token::Interpolated(ref nt) => match **nt {
|
||||
token::NtIdent(..) | token::NtExpr(..) |
|
||||
|
@ -4328,7 +4326,7 @@ impl<'a> Parser<'a> {
|
|||
-> PResult<'a, Option<P<Item>>> {
|
||||
let token_lo = self.span;
|
||||
let (ident, def) = match self.token.kind {
|
||||
token::Ident(ident, false) if ident.name == kw::Macro => {
|
||||
token::Ident(name, false) if name == kw::Macro => {
|
||||
self.bump();
|
||||
let ident = self.parse_ident()?;
|
||||
let tokens = if self.check(&token::OpenDelim(token::Brace)) {
|
||||
|
@ -4356,8 +4354,8 @@ impl<'a> Parser<'a> {
|
|||
|
||||
(ident, ast::MacroDef { tokens: tokens.into(), legacy: false })
|
||||
}
|
||||
token::Ident(ident, _) if ident.name == sym::macro_rules &&
|
||||
self.look_ahead(1, |t| *t == token::Not) => {
|
||||
token::Ident(name, _) if name == sym::macro_rules &&
|
||||
self.look_ahead(1, |t| *t == token::Not) => {
|
||||
let prev_span = self.prev_span;
|
||||
self.complain_if_pub_macro(&vis.node, prev_span);
|
||||
self.bump();
|
||||
|
@ -5481,8 +5479,8 @@ impl<'a> Parser<'a> {
|
|||
fn parse_self_arg(&mut self) -> PResult<'a, Option<Arg>> {
|
||||
let expect_ident = |this: &mut Self| match this.token.kind {
|
||||
// Preserve hygienic context.
|
||||
token::Ident(ident, _) =>
|
||||
{ let span = this.span; this.bump(); Ident::new(ident.name, span) }
|
||||
token::Ident(name, _) =>
|
||||
{ let span = this.span; this.bump(); Ident::new(name, span) }
|
||||
_ => unreachable!()
|
||||
};
|
||||
let isolated_self = |this: &mut Self, n| {
|
||||
|
@ -5805,11 +5803,7 @@ impl<'a> Parser<'a> {
|
|||
match *vis {
|
||||
VisibilityKind::Inherited => {}
|
||||
_ => {
|
||||
let is_macro_rules: bool = match self.token.kind {
|
||||
token::Ident(sid, _) => sid.name == sym::macro_rules,
|
||||
_ => false,
|
||||
};
|
||||
let mut err = if is_macro_rules {
|
||||
let mut err = if self.token.is_keyword(sym::macro_rules) {
|
||||
let mut err = self.diagnostic()
|
||||
.struct_span_err(sp, "can't qualify macro_rules invocation with `pub`");
|
||||
err.span_suggestion(
|
||||
|
|
|
@ -118,8 +118,8 @@ impl Lit {
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) fn ident_can_begin_expr(ident: ast::Ident, is_raw: bool) -> bool {
|
||||
let ident_token: TokenKind = Ident(ident, is_raw);
|
||||
pub(crate) fn ident_can_begin_expr(name: ast::Name, span: Span, is_raw: bool) -> bool {
|
||||
let ident_token = Token::new(Ident(name, is_raw), span);
|
||||
|
||||
!ident_token.is_reserved_ident() ||
|
||||
ident_token.is_path_segment_keyword() ||
|
||||
|
@ -146,11 +146,11 @@ pub(crate) fn ident_can_begin_expr(ident: ast::Ident, is_raw: bool) -> bool {
|
|||
kw::While,
|
||||
kw::Yield,
|
||||
kw::Static,
|
||||
].contains(&ident.name)
|
||||
].contains(&name)
|
||||
}
|
||||
|
||||
fn ident_can_begin_type(ident: ast::Ident, is_raw: bool) -> bool {
|
||||
let ident_token: TokenKind = Ident(ident, is_raw);
|
||||
fn ident_can_begin_type(name: ast::Name, span: Span, is_raw: bool) -> bool {
|
||||
let ident_token = Token::new(Ident(name, is_raw), span);
|
||||
|
||||
!ident_token.is_reserved_ident() ||
|
||||
ident_token.is_path_segment_keyword() ||
|
||||
|
@ -163,7 +163,7 @@ fn ident_can_begin_type(ident: ast::Ident, is_raw: bool) -> bool {
|
|||
kw::Extern,
|
||||
kw::Typeof,
|
||||
kw::Dyn,
|
||||
].contains(&ident.name)
|
||||
].contains(&name)
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
|
||||
|
@ -210,7 +210,7 @@ pub enum TokenKind {
|
|||
Literal(Lit),
|
||||
|
||||
/* Name components */
|
||||
Ident(ast::Ident, /* is_raw */ bool),
|
||||
Ident(ast::Name, /* is_raw */ bool),
|
||||
Lifetime(ast::Name),
|
||||
|
||||
Interpolated(Lrc<Nonterminal>),
|
||||
|
@ -245,7 +245,7 @@ pub struct Token {
|
|||
impl TokenKind {
|
||||
/// Recovers a `TokenKind` from an `ast::Ident`. This creates a raw identifier if necessary.
|
||||
pub fn from_ast_ident(ident: ast::Ident) -> TokenKind {
|
||||
Ident(ident, ident.is_raw_guess())
|
||||
Ident(ident.name, ident.is_raw_guess())
|
||||
}
|
||||
|
||||
crate fn is_like_plus(&self) -> bool {
|
||||
|
@ -254,12 +254,14 @@ impl TokenKind {
|
|||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Token {
|
||||
/// Returns `true` if the token can appear at the start of an expression.
|
||||
crate fn can_begin_expr(&self) -> bool {
|
||||
match *self {
|
||||
Ident(ident, is_raw) =>
|
||||
ident_can_begin_expr(ident, is_raw), // value name or keyword
|
||||
match self.kind {
|
||||
Ident(name, is_raw) =>
|
||||
ident_can_begin_expr(name, self.span, is_raw), // value name or keyword
|
||||
OpenDelim(..) | // tuple, array or block
|
||||
Literal(..) | // literal
|
||||
Not | // operator not
|
||||
|
@ -289,9 +291,9 @@ impl TokenKind {
|
|||
|
||||
/// Returns `true` if the token can appear at the start of a type.
|
||||
crate fn can_begin_type(&self) -> bool {
|
||||
match *self {
|
||||
Ident(ident, is_raw) =>
|
||||
ident_can_begin_type(ident, is_raw), // type name or keyword
|
||||
match self.kind {
|
||||
Ident(name, is_raw) =>
|
||||
ident_can_begin_type(name, self.span, is_raw), // type name or keyword
|
||||
OpenDelim(Paren) | // tuple
|
||||
OpenDelim(Bracket) | // array
|
||||
Not | // never
|
||||
|
@ -309,7 +311,9 @@ impl TokenKind {
|
|||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TokenKind {
|
||||
/// Returns `true` if the token can appear at the start of a const param.
|
||||
pub fn can_begin_const_arg(&self) -> bool {
|
||||
match self {
|
||||
|
@ -323,13 +327,17 @@ impl TokenKind {
|
|||
_ => self.can_begin_literal_or_bool(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Token {
|
||||
/// Returns `true` if the token can appear at the start of a generic bound.
|
||||
crate fn can_begin_bound(&self) -> bool {
|
||||
self.is_path_start() || self.is_lifetime() || self.is_keyword(kw::For) ||
|
||||
self == &Question || self == &OpenDelim(Paren)
|
||||
}
|
||||
}
|
||||
|
||||
impl TokenKind {
|
||||
pub fn lit(kind: LitKind, symbol: Symbol, suffix: Option<Symbol>) -> TokenKind {
|
||||
Literal(Lit::new(kind, symbol, suffix))
|
||||
}
|
||||
|
@ -355,8 +363,8 @@ impl TokenKind {
|
|||
match *self {
|
||||
Literal(..) => true,
|
||||
BinOp(Minus) => true,
|
||||
Ident(ident, false) if ident.name == kw::True => true,
|
||||
Ident(ident, false) if ident.name == kw::False => true,
|
||||
Ident(name, false) if name == kw::True => true,
|
||||
Ident(name, false) if name == kw::False => true,
|
||||
Interpolated(ref nt) => match **nt {
|
||||
NtLiteral(..) => true,
|
||||
_ => false,
|
||||
|
@ -367,6 +375,18 @@ impl TokenKind {
|
|||
}
|
||||
|
||||
impl Token {
|
||||
/// Returns an identifier if this token is an identifier.
|
||||
pub fn ident(&self) -> Option<(ast::Ident, /* is_raw */ bool)> {
|
||||
match self.kind {
|
||||
Ident(name, is_raw) => Some((ast::Ident::new(name, self.span), is_raw)),
|
||||
Interpolated(ref nt) => match **nt {
|
||||
NtIdent(ident, is_raw) => Some((ident, is_raw)),
|
||||
_ => None,
|
||||
},
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a lifetime identifier if this token is a lifetime.
|
||||
pub fn lifetime(&self) -> Option<ast::Ident> {
|
||||
match self.kind {
|
||||
|
@ -381,12 +401,12 @@ impl Token {
|
|||
}
|
||||
|
||||
impl TokenKind {
|
||||
/// Returns an identifier if this token is an identifier.
|
||||
pub fn ident(&self) -> Option<(ast::Ident, /* is_raw */ bool)> {
|
||||
/// Returns an identifier name if this token is an identifier.
|
||||
pub fn ident_name(&self) -> Option<(ast::Name, /* is_raw */ bool)> {
|
||||
match *self {
|
||||
Ident(ident, is_raw) => Some((ident, is_raw)),
|
||||
Ident(name, is_raw) => Some((name, is_raw)),
|
||||
Interpolated(ref nt) => match **nt {
|
||||
NtIdent(ident, is_raw) => Some((ident, is_raw)),
|
||||
NtIdent(ident, is_raw) => Some((ident.name, is_raw)),
|
||||
_ => None,
|
||||
},
|
||||
_ => None,
|
||||
|
@ -405,7 +425,7 @@ impl TokenKind {
|
|||
}
|
||||
/// Returns `true` if the token is an identifier.
|
||||
pub fn is_ident(&self) -> bool {
|
||||
self.ident().is_some()
|
||||
self.ident_name().is_some()
|
||||
}
|
||||
/// Returns `true` if the token is a lifetime.
|
||||
crate fn is_lifetime(&self) -> bool {
|
||||
|
@ -415,10 +435,7 @@ impl TokenKind {
|
|||
/// Returns `true` if the token is a identifier whose name is the given
|
||||
/// string slice.
|
||||
crate fn is_ident_named(&self, name: Symbol) -> bool {
|
||||
match self.ident() {
|
||||
Some((ident, _)) => ident.name == name,
|
||||
None => false
|
||||
}
|
||||
self.ident_name().map_or(false, |(ident_name, _)| ident_name == name)
|
||||
}
|
||||
|
||||
/// Returns `true` if the token is an interpolated path.
|
||||
|
@ -440,24 +457,30 @@ impl TokenKind {
|
|||
crate fn is_qpath_start(&self) -> bool {
|
||||
self == &Lt || self == &BinOp(Shl)
|
||||
}
|
||||
}
|
||||
|
||||
impl Token {
|
||||
crate fn is_path_start(&self) -> bool {
|
||||
self == &ModSep || self.is_qpath_start() || self.is_path() ||
|
||||
self.is_path_segment_keyword() || self.is_ident() && !self.is_reserved_ident()
|
||||
}
|
||||
}
|
||||
|
||||
impl TokenKind {
|
||||
/// Returns `true` if the token is a given keyword, `kw`.
|
||||
pub fn is_keyword(&self, kw: Symbol) -> bool {
|
||||
self.ident().map(|(ident, is_raw)| ident.name == kw && !is_raw).unwrap_or(false)
|
||||
self.ident_name().map(|(name, is_raw)| name == kw && !is_raw).unwrap_or(false)
|
||||
}
|
||||
|
||||
pub fn is_path_segment_keyword(&self) -> bool {
|
||||
match self.ident() {
|
||||
Some((id, false)) => id.is_path_segment_keyword(),
|
||||
match self.ident_name() {
|
||||
Some((name, false)) => name.is_path_segment_keyword(),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Token {
|
||||
// Returns true for reserved identifiers used internally for elided lifetimes,
|
||||
// unnamed method parameters, crate root module, error recovery etc.
|
||||
pub fn is_special_ident(&self) -> bool {
|
||||
|
@ -490,7 +513,9 @@ impl TokenKind {
|
|||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TokenKind {
|
||||
crate fn glue(self, joint: TokenKind) -> Option<TokenKind> {
|
||||
Some(match self {
|
||||
Eq => match joint {
|
||||
|
@ -537,7 +562,7 @@ impl TokenKind {
|
|||
_ => return None,
|
||||
},
|
||||
SingleQuote => match joint {
|
||||
Ident(ident, false) => Lifetime(Symbol::intern(&format!("'{}", ident))),
|
||||
Ident(name, false) => Lifetime(Symbol::intern(&format!("'{}", name))),
|
||||
_ => return None,
|
||||
},
|
||||
|
||||
|
@ -608,9 +633,9 @@ impl TokenKind {
|
|||
(&Literal(a), &Literal(b)) => a == b,
|
||||
|
||||
(&Lifetime(a), &Lifetime(b)) => a == b,
|
||||
(&Ident(a, b), &Ident(c, d)) => b == d && (a.name == c.name ||
|
||||
a.name == kw::DollarCrate ||
|
||||
c.name == kw::DollarCrate),
|
||||
(&Ident(a, b), &Ident(c, d)) => b == d && (a == c ||
|
||||
a == kw::DollarCrate ||
|
||||
c == kw::DollarCrate),
|
||||
|
||||
(&Interpolated(_), &Interpolated(_)) => false,
|
||||
|
||||
|
@ -738,8 +763,7 @@ impl Nonterminal {
|
|||
prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span)
|
||||
}
|
||||
Nonterminal::NtIdent(ident, is_raw) => {
|
||||
let token = Ident(ident, is_raw);
|
||||
Some(TokenTree::token(ident.span, token).into())
|
||||
Some(TokenTree::token(ident.span, Ident(ident.name, is_raw)).into())
|
||||
}
|
||||
Nonterminal::NtLifetime(ident) => {
|
||||
Some(TokenTree::token(ident.span, Lifetime(ident.name)).into())
|
||||
|
@ -827,7 +851,7 @@ fn prepend_attrs(sess: &ParseSess,
|
|||
// For simple paths, push the identifier directly
|
||||
if attr.path.segments.len() == 1 && attr.path.segments[0].args.is_none() {
|
||||
let ident = attr.path.segments[0].ident;
|
||||
let token = Ident(ident, ident.as_str().starts_with("r#"));
|
||||
let token = Ident(ident.name, ident.as_str().starts_with("r#"));
|
||||
brackets.push(tokenstream::TokenTree::token(ident.span, token));
|
||||
|
||||
// ... and for more complicated paths, fall back to a reparse hack that
|
||||
|
|
|
@ -575,7 +575,7 @@ impl DelimSpan {
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::syntax::ast::Ident;
|
||||
use crate::syntax::ast::Name;
|
||||
use crate::with_default_globals;
|
||||
use crate::util::parser_testing::string_to_stream;
|
||||
use syntax_pos::{Span, BytePos, NO_EXPANSION};
|
||||
|
@ -660,7 +660,7 @@ mod tests {
|
|||
with_default_globals(|| {
|
||||
let test0: TokenStream = Vec::<TokenTree>::new().into_iter().collect();
|
||||
let test1: TokenStream =
|
||||
TokenTree::token(sp(0, 1), token::Ident(Ident::from_str("a"), false)).into();
|
||||
TokenTree::token(sp(0, 1), token::Ident(Name::intern("a"), false)).into();
|
||||
let test2 = string_to_ts("foo(bar::baz)");
|
||||
|
||||
assert_eq!(test0.is_empty(), true);
|
||||
|
|
|
@ -38,8 +38,8 @@ pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt<'_>,
|
|||
}
|
||||
} else {
|
||||
match *e {
|
||||
TokenTree::Token(Token { kind: token::Ident(ident, _), .. }) =>
|
||||
res_str.push_str(&ident.as_str()),
|
||||
TokenTree::Token(Token { kind: token::Ident(name, _), .. }) =>
|
||||
res_str.push_str(&name.as_str()),
|
||||
_ => {
|
||||
cx.span_err(sp, "concat_idents! requires ident args.");
|
||||
return DummyResult::any(sp);
|
||||
|
|
|
@ -149,16 +149,16 @@ fn parse_args<'a>(
|
|||
} // accept trailing commas
|
||||
if named || (p.token.is_ident() && p.look_ahead(1, |t| *t == token::Eq)) {
|
||||
named = true;
|
||||
let ident = if let token::Ident(i, _) = p.token.kind {
|
||||
let name = if let token::Ident(name, _) = p.token.kind {
|
||||
p.bump();
|
||||
i
|
||||
name
|
||||
} else {
|
||||
return Err(ecx.struct_span_err(
|
||||
p.span,
|
||||
"expected ident, positional arguments cannot follow named arguments",
|
||||
));
|
||||
};
|
||||
let name: &str = &ident.as_str();
|
||||
let name: &str = &name.as_str();
|
||||
|
||||
p.expect(&token::Eq)?;
|
||||
let e = p.parse_expr()?;
|
||||
|
|
|
@ -132,7 +132,7 @@ impl<'a> CollectProcMacros<'a> {
|
|||
}
|
||||
};
|
||||
|
||||
if !trait_ident.can_be_raw() {
|
||||
if !trait_ident.name.can_be_raw() {
|
||||
self.handler.span_err(trait_attr.span,
|
||||
&format!("`{}` cannot be a name of derive macro", trait_ident));
|
||||
}
|
||||
|
@ -166,7 +166,7 @@ impl<'a> CollectProcMacros<'a> {
|
|||
return None;
|
||||
}
|
||||
};
|
||||
if !ident.can_be_raw() {
|
||||
if !ident.name.can_be_raw() {
|
||||
self.handler.span_err(
|
||||
attr.span,
|
||||
&format!("`{}` cannot be a name of derive helper attribute", ident),
|
||||
|
|
|
@ -142,9 +142,8 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
|
|||
Question => op!('?'),
|
||||
SingleQuote => op!('\''),
|
||||
|
||||
Ident(ident, false) if ident.name == kw::DollarCrate =>
|
||||
tt!(Ident::dollar_crate()),
|
||||
Ident(ident, is_raw) => tt!(Ident::new(ident.name, is_raw)),
|
||||
Ident(name, false) if name == kw::DollarCrate => tt!(Ident::dollar_crate()),
|
||||
Ident(name, is_raw) => tt!(Ident::new(name, is_raw)),
|
||||
Lifetime(name) => {
|
||||
let ident = ast::Ident::new(name, span).without_first_quote();
|
||||
stack.push(tt!(Ident::new(ident.name, false)));
|
||||
|
@ -159,7 +158,7 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
|
|||
escaped.extend(ch.escape_debug());
|
||||
}
|
||||
let stream = vec![
|
||||
Ident(ast::Ident::new(sym::doc, span), false),
|
||||
Ident(sym::doc, false),
|
||||
Eq,
|
||||
TokenKind::lit(token::Str, Symbol::intern(&escaped), None),
|
||||
]
|
||||
|
@ -211,8 +210,7 @@ impl ToInternal<TokenStream> for TokenTree<Group, Punct, Ident, Literal> {
|
|||
.into();
|
||||
}
|
||||
TokenTree::Ident(self::Ident { sym, is_raw, span }) => {
|
||||
let token = Ident(ast::Ident::new(sym, span), is_raw);
|
||||
return tokenstream::TokenTree::token(span, token).into();
|
||||
return tokenstream::TokenTree::token(span, Ident(sym, is_raw)).into();
|
||||
}
|
||||
TokenTree::Literal(self::Literal {
|
||||
lit: token::Lit { kind: token::Integer, symbol, suffix },
|
||||
|
@ -338,7 +336,8 @@ impl Ident {
|
|||
if !Self::is_valid(&string) {
|
||||
panic!("`{:?}` is not a valid identifier", string)
|
||||
}
|
||||
if is_raw && !ast::Ident::from_interned_str(sym.as_interned_str()).can_be_raw() {
|
||||
// Get rid of gensyms to conservatively check rawness on the string contents only.
|
||||
if is_raw && !sym.as_interned_str().as_symbol().can_be_raw() {
|
||||
panic!("`{}` cannot be a raw identifier", string);
|
||||
}
|
||||
Ident { sym, is_raw, span }
|
||||
|
|
|
@ -1019,6 +1019,21 @@ impl Symbol {
|
|||
pub fn is_doc_keyword(self) -> bool {
|
||||
self <= kw::Union
|
||||
}
|
||||
|
||||
/// A keyword or reserved identifier that can be used as a path segment.
|
||||
pub fn is_path_segment_keyword(self) -> bool {
|
||||
self == kw::Super ||
|
||||
self == kw::SelfLower ||
|
||||
self == kw::SelfUpper ||
|
||||
self == kw::Crate ||
|
||||
self == kw::PathRoot ||
|
||||
self == kw::DollarCrate
|
||||
}
|
||||
|
||||
/// This symbol can be a raw identifier.
|
||||
pub fn can_be_raw(self) -> bool {
|
||||
self != kw::Invalid && self != kw::Underscore && !self.is_path_segment_keyword()
|
||||
}
|
||||
}
|
||||
|
||||
impl Ident {
|
||||
|
@ -1049,24 +1064,13 @@ impl Ident {
|
|||
|
||||
/// A keyword or reserved identifier that can be used as a path segment.
|
||||
pub fn is_path_segment_keyword(self) -> bool {
|
||||
self.name == kw::Super ||
|
||||
self.name == kw::SelfLower ||
|
||||
self.name == kw::SelfUpper ||
|
||||
self.name == kw::Crate ||
|
||||
self.name == kw::PathRoot ||
|
||||
self.name == kw::DollarCrate
|
||||
}
|
||||
|
||||
/// This identifier can be a raw identifier.
|
||||
pub fn can_be_raw(self) -> bool {
|
||||
self.name != kw::Invalid && self.name != kw::Underscore &&
|
||||
!self.is_path_segment_keyword()
|
||||
self.name.is_path_segment_keyword()
|
||||
}
|
||||
|
||||
/// We see this identifier in a normal identifier position, like variable name or a type.
|
||||
/// How was it written originally? Did it use the raw form? Let's try to guess.
|
||||
pub fn is_raw_guess(self) -> bool {
|
||||
self.can_be_raw() && self.is_reserved()
|
||||
self.name.can_be_raw() && self.is_reserved()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue