Remove Token::uninterpolated_span
.
In favour of the similar method on `Parser`, which works on things other than identifiers and lifetimes.
This commit is contained in:
parent
49ed25b5d2
commit
d59b17c5cd
5 changed files with 48 additions and 44 deletions
|
@ -448,8 +448,9 @@ pub enum TokenKind {
|
||||||
|
|
||||||
/// Identifier token.
|
/// Identifier token.
|
||||||
/// Do not forget about `NtIdent` when you want to match on identifiers.
|
/// Do not forget about `NtIdent` when you want to match on identifiers.
|
||||||
/// It's recommended to use `Token::(ident,uninterpolate,uninterpolated_span)` to
|
/// It's recommended to use `Token::{ident,uninterpolate}` and
|
||||||
/// treat regular and interpolated identifiers in the same way.
|
/// `Parser::token_uninterpolated_span` to treat regular and interpolated
|
||||||
|
/// identifiers in the same way.
|
||||||
Ident(Symbol, IdentIsRaw),
|
Ident(Symbol, IdentIsRaw),
|
||||||
/// This identifier (and its span) is the identifier passed to the
|
/// This identifier (and its span) is the identifier passed to the
|
||||||
/// declarative macro. The span in the surrounding `Token` is the span of
|
/// declarative macro. The span in the surrounding `Token` is the span of
|
||||||
|
@ -458,8 +459,9 @@ pub enum TokenKind {
|
||||||
|
|
||||||
/// Lifetime identifier token.
|
/// Lifetime identifier token.
|
||||||
/// Do not forget about `NtLifetime` when you want to match on lifetime identifiers.
|
/// Do not forget about `NtLifetime` when you want to match on lifetime identifiers.
|
||||||
/// It's recommended to use `Token::(lifetime,uninterpolate,uninterpolated_span)` to
|
/// It's recommended to use `Token::{ident,uninterpolate}` and
|
||||||
/// treat regular and interpolated lifetime identifiers in the same way.
|
/// `Parser::token_uninterpolated_span` to treat regular and interpolated
|
||||||
|
/// identifiers in the same way.
|
||||||
Lifetime(Symbol, IdentIsRaw),
|
Lifetime(Symbol, IdentIsRaw),
|
||||||
/// This identifier (and its span) is the lifetime passed to the
|
/// This identifier (and its span) is the lifetime passed to the
|
||||||
/// declarative macro. The span in the surrounding `Token` is the span of
|
/// declarative macro. The span in the surrounding `Token` is the span of
|
||||||
|
@ -585,23 +587,6 @@ impl Token {
|
||||||
Token::new(Ident(ident.name, ident.is_raw_guess().into()), ident.span)
|
Token::new(Ident(ident.name, ident.is_raw_guess().into()), ident.span)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// For interpolated tokens, returns a span of the fragment to which the interpolated
|
|
||||||
/// token refers. For all other tokens this is just a regular span.
|
|
||||||
/// It is particularly important to use this for identifiers and lifetimes
|
|
||||||
/// for which spans affect name resolution and edition checks.
|
|
||||||
/// Note that keywords are also identifiers, so they should use this
|
|
||||||
/// if they keep spans or perform edition checks.
|
|
||||||
//
|
|
||||||
// Note: `Parser::uninterpolated_token_span` may give better information
|
|
||||||
// than this method does.
|
|
||||||
pub fn uninterpolated_span(&self) -> Span {
|
|
||||||
match self.kind {
|
|
||||||
NtIdent(ident, _) | NtLifetime(ident, _) => ident.span,
|
|
||||||
Interpolated(ref nt) => nt.use_span(),
|
|
||||||
_ => self.span,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_range_separator(&self) -> bool {
|
pub fn is_range_separator(&self) -> bool {
|
||||||
[DotDot, DotDotDot, DotDotEq].contains(&self.kind)
|
[DotDot, DotDotDot, DotDotEq].contains(&self.kind)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1318,7 +1318,7 @@ impl<'a> Parser<'a> {
|
||||||
|
|
||||||
/// Assuming we have just parsed `.`, continue parsing into an expression.
|
/// Assuming we have just parsed `.`, continue parsing into an expression.
|
||||||
fn parse_dot_suffix(&mut self, self_arg: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> {
|
fn parse_dot_suffix(&mut self, self_arg: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> {
|
||||||
if self.token.uninterpolated_span().at_least_rust_2018() && self.eat_keyword(exp!(Await)) {
|
if self.token_uninterpolated_span().at_least_rust_2018() && self.eat_keyword(exp!(Await)) {
|
||||||
return Ok(self.mk_await_expr(self_arg, lo));
|
return Ok(self.mk_await_expr(self_arg, lo));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1509,9 +1509,9 @@ impl<'a> Parser<'a> {
|
||||||
this.parse_expr_let(restrictions)
|
this.parse_expr_let(restrictions)
|
||||||
} else if this.eat_keyword(exp!(Underscore)) {
|
} else if this.eat_keyword(exp!(Underscore)) {
|
||||||
Ok(this.mk_expr(this.prev_token.span, ExprKind::Underscore))
|
Ok(this.mk_expr(this.prev_token.span, ExprKind::Underscore))
|
||||||
} else if this.token.uninterpolated_span().at_least_rust_2018() {
|
} else if this.token_uninterpolated_span().at_least_rust_2018() {
|
||||||
// `Span::at_least_rust_2018()` is somewhat expensive; don't get it repeatedly.
|
// `Span::at_least_rust_2018()` is somewhat expensive; don't get it repeatedly.
|
||||||
if this.token.uninterpolated_span().at_least_rust_2024()
|
if this.token_uninterpolated_span().at_least_rust_2024()
|
||||||
// check for `gen {}` and `gen move {}`
|
// check for `gen {}` and `gen move {}`
|
||||||
// or `async gen {}` and `async gen move {}`
|
// or `async gen {}` and `async gen move {}`
|
||||||
&& (this.is_gen_block(kw::Gen, 0)
|
&& (this.is_gen_block(kw::Gen, 0)
|
||||||
|
@ -2186,7 +2186,7 @@ impl<'a> Parser<'a> {
|
||||||
fn parse_opt_meta_item_lit(&mut self) -> Option<MetaItemLit> {
|
fn parse_opt_meta_item_lit(&mut self) -> Option<MetaItemLit> {
|
||||||
self.recover_after_dot();
|
self.recover_after_dot();
|
||||||
let span = self.token.span;
|
let span = self.token.span;
|
||||||
let uninterpolated_span = self.uninterpolated_token_span();
|
let uninterpolated_span = self.token_uninterpolated_span();
|
||||||
self.eat_token_lit().map(|token_lit| {
|
self.eat_token_lit().map(|token_lit| {
|
||||||
match MetaItemLit::from_token_lit(token_lit, span) {
|
match MetaItemLit::from_token_lit(token_lit, span) {
|
||||||
Ok(lit) => lit,
|
Ok(lit) => lit,
|
||||||
|
@ -2390,7 +2390,7 @@ impl<'a> Parser<'a> {
|
||||||
let movability =
|
let movability =
|
||||||
if self.eat_keyword(exp!(Static)) { Movability::Static } else { Movability::Movable };
|
if self.eat_keyword(exp!(Static)) { Movability::Static } else { Movability::Movable };
|
||||||
|
|
||||||
let coroutine_kind = if self.token.uninterpolated_span().at_least_rust_2018() {
|
let coroutine_kind = if self.token_uninterpolated_span().at_least_rust_2018() {
|
||||||
self.parse_coroutine_kind(Case::Sensitive)
|
self.parse_coroutine_kind(Case::Sensitive)
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
|
@ -2939,7 +2939,7 @@ impl<'a> Parser<'a> {
|
||||||
/// Parses `for await? <src_pat> in <src_expr> <src_loop_block>` (`for` token already eaten).
|
/// Parses `for await? <src_pat> in <src_expr> <src_loop_block>` (`for` token already eaten).
|
||||||
fn parse_expr_for(&mut self, opt_label: Option<Label>, lo: Span) -> PResult<'a, P<Expr>> {
|
fn parse_expr_for(&mut self, opt_label: Option<Label>, lo: Span) -> PResult<'a, P<Expr>> {
|
||||||
let is_await =
|
let is_await =
|
||||||
self.token.uninterpolated_span().at_least_rust_2018() && self.eat_keyword(exp!(Await));
|
self.token_uninterpolated_span().at_least_rust_2018() && self.eat_keyword(exp!(Await));
|
||||||
|
|
||||||
if is_await {
|
if is_await {
|
||||||
self.psess.gated_spans.gate(sym::async_for_loop, self.prev_token.span);
|
self.psess.gated_spans.gate(sym::async_for_loop, self.prev_token.span);
|
||||||
|
@ -3529,7 +3529,7 @@ impl<'a> Parser<'a> {
|
||||||
self.token.is_keyword(kw::Try)
|
self.token.is_keyword(kw::Try)
|
||||||
&& self
|
&& self
|
||||||
.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block())
|
.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block())
|
||||||
&& self.token.uninterpolated_span().at_least_rust_2018()
|
&& self.token_uninterpolated_span().at_least_rust_2018()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parses an `async move? {...}` or `gen move? {...}` expression.
|
/// Parses an `async move? {...}` or `gen move? {...}` expression.
|
||||||
|
|
|
@ -591,7 +591,7 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse stray `impl async Trait`
|
// Parse stray `impl async Trait`
|
||||||
if (self.token.uninterpolated_span().at_least_rust_2018()
|
if (self.token_uninterpolated_span().at_least_rust_2018()
|
||||||
&& self.token.is_keyword(kw::Async))
|
&& self.token.is_keyword(kw::Async))
|
||||||
|| self.is_kw_followed_by_ident(kw::Async)
|
|| self.is_kw_followed_by_ident(kw::Async)
|
||||||
{
|
{
|
||||||
|
@ -877,7 +877,7 @@ impl<'a> Parser<'a> {
|
||||||
&& self.look_ahead(1, |t| t.is_non_raw_ident_where(|i| i.name != kw::As))
|
&& self.look_ahead(1, |t| t.is_non_raw_ident_where(|i| i.name != kw::As))
|
||||||
{
|
{
|
||||||
self.bump(); // `default`
|
self.bump(); // `default`
|
||||||
Defaultness::Default(self.prev_token.uninterpolated_span())
|
Defaultness::Default(self.prev_token_uninterpolated_span())
|
||||||
} else {
|
} else {
|
||||||
Defaultness::Final
|
Defaultness::Final
|
||||||
}
|
}
|
||||||
|
@ -1208,7 +1208,7 @@ impl<'a> Parser<'a> {
|
||||||
attrs: &mut AttrVec,
|
attrs: &mut AttrVec,
|
||||||
mut safety: Safety,
|
mut safety: Safety,
|
||||||
) -> PResult<'a, ItemKind> {
|
) -> PResult<'a, ItemKind> {
|
||||||
let extern_span = self.prev_token.uninterpolated_span();
|
let extern_span = self.prev_token_uninterpolated_span();
|
||||||
let abi = self.parse_abi(); // ABI?
|
let abi = self.parse_abi(); // ABI?
|
||||||
// FIXME: This recovery should be tested better.
|
// FIXME: This recovery should be tested better.
|
||||||
if safety == Safety::Default
|
if safety == Safety::Default
|
||||||
|
@ -2781,7 +2781,7 @@ impl<'a> Parser<'a> {
|
||||||
.expect("Span extracted directly from keyword should always work");
|
.expect("Span extracted directly from keyword should always work");
|
||||||
|
|
||||||
err.span_suggestion(
|
err.span_suggestion(
|
||||||
self.token.uninterpolated_span(),
|
self.token_uninterpolated_span(),
|
||||||
format!("`{original_kw}` already used earlier, remove this one"),
|
format!("`{original_kw}` already used earlier, remove this one"),
|
||||||
"",
|
"",
|
||||||
Applicability::MachineApplicable,
|
Applicability::MachineApplicable,
|
||||||
|
@ -2792,7 +2792,7 @@ impl<'a> Parser<'a> {
|
||||||
else if let Some(WrongKw::Misplaced(correct_pos_sp)) = wrong_kw {
|
else if let Some(WrongKw::Misplaced(correct_pos_sp)) = wrong_kw {
|
||||||
let correct_pos_sp = correct_pos_sp.to(self.prev_token.span);
|
let correct_pos_sp = correct_pos_sp.to(self.prev_token.span);
|
||||||
if let Ok(current_qual) = self.span_to_snippet(correct_pos_sp) {
|
if let Ok(current_qual) = self.span_to_snippet(correct_pos_sp) {
|
||||||
let misplaced_qual_sp = self.token.uninterpolated_span();
|
let misplaced_qual_sp = self.token_uninterpolated_span();
|
||||||
let misplaced_qual = self.span_to_snippet(misplaced_qual_sp).unwrap();
|
let misplaced_qual = self.span_to_snippet(misplaced_qual_sp).unwrap();
|
||||||
|
|
||||||
err.span_suggestion(
|
err.span_suggestion(
|
||||||
|
|
|
@ -1313,14 +1313,14 @@ impl<'a> Parser<'a> {
|
||||||
|
|
||||||
/// Parses asyncness: `async` or nothing.
|
/// Parses asyncness: `async` or nothing.
|
||||||
fn parse_coroutine_kind(&mut self, case: Case) -> Option<CoroutineKind> {
|
fn parse_coroutine_kind(&mut self, case: Case) -> Option<CoroutineKind> {
|
||||||
let span = self.token.uninterpolated_span();
|
let span = self.token_uninterpolated_span();
|
||||||
if self.eat_keyword_case(exp!(Async), case) {
|
if self.eat_keyword_case(exp!(Async), case) {
|
||||||
// FIXME(gen_blocks): Do we want to unconditionally parse `gen` and then
|
// FIXME(gen_blocks): Do we want to unconditionally parse `gen` and then
|
||||||
// error if edition <= 2024, like we do with async and edition <= 2018?
|
// error if edition <= 2024, like we do with async and edition <= 2018?
|
||||||
if self.token.uninterpolated_span().at_least_rust_2024()
|
if self.token_uninterpolated_span().at_least_rust_2024()
|
||||||
&& self.eat_keyword_case(exp!(Gen), case)
|
&& self.eat_keyword_case(exp!(Gen), case)
|
||||||
{
|
{
|
||||||
let gen_span = self.prev_token.uninterpolated_span();
|
let gen_span = self.prev_token_uninterpolated_span();
|
||||||
Some(CoroutineKind::AsyncGen {
|
Some(CoroutineKind::AsyncGen {
|
||||||
span: span.to(gen_span),
|
span: span.to(gen_span),
|
||||||
closure_id: DUMMY_NODE_ID,
|
closure_id: DUMMY_NODE_ID,
|
||||||
|
@ -1333,7 +1333,7 @@ impl<'a> Parser<'a> {
|
||||||
return_impl_trait_id: DUMMY_NODE_ID,
|
return_impl_trait_id: DUMMY_NODE_ID,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
} else if self.token.uninterpolated_span().at_least_rust_2024()
|
} else if self.token_uninterpolated_span().at_least_rust_2024()
|
||||||
&& self.eat_keyword_case(exp!(Gen), case)
|
&& self.eat_keyword_case(exp!(Gen), case)
|
||||||
{
|
{
|
||||||
Some(CoroutineKind::Gen {
|
Some(CoroutineKind::Gen {
|
||||||
|
@ -1349,9 +1349,9 @@ impl<'a> Parser<'a> {
|
||||||
/// Parses fn unsafety: `unsafe`, `safe` or nothing.
|
/// Parses fn unsafety: `unsafe`, `safe` or nothing.
|
||||||
fn parse_safety(&mut self, case: Case) -> Safety {
|
fn parse_safety(&mut self, case: Case) -> Safety {
|
||||||
if self.eat_keyword_case(exp!(Unsafe), case) {
|
if self.eat_keyword_case(exp!(Unsafe), case) {
|
||||||
Safety::Unsafe(self.prev_token.uninterpolated_span())
|
Safety::Unsafe(self.prev_token_uninterpolated_span())
|
||||||
} else if self.eat_keyword_case(exp!(Safe), case) {
|
} else if self.eat_keyword_case(exp!(Safe), case) {
|
||||||
Safety::Safe(self.prev_token.uninterpolated_span())
|
Safety::Safe(self.prev_token_uninterpolated_span())
|
||||||
} else {
|
} else {
|
||||||
Safety::Default
|
Safety::Default
|
||||||
}
|
}
|
||||||
|
@ -1378,7 +1378,7 @@ impl<'a> Parser<'a> {
|
||||||
.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block())
|
.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block())
|
||||||
&& self.eat_keyword_case(exp!(Const), case)
|
&& self.eat_keyword_case(exp!(Const), case)
|
||||||
{
|
{
|
||||||
Const::Yes(self.prev_token.uninterpolated_span())
|
Const::Yes(self.prev_token_uninterpolated_span())
|
||||||
} else {
|
} else {
|
||||||
Const::No
|
Const::No
|
||||||
}
|
}
|
||||||
|
@ -1723,8 +1723,15 @@ impl<'a> Parser<'a> {
|
||||||
self.num_bump_calls
|
self.num_bump_calls
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn uninterpolated_token_span(&self) -> Span {
|
/// For interpolated `self.token`, returns a span of the fragment to which
|
||||||
|
/// the interpolated token refers. For all other tokens this is just a
|
||||||
|
/// regular span. It is particularly important to use this for identifiers
|
||||||
|
/// and lifetimes for which spans affect name resolution and edition
|
||||||
|
/// checks. Note that keywords are also identifiers, so they should use
|
||||||
|
/// this if they keep spans or perform edition checks.
|
||||||
|
pub fn token_uninterpolated_span(&self) -> Span {
|
||||||
match &self.token.kind {
|
match &self.token.kind {
|
||||||
|
token::NtIdent(ident, _) | token::NtLifetime(ident, _) => ident.span,
|
||||||
token::Interpolated(nt) => nt.use_span(),
|
token::Interpolated(nt) => nt.use_span(),
|
||||||
token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(_))) => {
|
token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(_))) => {
|
||||||
self.look_ahead(1, |t| t.span)
|
self.look_ahead(1, |t| t.span)
|
||||||
|
@ -1732,6 +1739,18 @@ impl<'a> Parser<'a> {
|
||||||
_ => self.token.span,
|
_ => self.token.span,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Like `token_uninterpolated_span`, but works on `self.prev_token`.
|
||||||
|
pub fn prev_token_uninterpolated_span(&self) -> Span {
|
||||||
|
match &self.prev_token.kind {
|
||||||
|
token::NtIdent(ident, _) | token::NtLifetime(ident, _) => ident.span,
|
||||||
|
token::Interpolated(nt) => nt.use_span(),
|
||||||
|
token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(_))) => {
|
||||||
|
self.look_ahead(0, |t| t.span)
|
||||||
|
}
|
||||||
|
_ => self.prev_token.span,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn make_unclosed_delims_error(
|
pub(crate) fn make_unclosed_delims_error(
|
||||||
|
|
|
@ -775,7 +775,7 @@ impl<'a> Parser<'a> {
|
||||||
/// Is a `dyn B0 + ... + Bn` type allowed here?
|
/// Is a `dyn B0 + ... + Bn` type allowed here?
|
||||||
fn is_explicit_dyn_type(&mut self) -> bool {
|
fn is_explicit_dyn_type(&mut self) -> bool {
|
||||||
self.check_keyword(exp!(Dyn))
|
self.check_keyword(exp!(Dyn))
|
||||||
&& (self.token.uninterpolated_span().at_least_rust_2018()
|
&& (self.token_uninterpolated_span().at_least_rust_2018()
|
||||||
|| self.look_ahead(1, |t| {
|
|| self.look_ahead(1, |t| {
|
||||||
(can_begin_dyn_bound_in_edition_2015(t) || *t == TokenKind::Star)
|
(can_begin_dyn_bound_in_edition_2015(t) || *t == TokenKind::Star)
|
||||||
&& !can_continue_type_after_non_fn_ident(t)
|
&& !can_continue_type_after_non_fn_ident(t)
|
||||||
|
@ -998,13 +998,13 @@ impl<'a> Parser<'a> {
|
||||||
BoundConstness::Never
|
BoundConstness::Never
|
||||||
};
|
};
|
||||||
|
|
||||||
let asyncness = if self.token.uninterpolated_span().at_least_rust_2018()
|
let asyncness = if self.token_uninterpolated_span().at_least_rust_2018()
|
||||||
&& self.eat_keyword(exp!(Async))
|
&& self.eat_keyword(exp!(Async))
|
||||||
{
|
{
|
||||||
self.psess.gated_spans.gate(sym::async_trait_bounds, self.prev_token.span);
|
self.psess.gated_spans.gate(sym::async_trait_bounds, self.prev_token.span);
|
||||||
BoundAsyncness::Async(self.prev_token.span)
|
BoundAsyncness::Async(self.prev_token.span)
|
||||||
} else if self.may_recover()
|
} else if self.may_recover()
|
||||||
&& self.token.uninterpolated_span().is_rust_2015()
|
&& self.token_uninterpolated_span().is_rust_2015()
|
||||||
&& self.is_kw_followed_by_ident(kw::Async)
|
&& self.is_kw_followed_by_ident(kw::Async)
|
||||||
{
|
{
|
||||||
self.bump(); // eat `async`
|
self.bump(); // eat `async`
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue