Rollup merge of #96433 - petrochenkov:delim, r=nnethercote
rustc_ast: Harmonize delimiter naming with `proc_macro::Delimiter` Compiler cannot reuse `proc_macro::Delimiter` directly due to extra impls, but can at least use the same naming. After this PR the only difference between these two enums is that `proc_macro::Delimiter::None` is turned into `token::Delimiter::Invisible`. It's my mistake that the invisible delimiter is called `None` on stable, during the stabilization I audited the naming and wrote the docs, but missed the fact that the `None` naming gives a wrong and confusing impression about what this thing is. cc https://github.com/rust-lang/rust/pull/96421 r? ``@nnethercote``
This commit is contained in:
commit
0cbf3b2b30
41 changed files with 433 additions and 426 deletions
|
@ -1,6 +1,6 @@
|
|||
use crate::lexer::unicode_chars::UNICODE_ARRAY;
|
||||
use rustc_ast::ast::{self, AttrStyle};
|
||||
use rustc_ast::token::{self, CommentKind, Token, TokenKind};
|
||||
use rustc_ast::token::{self, CommentKind, Delimiter, Token, TokenKind};
|
||||
use rustc_ast::tokenstream::{Spacing, TokenStream};
|
||||
use rustc_ast::util::unicode::contains_text_flow_control_chars;
|
||||
use rustc_errors::{error_code, Applicability, DiagnosticBuilder, ErrorGuaranteed, PResult};
|
||||
|
@ -24,8 +24,8 @@ use unescape_error_reporting::{emit_unescape_error, escaped_char};
|
|||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct UnmatchedBrace {
|
||||
pub expected_delim: token::DelimToken,
|
||||
pub found_delim: Option<token::DelimToken>,
|
||||
pub expected_delim: Delimiter,
|
||||
pub found_delim: Option<Delimiter>,
|
||||
pub found_span: Span,
|
||||
pub unclosed_span: Option<Span>,
|
||||
pub candidate_span: Option<Span>,
|
||||
|
@ -284,12 +284,12 @@ impl<'a> StringReader<'a> {
|
|||
rustc_lexer::TokenKind::Semi => token::Semi,
|
||||
rustc_lexer::TokenKind::Comma => token::Comma,
|
||||
rustc_lexer::TokenKind::Dot => token::Dot,
|
||||
rustc_lexer::TokenKind::OpenParen => token::OpenDelim(token::Paren),
|
||||
rustc_lexer::TokenKind::CloseParen => token::CloseDelim(token::Paren),
|
||||
rustc_lexer::TokenKind::OpenBrace => token::OpenDelim(token::Brace),
|
||||
rustc_lexer::TokenKind::CloseBrace => token::CloseDelim(token::Brace),
|
||||
rustc_lexer::TokenKind::OpenBracket => token::OpenDelim(token::Bracket),
|
||||
rustc_lexer::TokenKind::CloseBracket => token::CloseDelim(token::Bracket),
|
||||
rustc_lexer::TokenKind::OpenParen => token::OpenDelim(Delimiter::Parenthesis),
|
||||
rustc_lexer::TokenKind::CloseParen => token::CloseDelim(Delimiter::Parenthesis),
|
||||
rustc_lexer::TokenKind::OpenBrace => token::OpenDelim(Delimiter::Brace),
|
||||
rustc_lexer::TokenKind::CloseBrace => token::CloseDelim(Delimiter::Brace),
|
||||
rustc_lexer::TokenKind::OpenBracket => token::OpenDelim(Delimiter::Bracket),
|
||||
rustc_lexer::TokenKind::CloseBracket => token::CloseDelim(Delimiter::Bracket),
|
||||
rustc_lexer::TokenKind::At => token::At,
|
||||
rustc_lexer::TokenKind::Pound => token::Pound,
|
||||
rustc_lexer::TokenKind::Tilde => token::Tilde,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use super::{StringReader, UnmatchedBrace};
|
||||
|
||||
use rustc_ast::token::{self, DelimToken, Token};
|
||||
use rustc_ast::token::{self, Delimiter, Token};
|
||||
use rustc_ast::tokenstream::{
|
||||
DelimSpan,
|
||||
Spacing::{self, *},
|
||||
|
@ -32,15 +32,15 @@ struct TokenTreesReader<'a> {
|
|||
string_reader: StringReader<'a>,
|
||||
token: Token,
|
||||
/// Stack of open delimiters and their spans. Used for error message.
|
||||
open_braces: Vec<(token::DelimToken, Span)>,
|
||||
open_braces: Vec<(Delimiter, Span)>,
|
||||
unmatched_braces: Vec<UnmatchedBrace>,
|
||||
/// The type and spans for all braces
|
||||
///
|
||||
/// Used only for error recovery when arriving to EOF with mismatched braces.
|
||||
matching_delim_spans: Vec<(token::DelimToken, Span, Span)>,
|
||||
matching_delim_spans: Vec<(Delimiter, Span, Span)>,
|
||||
last_unclosed_found_span: Option<Span>,
|
||||
/// Collect empty block spans that might have been auto-inserted by editors.
|
||||
last_delim_empty_block_spans: FxHashMap<token::DelimToken, Span>,
|
||||
last_delim_empty_block_spans: FxHashMap<Delimiter, Span>,
|
||||
/// Collect the spans of braces (Open, Close). Used only
|
||||
/// for detecting if blocks are empty and only braces.
|
||||
matching_block_spans: Vec<(Span, Span)>,
|
||||
|
@ -88,7 +88,7 @@ impl<'a> TokenTreesReader<'a> {
|
|||
for &(_, sp) in &self.open_braces {
|
||||
err.span_label(sp, "unclosed delimiter");
|
||||
self.unmatched_braces.push(UnmatchedBrace {
|
||||
expected_delim: token::DelimToken::Brace,
|
||||
expected_delim: Delimiter::Brace,
|
||||
found_delim: None,
|
||||
found_span: self.token.span,
|
||||
unclosed_span: Some(sp),
|
||||
|
@ -150,7 +150,7 @@ impl<'a> TokenTreesReader<'a> {
|
|||
}
|
||||
|
||||
//only add braces
|
||||
if let (DelimToken::Brace, DelimToken::Brace) = (open_brace, delim) {
|
||||
if let (Delimiter::Brace, Delimiter::Brace) = (open_brace, delim) {
|
||||
self.matching_block_spans.push((open_brace_span, close_brace_span));
|
||||
}
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
// https://www.unicode.org/Public/security/10.0.0/confusables.txt
|
||||
|
||||
use super::StringReader;
|
||||
use crate::token;
|
||||
use crate::token::{self, Delimiter};
|
||||
use rustc_errors::{Applicability, Diagnostic};
|
||||
use rustc_span::{symbol::kw, BytePos, Pos, Span};
|
||||
|
||||
|
@ -312,12 +312,12 @@ const ASCII_ARRAY: &[(char, &str, Option<token::TokenKind>)] = &[
|
|||
('!', "Exclamation Mark", Some(token::Not)),
|
||||
('?', "Question Mark", Some(token::Question)),
|
||||
('.', "Period", Some(token::Dot)),
|
||||
('(', "Left Parenthesis", Some(token::OpenDelim(token::Paren))),
|
||||
(')', "Right Parenthesis", Some(token::CloseDelim(token::Paren))),
|
||||
('[', "Left Square Bracket", Some(token::OpenDelim(token::Bracket))),
|
||||
(']', "Right Square Bracket", Some(token::CloseDelim(token::Bracket))),
|
||||
('{', "Left Curly Brace", Some(token::OpenDelim(token::Brace))),
|
||||
('}', "Right Curly Brace", Some(token::CloseDelim(token::Brace))),
|
||||
('(', "Left Parenthesis", Some(token::OpenDelim(Delimiter::Parenthesis))),
|
||||
(')', "Right Parenthesis", Some(token::CloseDelim(Delimiter::Parenthesis))),
|
||||
('[', "Left Square Bracket", Some(token::OpenDelim(Delimiter::Bracket))),
|
||||
(']', "Right Square Bracket", Some(token::CloseDelim(Delimiter::Bracket))),
|
||||
('{', "Left Curly Brace", Some(token::OpenDelim(Delimiter::Brace))),
|
||||
('}', "Right Curly Brace", Some(token::CloseDelim(Delimiter::Brace))),
|
||||
('*', "Asterisk", Some(token::BinOp(token::Star))),
|
||||
('/', "Slash", Some(token::BinOp(token::Slash))),
|
||||
('\\', "Backslash", None),
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use super::{AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, PathStyle};
|
||||
use rustc_ast as ast;
|
||||
use rustc_ast::attr;
|
||||
use rustc_ast::token::{self, Nonterminal};
|
||||
use rustc_ast::token::{self, Delimiter, Nonterminal};
|
||||
use rustc_ast_pretty::pprust;
|
||||
use rustc_errors::{error_code, Diagnostic, PResult};
|
||||
use rustc_span::{sym, BytePos, Span};
|
||||
|
@ -130,9 +130,9 @@ impl<'a> Parser<'a> {
|
|||
ast::AttrStyle::Outer
|
||||
};
|
||||
|
||||
this.expect(&token::OpenDelim(token::Bracket))?;
|
||||
this.expect(&token::OpenDelim(Delimiter::Bracket))?;
|
||||
let item = this.parse_attr_item(false)?;
|
||||
this.expect(&token::CloseDelim(token::Bracket))?;
|
||||
this.expect(&token::CloseDelim(Delimiter::Bracket))?;
|
||||
let attr_sp = lo.to(this.prev_token.span);
|
||||
|
||||
// Emit error if inner attribute is encountered and forbidden.
|
||||
|
@ -403,7 +403,7 @@ impl<'a> Parser<'a> {
|
|||
crate fn parse_meta_item_kind(&mut self) -> PResult<'a, ast::MetaItemKind> {
|
||||
Ok(if self.eat(&token::Eq) {
|
||||
ast::MetaItemKind::NameValue(self.parse_unsuffixed_lit()?)
|
||||
} else if self.check(&token::OpenDelim(token::Paren)) {
|
||||
} else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
|
||||
// Matches `meta_seq = ( COMMASEP(meta_item_inner) )`.
|
||||
let (list, _) = self.parse_paren_comma_seq(|p| p.parse_meta_item_inner())?;
|
||||
ast::MetaItemKind::List(list)
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use super::{Capturing, FlatToken, ForceCollect, Parser, ReplaceRange, TokenCursor, TrailingToken};
|
||||
use rustc_ast::token::{self, DelimToken, Token, TokenKind};
|
||||
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
|
||||
use rustc_ast::tokenstream::{AttrAnnotatedTokenStream, AttributesData, CreateTokenStream};
|
||||
use rustc_ast::tokenstream::{AttrAnnotatedTokenTree, DelimSpan, LazyTokenStream, Spacing};
|
||||
use rustc_ast::{self as ast};
|
||||
|
@ -388,11 +388,11 @@ impl<'a> Parser<'a> {
|
|||
/// Converts a flattened iterator of tokens (including open and close delimiter tokens)
|
||||
/// into a `TokenStream`, creating a `TokenTree::Delimited` for each matching pair
|
||||
/// of open and close delims.
|
||||
// FIXME(#67062): Currently, we don't parse `None`-delimited groups correctly,
|
||||
// which can cause us to end up with mismatched `None` delimiters in our
|
||||
// FIXME(#67062): Currently, we don't parse `Invisible`-delimited groups correctly,
|
||||
// which can cause us to end up with mismatched `Invisible` delimiters in our
|
||||
// captured tokens. This function contains several hacks to work around this -
|
||||
// essentially, we throw away mismatched `None` delimiters when we encounter them.
|
||||
// Once we properly parse `None` delimiters, they can be captured just like any
|
||||
// essentially, we throw away mismatched `Invisible` delimiters when we encounter them.
|
||||
// Once we properly parse `Invisible` delimiters, they can be captured just like any
|
||||
// other tokens, and these hacks can be removed.
|
||||
fn make_token_stream(
|
||||
mut iter: impl Iterator<Item = (FlatToken, Spacing)>,
|
||||
|
@ -401,7 +401,7 @@ fn make_token_stream(
|
|||
#[derive(Debug)]
|
||||
struct FrameData {
|
||||
// This is `None` for the first frame, `Some` for all others.
|
||||
open_delim_sp: Option<(DelimToken, Span)>,
|
||||
open_delim_sp: Option<(Delimiter, Span)>,
|
||||
inner: Vec<(AttrAnnotatedTokenTree, Spacing)>,
|
||||
}
|
||||
let mut stack = vec![FrameData { open_delim_sp: None, inner: vec![] }];
|
||||
|
@ -412,13 +412,13 @@ fn make_token_stream(
|
|||
stack.push(FrameData { open_delim_sp: Some((delim, span)), inner: vec![] });
|
||||
}
|
||||
FlatToken::Token(Token { kind: TokenKind::CloseDelim(delim), span }) => {
|
||||
// HACK: If we encounter a mismatched `None` delimiter at the top
|
||||
// HACK: If we encounter a mismatched `Invisible` delimiter at the top
|
||||
// level, just ignore it.
|
||||
if matches!(delim, DelimToken::NoDelim)
|
||||
if matches!(delim, Delimiter::Invisible)
|
||||
&& (stack.len() == 1
|
||||
|| !matches!(
|
||||
stack.last_mut().unwrap().open_delim_sp.unwrap().0,
|
||||
DelimToken::NoDelim
|
||||
Delimiter::Invisible
|
||||
))
|
||||
{
|
||||
token_and_spacing = iter.next();
|
||||
|
@ -428,11 +428,11 @@ fn make_token_stream(
|
|||
.pop()
|
||||
.unwrap_or_else(|| panic!("Token stack was empty for token: {:?}", token));
|
||||
|
||||
// HACK: If our current frame has a mismatched opening `None` delimiter,
|
||||
// HACK: If our current frame has a mismatched opening `Invisible` delimiter,
|
||||
// merge our current frame with the one above it. That is, transform
|
||||
// `[ { < first second } third ]` into `[ { first second } third ]`
|
||||
if !matches!(delim, DelimToken::NoDelim)
|
||||
&& matches!(frame_data.open_delim_sp.unwrap().0, DelimToken::NoDelim)
|
||||
if !matches!(delim, Delimiter::Invisible)
|
||||
&& matches!(frame_data.open_delim_sp.unwrap().0, Delimiter::Invisible)
|
||||
{
|
||||
stack.last_mut().unwrap().inner.extend(frame_data.inner);
|
||||
// Process our closing delimiter again, this time at the previous
|
||||
|
@ -472,10 +472,10 @@ fn make_token_stream(
|
|||
}
|
||||
token_and_spacing = iter.next();
|
||||
}
|
||||
// HACK: If we don't have a closing `None` delimiter for our last
|
||||
// HACK: If we don't have a closing `Invisible` delimiter for our last
|
||||
// frame, merge the frame with the top-level frame. That is,
|
||||
// turn `< first second` into `first second`
|
||||
if stack.len() == 2 && stack[1].open_delim_sp.unwrap().0 == DelimToken::NoDelim {
|
||||
if stack.len() == 2 && stack[1].open_delim_sp.unwrap().0 == Delimiter::Invisible {
|
||||
let temp_buf = stack.pop().unwrap();
|
||||
stack.last_mut().unwrap().inner.extend(temp_buf.inner);
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ use super::{
|
|||
use crate::lexer::UnmatchedBrace;
|
||||
use rustc_ast as ast;
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::{self, Lit, LitKind, TokenKind};
|
||||
use rustc_ast::token::{self, Delimiter, Lit, LitKind, TokenKind};
|
||||
use rustc_ast::util::parser::AssocOp;
|
||||
use rustc_ast::{
|
||||
AngleBracketedArg, AngleBracketedArgs, AnonConst, AttrVec, BinOpKind, BindingMode, Block,
|
||||
|
@ -337,10 +337,10 @@ impl<'a> Parser<'a> {
|
|||
TokenKind::Comma,
|
||||
TokenKind::Semi,
|
||||
TokenKind::ModSep,
|
||||
TokenKind::OpenDelim(token::DelimToken::Brace),
|
||||
TokenKind::OpenDelim(token::DelimToken::Paren),
|
||||
TokenKind::CloseDelim(token::DelimToken::Brace),
|
||||
TokenKind::CloseDelim(token::DelimToken::Paren),
|
||||
TokenKind::OpenDelim(Delimiter::Brace),
|
||||
TokenKind::OpenDelim(Delimiter::Parenthesis),
|
||||
TokenKind::CloseDelim(Delimiter::Brace),
|
||||
TokenKind::CloseDelim(Delimiter::Parenthesis),
|
||||
];
|
||||
match self.token.ident() {
|
||||
Some((ident, false))
|
||||
|
@ -413,7 +413,7 @@ impl<'a> Parser<'a> {
|
|||
} else if !sm.is_multiline(self.prev_token.span.until(self.token.span)) {
|
||||
// The current token is in the same line as the prior token, not recoverable.
|
||||
} else if [token::Comma, token::Colon].contains(&self.token.kind)
|
||||
&& self.prev_token.kind == token::CloseDelim(token::Paren)
|
||||
&& self.prev_token.kind == token::CloseDelim(Delimiter::Parenthesis)
|
||||
{
|
||||
// Likely typo: The current token is on a new line and is expected to be
|
||||
// `.`, `;`, `?`, or an operator after a close delimiter token.
|
||||
|
@ -424,7 +424,7 @@ impl<'a> Parser<'a> {
|
|||
// ^
|
||||
// https://github.com/rust-lang/rust/issues/72253
|
||||
} else if self.look_ahead(1, |t| {
|
||||
t == &token::CloseDelim(token::Brace)
|
||||
t == &token::CloseDelim(Delimiter::Brace)
|
||||
|| t.can_begin_expr() && t.kind != token::Colon
|
||||
}) && [token::Comma, token::Colon].contains(&self.token.kind)
|
||||
{
|
||||
|
@ -441,7 +441,7 @@ impl<'a> Parser<'a> {
|
|||
.emit();
|
||||
return Ok(true);
|
||||
} else if self.look_ahead(0, |t| {
|
||||
t == &token::CloseDelim(token::Brace)
|
||||
t == &token::CloseDelim(Delimiter::Brace)
|
||||
|| (t.can_begin_expr() && t != &token::Semi && t != &token::Pound)
|
||||
// Avoid triggering with too many trailing `#` in raw string.
|
||||
|| (sm.is_multiline(
|
||||
|
@ -655,7 +655,7 @@ impl<'a> Parser<'a> {
|
|||
(Err(snapshot_err), Err(err)) => {
|
||||
// We don't know what went wrong, emit the normal error.
|
||||
snapshot_err.cancel();
|
||||
self.consume_block(token::Brace, ConsumeClosingDelim::Yes);
|
||||
self.consume_block(Delimiter::Brace, ConsumeClosingDelim::Yes);
|
||||
Err(err)
|
||||
}
|
||||
(Ok(_), Ok(mut tail)) => {
|
||||
|
@ -866,7 +866,7 @@ impl<'a> Parser<'a> {
|
|||
trailing_span = trailing_span.to(self.token.span);
|
||||
self.bump();
|
||||
}
|
||||
if self.token.kind == token::OpenDelim(token::Paren) {
|
||||
if self.token.kind == token::OpenDelim(Delimiter::Parenthesis) {
|
||||
// Recover from bad turbofish: `foo.collect::Vec<_>()`.
|
||||
let args = AngleBracketedArgs { args, span }.into();
|
||||
segment.args = args;
|
||||
|
@ -1098,7 +1098,7 @@ impl<'a> Parser<'a> {
|
|||
[(token::Lt, 1), (token::Gt, -1), (token::BinOp(token::Shr), -2)];
|
||||
self.consume_tts(1, &modifiers);
|
||||
|
||||
if !&[token::OpenDelim(token::Paren), token::ModSep]
|
||||
if !&[token::OpenDelim(Delimiter::Parenthesis), token::ModSep]
|
||||
.contains(&self.token.kind)
|
||||
{
|
||||
// We don't have `foo< bar >(` or `foo< bar >::`, so we rewind the
|
||||
|
@ -1132,7 +1132,7 @@ impl<'a> Parser<'a> {
|
|||
Err(err)
|
||||
}
|
||||
}
|
||||
} else if token::OpenDelim(token::Paren) == self.token.kind {
|
||||
} else if token::OpenDelim(Delimiter::Parenthesis) == self.token.kind {
|
||||
// We have high certainty that this was a bad turbofish at this point.
|
||||
// `foo< bar >(`
|
||||
suggest(&mut err);
|
||||
|
@ -1186,8 +1186,10 @@ impl<'a> Parser<'a> {
|
|||
self.bump(); // `(`
|
||||
|
||||
// Consume the fn call arguments.
|
||||
let modifiers =
|
||||
[(token::OpenDelim(token::Paren), 1), (token::CloseDelim(token::Paren), -1)];
|
||||
let modifiers = [
|
||||
(token::OpenDelim(Delimiter::Parenthesis), 1),
|
||||
(token::CloseDelim(Delimiter::Parenthesis), -1),
|
||||
];
|
||||
self.consume_tts(1, &modifiers);
|
||||
|
||||
if self.token.kind == token::Eof {
|
||||
|
@ -1579,15 +1581,15 @@ impl<'a> Parser<'a> {
|
|||
|
||||
fn recover_await_macro(&mut self) -> PResult<'a, (Span, P<Expr>, bool)> {
|
||||
self.expect(&token::Not)?;
|
||||
self.expect(&token::OpenDelim(token::Paren))?;
|
||||
self.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
|
||||
let expr = self.parse_expr()?;
|
||||
self.expect(&token::CloseDelim(token::Paren))?;
|
||||
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
|
||||
Ok((self.prev_token.span, expr, false))
|
||||
}
|
||||
|
||||
fn recover_await_prefix(&mut self, await_sp: Span) -> PResult<'a, (Span, P<Expr>, bool)> {
|
||||
let is_question = self.eat(&token::Question); // Handle `await? <expr>`.
|
||||
let expr = if self.token == token::OpenDelim(token::Brace) {
|
||||
let expr = if self.token == token::OpenDelim(Delimiter::Brace) {
|
||||
// Handle `await { <expr> }`.
|
||||
// This needs to be handled separately from the next arm to avoid
|
||||
// interpreting `await { <expr> }?` as `<expr>?.await`.
|
||||
|
@ -1619,8 +1621,8 @@ impl<'a> Parser<'a> {
|
|||
|
||||
/// If encountering `future.await()`, consumes and emits an error.
|
||||
pub(super) fn recover_from_await_method_call(&mut self) {
|
||||
if self.token == token::OpenDelim(token::Paren)
|
||||
&& self.look_ahead(1, |t| t == &token::CloseDelim(token::Paren))
|
||||
if self.token == token::OpenDelim(Delimiter::Parenthesis)
|
||||
&& self.look_ahead(1, |t| t == &token::CloseDelim(Delimiter::Parenthesis))
|
||||
{
|
||||
// future.await()
|
||||
let lo = self.token.span;
|
||||
|
@ -1641,7 +1643,7 @@ impl<'a> Parser<'a> {
|
|||
pub(super) fn try_macro_suggestion(&mut self) -> PResult<'a, P<Expr>> {
|
||||
let is_try = self.token.is_keyword(kw::Try);
|
||||
let is_questionmark = self.look_ahead(1, |t| t == &token::Not); //check for !
|
||||
let is_open = self.look_ahead(2, |t| t == &token::OpenDelim(token::Paren)); //check for (
|
||||
let is_open = self.look_ahead(2, |t| t == &token::OpenDelim(Delimiter::Parenthesis)); //check for (
|
||||
|
||||
if is_try && is_questionmark && is_open {
|
||||
let lo = self.token.span;
|
||||
|
@ -1649,8 +1651,8 @@ impl<'a> Parser<'a> {
|
|||
self.bump(); //remove !
|
||||
let try_span = lo.to(self.token.span); //we take the try!( span
|
||||
self.bump(); //remove (
|
||||
let is_empty = self.token == token::CloseDelim(token::Paren); //check if the block is empty
|
||||
self.consume_block(token::Paren, ConsumeClosingDelim::No); //eat the block
|
||||
let is_empty = self.token == token::CloseDelim(Delimiter::Parenthesis); //check if the block is empty
|
||||
self.consume_block(Delimiter::Parenthesis, ConsumeClosingDelim::No); //eat the block
|
||||
let hi = self.token.span;
|
||||
self.bump(); //remove )
|
||||
let mut err = self.struct_span_err(lo.to(hi), "use of deprecated `try` macro");
|
||||
|
@ -1681,7 +1683,7 @@ impl<'a> Parser<'a> {
|
|||
begin_paren: Option<Span>,
|
||||
) -> P<Pat> {
|
||||
match (&self.token.kind, begin_paren) {
|
||||
(token::CloseDelim(token::Paren), Some(begin_par_sp)) => {
|
||||
(token::CloseDelim(Delimiter::Parenthesis), Some(begin_par_sp)) => {
|
||||
self.bump();
|
||||
|
||||
self.struct_span_err(
|
||||
|
@ -1714,8 +1716,8 @@ impl<'a> Parser<'a> {
|
|||
|| self.token.is_ident() &&
|
||||
matches!(node, ast::ExprKind::Path(..) | ast::ExprKind::Field(..)) &&
|
||||
!self.token.is_reserved_ident() && // v `foo:bar(baz)`
|
||||
self.look_ahead(1, |t| t == &token::OpenDelim(token::Paren))
|
||||
|| self.look_ahead(1, |t| t == &token::OpenDelim(token::Brace)) // `foo:bar {`
|
||||
self.look_ahead(1, |t| t == &token::OpenDelim(Delimiter::Parenthesis))
|
||||
|| self.look_ahead(1, |t| t == &token::OpenDelim(Delimiter::Brace)) // `foo:bar {`
|
||||
|| self.look_ahead(1, |t| t == &token::Colon) && // `foo:bar::<baz`
|
||||
self.look_ahead(2, |t| t == &token::Lt) &&
|
||||
self.look_ahead(3, |t| t.is_ident())
|
||||
|
@ -1728,7 +1730,7 @@ impl<'a> Parser<'a> {
|
|||
|
||||
pub(super) fn recover_seq_parse_error(
|
||||
&mut self,
|
||||
delim: token::DelimToken,
|
||||
delim: Delimiter,
|
||||
lo: Span,
|
||||
result: PResult<'a, P<Expr>>,
|
||||
) -> P<Expr> {
|
||||
|
@ -1845,7 +1847,7 @@ impl<'a> Parser<'a> {
|
|||
loop {
|
||||
debug!("recover_stmt_ loop {:?}", self.token);
|
||||
match self.token.kind {
|
||||
token::OpenDelim(token::DelimToken::Brace) => {
|
||||
token::OpenDelim(Delimiter::Brace) => {
|
||||
brace_depth += 1;
|
||||
self.bump();
|
||||
if break_on_block == BlockMode::Break && brace_depth == 1 && bracket_depth == 0
|
||||
|
@ -1853,11 +1855,11 @@ impl<'a> Parser<'a> {
|
|||
in_block = true;
|
||||
}
|
||||
}
|
||||
token::OpenDelim(token::DelimToken::Bracket) => {
|
||||
token::OpenDelim(Delimiter::Bracket) => {
|
||||
bracket_depth += 1;
|
||||
self.bump();
|
||||
}
|
||||
token::CloseDelim(token::DelimToken::Brace) => {
|
||||
token::CloseDelim(Delimiter::Brace) => {
|
||||
if brace_depth == 0 {
|
||||
debug!("recover_stmt_ return - close delim {:?}", self.token);
|
||||
break;
|
||||
|
@ -1869,7 +1871,7 @@ impl<'a> Parser<'a> {
|
|||
break;
|
||||
}
|
||||
}
|
||||
token::CloseDelim(token::DelimToken::Bracket) => {
|
||||
token::CloseDelim(Delimiter::Bracket) => {
|
||||
bracket_depth -= 1;
|
||||
if bracket_depth < 0 {
|
||||
bracket_depth = 0;
|
||||
|
@ -1927,11 +1929,11 @@ impl<'a> Parser<'a> {
|
|||
.emit();
|
||||
self.bump();
|
||||
} else if self.token == token::Pound
|
||||
&& self.look_ahead(1, |t| *t == token::OpenDelim(token::Bracket))
|
||||
&& self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Bracket))
|
||||
{
|
||||
let lo = self.token.span;
|
||||
// Skip every token until next possible arg.
|
||||
while self.token != token::CloseDelim(token::Bracket) {
|
||||
while self.token != token::CloseDelim(Delimiter::Bracket) {
|
||||
self.bump();
|
||||
}
|
||||
let sp = lo.to(self.token.span);
|
||||
|
@ -1952,7 +1954,9 @@ impl<'a> Parser<'a> {
|
|||
// If we find a pattern followed by an identifier, it could be an (incorrect)
|
||||
// C-style parameter declaration.
|
||||
if self.check_ident()
|
||||
&& self.look_ahead(1, |t| *t == token::Comma || *t == token::CloseDelim(token::Paren))
|
||||
&& self.look_ahead(1, |t| {
|
||||
*t == token::Comma || *t == token::CloseDelim(Delimiter::Parenthesis)
|
||||
})
|
||||
{
|
||||
// `fn foo(String s) {}`
|
||||
let ident = self.parse_ident().unwrap();
|
||||
|
@ -1968,7 +1972,7 @@ impl<'a> Parser<'a> {
|
|||
} else if require_name
|
||||
&& (self.token == token::Comma
|
||||
|| self.token == token::Lt
|
||||
|| self.token == token::CloseDelim(token::Paren))
|
||||
|| self.token == token::CloseDelim(Delimiter::Parenthesis))
|
||||
{
|
||||
let rfc_note = "anonymous parameters are removed in the 2018 edition (see RFC 1685)";
|
||||
|
||||
|
@ -2086,11 +2090,7 @@ impl<'a> Parser<'a> {
|
|||
Ok(param)
|
||||
}
|
||||
|
||||
pub(super) fn consume_block(
|
||||
&mut self,
|
||||
delim: token::DelimToken,
|
||||
consume_close: ConsumeClosingDelim,
|
||||
) {
|
||||
pub(super) fn consume_block(&mut self, delim: Delimiter, consume_close: ConsumeClosingDelim) {
|
||||
let mut brace_depth = 0;
|
||||
loop {
|
||||
if self.eat(&token::OpenDelim(delim)) {
|
||||
|
@ -2109,7 +2109,8 @@ impl<'a> Parser<'a> {
|
|||
brace_depth -= 1;
|
||||
continue;
|
||||
}
|
||||
} else if self.token == token::Eof || self.eat(&token::CloseDelim(token::NoDelim)) {
|
||||
} else if self.token == token::Eof || self.eat(&token::CloseDelim(Delimiter::Invisible))
|
||||
{
|
||||
return;
|
||||
} else {
|
||||
self.bump();
|
||||
|
@ -2555,7 +2556,7 @@ impl<'a> Parser<'a> {
|
|||
|
||||
crate fn maybe_recover_unexpected_block_label(&mut self) -> bool {
|
||||
let Some(label) = self.eat_label().filter(|_| {
|
||||
self.eat(&token::Colon) && self.token.kind == token::OpenDelim(token::Brace)
|
||||
self.eat(&token::Colon) && self.token.kind == token::OpenDelim(Delimiter::Brace)
|
||||
}) else {
|
||||
return false;
|
||||
};
|
||||
|
@ -2652,7 +2653,7 @@ impl<'a> Parser<'a> {
|
|||
/// Parse and throw away a parenthesized comma separated
|
||||
/// sequence of patterns until `)` is reached.
|
||||
fn skip_pat_list(&mut self) -> PResult<'a, ()> {
|
||||
while !self.check(&token::CloseDelim(token::Paren)) {
|
||||
while !self.check(&token::CloseDelim(Delimiter::Parenthesis)) {
|
||||
self.parse_pat_no_top_alt(None)?;
|
||||
if !self.eat(&token::Comma) {
|
||||
return Ok(());
|
||||
|
|
|
@ -7,9 +7,8 @@ use super::{
|
|||
};
|
||||
use crate::maybe_recover_from_interpolated_ty_qpath;
|
||||
|
||||
use ast::token::DelimToken;
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::{self, Token, TokenKind};
|
||||
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
|
||||
use rustc_ast::tokenstream::Spacing;
|
||||
use rustc_ast::util::classify;
|
||||
use rustc_ast::util::literal::LitError;
|
||||
|
@ -495,7 +494,7 @@ impl<'a> Parser<'a> {
|
|||
fn is_at_start_of_range_notation_rhs(&self) -> bool {
|
||||
if self.token.can_begin_expr() {
|
||||
// Parse `for i in 1.. { }` as infinite loop, not as `for i in (1..{})`.
|
||||
if self.token == token::OpenDelim(token::Brace) {
|
||||
if self.token == token::OpenDelim(Delimiter::Brace) {
|
||||
return !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL);
|
||||
}
|
||||
true
|
||||
|
@ -992,8 +991,8 @@ impl<'a> Parser<'a> {
|
|||
return Ok(e);
|
||||
}
|
||||
e = match self.token.kind {
|
||||
token::OpenDelim(token::Paren) => self.parse_fn_call_expr(lo, e),
|
||||
token::OpenDelim(token::Bracket) => self.parse_index_expr(lo, e)?,
|
||||
token::OpenDelim(Delimiter::Parenthesis) => self.parse_fn_call_expr(lo, e),
|
||||
token::OpenDelim(Delimiter::Bracket) => self.parse_index_expr(lo, e)?,
|
||||
_ => return Ok(e),
|
||||
}
|
||||
}
|
||||
|
@ -1156,7 +1155,7 @@ impl<'a> Parser<'a> {
|
|||
|
||||
/// Parse a function call expression, `expr(...)`.
|
||||
fn parse_fn_call_expr(&mut self, lo: Span, fun: P<Expr>) -> P<Expr> {
|
||||
let snapshot = if self.token.kind == token::OpenDelim(token::Paren)
|
||||
let snapshot = if self.token.kind == token::OpenDelim(Delimiter::Parenthesis)
|
||||
&& self.look_ahead_type_ascription_as_field()
|
||||
{
|
||||
Some((self.create_snapshot_for_diagnostic(), fun.kind.clone()))
|
||||
|
@ -1173,7 +1172,7 @@ impl<'a> Parser<'a> {
|
|||
{
|
||||
return expr;
|
||||
}
|
||||
self.recover_seq_parse_error(token::Paren, lo, seq)
|
||||
self.recover_seq_parse_error(Delimiter::Parenthesis, lo, seq)
|
||||
}
|
||||
|
||||
/// If we encounter a parser state that looks like the user has written a `struct` literal with
|
||||
|
@ -1190,8 +1189,10 @@ impl<'a> Parser<'a> {
|
|||
(Err(err), Some((mut snapshot, ExprKind::Path(None, path)))) => {
|
||||
let name = pprust::path_to_string(&path);
|
||||
snapshot.bump(); // `(`
|
||||
match snapshot.parse_struct_fields(path, false, token::Paren) {
|
||||
Ok((fields, ..)) if snapshot.eat(&token::CloseDelim(token::Paren)) => {
|
||||
match snapshot.parse_struct_fields(path, false, Delimiter::Parenthesis) {
|
||||
Ok((fields, ..))
|
||||
if snapshot.eat(&token::CloseDelim(Delimiter::Parenthesis)) =>
|
||||
{
|
||||
// We are certain we have `Enum::Foo(a: 3, b: 4)`, suggest
|
||||
// `Enum::Foo { a: 3, b: 4 }` or `Enum::Foo(3, 4)`.
|
||||
self.restore_snapshot(snapshot);
|
||||
|
@ -1241,7 +1242,7 @@ impl<'a> Parser<'a> {
|
|||
fn parse_index_expr(&mut self, lo: Span, base: P<Expr>) -> PResult<'a, P<Expr>> {
|
||||
self.bump(); // `[`
|
||||
let index = self.parse_expr()?;
|
||||
self.expect(&token::CloseDelim(token::Bracket))?;
|
||||
self.expect(&token::CloseDelim(Delimiter::Bracket))?;
|
||||
Ok(self.mk_expr(lo.to(self.prev_token.span), self.mk_index(base, index), AttrVec::new()))
|
||||
}
|
||||
|
||||
|
@ -1253,10 +1254,10 @@ impl<'a> Parser<'a> {
|
|||
|
||||
let fn_span_lo = self.token.span;
|
||||
let mut segment = self.parse_path_segment(PathStyle::Expr, None)?;
|
||||
self.check_trailing_angle_brackets(&segment, &[&token::OpenDelim(token::Paren)]);
|
||||
self.check_trailing_angle_brackets(&segment, &[&token::OpenDelim(Delimiter::Parenthesis)]);
|
||||
self.check_turbofish_missing_angle_brackets(&mut segment);
|
||||
|
||||
if self.check(&token::OpenDelim(token::Paren)) {
|
||||
if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
|
||||
// Method call `expr.f()`
|
||||
let mut args = self.parse_paren_expr_seq()?;
|
||||
args.insert(0, self_arg);
|
||||
|
@ -1302,9 +1303,9 @@ impl<'a> Parser<'a> {
|
|||
// could be removed without changing functionality, but it's faster
|
||||
// to have it here, especially for programs with large constants.
|
||||
self.parse_lit_expr(attrs)
|
||||
} else if self.check(&token::OpenDelim(token::Paren)) {
|
||||
} else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
|
||||
self.parse_tuple_parens_expr(attrs)
|
||||
} else if self.check(&token::OpenDelim(token::Brace)) {
|
||||
} else if self.check(&token::OpenDelim(Delimiter::Brace)) {
|
||||
self.parse_block_expr(None, lo, BlockCheckMode::Default, attrs)
|
||||
} else if self.check(&token::BinOp(token::Or)) || self.check(&token::OrOr) {
|
||||
self.parse_closure_expr(attrs).map_err(|mut err| {
|
||||
|
@ -1315,8 +1316,8 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
err
|
||||
})
|
||||
} else if self.check(&token::OpenDelim(token::Bracket)) {
|
||||
self.parse_array_or_repeat_expr(attrs, token::Bracket)
|
||||
} else if self.check(&token::OpenDelim(Delimiter::Bracket)) {
|
||||
self.parse_array_or_repeat_expr(attrs, Delimiter::Bracket)
|
||||
} else if self.check_path() {
|
||||
self.parse_path_start_expr(attrs)
|
||||
} else if self.check_keyword(kw::Move) || self.check_keyword(kw::Static) {
|
||||
|
@ -1422,14 +1423,16 @@ impl<'a> Parser<'a> {
|
|||
|
||||
fn parse_tuple_parens_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> {
|
||||
let lo = self.token.span;
|
||||
self.expect(&token::OpenDelim(token::Paren))?;
|
||||
self.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
|
||||
let (es, trailing_comma) = match self.parse_seq_to_end(
|
||||
&token::CloseDelim(token::Paren),
|
||||
&token::CloseDelim(Delimiter::Parenthesis),
|
||||
SeqSep::trailing_allowed(token::Comma),
|
||||
|p| p.parse_expr_catch_underscore(),
|
||||
) {
|
||||
Ok(x) => x,
|
||||
Err(err) => return Ok(self.recover_seq_parse_error(token::Paren, lo, Err(err))),
|
||||
Err(err) => {
|
||||
return Ok(self.recover_seq_parse_error(Delimiter::Parenthesis, lo, Err(err)));
|
||||
}
|
||||
};
|
||||
let kind = if es.len() == 1 && !trailing_comma {
|
||||
// `(e)` is parenthesized `e`.
|
||||
|
@ -1445,7 +1448,7 @@ impl<'a> Parser<'a> {
|
|||
fn parse_array_or_repeat_expr(
|
||||
&mut self,
|
||||
attrs: AttrVec,
|
||||
close_delim: token::DelimToken,
|
||||
close_delim: Delimiter,
|
||||
) -> PResult<'a, P<Expr>> {
|
||||
let lo = self.token.span;
|
||||
self.bump(); // `[` or other open delim
|
||||
|
@ -1500,7 +1503,7 @@ impl<'a> Parser<'a> {
|
|||
prior_type_ascription: self.last_type_ascription,
|
||||
};
|
||||
(self.prev_token.span, ExprKind::MacCall(mac))
|
||||
} else if self.check(&token::OpenDelim(token::Brace)) {
|
||||
} else if self.check(&token::OpenDelim(Delimiter::Brace)) {
|
||||
if let Some(expr) = self.maybe_parse_struct_expr(qself.as_ref(), &path, &attrs) {
|
||||
if qself.is_some() {
|
||||
self.sess.gated_spans.gate(sym::more_qualified_paths, path.span);
|
||||
|
@ -1533,7 +1536,7 @@ impl<'a> Parser<'a> {
|
|||
self.parse_for_expr(label, lo, attrs)
|
||||
} else if self.eat_keyword(kw::Loop) {
|
||||
self.parse_loop_expr(label, lo, attrs)
|
||||
} else if self.check(&token::OpenDelim(token::Brace)) || self.token.is_whole_block() {
|
||||
} else if self.check(&token::OpenDelim(Delimiter::Brace)) || self.token.is_whole_block() {
|
||||
self.parse_block_expr(label, lo, BlockCheckMode::Default, attrs)
|
||||
} else if !ate_colon && (self.check(&TokenKind::Comma) || self.check(&TokenKind::Gt)) {
|
||||
// We're probably inside of a `Path<'a>` that needs a turbofish
|
||||
|
@ -1631,7 +1634,7 @@ impl<'a> Parser<'a> {
|
|||
)
|
||||
.emit();
|
||||
Some(lexpr)
|
||||
} else if self.token != token::OpenDelim(token::Brace)
|
||||
} else if self.token != token::OpenDelim(Delimiter::Brace)
|
||||
|| !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
|
||||
{
|
||||
let expr = self.parse_expr_opt()?;
|
||||
|
@ -1940,7 +1943,7 @@ impl<'a> Parser<'a> {
|
|||
attrs: AttrVec,
|
||||
) -> Option<P<Expr>> {
|
||||
let mut snapshot = self.create_snapshot_for_diagnostic();
|
||||
match snapshot.parse_array_or_repeat_expr(attrs, token::Brace) {
|
||||
match snapshot.parse_array_or_repeat_expr(attrs, Delimiter::Brace) {
|
||||
Ok(arr) => {
|
||||
let hi = snapshot.prev_token.span;
|
||||
self.struct_span_err(arr.span, "this is a block expression, not an array")
|
||||
|
@ -2044,7 +2047,7 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
|
||||
if self.token.kind == TokenKind::Semi
|
||||
&& matches!(self.token_cursor.frame.delim_sp, Some((DelimToken::Paren, _)))
|
||||
&& matches!(self.token_cursor.frame.delim_sp, Some((Delimiter::Parenthesis, _)))
|
||||
{
|
||||
// It is likely that the closure body is a block but where the
|
||||
// braces have been removed. We will recover and eat the next
|
||||
|
@ -2158,7 +2161,7 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
} else {
|
||||
let attrs = self.parse_outer_attributes()?.take_for_recovery(); // For recovery.
|
||||
let not_block = self.token != token::OpenDelim(token::Brace);
|
||||
let not_block = self.token != token::OpenDelim(Delimiter::Brace);
|
||||
let block = self.parse_block().map_err(|err| {
|
||||
if not_block {
|
||||
self.error_missing_if_then_block(lo, Some(err), missing_then_block_binop_span())
|
||||
|
@ -2283,7 +2286,7 @@ impl<'a> Parser<'a> {
|
|||
// This is used below for recovery in case of `for ( $stuff ) $block`
|
||||
// in which case we will suggest `for $stuff $block`.
|
||||
let begin_paren = match self.token.kind {
|
||||
token::OpenDelim(token::Paren) => Some(self.token.span),
|
||||
token::OpenDelim(Delimiter::Parenthesis) => Some(self.token.span),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
|
@ -2372,7 +2375,7 @@ impl<'a> Parser<'a> {
|
|||
let match_span = self.prev_token.span;
|
||||
let lo = self.prev_token.span;
|
||||
let scrutinee = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
|
||||
if let Err(mut e) = self.expect(&token::OpenDelim(token::Brace)) {
|
||||
if let Err(mut e) = self.expect(&token::OpenDelim(Delimiter::Brace)) {
|
||||
if self.token == token::Semi {
|
||||
e.span_suggestion_short(
|
||||
match_span,
|
||||
|
@ -2391,7 +2394,7 @@ impl<'a> Parser<'a> {
|
|||
attrs.extend(self.parse_inner_attributes()?);
|
||||
|
||||
let mut arms: Vec<Arm> = Vec::new();
|
||||
while self.token != token::CloseDelim(token::Brace) {
|
||||
while self.token != token::CloseDelim(Delimiter::Brace) {
|
||||
match self.parse_arm() {
|
||||
Ok(arm) => arms.push(arm),
|
||||
Err(mut e) => {
|
||||
|
@ -2399,7 +2402,7 @@ impl<'a> Parser<'a> {
|
|||
e.emit();
|
||||
self.recover_stmt();
|
||||
let span = lo.to(self.token.span);
|
||||
if self.token == token::CloseDelim(token::Brace) {
|
||||
if self.token == token::CloseDelim(Delimiter::Brace) {
|
||||
self.bump();
|
||||
}
|
||||
return Ok(self.mk_expr(span, ExprKind::Match(scrutinee, arms), attrs));
|
||||
|
@ -2463,7 +2466,7 @@ impl<'a> Parser<'a> {
|
|||
// We might have either a `,` -> `;` typo, or a block without braces. We need
|
||||
// a more subtle parsing strategy.
|
||||
loop {
|
||||
if self.token.kind == token::CloseDelim(token::Brace) {
|
||||
if self.token.kind == token::CloseDelim(Delimiter::Brace) {
|
||||
// We have reached the closing brace of the `match` expression.
|
||||
return Some(err(self, stmts));
|
||||
}
|
||||
|
@ -2571,7 +2574,7 @@ impl<'a> Parser<'a> {
|
|||
})?;
|
||||
|
||||
let require_comma = classify::expr_requires_semi_to_be_stmt(&expr)
|
||||
&& this.token != token::CloseDelim(token::Brace);
|
||||
&& this.token != token::CloseDelim(Delimiter::Brace);
|
||||
|
||||
let hi = this.prev_token.span;
|
||||
|
||||
|
@ -2592,8 +2595,8 @@ impl<'a> Parser<'a> {
|
|||
TrailingToken::None,
|
||||
));
|
||||
}
|
||||
this.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Brace)]).map_err(
|
||||
|mut err| {
|
||||
this.expect_one_of(&[token::Comma], &[token::CloseDelim(Delimiter::Brace)])
|
||||
.map_err(|mut err| {
|
||||
match (sm.span_to_lines(expr.span), sm.span_to_lines(arm_start_span)) {
|
||||
(Ok(ref expr_lines), Ok(ref arm_start_lines))
|
||||
if arm_start_lines.lines[0].end_col
|
||||
|
@ -2627,8 +2630,7 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
}
|
||||
err
|
||||
},
|
||||
)?;
|
||||
})?;
|
||||
} else {
|
||||
this.eat(&token::Comma);
|
||||
}
|
||||
|
@ -2670,13 +2672,13 @@ impl<'a> Parser<'a> {
|
|||
fn is_do_catch_block(&self) -> bool {
|
||||
self.token.is_keyword(kw::Do)
|
||||
&& self.is_keyword_ahead(1, &[kw::Catch])
|
||||
&& self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace))
|
||||
&& self.look_ahead(2, |t| *t == token::OpenDelim(Delimiter::Brace))
|
||||
&& !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
|
||||
}
|
||||
|
||||
fn is_try_block(&self) -> bool {
|
||||
self.token.is_keyword(kw::Try)
|
||||
&& self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace))
|
||||
&& self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace))
|
||||
&& self.token.uninterpolated_span().rust_2018()
|
||||
}
|
||||
|
||||
|
@ -2696,10 +2698,10 @@ impl<'a> Parser<'a> {
|
|||
&& ((
|
||||
// `async move {`
|
||||
self.is_keyword_ahead(1, &[kw::Move])
|
||||
&& self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace))
|
||||
&& self.look_ahead(2, |t| *t == token::OpenDelim(Delimiter::Brace))
|
||||
) || (
|
||||
// `async {`
|
||||
self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace))
|
||||
self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace))
|
||||
))
|
||||
}
|
||||
|
||||
|
@ -2726,7 +2728,7 @@ impl<'a> Parser<'a> {
|
|||
) -> Option<PResult<'a, P<Expr>>> {
|
||||
let struct_allowed = !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL);
|
||||
if struct_allowed || self.is_certainly_not_a_block() {
|
||||
if let Err(err) = self.expect(&token::OpenDelim(token::Brace)) {
|
||||
if let Err(err) = self.expect(&token::OpenDelim(Delimiter::Brace)) {
|
||||
return Some(Err(err));
|
||||
}
|
||||
let expr = self.parse_struct_expr(qself.cloned(), path.clone(), attrs.clone(), true);
|
||||
|
@ -2753,7 +2755,7 @@ impl<'a> Parser<'a> {
|
|||
&mut self,
|
||||
pth: ast::Path,
|
||||
recover: bool,
|
||||
close_delim: token::DelimToken,
|
||||
close_delim: Delimiter,
|
||||
) -> PResult<'a, (Vec<ExprField>, ast::StructRest, bool)> {
|
||||
let mut fields = Vec::new();
|
||||
let mut base = ast::StructRest::None;
|
||||
|
@ -2853,9 +2855,9 @@ impl<'a> Parser<'a> {
|
|||
) -> PResult<'a, P<Expr>> {
|
||||
let lo = pth.span;
|
||||
let (fields, base, recover_async) =
|
||||
self.parse_struct_fields(pth.clone(), recover, token::Brace)?;
|
||||
self.parse_struct_fields(pth.clone(), recover, Delimiter::Brace)?;
|
||||
let span = lo.to(self.token.span);
|
||||
self.expect(&token::CloseDelim(token::Brace))?;
|
||||
self.expect(&token::CloseDelim(Delimiter::Brace))?;
|
||||
let expr = if recover_async {
|
||||
ExprKind::Err
|
||||
} else {
|
||||
|
|
|
@ -4,7 +4,7 @@ use super::{AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, Traili
|
|||
|
||||
use rustc_ast::ast::*;
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::{self, TokenKind};
|
||||
use rustc_ast::token::{self, Delimiter, TokenKind};
|
||||
use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
|
||||
use rustc_ast::{self as ast, AttrVec, Attribute, DUMMY_NODE_ID};
|
||||
use rustc_ast::{Async, Const, Defaultness, IsAuto, Mutability, Unsafe, UseTree, UseTreeKind};
|
||||
|
@ -39,9 +39,9 @@ impl<'a> Parser<'a> {
|
|||
let mod_kind = if self.eat(&token::Semi) {
|
||||
ModKind::Unloaded
|
||||
} else {
|
||||
self.expect(&token::OpenDelim(token::Brace))?;
|
||||
self.expect(&token::OpenDelim(Delimiter::Brace))?;
|
||||
let (mut inner_attrs, items, inner_span) =
|
||||
self.parse_mod(&token::CloseDelim(token::Brace))?;
|
||||
self.parse_mod(&token::CloseDelim(Delimiter::Brace))?;
|
||||
attrs.append(&mut inner_attrs);
|
||||
ModKind::Loaded(items, Inline::Yes, inner_span)
|
||||
};
|
||||
|
@ -324,7 +324,7 @@ impl<'a> Parser<'a> {
|
|||
let sp = self.prev_token.span.between(self.token.span);
|
||||
let full_sp = self.prev_token.span.to(self.token.span);
|
||||
let ident_sp = self.token.span;
|
||||
if self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace)) {
|
||||
if self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace)) {
|
||||
// possible public struct definition where `struct` was forgotten
|
||||
let ident = self.parse_ident().unwrap();
|
||||
let msg = format!("add `struct` here to parse `{ident}` as a public struct");
|
||||
|
@ -336,16 +336,16 @@ impl<'a> Parser<'a> {
|
|||
Applicability::MaybeIncorrect, // speculative
|
||||
);
|
||||
Err(err)
|
||||
} else if self.look_ahead(1, |t| *t == token::OpenDelim(token::Paren)) {
|
||||
} else if self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Parenthesis)) {
|
||||
let ident = self.parse_ident().unwrap();
|
||||
self.bump(); // `(`
|
||||
let kw_name = self.recover_first_param();
|
||||
self.consume_block(token::Paren, ConsumeClosingDelim::Yes);
|
||||
self.consume_block(Delimiter::Parenthesis, ConsumeClosingDelim::Yes);
|
||||
let (kw, kw_name, ambiguous) = if self.check(&token::RArrow) {
|
||||
self.eat_to_tokens(&[&token::OpenDelim(token::Brace)]);
|
||||
self.eat_to_tokens(&[&token::OpenDelim(Delimiter::Brace)]);
|
||||
self.bump(); // `{`
|
||||
("fn", kw_name, false)
|
||||
} else if self.check(&token::OpenDelim(token::Brace)) {
|
||||
} else if self.check(&token::OpenDelim(Delimiter::Brace)) {
|
||||
self.bump(); // `{`
|
||||
("fn", kw_name, false)
|
||||
} else if self.check(&token::Colon) {
|
||||
|
@ -358,7 +358,7 @@ impl<'a> Parser<'a> {
|
|||
let msg = format!("missing `{kw}` for {kw_name} definition");
|
||||
let mut err = self.struct_span_err(sp, &msg);
|
||||
if !ambiguous {
|
||||
self.consume_block(token::Brace, ConsumeClosingDelim::Yes);
|
||||
self.consume_block(Delimiter::Brace, ConsumeClosingDelim::Yes);
|
||||
let suggestion =
|
||||
format!("add `{kw}` here to parse `{ident}` as a public {kw_name}");
|
||||
err.span_suggestion_short(
|
||||
|
@ -386,9 +386,9 @@ impl<'a> Parser<'a> {
|
|||
let ident = self.parse_ident().unwrap();
|
||||
self.eat_to_tokens(&[&token::Gt]);
|
||||
self.bump(); // `>`
|
||||
let (kw, kw_name, ambiguous) = if self.eat(&token::OpenDelim(token::Paren)) {
|
||||
let (kw, kw_name, ambiguous) = if self.eat(&token::OpenDelim(Delimiter::Parenthesis)) {
|
||||
("fn", self.recover_first_param(), false)
|
||||
} else if self.check(&token::OpenDelim(token::Brace)) {
|
||||
} else if self.check(&token::OpenDelim(Delimiter::Brace)) {
|
||||
("struct", "struct", false)
|
||||
} else {
|
||||
("fn` or `struct", "function or struct", true)
|
||||
|
@ -630,11 +630,11 @@ impl<'a> Parser<'a> {
|
|||
mut parse_item: impl FnMut(&mut Parser<'a>) -> PResult<'a, Option<Option<T>>>,
|
||||
) -> PResult<'a, Vec<T>> {
|
||||
let open_brace_span = self.token.span;
|
||||
self.expect(&token::OpenDelim(token::Brace))?;
|
||||
self.expect(&token::OpenDelim(Delimiter::Brace))?;
|
||||
attrs.append(&mut self.parse_inner_attributes()?);
|
||||
|
||||
let mut items = Vec::new();
|
||||
while !self.eat(&token::CloseDelim(token::Brace)) {
|
||||
while !self.eat(&token::CloseDelim(Delimiter::Brace)) {
|
||||
if self.recover_doc_comment_before_brace() {
|
||||
continue;
|
||||
}
|
||||
|
@ -642,7 +642,7 @@ impl<'a> Parser<'a> {
|
|||
Ok(None) => {
|
||||
// We have to bail or we'll potentially never make progress.
|
||||
let non_item_span = self.token.span;
|
||||
self.consume_block(token::Brace, ConsumeClosingDelim::Yes);
|
||||
self.consume_block(Delimiter::Brace, ConsumeClosingDelim::Yes);
|
||||
self.struct_span_err(non_item_span, "non-item in item list")
|
||||
.span_label(open_brace_span, "item list starts here")
|
||||
.span_label(non_item_span, "non-item starts here")
|
||||
|
@ -652,7 +652,7 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
Ok(Some(item)) => items.extend(item),
|
||||
Err(mut err) => {
|
||||
self.consume_block(token::Brace, ConsumeClosingDelim::Yes);
|
||||
self.consume_block(Delimiter::Brace, ConsumeClosingDelim::Yes);
|
||||
err.span_label(open_brace_span, "while parsing this item list starting here")
|
||||
.span_label(self.prev_token.span, "the item list ends here")
|
||||
.emit();
|
||||
|
@ -666,7 +666,7 @@ impl<'a> Parser<'a> {
|
|||
/// Recover on a doc comment before `}`.
|
||||
fn recover_doc_comment_before_brace(&mut self) -> bool {
|
||||
if let token::DocComment(..) = self.token.kind {
|
||||
if self.look_ahead(1, |tok| tok == &token::CloseDelim(token::Brace)) {
|
||||
if self.look_ahead(1, |tok| tok == &token::CloseDelim(Delimiter::Brace)) {
|
||||
struct_span_err!(
|
||||
self.diagnostic(),
|
||||
self.token.span,
|
||||
|
@ -866,7 +866,7 @@ impl<'a> Parser<'a> {
|
|||
let lo = self.token.span;
|
||||
|
||||
let mut prefix = ast::Path { segments: Vec::new(), span: lo.shrink_to_lo(), tokens: None };
|
||||
let kind = if self.check(&token::OpenDelim(token::Brace))
|
||||
let kind = if self.check(&token::OpenDelim(Delimiter::Brace))
|
||||
|| self.check(&token::BinOp(token::Star))
|
||||
|| self.is_import_coupler()
|
||||
{
|
||||
|
@ -908,7 +908,7 @@ impl<'a> Parser<'a> {
|
|||
/// USE_TREE_LIST = Ø | (USE_TREE `,`)* USE_TREE [`,`]
|
||||
/// ```
|
||||
fn parse_use_tree_list(&mut self) -> PResult<'a, Vec<(UseTree, ast::NodeId)>> {
|
||||
self.parse_delim_comma_seq(token::Brace, |p| Ok((p.parse_use_tree()?, DUMMY_NODE_ID)))
|
||||
self.parse_delim_comma_seq(Delimiter::Brace, |p| Ok((p.parse_use_tree()?, DUMMY_NODE_ID)))
|
||||
.map(|(r, _)| r)
|
||||
}
|
||||
|
||||
|
@ -1077,7 +1077,7 @@ impl<'a> Parser<'a> {
|
|||
&& self.is_keyword_ahead(1, &[kw::Extern])
|
||||
&& self.look_ahead(
|
||||
2 + self.look_ahead(2, |t| t.can_begin_literal_maybe_minus() as usize),
|
||||
|t| t.kind == token::OpenDelim(token::Brace),
|
||||
|t| t.kind == token::OpenDelim(Delimiter::Brace),
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -1204,8 +1204,9 @@ impl<'a> Parser<'a> {
|
|||
let mut generics = self.parse_generics()?;
|
||||
generics.where_clause = self.parse_where_clause()?;
|
||||
|
||||
let (variants, _) =
|
||||
self.parse_delim_comma_seq(token::Brace, |p| p.parse_enum_variant()).map_err(|e| {
|
||||
let (variants, _) = self
|
||||
.parse_delim_comma_seq(Delimiter::Brace, |p| p.parse_enum_variant())
|
||||
.map_err(|e| {
|
||||
self.recover_stmt();
|
||||
e
|
||||
})?;
|
||||
|
@ -1228,11 +1229,11 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
let ident = this.parse_field_ident("enum", vlo)?;
|
||||
|
||||
let struct_def = if this.check(&token::OpenDelim(token::Brace)) {
|
||||
let struct_def = if this.check(&token::OpenDelim(Delimiter::Brace)) {
|
||||
// Parse a struct variant.
|
||||
let (fields, recovered) = this.parse_record_struct_body("struct", false)?;
|
||||
VariantData::Struct(fields, recovered)
|
||||
} else if this.check(&token::OpenDelim(token::Paren)) {
|
||||
} else if this.check(&token::OpenDelim(Delimiter::Parenthesis)) {
|
||||
VariantData::Tuple(this.parse_tuple_struct_body()?, DUMMY_NODE_ID)
|
||||
} else {
|
||||
VariantData::Unit(DUMMY_NODE_ID)
|
||||
|
@ -1292,12 +1293,12 @@ impl<'a> Parser<'a> {
|
|||
} else if self.eat(&token::Semi) {
|
||||
VariantData::Unit(DUMMY_NODE_ID)
|
||||
// Record-style struct definition
|
||||
} else if self.token == token::OpenDelim(token::Brace) {
|
||||
} else if self.token == token::OpenDelim(Delimiter::Brace) {
|
||||
let (fields, recovered) =
|
||||
self.parse_record_struct_body("struct", generics.where_clause.has_where_token)?;
|
||||
VariantData::Struct(fields, recovered)
|
||||
// Tuple-style struct definition with optional where-clause.
|
||||
} else if self.token == token::OpenDelim(token::Paren) {
|
||||
} else if self.token == token::OpenDelim(Delimiter::Parenthesis) {
|
||||
let body = VariantData::Tuple(self.parse_tuple_struct_body()?, DUMMY_NODE_ID);
|
||||
generics.where_clause = self.parse_where_clause()?;
|
||||
self.expect_semi()?;
|
||||
|
@ -1326,7 +1327,7 @@ impl<'a> Parser<'a> {
|
|||
let (fields, recovered) =
|
||||
self.parse_record_struct_body("union", generics.where_clause.has_where_token)?;
|
||||
VariantData::Struct(fields, recovered)
|
||||
} else if self.token == token::OpenDelim(token::Brace) {
|
||||
} else if self.token == token::OpenDelim(Delimiter::Brace) {
|
||||
let (fields, recovered) =
|
||||
self.parse_record_struct_body("union", generics.where_clause.has_where_token)?;
|
||||
VariantData::Struct(fields, recovered)
|
||||
|
@ -1348,10 +1349,10 @@ impl<'a> Parser<'a> {
|
|||
) -> PResult<'a, (Vec<FieldDef>, /* recovered */ bool)> {
|
||||
let mut fields = Vec::new();
|
||||
let mut recovered = false;
|
||||
if self.eat(&token::OpenDelim(token::Brace)) {
|
||||
while self.token != token::CloseDelim(token::Brace) {
|
||||
if self.eat(&token::OpenDelim(Delimiter::Brace)) {
|
||||
while self.token != token::CloseDelim(Delimiter::Brace) {
|
||||
let field = self.parse_field_def(adt_ty).map_err(|e| {
|
||||
self.consume_block(token::Brace, ConsumeClosingDelim::No);
|
||||
self.consume_block(Delimiter::Brace, ConsumeClosingDelim::No);
|
||||
recovered = true;
|
||||
e
|
||||
});
|
||||
|
@ -1363,7 +1364,7 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
}
|
||||
}
|
||||
self.eat(&token::CloseDelim(token::Brace));
|
||||
self.eat(&token::CloseDelim(Delimiter::Brace));
|
||||
} else {
|
||||
let token_str = super::token_descr(&self.token);
|
||||
let msg = &format!(
|
||||
|
@ -1439,7 +1440,7 @@ impl<'a> Parser<'a> {
|
|||
token::Comma => {
|
||||
self.bump();
|
||||
}
|
||||
token::CloseDelim(token::Brace) => {}
|
||||
token::CloseDelim(Delimiter::Brace) => {}
|
||||
token::DocComment(..) => {
|
||||
let previous_span = self.prev_token.span;
|
||||
let mut err = self.span_err(self.token.span, Error::UselessDocComment);
|
||||
|
@ -1450,7 +1451,7 @@ impl<'a> Parser<'a> {
|
|||
if !seen_comma && comma_after_doc_seen {
|
||||
seen_comma = true;
|
||||
}
|
||||
if comma_after_doc_seen || self.token == token::CloseDelim(token::Brace) {
|
||||
if comma_after_doc_seen || self.token == token::CloseDelim(Delimiter::Brace) {
|
||||
err.emit();
|
||||
} else {
|
||||
if !seen_comma {
|
||||
|
@ -1478,7 +1479,7 @@ impl<'a> Parser<'a> {
|
|||
if let Some(last_segment) = segments.last() {
|
||||
recovered = self.check_trailing_angle_brackets(
|
||||
last_segment,
|
||||
&[&token::Comma, &token::CloseDelim(token::Brace)],
|
||||
&[&token::Comma, &token::CloseDelim(Delimiter::Brace)],
|
||||
);
|
||||
if recovered {
|
||||
// Handle a case like `Vec<u8>>,` where we can continue parsing fields
|
||||
|
@ -1636,12 +1637,12 @@ impl<'a> Parser<'a> {
|
|||
/// ```
|
||||
fn parse_item_decl_macro(&mut self, lo: Span) -> PResult<'a, ItemInfo> {
|
||||
let ident = self.parse_ident()?;
|
||||
let body = if self.check(&token::OpenDelim(token::Brace)) {
|
||||
let body = if self.check(&token::OpenDelim(Delimiter::Brace)) {
|
||||
self.parse_mac_args()? // `MacBody`
|
||||
} else if self.check(&token::OpenDelim(token::Paren)) {
|
||||
} else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
|
||||
let params = self.parse_token_tree(); // `MacParams`
|
||||
let pspan = params.span();
|
||||
if !self.check(&token::OpenDelim(token::Brace)) {
|
||||
if !self.check(&token::OpenDelim(Delimiter::Brace)) {
|
||||
return self.unexpected();
|
||||
}
|
||||
let body = self.parse_token_tree(); // `MacBody`
|
||||
|
@ -1924,7 +1925,7 @@ impl<'a> Parser<'a> {
|
|||
self.expect_semi()?;
|
||||
*sig_hi = self.prev_token.span;
|
||||
(Vec::new(), None)
|
||||
} else if self.check(&token::OpenDelim(token::Brace)) || self.token.is_whole_block() {
|
||||
} else if self.check(&token::OpenDelim(Delimiter::Brace)) || self.token.is_whole_block() {
|
||||
self.parse_inner_attrs_and_block().map(|(attrs, body)| (attrs, Some(body)))?
|
||||
} else if self.token.kind == token::Eq {
|
||||
// Recover `fn foo() = $expr;`.
|
||||
|
@ -1943,12 +1944,12 @@ impl<'a> Parser<'a> {
|
|||
(Vec::new(), Some(self.mk_block_err(span)))
|
||||
} else {
|
||||
let expected = if req_body {
|
||||
&[token::OpenDelim(token::Brace)][..]
|
||||
&[token::OpenDelim(Delimiter::Brace)][..]
|
||||
} else {
|
||||
&[token::Semi, token::OpenDelim(token::Brace)]
|
||||
&[token::Semi, token::OpenDelim(Delimiter::Brace)]
|
||||
};
|
||||
if let Err(mut err) = self.expected_one_of_not_found(&[], &expected) {
|
||||
if self.token.kind == token::CloseDelim(token::Brace) {
|
||||
if self.token.kind == token::CloseDelim(Delimiter::Brace) {
|
||||
// The enclosing `mod`, `trait` or `impl` is being closed, so keep the `fn` in
|
||||
// the AST for typechecking.
|
||||
err.span_label(ident.span, "while parsing this `fn`");
|
||||
|
@ -2164,7 +2165,7 @@ impl<'a> Parser<'a> {
|
|||
e.emit();
|
||||
let lo = p.prev_token.span;
|
||||
// Skip every token until next possible arg or end.
|
||||
p.eat_to_tokens(&[&token::Comma, &token::CloseDelim(token::Paren)]);
|
||||
p.eat_to_tokens(&[&token::Comma, &token::CloseDelim(Delimiter::Parenthesis)]);
|
||||
// Create a placeholder argument for proper arg count (issue #34264).
|
||||
Ok(dummy_arg(Ident::new(kw::Empty, lo.to(p.prev_token.span))))
|
||||
});
|
||||
|
@ -2220,7 +2221,7 @@ impl<'a> Parser<'a> {
|
|||
let mut ty = this.parse_ty_for_param();
|
||||
if ty.is_ok()
|
||||
&& this.token != token::Comma
|
||||
&& this.token != token::CloseDelim(token::Paren)
|
||||
&& this.token != token::CloseDelim(Delimiter::Parenthesis)
|
||||
{
|
||||
// This wasn't actually a type, but a pattern looking like a type,
|
||||
// so we are going to rollback and re-parse for recovery.
|
||||
|
|
|
@ -19,7 +19,7 @@ pub use pat::{CommaRecoveryMode, RecoverColon, RecoverComma};
|
|||
pub use path::PathStyle;
|
||||
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::{self, DelimToken, Nonterminal, Token, TokenKind};
|
||||
use rustc_ast::token::{self, Delimiter, Nonterminal, Token, TokenKind};
|
||||
use rustc_ast::tokenstream::AttributesData;
|
||||
use rustc_ast::tokenstream::{self, DelimSpan, Spacing};
|
||||
use rustc_ast::tokenstream::{TokenStream, TokenTree};
|
||||
|
@ -244,12 +244,12 @@ struct TokenCursor {
|
|||
|
||||
#[derive(Clone)]
|
||||
struct TokenCursorFrame {
|
||||
delim_sp: Option<(DelimToken, DelimSpan)>,
|
||||
delim_sp: Option<(Delimiter, DelimSpan)>,
|
||||
tree_cursor: tokenstream::Cursor,
|
||||
}
|
||||
|
||||
impl TokenCursorFrame {
|
||||
fn new(delim_sp: Option<(DelimToken, DelimSpan)>, tts: TokenStream) -> Self {
|
||||
fn new(delim_sp: Option<(Delimiter, DelimSpan)>, tts: TokenStream) -> Self {
|
||||
TokenCursorFrame { delim_sp, tree_cursor: tts.into_trees() }
|
||||
}
|
||||
}
|
||||
|
@ -263,8 +263,8 @@ impl TokenCursor {
|
|||
#[inline(always)]
|
||||
fn inlined_next(&mut self, desugar_doc_comments: bool) -> (Token, Spacing) {
|
||||
loop {
|
||||
// FIXME: we currently don't return `NoDelim` open/close delims. To fix #67062 we will
|
||||
// need to, whereupon the `delim != DelimToken::NoDelim` conditions below can be
|
||||
// FIXME: we currently don't return `Delimiter` open/close delims. To fix #67062 we will
|
||||
// need to, whereupon the `delim != Delimiter::Invisible` conditions below can be
|
||||
// removed.
|
||||
if let Some((tree, spacing)) = self.frame.tree_cursor.next_with_spacing_ref() {
|
||||
match tree {
|
||||
|
@ -278,14 +278,14 @@ impl TokenCursor {
|
|||
// Set `open_delim` to true here because we deal with it immediately.
|
||||
let frame = TokenCursorFrame::new(Some((delim, sp)), tts.clone());
|
||||
self.stack.push(mem::replace(&mut self.frame, frame));
|
||||
if delim != DelimToken::NoDelim {
|
||||
if delim != Delimiter::Invisible {
|
||||
return (Token::new(token::OpenDelim(delim), sp.open), Spacing::Alone);
|
||||
}
|
||||
// No open delimeter to return; continue on to the next iteration.
|
||||
}
|
||||
};
|
||||
} else if let Some(frame) = self.stack.pop() {
|
||||
if let Some((delim, span)) = self.frame.delim_sp && delim != DelimToken::NoDelim {
|
||||
if let Some((delim, span)) = self.frame.delim_sp && delim != Delimiter::Invisible {
|
||||
self.frame = frame;
|
||||
return (Token::new(token::CloseDelim(delim), span.close), Spacing::Alone);
|
||||
}
|
||||
|
@ -314,7 +314,7 @@ impl TokenCursor {
|
|||
let delim_span = DelimSpan::from_single(span);
|
||||
let body = TokenTree::Delimited(
|
||||
delim_span,
|
||||
token::Bracket,
|
||||
Delimiter::Bracket,
|
||||
[
|
||||
TokenTree::token(token::Ident(sym::doc, false), span),
|
||||
TokenTree::token(token::Eq, span),
|
||||
|
@ -626,7 +626,7 @@ impl<'a> Parser<'a> {
|
|||
self.is_keyword_ahead(dist, &[kw::Const])
|
||||
&& self.look_ahead(dist + 1, |t| match t.kind {
|
||||
token::Interpolated(ref nt) => matches!(**nt, token::NtBlock(..)),
|
||||
token::OpenDelim(DelimToken::Brace) => true,
|
||||
token::OpenDelim(Delimiter::Brace) => true,
|
||||
_ => false,
|
||||
})
|
||||
}
|
||||
|
@ -954,7 +954,7 @@ impl<'a> Parser<'a> {
|
|||
|
||||
fn parse_delim_comma_seq<T>(
|
||||
&mut self,
|
||||
delim: DelimToken,
|
||||
delim: Delimiter,
|
||||
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
|
||||
) -> PResult<'a, (Vec<T>, bool)> {
|
||||
self.parse_unspanned_seq(
|
||||
|
@ -969,7 +969,7 @@ impl<'a> Parser<'a> {
|
|||
&mut self,
|
||||
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
|
||||
) -> PResult<'a, (Vec<T>, bool)> {
|
||||
self.parse_delim_comma_seq(token::Paren, f)
|
||||
self.parse_delim_comma_seq(Delimiter::Parenthesis, f)
|
||||
}
|
||||
|
||||
/// Advance the parser by one token using provided token as the next one.
|
||||
|
@ -1005,7 +1005,7 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
debug_assert!(!matches!(
|
||||
next.0.kind,
|
||||
token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim)
|
||||
token::OpenDelim(Delimiter::Invisible) | token::CloseDelim(Delimiter::Invisible)
|
||||
));
|
||||
self.inlined_bump_with(next)
|
||||
}
|
||||
|
@ -1018,10 +1018,10 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
|
||||
let frame = &self.token_cursor.frame;
|
||||
if let Some((delim, span)) = frame.delim_sp && delim != DelimToken::NoDelim {
|
||||
if let Some((delim, span)) = frame.delim_sp && delim != Delimiter::Invisible {
|
||||
let all_normal = (0..dist).all(|i| {
|
||||
let token = frame.tree_cursor.look_ahead(i);
|
||||
!matches!(token, Some(TokenTree::Delimited(_, DelimToken::NoDelim, _)))
|
||||
!matches!(token, Some(TokenTree::Delimited(_, Delimiter::Invisible, _)))
|
||||
});
|
||||
if all_normal {
|
||||
return match frame.tree_cursor.look_ahead(dist - 1) {
|
||||
|
@ -1043,7 +1043,7 @@ impl<'a> Parser<'a> {
|
|||
token = cursor.next(/* desugar_doc_comments */ false).0;
|
||||
if matches!(
|
||||
token.kind,
|
||||
token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim)
|
||||
token::OpenDelim(Delimiter::Invisible) | token::CloseDelim(Delimiter::Invisible)
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
|
@ -1079,7 +1079,7 @@ impl<'a> Parser<'a> {
|
|||
/// Parses constness: `const` or nothing.
|
||||
fn parse_constness(&mut self) -> Const {
|
||||
// Avoid const blocks to be parsed as const items
|
||||
if self.look_ahead(1, |t| t != &token::OpenDelim(DelimToken::Brace))
|
||||
if self.look_ahead(1, |t| t != &token::OpenDelim(Delimiter::Brace))
|
||||
&& self.eat_keyword(kw::Const)
|
||||
{
|
||||
Const::Yes(self.prev_token.uninterpolated_span())
|
||||
|
@ -1142,9 +1142,9 @@ impl<'a> Parser<'a> {
|
|||
|
||||
fn parse_mac_args_common(&mut self, delimited_only: bool) -> PResult<'a, MacArgs> {
|
||||
Ok(
|
||||
if self.check(&token::OpenDelim(DelimToken::Paren))
|
||||
|| self.check(&token::OpenDelim(DelimToken::Bracket))
|
||||
|| self.check(&token::OpenDelim(DelimToken::Brace))
|
||||
if self.check(&token::OpenDelim(Delimiter::Parenthesis))
|
||||
|| self.check(&token::OpenDelim(Delimiter::Bracket))
|
||||
|| self.check(&token::OpenDelim(Delimiter::Brace))
|
||||
{
|
||||
match self.parse_token_tree() {
|
||||
TokenTree::Delimited(dspan, delim, tokens) =>
|
||||
|
@ -1288,7 +1288,7 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
let lo = self.prev_token.span;
|
||||
|
||||
if self.check(&token::OpenDelim(token::Paren)) {
|
||||
if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
|
||||
// We don't `self.bump()` the `(` yet because this might be a struct definition where
|
||||
// `()` or a tuple might be allowed. For example, `struct Struct(pub (), pub (usize));`.
|
||||
// Because of this, we only `bump` the `(` if we're assured it is appropriate to do so
|
||||
|
@ -1299,7 +1299,7 @@ impl<'a> Parser<'a> {
|
|||
// Parse `pub(crate)`.
|
||||
self.bump(); // `(`
|
||||
self.bump(); // `crate`
|
||||
self.expect(&token::CloseDelim(token::Paren))?; // `)`
|
||||
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; // `)`
|
||||
let vis = VisibilityKind::Crate(CrateSugar::PubCrate);
|
||||
return Ok(Visibility {
|
||||
span: lo.to(self.prev_token.span),
|
||||
|
@ -1311,20 +1311,20 @@ impl<'a> Parser<'a> {
|
|||
self.bump(); // `(`
|
||||
self.bump(); // `in`
|
||||
let path = self.parse_path(PathStyle::Mod)?; // `path`
|
||||
self.expect(&token::CloseDelim(token::Paren))?; // `)`
|
||||
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; // `)`
|
||||
let vis = VisibilityKind::Restricted { path: P(path), id: ast::DUMMY_NODE_ID };
|
||||
return Ok(Visibility {
|
||||
span: lo.to(self.prev_token.span),
|
||||
kind: vis,
|
||||
tokens: None,
|
||||
});
|
||||
} else if self.look_ahead(2, |t| t == &token::CloseDelim(token::Paren))
|
||||
} else if self.look_ahead(2, |t| t == &token::CloseDelim(Delimiter::Parenthesis))
|
||||
&& self.is_keyword_ahead(1, &[kw::Super, kw::SelfLower])
|
||||
{
|
||||
// Parse `pub(self)` or `pub(super)`.
|
||||
self.bump(); // `(`
|
||||
let path = self.parse_path(PathStyle::Mod)?; // `super`/`self`
|
||||
self.expect(&token::CloseDelim(token::Paren))?; // `)`
|
||||
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; // `)`
|
||||
let vis = VisibilityKind::Restricted { path: P(path), id: ast::DUMMY_NODE_ID };
|
||||
return Ok(Visibility {
|
||||
span: lo.to(self.prev_token.span),
|
||||
|
@ -1346,7 +1346,7 @@ impl<'a> Parser<'a> {
|
|||
fn recover_incorrect_vis_restriction(&mut self) -> PResult<'a, ()> {
|
||||
self.bump(); // `(`
|
||||
let path = self.parse_path(PathStyle::Mod)?;
|
||||
self.expect(&token::CloseDelim(token::Paren))?; // `)`
|
||||
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; // `)`
|
||||
|
||||
let msg = "incorrect visibility restriction";
|
||||
let suggestion = r##"some possible visibility restrictions are:
|
||||
|
@ -1413,7 +1413,7 @@ impl<'a> Parser<'a> {
|
|||
fn is_import_coupler(&mut self) -> bool {
|
||||
self.check(&token::ModSep)
|
||||
&& self.look_ahead(1, |t| {
|
||||
*t == token::OpenDelim(token::Brace) || *t == token::BinOp(token::Star)
|
||||
*t == token::OpenDelim(Delimiter::Brace) || *t == token::BinOp(token::Star)
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::{self, NonterminalKind, Token};
|
||||
use rustc_ast::token::{self, Delimiter, NonterminalKind, Token};
|
||||
use rustc_ast::AstLike;
|
||||
use rustc_ast_pretty::pprust;
|
||||
use rustc_errors::PResult;
|
||||
|
@ -43,7 +43,7 @@ impl<'a> Parser<'a> {
|
|||
_ => token.can_begin_type(),
|
||||
},
|
||||
NonterminalKind::Block => match token.kind {
|
||||
token::OpenDelim(token::Brace) => true,
|
||||
token::OpenDelim(Delimiter::Brace) => true,
|
||||
token::Interpolated(ref nt) => !matches!(
|
||||
**nt,
|
||||
token::NtItem(_)
|
||||
|
@ -67,8 +67,8 @@ impl<'a> Parser<'a> {
|
|||
NonterminalKind::PatParam { .. } | NonterminalKind::PatWithOr { .. } => {
|
||||
match token.kind {
|
||||
token::Ident(..) | // box, ref, mut, and other identifiers (can stricten)
|
||||
token::OpenDelim(token::Paren) | // tuple pattern
|
||||
token::OpenDelim(token::Bracket) | // slice pattern
|
||||
token::OpenDelim(Delimiter::Parenthesis) | // tuple pattern
|
||||
token::OpenDelim(Delimiter::Bracket) | // slice pattern
|
||||
token::BinOp(token::And) | // reference
|
||||
token::BinOp(token::Minus) | // negative literal
|
||||
token::AndAnd | // double reference
|
||||
|
|
|
@ -2,7 +2,7 @@ use super::{ForceCollect, Parser, PathStyle, TrailingToken};
|
|||
use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole};
|
||||
use rustc_ast::mut_visit::{noop_visit_pat, MutVisitor};
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token;
|
||||
use rustc_ast::token::{self, Delimiter};
|
||||
use rustc_ast::{
|
||||
self as ast, AttrVec, Attribute, BindingMode, Expr, ExprKind, MacCall, Mutability, Pat,
|
||||
PatField, PatKind, Path, QSelf, RangeEnd, RangeSyntax,
|
||||
|
@ -260,9 +260,9 @@ impl<'a> Parser<'a> {
|
|||
| token::Semi // e.g. `let a |;`.
|
||||
| token::Colon // e.g. `let a | :`.
|
||||
| token::Comma // e.g. `let (a |,)`.
|
||||
| token::CloseDelim(token::Bracket) // e.g. `let [a | ]`.
|
||||
| token::CloseDelim(token::Paren) // e.g. `let (a | )`.
|
||||
| token::CloseDelim(token::Brace) // e.g. `let A { f: a | }`.
|
||||
| token::CloseDelim(Delimiter::Bracket) // e.g. `let [a | ]`.
|
||||
| token::CloseDelim(Delimiter::Parenthesis) // e.g. `let (a | )`.
|
||||
| token::CloseDelim(Delimiter::Brace) // e.g. `let A { f: a | }`.
|
||||
)
|
||||
});
|
||||
match (is_end_ahead, &self.token.kind) {
|
||||
|
@ -323,11 +323,11 @@ impl<'a> Parser<'a> {
|
|||
|
||||
let pat = if self.check(&token::BinOp(token::And)) || self.token.kind == token::AndAnd {
|
||||
self.parse_pat_deref(expected)?
|
||||
} else if self.check(&token::OpenDelim(token::Paren)) {
|
||||
} else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
|
||||
self.parse_pat_tuple_or_parens()?
|
||||
} else if self.check(&token::OpenDelim(token::Bracket)) {
|
||||
} else if self.check(&token::OpenDelim(Delimiter::Bracket)) {
|
||||
// Parse `[pat, pat,...]` as a slice pattern.
|
||||
let (pats, _) = self.parse_delim_comma_seq(token::Bracket, |p| {
|
||||
let (pats, _) = self.parse_delim_comma_seq(Delimiter::Bracket, |p| {
|
||||
p.parse_pat_allow_top_alt(
|
||||
None,
|
||||
RecoverComma::No,
|
||||
|
@ -389,9 +389,9 @@ impl<'a> Parser<'a> {
|
|||
} else if let Some(form) = self.parse_range_end() {
|
||||
let begin = self.mk_expr(span, ExprKind::Path(qself, path), AttrVec::new());
|
||||
self.parse_pat_range_begin_with(begin, form)?
|
||||
} else if self.check(&token::OpenDelim(token::Brace)) {
|
||||
} else if self.check(&token::OpenDelim(Delimiter::Brace)) {
|
||||
self.parse_pat_struct(qself, path)?
|
||||
} else if self.check(&token::OpenDelim(token::Paren)) {
|
||||
} else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
|
||||
self.parse_pat_tuple_struct(qself, path)?
|
||||
} else {
|
||||
PatKind::Path(qself, path)
|
||||
|
@ -845,8 +845,8 @@ impl<'a> Parser<'a> {
|
|||
// Avoid `in`. Due to recovery in the list parser this messes with `for ( $pat in $expr )`.
|
||||
&& !self.token.is_keyword(kw::In)
|
||||
// Try to do something more complex?
|
||||
&& self.look_ahead(1, |t| !matches!(t.kind, token::OpenDelim(token::Paren) // A tuple struct pattern.
|
||||
| token::OpenDelim(token::Brace) // A struct pattern.
|
||||
&& self.look_ahead(1, |t| !matches!(t.kind, token::OpenDelim(Delimiter::Parenthesis) // A tuple struct pattern.
|
||||
| token::OpenDelim(Delimiter::Brace) // A struct pattern.
|
||||
| token::DotDotDot | token::DotDotEq | token::DotDot // A range pattern.
|
||||
| token::ModSep // A tuple / struct variant pattern.
|
||||
| token::Not)) // A macro expanding to a pattern.
|
||||
|
@ -868,7 +868,7 @@ impl<'a> Parser<'a> {
|
|||
// This shortly leads to a parse error. Note that if there is no explicit
|
||||
// binding mode then we do not end up here, because the lookahead
|
||||
// will direct us over to `parse_enum_variant()`.
|
||||
if self.token == token::OpenDelim(token::Paren) {
|
||||
if self.token == token::OpenDelim(Delimiter::Parenthesis) {
|
||||
return Err(self
|
||||
.struct_span_err(self.prev_token.span, "expected identifier, found enum pattern"));
|
||||
}
|
||||
|
@ -917,7 +917,7 @@ impl<'a> Parser<'a> {
|
|||
let mut delayed_err: Option<DiagnosticBuilder<'a, ErrorGuaranteed>> = None;
|
||||
let mut etc_span = None;
|
||||
|
||||
while self.token != token::CloseDelim(token::Brace) {
|
||||
while self.token != token::CloseDelim(Delimiter::Brace) {
|
||||
let attrs = match self.parse_outer_attributes() {
|
||||
Ok(attrs) => attrs,
|
||||
Err(err) => {
|
||||
|
@ -946,7 +946,7 @@ impl<'a> Parser<'a> {
|
|||
self.recover_one_fewer_dotdot();
|
||||
self.bump(); // `..` || `...`
|
||||
|
||||
if self.token == token::CloseDelim(token::Brace) {
|
||||
if self.token == token::CloseDelim(Delimiter::Brace) {
|
||||
etc_span = Some(etc_sp);
|
||||
break;
|
||||
}
|
||||
|
@ -970,7 +970,7 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
|
||||
etc_span = Some(etc_sp.until(self.token.span));
|
||||
if self.token == token::CloseDelim(token::Brace) {
|
||||
if self.token == token::CloseDelim(Delimiter::Brace) {
|
||||
// If the struct looks otherwise well formed, recover and continue.
|
||||
if let Some(sp) = comma_sp {
|
||||
err.span_suggestion_short(
|
||||
|
|
|
@ -2,7 +2,7 @@ use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
|
|||
use super::{Parser, Restrictions, TokenType};
|
||||
use crate::maybe_whole;
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::{self, Token};
|
||||
use rustc_ast::token::{self, Delimiter, Token};
|
||||
use rustc_ast::{
|
||||
self as ast, AngleBracketedArg, AngleBracketedArgs, AnonConst, AssocConstraint,
|
||||
AssocConstraintKind, BlockCheckMode, GenericArg, GenericArgs, Generics, ParenthesizedArgs,
|
||||
|
@ -236,14 +236,14 @@ impl<'a> Parser<'a> {
|
|||
token.kind,
|
||||
token::Lt
|
||||
| token::BinOp(token::Shl)
|
||||
| token::OpenDelim(token::Paren)
|
||||
| token::OpenDelim(Delimiter::Parenthesis)
|
||||
| token::LArrow
|
||||
)
|
||||
};
|
||||
let check_args_start = |this: &mut Self| {
|
||||
this.expected_tokens.extend_from_slice(&[
|
||||
TokenType::Token(token::Lt),
|
||||
TokenType::Token(token::OpenDelim(token::Paren)),
|
||||
TokenType::Token(token::OpenDelim(Delimiter::Parenthesis)),
|
||||
]);
|
||||
is_args_start(&this.token)
|
||||
};
|
||||
|
@ -639,7 +639,7 @@ impl<'a> Parser<'a> {
|
|||
/// the caller.
|
||||
pub(super) fn parse_const_arg(&mut self) -> PResult<'a, AnonConst> {
|
||||
// Parse const argument.
|
||||
let value = if let token::OpenDelim(token::Brace) = self.token.kind {
|
||||
let value = if let token::OpenDelim(Delimiter::Brace) = self.token.kind {
|
||||
self.parse_block_expr(
|
||||
None,
|
||||
self.token.span,
|
||||
|
@ -667,7 +667,8 @@ impl<'a> Parser<'a> {
|
|||
GenericArg::Const(self.parse_const_arg()?)
|
||||
} else if self.check_type() {
|
||||
// Parse type argument.
|
||||
let is_const_fn = self.look_ahead(1, |t| t.kind == token::OpenDelim(token::Paren));
|
||||
let is_const_fn =
|
||||
self.look_ahead(1, |t| t.kind == token::OpenDelim(Delimiter::Parenthesis));
|
||||
let mut snapshot = self.create_snapshot_for_diagnostic();
|
||||
match self.parse_ty() {
|
||||
Ok(ty) => GenericArg::Type(ty),
|
||||
|
|
|
@ -11,7 +11,7 @@ use crate::maybe_whole;
|
|||
|
||||
use rustc_ast as ast;
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::{self, TokenKind};
|
||||
use rustc_ast::token::{self, Delimiter, TokenKind};
|
||||
use rustc_ast::util::classify;
|
||||
use rustc_ast::{
|
||||
AstLike, AttrStyle, AttrVec, Attribute, LocalKind, MacCall, MacCallStmt, MacStmtStyle,
|
||||
|
@ -92,7 +92,7 @@ impl<'a> Parser<'a> {
|
|||
// Do not attempt to parse an expression if we're done here.
|
||||
self.error_outer_attrs(&attrs.take_for_recovery());
|
||||
self.mk_stmt(lo, StmtKind::Empty)
|
||||
} else if self.token != token::CloseDelim(token::Brace) {
|
||||
} else if self.token != token::CloseDelim(Delimiter::Brace) {
|
||||
// Remainder are line-expr stmts.
|
||||
let e = if force_collect == ForceCollect::Yes {
|
||||
self.collect_tokens_no_attrs(|this| {
|
||||
|
@ -131,7 +131,7 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
let expr = if this.eat(&token::OpenDelim(token::Brace)) {
|
||||
let expr = if this.eat(&token::OpenDelim(Delimiter::Brace)) {
|
||||
this.parse_struct_expr(None, path, AttrVec::new(), true)?
|
||||
} else {
|
||||
let hi = this.prev_token.span;
|
||||
|
@ -165,7 +165,7 @@ impl<'a> Parser<'a> {
|
|||
let hi = self.prev_token.span;
|
||||
|
||||
let style = match delim {
|
||||
Some(token::Brace) => MacStmtStyle::Braces,
|
||||
Some(Delimiter::Brace) => MacStmtStyle::Braces,
|
||||
Some(_) => MacStmtStyle::NoBraces,
|
||||
None => unreachable!(),
|
||||
};
|
||||
|
@ -434,7 +434,7 @@ impl<'a> Parser<'a> {
|
|||
// If the next token is an open brace (e.g., `if a b {`), the place-
|
||||
// inside-a-block suggestion would be more likely wrong than right.
|
||||
Ok(Some(_))
|
||||
if self.look_ahead(1, |t| t == &token::OpenDelim(token::Brace))
|
||||
if self.look_ahead(1, |t| t == &token::OpenDelim(Delimiter::Brace))
|
||||
|| do_not_suggest_help => {}
|
||||
// Do not suggest `if foo println!("") {;}` (as would be seen in test for #46836).
|
||||
Ok(Some(Stmt { kind: StmtKind::Empty, .. })) => {}
|
||||
|
@ -488,7 +488,7 @@ impl<'a> Parser<'a> {
|
|||
maybe_whole!(self, NtBlock, |x| (Vec::new(), x));
|
||||
|
||||
self.maybe_recover_unexpected_block_label();
|
||||
if !self.eat(&token::OpenDelim(token::Brace)) {
|
||||
if !self.eat(&token::OpenDelim(Delimiter::Brace)) {
|
||||
return self.error_block_no_opening_brace();
|
||||
}
|
||||
|
||||
|
@ -509,7 +509,7 @@ impl<'a> Parser<'a> {
|
|||
recover: AttemptLocalParseRecovery,
|
||||
) -> PResult<'a, P<Block>> {
|
||||
let mut stmts = vec![];
|
||||
while !self.eat(&token::CloseDelim(token::Brace)) {
|
||||
while !self.eat(&token::CloseDelim(Delimiter::Brace)) {
|
||||
if self.token == token::Eof {
|
||||
break;
|
||||
}
|
||||
|
@ -553,7 +553,7 @@ impl<'a> Parser<'a> {
|
|||
{
|
||||
// Just check for errors and recover; do not eat semicolon yet.
|
||||
if let Err(mut e) =
|
||||
self.expect_one_of(&[], &[token::Semi, token::CloseDelim(token::Brace)])
|
||||
self.expect_one_of(&[], &[token::Semi, token::CloseDelim(Delimiter::Brace)])
|
||||
{
|
||||
if let TokenKind::DocComment(..) = self.token.kind {
|
||||
if let Ok(snippet) = self.span_to_snippet(self.token.span) {
|
||||
|
|
|
@ -3,7 +3,7 @@ use super::{Parser, PathStyle, TokenType};
|
|||
use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole};
|
||||
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::{self, Token, TokenKind};
|
||||
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
|
||||
use rustc_ast::{
|
||||
self as ast, BareFnTy, FnRetTy, GenericBound, GenericBounds, GenericParam, Generics, Lifetime,
|
||||
MacCall, MutTy, Mutability, PolyTraitRef, TraitBoundModifier, TraitObjectSyntax, Ty, TyKind,
|
||||
|
@ -249,14 +249,14 @@ impl<'a> Parser<'a> {
|
|||
|
||||
let lo = self.token.span;
|
||||
let mut impl_dyn_multi = false;
|
||||
let kind = if self.check(&token::OpenDelim(token::Paren)) {
|
||||
let kind = if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
|
||||
self.parse_ty_tuple_or_parens(lo, allow_plus)?
|
||||
} else if self.eat(&token::Not) {
|
||||
// Never type `!`
|
||||
TyKind::Never
|
||||
} else if self.eat(&token::BinOp(token::Star)) {
|
||||
self.parse_ty_ptr()?
|
||||
} else if self.eat(&token::OpenDelim(token::Bracket)) {
|
||||
} else if self.eat(&token::OpenDelim(Delimiter::Bracket)) {
|
||||
self.parse_array_or_slice_ty()?
|
||||
} else if self.check(&token::BinOp(token::And)) || self.check(&token::AndAnd) {
|
||||
// Reference
|
||||
|
@ -409,7 +409,7 @@ impl<'a> Parser<'a> {
|
|||
let elt_ty = match self.parse_ty() {
|
||||
Ok(ty) => ty,
|
||||
Err(mut err)
|
||||
if self.look_ahead(1, |t| t.kind == token::CloseDelim(token::Bracket))
|
||||
if self.look_ahead(1, |t| t.kind == token::CloseDelim(Delimiter::Bracket))
|
||||
| self.look_ahead(1, |t| t.kind == token::Semi) =>
|
||||
{
|
||||
// Recover from `[LIT; EXPR]` and `[LIT]`
|
||||
|
@ -422,14 +422,14 @@ impl<'a> Parser<'a> {
|
|||
|
||||
let ty = if self.eat(&token::Semi) {
|
||||
let mut length = self.parse_anon_const_expr()?;
|
||||
if let Err(e) = self.expect(&token::CloseDelim(token::Bracket)) {
|
||||
if let Err(e) = self.expect(&token::CloseDelim(Delimiter::Bracket)) {
|
||||
// Try to recover from `X<Y, ...>` when `X::<Y, ...>` works
|
||||
self.check_mistyped_turbofish_with_multiple_type_params(e, &mut length.value)?;
|
||||
self.expect(&token::CloseDelim(token::Bracket))?;
|
||||
self.expect(&token::CloseDelim(Delimiter::Bracket))?;
|
||||
}
|
||||
TyKind::Array(elt_ty, length)
|
||||
} else {
|
||||
self.expect(&token::CloseDelim(token::Bracket))?;
|
||||
self.expect(&token::CloseDelim(Delimiter::Bracket))?;
|
||||
TyKind::Slice(elt_ty)
|
||||
};
|
||||
|
||||
|
@ -492,9 +492,9 @@ impl<'a> Parser<'a> {
|
|||
// Parses the `typeof(EXPR)`.
|
||||
// To avoid ambiguity, the type is surrounded by parentheses.
|
||||
fn parse_typeof_ty(&mut self) -> PResult<'a, TyKind> {
|
||||
self.expect(&token::OpenDelim(token::Paren))?;
|
||||
self.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
|
||||
let expr = self.parse_anon_const_expr()?;
|
||||
self.expect(&token::CloseDelim(token::Paren))?;
|
||||
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
|
||||
Ok(TyKind::Typeof(expr))
|
||||
}
|
||||
|
||||
|
@ -672,7 +672,7 @@ impl<'a> Parser<'a> {
|
|||
|| self.check(&token::Question)
|
||||
|| self.check(&token::Tilde)
|
||||
|| self.check_keyword(kw::For)
|
||||
|| self.check(&token::OpenDelim(token::Paren))
|
||||
|| self.check(&token::OpenDelim(Delimiter::Parenthesis))
|
||||
}
|
||||
|
||||
fn error_negative_bounds(
|
||||
|
@ -713,7 +713,7 @@ impl<'a> Parser<'a> {
|
|||
fn parse_generic_bound(&mut self) -> PResult<'a, Result<GenericBound, Span>> {
|
||||
let anchor_lo = self.prev_token.span;
|
||||
let lo = self.token.span;
|
||||
let has_parens = self.eat(&token::OpenDelim(token::Paren));
|
||||
let has_parens = self.eat(&token::OpenDelim(Delimiter::Parenthesis));
|
||||
let inner_lo = self.token.span;
|
||||
let is_negative = self.eat(&token::Not);
|
||||
|
||||
|
@ -766,7 +766,7 @@ impl<'a> Parser<'a> {
|
|||
/// Recover on `('lifetime)` with `(` already eaten.
|
||||
fn recover_paren_lifetime(&mut self, lo: Span, inner_lo: Span) -> PResult<'a, ()> {
|
||||
let inner_span = inner_lo.to(self.prev_token.span);
|
||||
self.expect(&token::CloseDelim(token::Paren))?;
|
||||
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
|
||||
let mut err = self.struct_span_err(
|
||||
lo.to(self.prev_token.span),
|
||||
"parenthesized lifetime bounds are not supported",
|
||||
|
@ -829,7 +829,7 @@ impl<'a> Parser<'a> {
|
|||
// suggestion is given.
|
||||
let bounds = vec![];
|
||||
self.parse_remaining_bounds(bounds, true)?;
|
||||
self.expect(&token::CloseDelim(token::Paren))?;
|
||||
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
|
||||
let sp = vec![lo, self.prev_token.span];
|
||||
let sugg: Vec<_> = sp.iter().map(|sp| (*sp, String::new())).collect();
|
||||
self.struct_span_err(sp, "incorrect braces around trait bounds")
|
||||
|
@ -840,7 +840,7 @@ impl<'a> Parser<'a> {
|
|||
)
|
||||
.emit();
|
||||
} else {
|
||||
self.expect(&token::CloseDelim(token::Paren))?;
|
||||
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue