2020-01-29 01:57:24 +01:00
use super ::ty ::AllowPlus ;
2020-10-03 19:30:32 +01:00
use super ::TokenType ;
use super ::{ BlockMode , Parser , PathStyle , Restrictions , SemiColonMode , SeqSep , TokenExpectType } ;
2019-10-15 22:48:13 +02:00
2020-02-29 20:37:32 +03:00
use rustc_ast ::ptr ::P ;
2020-03-29 12:02:28 -04:00
use rustc_ast ::token ::{ self , Lit , LitKind , TokenKind } ;
2020-02-29 20:37:32 +03:00
use rustc_ast ::util ::parser ::AssocOp ;
2020-04-27 23:26:11 +05:30
use rustc_ast ::{
2020-10-03 19:30:32 +01:00
self as ast , AngleBracketedArg , AngleBracketedArgs , AnonConst , AttrVec , BinOpKind , BindingMode ,
Block , BlockCheckMode , Expr , ExprKind , GenericArg , Item , ItemKind , Mutability , Param , Pat ,
PatKind , Path , PathSegment , QSelf , Ty , TyKind ,
2020-04-27 23:26:11 +05:30
} ;
2020-01-11 17:02:46 +01:00
use rustc_ast_pretty ::pprust ;
2019-12-05 06:38:06 +01:00
use rustc_data_structures ::fx ::FxHashSet ;
2019-12-31 21:25:16 +01:00
use rustc_errors ::{ pluralize , struct_span_err } ;
use rustc_errors ::{ Applicability , DiagnosticBuilder , Handler , PResult } ;
2020-01-11 00:19:09 +00:00
use rustc_span ::source_map ::Spanned ;
2020-04-19 13:00:18 +02:00
use rustc_span ::symbol ::{ kw , Ident } ;
2019-12-31 20:15:40 +03:00
use rustc_span ::{ MultiSpan , Span , SpanSnippetError , DUMMY_SP } ;
2019-12-05 06:38:06 +01:00
2020-08-13 23:05:01 -07:00
use tracing ::{ debug , trace } ;
2019-05-23 12:55:26 -07:00
2020-10-28 00:41:40 +00:00
const TURBOFISH_SUGGESTION_STR : & str =
" use `::<...>` instead of `<...>` to specify type or const arguments " ;
2019-10-08 09:35:34 +02:00
2019-05-30 18:19:48 -07:00
/// Creates a placeholder argument.
2019-10-08 09:35:34 +02:00
pub ( super ) fn dummy_arg ( ident : Ident ) -> Param {
2019-05-30 18:19:48 -07:00
let pat = P ( Pat {
id : ast ::DUMMY_NODE_ID ,
2019-12-16 17:28:40 +01:00
kind : PatKind ::Ident ( BindingMode ::ByValue ( Mutability ::Not ) , ident , None ) ,
2019-05-30 18:19:48 -07:00
span : ident . span ,
2020-07-27 18:02:29 -04:00
tokens : None ,
2019-05-30 18:19:48 -07:00
} ) ;
2020-08-21 18:18:04 -04:00
let ty = Ty { kind : TyKind ::Err , span : ident . span , id : ast ::DUMMY_NODE_ID , tokens : None } ;
2019-09-09 09:26:25 -03:00
Param {
2019-12-03 16:38:34 +01:00
attrs : AttrVec ::default ( ) ,
2019-09-09 09:26:25 -03:00
id : ast ::DUMMY_NODE_ID ,
pat ,
span : ident . span ,
ty : P ( ty ) ,
is_placeholder : false ,
}
2019-05-30 18:19:48 -07:00
}
2019-05-23 12:55:26 -07:00
pub enum Error {
UselessDocComment ,
}
impl Error {
2019-12-22 17:42:04 -05:00
fn span_err ( self , sp : impl Into < MultiSpan > , handler : & Handler ) -> DiagnosticBuilder < '_ > {
2019-05-23 12:55:26 -07:00
match self {
Error ::UselessDocComment = > {
let mut err = struct_span_err! (
handler ,
sp ,
E0585 ,
" found a documentation comment that doesn't document anything " ,
) ;
2019-12-22 17:42:04 -05:00
err . help (
" doc comments must come before what they document, maybe a comment was \
intended with ` //`?",
) ;
2019-05-23 12:55:26 -07:00
err
}
}
}
}
2019-04-28 13:28:07 +08:00
2019-10-08 09:35:34 +02:00
pub ( super ) trait RecoverQPath : Sized + 'static {
2019-04-28 13:28:07 +08:00
const PATH_STYLE : PathStyle = PathStyle ::Expr ;
fn to_ty ( & self ) -> Option < P < Ty > > ;
fn recovered ( qself : Option < QSelf > , path : ast ::Path ) -> Self ;
}
impl RecoverQPath for Ty {
const PATH_STYLE : PathStyle = PathStyle ::Type ;
fn to_ty ( & self ) -> Option < P < Ty > > {
Some ( P ( self . clone ( ) ) )
}
fn recovered ( qself : Option < QSelf > , path : ast ::Path ) -> Self {
2020-08-21 18:18:04 -04:00
Self {
span : path . span ,
kind : TyKind ::Path ( qself , path ) ,
id : ast ::DUMMY_NODE_ID ,
tokens : None ,
}
2019-04-28 13:28:07 +08:00
}
}
impl RecoverQPath for Pat {
fn to_ty ( & self ) -> Option < P < Ty > > {
self . to_ty ( )
}
fn recovered ( qself : Option < QSelf > , path : ast ::Path ) -> Self {
2020-07-27 18:02:29 -04:00
Self {
span : path . span ,
kind : PatKind ::Path ( qself , path ) ,
id : ast ::DUMMY_NODE_ID ,
tokens : None ,
}
2019-04-28 13:28:07 +08:00
}
}
impl RecoverQPath for Expr {
fn to_ty ( & self ) -> Option < P < Ty > > {
self . to_ty ( )
}
fn recovered ( qself : Option < QSelf > , path : ast ::Path ) -> Self {
Self {
span : path . span ,
2019-09-26 14:39:48 +01:00
kind : ExprKind ::Path ( qself , path ) ,
2019-12-03 16:38:34 +01:00
attrs : AttrVec ::new ( ) ,
2019-04-28 13:28:07 +08:00
id : ast ::DUMMY_NODE_ID ,
2020-05-19 16:56:20 -04:00
tokens : None ,
2019-04-28 13:28:07 +08:00
}
}
}
2019-10-25 18:30:02 -07:00
/// Control whether the closing delimiter should be consumed when calling `Parser::consume_block`.
crate enum ConsumeClosingDelim {
Yes ,
No ,
}
2020-08-12 15:39:15 -07:00
#[ derive(Clone, Copy) ]
pub enum AttemptLocalParseRecovery {
Yes ,
No ,
}
impl AttemptLocalParseRecovery {
pub fn yes ( & self ) -> bool {
match self {
AttemptLocalParseRecovery ::Yes = > true ,
AttemptLocalParseRecovery ::No = > false ,
}
}
pub fn no ( & self ) -> bool {
match self {
AttemptLocalParseRecovery ::Yes = > false ,
AttemptLocalParseRecovery ::No = > true ,
}
}
}
2019-04-28 13:28:07 +08:00
impl < ' a > Parser < ' a > {
2019-10-12 06:12:00 +02:00
pub ( super ) fn span_fatal_err < S : Into < MultiSpan > > (
& self ,
sp : S ,
err : Error ,
) -> DiagnosticBuilder < ' a > {
2019-05-23 12:55:26 -07:00
err . span_err ( sp , self . diagnostic ( ) )
}
2019-10-16 10:59:30 +02:00
pub fn struct_span_err < S : Into < MultiSpan > > ( & self , sp : S , m : & str ) -> DiagnosticBuilder < ' a > {
2019-05-23 12:55:26 -07:00
self . sess . span_diagnostic . struct_span_err ( sp , m )
}
2019-10-16 10:59:30 +02:00
pub fn span_bug < S : Into < MultiSpan > > ( & self , sp : S , m : & str ) -> ! {
2019-05-23 12:55:26 -07:00
self . sess . span_diagnostic . span_bug ( sp , m )
}
2019-12-05 06:38:06 +01:00
pub ( super ) fn diagnostic ( & self ) -> & ' a Handler {
2019-05-23 12:55:26 -07:00
& self . sess . span_diagnostic
}
2019-10-08 09:35:34 +02:00
pub ( super ) fn span_to_snippet ( & self , span : Span ) -> Result < String , SpanSnippetError > {
2019-07-24 11:01:30 +02:00
self . sess . source_map ( ) . span_to_snippet ( span )
}
2019-10-08 09:35:34 +02:00
pub ( super ) fn expected_ident_found ( & self ) -> DiagnosticBuilder < ' a > {
2019-05-23 12:55:26 -07:00
let mut err = self . struct_span_err (
2019-06-07 13:31:13 +03:00
self . token . span ,
2019-12-07 03:07:35 +01:00
& format! ( " expected identifier, found {} " , super ::token_descr ( & self . token ) ) ,
2019-05-23 12:55:26 -07:00
) ;
2019-11-20 14:50:13 -08:00
let valid_follow = & [
TokenKind ::Eq ,
TokenKind ::Colon ,
TokenKind ::Comma ,
TokenKind ::Semi ,
TokenKind ::ModSep ,
TokenKind ::OpenDelim ( token ::DelimToken ::Brace ) ,
TokenKind ::OpenDelim ( token ::DelimToken ::Paren ) ,
TokenKind ::CloseDelim ( token ::DelimToken ::Brace ) ,
TokenKind ::CloseDelim ( token ::DelimToken ::Paren ) ,
] ;
2020-03-04 23:37:52 +03:00
match self . token . ident ( ) {
Some ( ( ident , false ) )
if ident . is_raw_guess ( )
& & self . look_ahead ( 1 , | t | valid_follow . contains ( & t . kind ) ) = >
2019-11-20 14:50:13 -08:00
{
2019-05-23 12:55:26 -07:00
err . span_suggestion (
2020-03-04 23:37:52 +03:00
ident . span ,
2019-05-23 12:55:26 -07:00
" you can escape reserved keywords to use them as identifiers " ,
2020-03-04 23:37:52 +03:00
format! ( " r# {} " , ident . name ) ,
2019-05-23 12:55:26 -07:00
Applicability ::MaybeIncorrect ,
) ;
}
2020-03-04 23:37:52 +03:00
_ = > { }
2019-05-23 12:55:26 -07:00
}
2019-12-07 03:07:35 +01:00
if let Some ( token_descr ) = super ::token_descr_opt ( & self . token ) {
2019-06-07 13:31:13 +03:00
err . span_label ( self . token . span , format! ( " expected identifier, found {} " , token_descr ) ) ;
2019-05-23 12:55:26 -07:00
} else {
2019-06-07 13:31:13 +03:00
err . span_label ( self . token . span , " expected identifier " ) ;
2019-05-23 12:55:26 -07:00
if self . token = = token ::Comma & & self . look_ahead ( 1 , | t | t . is_ident ( ) ) {
err . span_suggestion (
2019-06-07 13:31:13 +03:00
self . token . span ,
2019-05-23 12:55:26 -07:00
" remove this comma " ,
String ::new ( ) ,
Applicability ::MachineApplicable ,
) ;
}
}
err
}
2019-10-08 09:35:34 +02:00
pub ( super ) fn expected_one_of_not_found (
2019-05-23 13:10:24 -07:00
& mut self ,
2019-06-05 14:17:56 +03:00
edible : & [ TokenKind ] ,
inedible : & [ TokenKind ] ,
2019-05-23 13:10:24 -07:00
) -> PResult < ' a , bool /* recovered */ > {
fn tokens_to_string ( tokens : & [ TokenType ] ) -> String {
let mut i = tokens . iter ( ) ;
2019-09-06 03:56:45 +01:00
// This might be a sign we need a connect method on `Iterator`.
2019-12-22 17:42:04 -05:00
let b = i . next ( ) . map_or ( String ::new ( ) , | t | t . to_string ( ) ) ;
2019-05-23 13:10:24 -07:00
i . enumerate ( ) . fold ( b , | mut b , ( i , a ) | {
if tokens . len ( ) > 2 & & i = = tokens . len ( ) - 2 {
b . push_str ( " , or " ) ;
} else if tokens . len ( ) = = 2 & & i = = tokens . len ( ) - 2 {
b . push_str ( " or " ) ;
} else {
b . push_str ( " , " ) ;
}
b . push_str ( & a . to_string ( ) ) ;
b
} )
}
2019-12-22 17:42:04 -05:00
let mut expected = edible
. iter ( )
2019-05-23 13:10:24 -07:00
. map ( | x | TokenType ::Token ( x . clone ( ) ) )
. chain ( inedible . iter ( ) . map ( | x | TokenType ::Token ( x . clone ( ) ) ) )
. chain ( self . expected_tokens . iter ( ) . cloned ( ) )
. collect ::< Vec < _ > > ( ) ;
expected . sort_by_cached_key ( | x | x . to_string ( ) ) ;
expected . dedup ( ) ;
let expect = tokens_to_string ( & expected [ .. ] ) ;
2019-12-07 03:07:35 +01:00
let actual = super ::token_descr ( & self . token ) ;
2019-05-23 13:10:24 -07:00
let ( msg_exp , ( label_sp , label_exp ) ) = if expected . len ( ) > 1 {
let short_expect = if expected . len ( ) > 6 {
format! ( " {} possible tokens " , expected . len ( ) )
} else {
expect . clone ( )
} ;
2019-12-22 17:42:04 -05:00
(
format! ( " expected one of {} , found {} " , expect , actual ) ,
2020-02-29 14:56:15 +03:00
( self . prev_token . span . shrink_to_hi ( ) , format! ( " expected one of {} " , short_expect ) ) ,
2019-12-22 17:42:04 -05:00
)
2019-05-23 13:10:24 -07:00
} else if expected . is_empty ( ) {
2019-12-22 17:42:04 -05:00
(
format! ( " unexpected token: {} " , actual ) ,
2020-02-29 14:56:15 +03:00
( self . prev_token . span , " unexpected token after this " . to_string ( ) ) ,
2019-12-22 17:42:04 -05:00
)
2019-05-23 13:10:24 -07:00
} else {
2019-12-22 17:42:04 -05:00
(
format! ( " expected {} , found {} " , expect , actual ) ,
2020-02-29 14:56:15 +03:00
( self . prev_token . span . shrink_to_hi ( ) , format! ( " expected {} " , expect ) ) ,
2019-12-22 17:42:04 -05:00
)
2019-05-23 13:10:24 -07:00
} ;
2019-06-07 13:31:13 +03:00
self . last_unexpected_token_span = Some ( self . token . span ) ;
2019-12-31 04:21:58 +01:00
let mut err = self . struct_span_err ( self . token . span , & msg_exp ) ;
2019-05-24 02:04:56 +03:00
let sp = if self . token = = token ::Eof {
2019-09-06 03:56:45 +01:00
// This is EOF; don't want to point at the following char, but rather the last token.
2020-02-29 14:56:15 +03:00
self . prev_token . span
2019-05-23 13:10:24 -07:00
} else {
label_sp
} ;
2019-12-22 17:42:04 -05:00
match self . recover_closing_delimiter (
& expected
. iter ( )
. filter_map ( | tt | match tt {
TokenType ::Token ( t ) = > Some ( t . clone ( ) ) ,
_ = > None ,
} )
. collect ::< Vec < _ > > ( ) ,
err ,
) {
2019-05-23 13:10:24 -07:00
Err ( e ) = > err = e ,
Ok ( recovered ) = > {
return Ok ( recovered ) ;
}
}
2020-03-28 01:46:20 -04:00
if self . check_too_many_raw_str_terminators ( & mut err ) {
return Err ( err ) ;
}
2019-07-19 10:59:02 -07:00
let sm = self . sess . source_map ( ) ;
2020-02-29 14:56:15 +03:00
if self . prev_token . span = = DUMMY_SP {
2019-10-24 15:57:43 -07:00
// Account for macro context where the previous span might not be
// available to avoid incorrect output (#54841).
err . span_label ( self . token . span , label_exp ) ;
} else if ! sm . is_multiline ( self . token . span . shrink_to_hi ( ) . until ( sp . shrink_to_lo ( ) ) ) {
// When the spans are in the same line, it means that the only content between
// them is whitespace, point at the found token in that case:
//
// X | () => { syntax error };
// | ^^^^^ expected one of 8 possible tokens here
//
// instead of having:
//
// X | () => { syntax error };
// | -^^^^^ unexpected token
// | |
// | expected one of 8 possible tokens here
err . span_label ( self . token . span , label_exp ) ;
} else {
err . span_label ( sp , label_exp ) ;
err . span_label ( self . token . span , " unexpected token " ) ;
2019-05-23 13:10:24 -07:00
}
2019-07-17 11:40:36 -07:00
self . maybe_annotate_with_ascription ( & mut err , false ) ;
2019-05-23 13:10:24 -07:00
Err ( err )
}
2020-03-28 01:46:20 -04:00
fn check_too_many_raw_str_terminators ( & mut self , err : & mut DiagnosticBuilder < '_ > ) -> bool {
2020-03-30 12:39:40 -04:00
match ( & self . prev_token . kind , & self . token . kind ) {
(
TokenKind ::Literal ( Lit {
kind : LitKind ::StrRaw ( n_hashes ) | LitKind ::ByteStrRaw ( n_hashes ) ,
..
} ) ,
TokenKind ::Pound ,
) = > {
2020-03-28 01:46:20 -04:00
err . set_primary_message ( " too many `#` when terminating raw string " ) ;
err . span_suggestion (
self . token . span ,
2020-03-29 11:12:48 -04:00
" remove the extra `#` " ,
2020-03-28 01:46:20 -04:00
String ::new ( ) ,
Applicability ::MachineApplicable ,
) ;
2020-03-29 11:12:48 -04:00
err . note ( & format! ( " the raw string started with {} `#`s " , n_hashes ) ) ;
2020-03-30 12:39:40 -04:00
true
2020-03-28 01:46:20 -04:00
}
2020-03-30 12:39:40 -04:00
_ = > false ,
2020-03-28 01:46:20 -04:00
}
}
2020-08-12 15:39:15 -07:00
pub fn maybe_suggest_struct_literal (
& mut self ,
lo : Span ,
s : BlockCheckMode ,
) -> Option < PResult < ' a , P < Block > > > {
if self . token . is_ident ( ) & & self . look_ahead ( 1 , | t | t = = & token ::Colon ) {
// We might be having a struct literal where people forgot to include the path:
// fn foo() -> Foo {
// field: value,
// }
let mut snapshot = self . clone ( ) ;
let path =
Path { segments : vec ! [ ] , span : self . prev_token . span . shrink_to_lo ( ) , tokens : None } ;
let struct_expr = snapshot . parse_struct_expr ( path , AttrVec ::new ( ) , false ) ;
let block_tail = self . parse_block_tail ( lo , s , AttemptLocalParseRecovery ::No ) ;
return Some ( match ( struct_expr , block_tail ) {
( Ok ( expr ) , Err ( mut err ) ) = > {
// We have encountered the following:
// fn foo() -> Foo {
// field: value,
// }
// Suggest:
// fn foo() -> Foo { Path {
// field: value,
// } }
err . delay_as_bug ( ) ;
self . struct_span_err ( expr . span , " struct literal body without path " )
. multipart_suggestion (
" you might have forgotten to add the struct literal inside the block " ,
vec! [
( expr . span . shrink_to_lo ( ) , " { SomeStruct " . to_string ( ) ) ,
( expr . span . shrink_to_hi ( ) , " } " . to_string ( ) ) ,
] ,
Applicability ::MaybeIncorrect ,
)
. emit ( ) ;
* self = snapshot ;
Ok ( self . mk_block (
vec! [ self . mk_stmt_err ( expr . span ) ] ,
s ,
lo . to ( self . prev_token . span ) ,
) )
}
( Err ( mut err ) , Ok ( tail ) ) = > {
// We have a block tail that contains a somehow valid type ascription expr.
err . cancel ( ) ;
Ok ( tail )
}
( Err ( mut snapshot_err ) , Err ( err ) ) = > {
// We don't know what went wrong, emit the normal error.
snapshot_err . cancel ( ) ;
self . consume_block ( token ::Brace , ConsumeClosingDelim ::Yes ) ;
Err ( err )
}
( Ok ( _ ) , Ok ( tail ) ) = > Ok ( tail ) ,
} ) ;
}
None
}
2019-07-17 11:40:36 -07:00
pub fn maybe_annotate_with_ascription (
2019-11-04 16:19:55 -08:00
& mut self ,
2019-07-17 11:40:36 -07:00
err : & mut DiagnosticBuilder < '_ > ,
maybe_expected_semicolon : bool ,
) {
2019-11-05 10:29:54 -08:00
if let Some ( ( sp , likely_path ) ) = self . last_type_ascription . take ( ) {
2019-07-19 10:59:02 -07:00
let sm = self . sess . source_map ( ) ;
let next_pos = sm . lookup_char_pos ( self . token . span . lo ( ) ) ;
let op_pos = sm . lookup_char_pos ( sp . hi ( ) ) ;
2019-07-17 11:40:36 -07:00
2019-10-08 10:46:08 +02:00
let allow_unstable = self . sess . unstable_features . is_nightly_build ( ) ;
2019-07-17 11:40:36 -07:00
if likely_path {
err . span_suggestion (
sp ,
" maybe write a path separator here " ,
" :: " . to_string ( ) ,
2019-10-08 10:46:08 +02:00
if allow_unstable {
Applicability ::MaybeIncorrect
} else {
Applicability ::MachineApplicable
2019-07-17 11:40:36 -07:00
} ,
) ;
2020-07-09 13:49:55 -07:00
self . sess . type_ascription_path_suggestions . borrow_mut ( ) . insert ( sp ) ;
2019-07-17 11:40:36 -07:00
} else if op_pos . line ! = next_pos . line & & maybe_expected_semicolon {
err . span_suggestion (
sp ,
" try using a semicolon " ,
" ; " . to_string ( ) ,
Applicability ::MaybeIncorrect ,
) ;
2019-10-08 10:46:08 +02:00
} else if allow_unstable {
2019-07-17 11:40:36 -07:00
err . span_label ( sp , " tried to parse a type due to this type ascription " ) ;
2019-10-08 10:46:08 +02:00
} else {
err . span_label ( sp , " tried to parse a type due to this " ) ;
2019-07-17 11:40:36 -07:00
}
2019-10-08 10:46:08 +02:00
if allow_unstable {
2019-07-17 11:40:36 -07:00
// Give extra information about type ascription only if it's a nightly compiler.
2019-12-22 17:42:04 -05:00
err . note (
2020-07-09 09:09:25 -07:00
" `#![feature(type_ascription)]` lets you annotate an expression with a type: \
` < expr > : < type > ` " ,
2019-12-22 17:42:04 -05:00
) ;
2020-07-09 09:09:25 -07:00
if ! likely_path {
// Avoid giving too much info when it was likely an unrelated typo.
err . note (
" see issue #23416 <https://github.com/rust-lang/rust/issues/23416> \
for more information " ,
) ;
}
2019-07-17 11:40:36 -07:00
}
}
}
2019-05-23 12:55:26 -07:00
/// Eats and discards tokens until one of `kets` is encountered. Respects token trees,
/// passes through any errors encountered. Used for error recovery.
2019-10-08 09:35:34 +02:00
pub ( super ) fn eat_to_tokens ( & mut self , kets : & [ & TokenKind ] ) {
2019-12-22 17:42:04 -05:00
if let Err ( ref mut err ) =
self . parse_seq_to_before_tokens ( kets , SeqSep ::none ( ) , TokenExpectType ::Expect , | p | {
Ok ( p . parse_token_tree ( ) )
} )
{
2019-09-07 10:38:02 -04:00
err . cancel ( ) ;
2019-05-23 12:55:26 -07:00
}
}
/// This function checks if there are trailing angle brackets and produces
/// a diagnostic to suggest removing them.
///
/// ```ignore (diagnostic)
/// let _ = vec![1, 2, 3].into_iter().collect::<Vec<usize>>>>();
/// ^^ help: remove extra angle brackets
/// ```
2020-06-27 11:35:12 -04:00
///
/// If `true` is returned, then trailing brackets were recovered, tokens were consumed
/// up until one of the tokens in 'end' was encountered, and an error was emitted.
pub ( super ) fn check_trailing_angle_brackets (
& mut self ,
segment : & PathSegment ,
end : & [ & TokenKind ] ,
) -> bool {
2019-05-23 12:55:26 -07:00
// This function is intended to be invoked after parsing a path segment where there are two
// cases:
//
// 1. A specific token is expected after the path segment.
// eg. `x.foo(`, `x.foo::<u32>(` (parenthesis - method call),
// `Foo::`, or `Foo::<Bar>::` (mod sep - continued path).
// 2. No specific token is expected after the path segment.
// eg. `x.foo` (field access)
//
// This function is called after parsing `.foo` and before parsing the token `end` (if
// present). This includes any angle bracket arguments, such as `.foo::<u32>` or
// `Foo::<Bar>`.
// We only care about trailing angle brackets if we previously parsed angle bracket
// arguments. This helps stop us incorrectly suggesting that extra angle brackets be
// removed in this case:
//
// `x.foo >> (3)` (where `x.foo` is a `u32` for example)
//
// This case is particularly tricky as we won't notice it just looking at the tokens -
// it will appear the same (in terms of upcoming tokens) as below (since the `::<u32>` will
// have already been parsed):
//
// `x.foo::<u32>>>(3)`
2019-12-22 17:42:04 -05:00
let parsed_angle_bracket_args =
segment . args . as_ref ( ) . map ( | args | args . is_angle_bracketed ( ) ) . unwrap_or ( false ) ;
2019-05-23 12:55:26 -07:00
debug! (
" check_trailing_angle_brackets: parsed_angle_bracket_args={:?} " ,
parsed_angle_bracket_args ,
) ;
if ! parsed_angle_bracket_args {
2020-06-27 11:35:12 -04:00
return false ;
2019-05-23 12:55:26 -07:00
}
// Keep the span at the start so we can highlight the sequence of `>` characters to be
// removed.
2019-06-07 13:31:13 +03:00
let lo = self . token . span ;
2019-05-23 12:55:26 -07:00
// We need to look-ahead to see if we have `>` characters without moving the cursor forward
// (since we might have the field access case and the characters we're eating are
// actual operators and not trailing characters - ie `x.foo >> 3`).
let mut position = 0 ;
// We can encounter `>` or `>>` tokens in any order, so we need to keep track of how
// many of each (so we can correctly pluralize our error messages) and continue to
// advance.
let mut number_of_shr = 0 ;
let mut number_of_gt = 0 ;
while self . look_ahead ( position , | t | {
trace! ( " check_trailing_angle_brackets: t={:?} " , t ) ;
if * t = = token ::BinOp ( token ::BinOpToken ::Shr ) {
number_of_shr + = 1 ;
true
} else if * t = = token ::Gt {
number_of_gt + = 1 ;
true
} else {
false
}
} ) {
position + = 1 ;
}
// If we didn't find any trailing `>` characters, then we have nothing to error about.
debug! (
" check_trailing_angle_brackets: number_of_gt={:?} number_of_shr={:?} " ,
number_of_gt , number_of_shr ,
) ;
if number_of_gt < 1 & & number_of_shr < 1 {
2020-06-27 11:35:12 -04:00
return false ;
2019-05-23 12:55:26 -07:00
}
// Finally, double check that we have our end token as otherwise this is the
// second case.
if self . look_ahead ( position , | t | {
trace! ( " check_trailing_angle_brackets: t={:?} " , t ) ;
2020-06-27 11:35:12 -04:00
end . contains ( & & t . kind )
2019-05-23 12:55:26 -07:00
} ) {
// Eat from where we started until the end token so that parsing can continue
// as if we didn't have those extra angle brackets.
2020-06-27 11:35:12 -04:00
self . eat_to_tokens ( end ) ;
2019-06-07 13:31:13 +03:00
let span = lo . until ( self . token . span ) ;
2019-05-23 12:55:26 -07:00
2019-09-19 15:13:40 +08:00
let total_num_of_gt = number_of_gt + number_of_shr * 2 ;
2019-12-30 14:56:57 +01:00
self . struct_span_err (
span ,
& format! ( " unmatched angle bracket {} " , pluralize! ( total_num_of_gt ) ) ,
)
. span_suggestion (
span ,
& format! ( " remove extra angle bracket {} " , pluralize! ( total_num_of_gt ) ) ,
String ::new ( ) ,
Applicability ::MachineApplicable ,
)
. emit ( ) ;
2020-06-27 11:35:12 -04:00
return true ;
2019-05-23 12:55:26 -07:00
}
2020-06-27 11:35:12 -04:00
false
2019-05-23 12:55:26 -07:00
}
2020-07-23 09:34:07 -07:00
/// Check if a method call with an intended turbofish has been written without surrounding
/// angle brackets.
pub ( super ) fn check_turbofish_missing_angle_brackets ( & mut self , segment : & mut PathSegment ) {
if token ::ModSep = = self . token . kind & & segment . args . is_none ( ) {
let snapshot = self . clone ( ) ;
self . bump ( ) ;
let lo = self . token . span ;
match self . parse_angle_args ( ) {
2020-07-23 16:25:39 -07:00
Ok ( args ) = > {
2020-07-23 09:34:07 -07:00
let span = lo . to ( self . prev_token . span ) ;
2020-07-23 16:25:39 -07:00
// Detect trailing `>` like in `x.collect::Vec<_>>()`.
let mut trailing_span = self . prev_token . span . shrink_to_hi ( ) ;
while self . token . kind = = token ::BinOp ( token ::Shr )
| | self . token . kind = = token ::Gt
{
trailing_span = trailing_span . to ( self . token . span ) ;
self . bump ( ) ;
}
if self . token . kind = = token ::OpenDelim ( token ::Paren ) {
// Recover from bad turbofish: `foo.collect::Vec<_>()`.
let args = AngleBracketedArgs { args , span } . into ( ) ;
segment . args = args ;
self . struct_span_err (
span ,
" generic parameters without surrounding angle brackets " ,
)
. multipart_suggestion (
" surround the type parameters with angle brackets " ,
vec! [
( span . shrink_to_lo ( ) , " < " . to_string ( ) ) ,
( trailing_span , " > " . to_string ( ) ) ,
] ,
Applicability ::MachineApplicable ,
)
. emit ( ) ;
} else {
// This doesn't look like an invalid turbofish, can't recover parse state.
* self = snapshot ;
}
2020-07-23 09:34:07 -07:00
}
Err ( mut err ) = > {
2020-07-23 16:25:39 -07:00
// We could't parse generic parameters, unlikely to be a turbofish. Rely on
// generic parse error instead.
2020-07-23 09:34:07 -07:00
err . cancel ( ) ;
* self = snapshot ;
}
}
}
}
2020-08-31 10:24:37 -07:00
/// When writing a turbofish with multiple type parameters missing the leading `::`, we will
/// encounter a parse error when encountering the first `,`.
pub ( super ) fn check_mistyped_turbofish_with_multiple_type_params (
& mut self ,
mut e : DiagnosticBuilder < ' a > ,
expr : & mut P < Expr > ,
) -> PResult < ' a , ( ) > {
if let ExprKind ::Binary ( binop , _ , _ ) = & expr . kind {
if let ast ::BinOpKind ::Lt = binop . node {
if self . eat ( & token ::Comma ) {
let x = self . parse_seq_to_before_end (
& token ::Gt ,
SeqSep ::trailing_allowed ( token ::Comma ) ,
| p | p . parse_ty ( ) ,
) ;
match x {
Ok ( ( _ , _ , false ) ) = > {
self . bump ( ) ; // `>`
match self . parse_expr ( ) {
Ok ( _ ) = > {
e . span_suggestion_verbose (
binop . span . shrink_to_lo ( ) ,
2020-10-28 00:41:40 +00:00
TURBOFISH_SUGGESTION_STR ,
2020-08-31 10:24:37 -07:00
" :: " . to_string ( ) ,
Applicability ::MaybeIncorrect ,
) ;
e . emit ( ) ;
* expr = self . mk_expr_err ( expr . span . to ( self . prev_token . span ) ) ;
return Ok ( ( ) ) ;
}
Err ( mut err ) = > {
err . cancel ( ) ;
}
}
}
Err ( mut err ) = > {
err . cancel ( ) ;
}
_ = > { }
}
}
}
}
Err ( e )
}
2020-01-11 00:19:09 +00:00
/// Check to see if a pair of chained operators looks like an attempt at chained comparison,
/// e.g. `1 < x <= 3`. If so, suggest either splitting the comparison into two, or
/// parenthesising the leftmost comparison.
fn attempt_chained_comparison_suggestion (
& mut self ,
err : & mut DiagnosticBuilder < '_ > ,
inner_op : & Expr ,
outer_op : & Spanned < AssocOp > ,
2020-03-25 18:10:18 -07:00
) -> bool /* advanced the cursor */ {
2020-01-11 00:19:09 +00:00
if let ExprKind ::Binary ( op , ref l1 , ref r1 ) = inner_op . kind {
2020-03-09 20:21:37 -07:00
if let ExprKind ::Field ( _ , ident ) = l1 . kind {
if ident . as_str ( ) . parse ::< i32 > ( ) . is_err ( ) & & ! matches! ( r1 . kind , ExprKind ::Lit ( _ ) ) {
// The parser has encountered `foo.bar<baz`, the likelihood of the turbofish
// suggestion being the only one to apply is high.
return false ;
}
}
2020-03-25 18:10:18 -07:00
let mut enclose = | left : Span , right : Span | {
err . multipart_suggestion (
" parenthesize the comparison " ,
vec! [
( left . shrink_to_lo ( ) , " ( " . to_string ( ) ) ,
( right . shrink_to_hi ( ) , " ) " . to_string ( ) ) ,
] ,
Applicability ::MaybeIncorrect ,
) ;
} ;
2020-03-09 20:21:37 -07:00
return match ( op . node , & outer_op . node ) {
// `x == y == z`
( BinOpKind ::Eq , AssocOp ::Equal ) |
2020-01-11 00:19:09 +00:00
// `x < y < z` and friends.
2020-04-16 17:38:52 -07:00
( BinOpKind ::Lt , AssocOp ::Less | AssocOp ::LessEqual ) |
( BinOpKind ::Le , AssocOp ::LessEqual | AssocOp ::Less ) |
2020-01-11 00:19:09 +00:00
// `x > y > z` and friends.
2020-04-16 17:38:52 -07:00
( BinOpKind ::Gt , AssocOp ::Greater | AssocOp ::GreaterEqual ) |
( BinOpKind ::Ge , AssocOp ::GreaterEqual | AssocOp ::Greater ) = > {
2020-01-11 00:19:09 +00:00
let expr_to_str = | e : & Expr | {
self . span_to_snippet ( e . span )
. unwrap_or_else ( | _ | pprust ::expr_to_string ( & e ) )
} ;
2020-03-09 20:21:37 -07:00
err . span_suggestion_verbose (
2020-03-29 12:02:28 -04:00
inner_op . span . shrink_to_hi ( ) ,
2020-03-09 20:21:37 -07:00
" split the comparison into two " ,
format! ( " && {} " , expr_to_str ( & r1 ) ) ,
2020-01-11 00:19:09 +00:00
Applicability ::MaybeIncorrect ,
) ;
2020-03-09 20:21:37 -07:00
false // Keep the current parse behavior, where the AST is `(x < y) < z`.
}
// `x == y < z`
2020-04-16 17:38:52 -07:00
( BinOpKind ::Eq , AssocOp ::Less | AssocOp ::LessEqual | AssocOp ::Greater | AssocOp ::GreaterEqual ) = > {
2020-03-25 18:10:18 -07:00
// Consume `z`/outer-op-rhs.
2020-03-09 20:21:37 -07:00
let snapshot = self . clone ( ) ;
match self . parse_expr ( ) {
Ok ( r2 ) = > {
2020-03-25 18:10:18 -07:00
// We are sure that outer-op-rhs could be consumed, the suggestion is
// likely correct.
enclose ( r1 . span , r2 . span ) ;
2020-03-09 20:21:37 -07:00
true
}
Err ( mut expr_err ) = > {
expr_err . cancel ( ) ;
2020-03-25 18:10:18 -07:00
* self = snapshot ;
2020-03-09 20:21:37 -07:00
false
}
}
}
// `x > y == z`
2020-04-16 17:38:52 -07:00
( BinOpKind ::Lt | BinOpKind ::Le | BinOpKind ::Gt | BinOpKind ::Ge , AssocOp ::Equal ) = > {
2020-03-09 20:21:37 -07:00
let snapshot = self . clone ( ) ;
2020-03-25 18:10:18 -07:00
// At this point it is always valid to enclose the lhs in parentheses, no
// further checks are necessary.
2020-03-09 20:21:37 -07:00
match self . parse_expr ( ) {
Ok ( _ ) = > {
2020-03-25 18:10:18 -07:00
enclose ( l1 . span , r1 . span ) ;
2020-03-09 20:21:37 -07:00
true
}
Err ( mut expr_err ) = > {
expr_err . cancel ( ) ;
2020-03-25 18:10:18 -07:00
* self = snapshot ;
2020-03-09 20:21:37 -07:00
false
}
}
2020-01-11 00:19:09 +00:00
}
2020-03-09 20:21:37 -07:00
_ = > false ,
} ;
2020-01-11 00:19:09 +00:00
}
2020-03-09 20:21:37 -07:00
false
2020-01-11 00:19:09 +00:00
}
2019-09-06 03:56:45 +01:00
/// Produces an error if comparison operators are chained (RFC #558).
2019-09-29 19:07:26 -07:00
/// We only need to check the LHS, not the RHS, because all comparison ops have same
2019-10-01 11:24:05 -07:00
/// precedence (see `fn precedence`) and are left-associative (see `fn fixity`).
2019-09-29 19:07:26 -07:00
///
/// This can also be hit if someone incorrectly writes `foo<bar>()` when they should have used
2019-10-01 11:24:05 -07:00
/// the turbofish (`foo::<bar>()`) syntax. We attempt some heuristic recovery if that is the
/// case.
///
/// Keep in mind that given that `outer_op.is_comparison()` holds and comparison ops are left
/// associative we can infer that we have:
///
2020-04-18 18:39:40 +02:00
/// ```text
2019-10-01 11:24:05 -07:00
/// outer_op
/// / \
/// inner_op r2
/// / \
2020-01-11 00:19:09 +00:00
/// l1 r1
2020-04-18 18:39:40 +02:00
/// ```
2019-10-08 09:35:34 +02:00
pub ( super ) fn check_no_chained_comparison (
2019-09-29 19:07:26 -07:00
& mut self ,
2020-01-11 00:19:09 +00:00
inner_op : & Expr ,
outer_op : & Spanned < AssocOp > ,
2019-09-29 19:07:26 -07:00
) -> PResult < ' a , Option < P < Expr > > > {
debug_assert! (
2020-01-11 00:19:09 +00:00
outer_op . node . is_comparison ( ) ,
2019-09-29 19:07:26 -07:00
" check_no_chained_comparison: {:?} is not comparison " ,
2020-01-11 00:19:09 +00:00
outer_op . node ,
2019-09-29 19:07:26 -07:00
) ;
2019-10-01 15:51:50 -07:00
2019-12-22 17:42:04 -05:00
let mk_err_expr =
| this : & Self , span | Ok ( Some ( this . mk_expr ( span , ExprKind ::Err , AttrVec ::new ( ) ) ) ) ;
2019-10-01 15:51:50 -07:00
2020-01-11 00:19:09 +00:00
match inner_op . kind {
2020-03-09 20:21:37 -07:00
ExprKind ::Binary ( op , ref l1 , ref r1 ) if op . node . is_comparison ( ) = > {
let mut err = self . struct_span_err (
vec! [ op . span , self . prev_token . span ] ,
" comparison operators cannot be chained " ,
) ;
2019-10-01 11:24:05 -07:00
let suggest = | err : & mut DiagnosticBuilder < '_ > | {
2019-10-03 13:22:18 -07:00
err . span_suggestion_verbose (
2020-03-09 20:21:37 -07:00
op . span . shrink_to_lo ( ) ,
2020-10-28 00:41:40 +00:00
TURBOFISH_SUGGESTION_STR ,
2019-10-01 11:24:05 -07:00
" :: " . to_string ( ) ,
Applicability ::MaybeIncorrect ,
) ;
} ;
2020-03-25 18:10:18 -07:00
// Include `<` to provide this recommendation even in a case like
// `Foo<Bar<Baz<Qux, ()>>>`
2020-03-09 20:21:37 -07:00
if op . node = = BinOpKind ::Lt & & outer_op . node = = AssocOp ::Less
| | outer_op . node = = AssocOp ::Greater
2019-12-22 17:42:04 -05:00
{
2020-01-11 00:19:09 +00:00
if outer_op . node = = AssocOp ::Less {
2019-09-29 19:07:26 -07:00
let snapshot = self . clone ( ) ;
self . bump ( ) ;
2019-10-01 11:24:05 -07:00
// So far we have parsed `foo<bar<`, consume the rest of the type args.
2019-12-22 17:42:04 -05:00
let modifiers =
[ ( token ::Lt , 1 ) , ( token ::Gt , - 1 ) , ( token ::BinOp ( token ::Shr ) , - 2 ) ] ;
2019-10-01 15:51:50 -07:00
self . consume_tts ( 1 , & modifiers [ .. ] ) ;
2019-09-30 12:19:22 -07:00
2019-12-22 17:42:04 -05:00
if ! & [ token ::OpenDelim ( token ::Paren ) , token ::ModSep ]
. contains ( & self . token . kind )
{
2019-09-30 12:36:44 -07:00
// We don't have `foo< bar >(` or `foo< bar >::`, so we rewind the
// parser and bail out.
2020-03-25 18:10:18 -07:00
* self = snapshot . clone ( ) ;
2019-09-29 19:07:26 -07:00
}
}
2019-10-01 15:51:50 -07:00
return if token ::ModSep = = self . token . kind {
2019-09-30 12:36:44 -07:00
// We have some certainty that this was a bad turbofish at this point.
// `foo< bar >::`
2019-10-01 11:24:05 -07:00
suggest ( & mut err ) ;
2019-09-30 12:36:44 -07:00
let snapshot = self . clone ( ) ;
self . bump ( ) ; // `::`
2019-10-01 11:24:05 -07:00
2019-09-30 12:36:44 -07:00
// Consume the rest of the likely `foo<bar>::new()` or return at `foo<bar>`.
match self . parse_expr ( ) {
Ok ( _ ) = > {
// 99% certain that the suggestion is correct, continue parsing.
err . emit ( ) ;
// FIXME: actually check that the two expressions in the binop are
// paths and resynthesize new fn call expression instead of using
// `ExprKind::Err` placeholder.
2020-02-29 14:56:15 +03:00
mk_err_expr ( self , inner_op . span . to ( self . prev_token . span ) )
2019-09-30 12:36:44 -07:00
}
2019-10-01 11:24:05 -07:00
Err ( mut expr_err ) = > {
expr_err . cancel ( ) ;
2019-09-30 12:36:44 -07:00
// Not entirely sure now, but we bubble the error up with the
// suggestion.
2020-03-25 18:10:18 -07:00
* self = snapshot ;
2019-10-01 15:51:50 -07:00
Err ( err )
2019-09-30 12:36:44 -07:00
}
}
} else if token ::OpenDelim ( token ::Paren ) = = self . token . kind {
2019-09-30 12:19:22 -07:00
// We have high certainty that this was a bad turbofish at this point.
// `foo< bar >(`
2019-10-01 11:24:05 -07:00
suggest ( & mut err ) ;
2019-09-30 12:19:22 -07:00
// Consume the fn call arguments.
2019-10-03 13:22:18 -07:00
match self . consume_fn_args ( ) {
Err ( ( ) ) = > Err ( err ) ,
Ok ( ( ) ) = > {
err . emit ( ) ;
// FIXME: actually check that the two expressions in the binop are
// paths and resynthesize new fn call expression instead of using
// `ExprKind::Err` placeholder.
2020-02-29 14:56:15 +03:00
mk_err_expr ( self , inner_op . span . to ( self . prev_token . span ) )
2019-10-03 13:22:18 -07:00
}
2019-09-29 19:07:26 -07:00
}
} else {
2020-03-09 20:21:37 -07:00
if ! matches! ( l1 . kind , ExprKind ::Lit ( _ ) )
& & ! matches! ( r1 . kind , ExprKind ::Lit ( _ ) )
{
// All we know is that this is `foo < bar >` and *nothing* else. Try to
// be helpful, but don't attempt to recover.
2020-10-28 00:41:40 +00:00
err . help ( TURBOFISH_SUGGESTION_STR ) ;
2020-03-09 20:21:37 -07:00
err . help ( " or use `(...)` if you meant to specify fn arguments " ) ;
}
// If it looks like a genuine attempt to chain operators (as opposed to a
// misformatted turbofish, for instance), suggest a correct form.
if self . attempt_chained_comparison_suggestion ( & mut err , inner_op , outer_op )
{
err . emit ( ) ;
mk_err_expr ( self , inner_op . span . to ( self . prev_token . span ) )
} else {
// These cases cause too many knock-down errors, bail out (#61329).
Err ( err )
}
2019-10-01 15:51:50 -07:00
} ;
2019-05-23 12:55:26 -07:00
}
2020-03-09 20:21:37 -07:00
let recover =
self . attempt_chained_comparison_suggestion ( & mut err , inner_op , outer_op ) ;
2019-05-23 12:55:26 -07:00
err . emit ( ) ;
2020-03-09 20:21:37 -07:00
if recover {
return mk_err_expr ( self , inner_op . span . to ( self . prev_token . span ) ) ;
}
2019-05-23 12:55:26 -07:00
}
_ = > { }
}
2019-09-29 19:07:26 -07:00
Ok ( None )
2019-05-23 12:55:26 -07:00
}
2019-10-03 13:22:18 -07:00
fn consume_fn_args ( & mut self ) -> Result < ( ) , ( ) > {
let snapshot = self . clone ( ) ;
self . bump ( ) ; // `(`
// Consume the fn call arguments.
2019-12-22 17:42:04 -05:00
let modifiers =
[ ( token ::OpenDelim ( token ::Paren ) , 1 ) , ( token ::CloseDelim ( token ::Paren ) , - 1 ) ] ;
2019-10-03 13:22:18 -07:00
self . consume_tts ( 1 , & modifiers [ .. ] ) ;
if self . token . kind = = token ::Eof {
// Not entirely sure that what we consumed were fn arguments, rollback.
2020-03-25 18:10:18 -07:00
* self = snapshot ;
2019-10-03 13:22:18 -07:00
Err ( ( ) )
} else {
// 99% certain that the suggestion is correct, continue parsing.
Ok ( ( ) )
}
2019-05-23 12:55:26 -07:00
}
2019-10-08 09:35:34 +02:00
pub ( super ) fn maybe_report_ambiguous_plus (
2019-04-28 13:28:07 +08:00
& mut self ,
2020-01-29 01:57:24 +01:00
allow_plus : AllowPlus ,
2019-04-28 13:28:07 +08:00
impl_dyn_multi : bool ,
ty : & Ty ,
) {
2020-01-29 01:57:24 +01:00
if matches! ( allow_plus , AllowPlus ::No ) & & impl_dyn_multi {
2019-04-28 13:28:07 +08:00
let sum_with_parens = format! ( " ( {} ) " , pprust ::ty_to_string ( & ty ) ) ;
self . struct_span_err ( ty . span , " ambiguous `+` in a type " )
. span_suggestion (
ty . span ,
" use parentheses to disambiguate " ,
sum_with_parens ,
Applicability ::MachineApplicable ,
)
. emit ( ) ;
}
}
2019-10-08 10:27:32 +02:00
pub ( super ) fn maybe_recover_from_bad_type_plus (
2019-04-28 13:28:07 +08:00
& mut self ,
2020-01-29 01:57:24 +01:00
allow_plus : AllowPlus ,
2019-04-28 13:28:07 +08:00
ty : & Ty ,
) -> PResult < ' a , ( ) > {
// Do not add `+` to expected tokens.
2020-01-29 01:57:24 +01:00
if matches! ( allow_plus , AllowPlus ::No ) | | ! self . token . is_like_plus ( ) {
2019-04-28 13:28:07 +08:00
return Ok ( ( ) ) ;
}
self . bump ( ) ; // `+`
let bounds = self . parse_generic_bounds ( None ) ? ;
2020-02-29 14:56:15 +03:00
let sum_span = ty . span . to ( self . prev_token . span ) ;
2019-04-28 13:28:07 +08:00
let mut err = struct_span_err! (
self . sess . span_diagnostic ,
sum_span ,
E0178 ,
" expected a path on the left-hand side of `+`, not `{}` " ,
pprust ::ty_to_string ( ty )
) ;
2019-09-26 17:25:31 +01:00
match ty . kind {
2019-04-28 13:28:07 +08:00
TyKind ::Rptr ( ref lifetime , ref mut_ty ) = > {
let sum_with_parens = pprust ::to_string ( | s | {
2019-06-24 14:15:11 -04:00
s . s . word ( " & " ) ;
s . print_opt_lifetime ( lifetime ) ;
2019-11-23 14:15:49 +00:00
s . print_mutability ( mut_ty . mutbl , false ) ;
2019-06-24 14:15:11 -04:00
s . popen ( ) ;
s . print_type ( & mut_ty . ty ) ;
s . print_type_bounds ( " + " , & bounds ) ;
2019-04-28 13:28:07 +08:00
s . pclose ( )
} ) ;
err . span_suggestion (
sum_span ,
" try adding parentheses " ,
sum_with_parens ,
Applicability ::MachineApplicable ,
) ;
}
TyKind ::Ptr ( .. ) | TyKind ::BareFn ( .. ) = > {
err . span_label ( sum_span , " perhaps you forgot parentheses? " ) ;
}
_ = > {
err . span_label ( sum_span , " expected a path " ) ;
}
}
err . emit ( ) ;
Ok ( ( ) )
}
2019-09-06 03:56:45 +01:00
/// Tries to recover from associated item paths like `[T]::AssocItem` / `(T, U)::AssocItem`.
/// Attempts to convert the base expression/pattern/type into a type, parses the `::AssocItem`
/// tail, and combines them into a `<Ty>::AssocItem` expression/pattern/type.
2019-10-08 09:35:34 +02:00
pub ( super ) fn maybe_recover_from_bad_qpath < T : RecoverQPath > (
2019-04-28 13:28:07 +08:00
& mut self ,
base : P < T > ,
allow_recovery : bool ,
) -> PResult < ' a , P < T > > {
// Do not add `::` to expected tokens.
if allow_recovery & & self . token = = token ::ModSep {
if let Some ( ty ) = base . to_ty ( ) {
return self . maybe_recover_from_bad_qpath_stage_2 ( ty . span , ty ) ;
}
}
Ok ( base )
}
2019-09-06 03:56:45 +01:00
/// Given an already parsed `Ty`, parses the `::AssocItem` tail and
/// combines them into a `<Ty>::AssocItem` expression/pattern/type.
2019-10-08 09:35:34 +02:00
pub ( super ) fn maybe_recover_from_bad_qpath_stage_2 < T : RecoverQPath > (
2019-04-28 13:28:07 +08:00
& mut self ,
ty_span : Span ,
ty : P < Ty > ,
) -> PResult < ' a , P < T > > {
self . expect ( & token ::ModSep ) ? ;
2020-08-21 18:51:23 -04:00
let mut path = ast ::Path { segments : Vec ::new ( ) , span : DUMMY_SP , tokens : None } ;
2019-04-28 13:28:07 +08:00
self . parse_path_segments ( & mut path . segments , T ::PATH_STYLE ) ? ;
2020-02-29 14:56:15 +03:00
path . span = ty_span . to ( self . prev_token . span ) ;
2019-04-28 13:28:07 +08:00
2019-12-22 17:42:04 -05:00
let ty_str = self . span_to_snippet ( ty_span ) . unwrap_or_else ( | _ | pprust ::ty_to_string ( & ty ) ) ;
2019-12-30 14:56:57 +01:00
self . struct_span_err ( path . span , " missing angle brackets in associated item path " )
2019-04-28 13:28:07 +08:00
. span_suggestion (
2019-09-06 03:56:45 +01:00
// This is a best-effort recovery.
2019-04-28 13:28:07 +08:00
path . span ,
" try " ,
2019-10-08 22:17:46 +02:00
format! ( " < {} >:: {} " , ty_str , pprust ::path_to_string ( & path ) ) ,
2019-04-28 13:28:07 +08:00
Applicability ::MaybeIncorrect ,
)
. emit ( ) ;
2019-09-06 03:56:45 +01:00
let path_span = ty_span . shrink_to_hi ( ) ; // Use an empty path since `position == 0`.
2019-12-22 17:42:04 -05:00
Ok ( P ( T ::recovered ( Some ( QSelf { ty , path_span , position : 0 } ) , path ) ) )
2019-04-28 13:28:07 +08:00
}
2019-10-08 09:35:34 +02:00
pub ( super ) fn maybe_consume_incorrect_semicolon ( & mut self , items : & [ P < Item > ] ) -> bool {
2019-04-28 13:28:07 +08:00
if self . eat ( & token ::Semi ) {
2020-02-29 14:56:15 +03:00
let mut err = self . struct_span_err ( self . prev_token . span , " expected item, found `;` " ) ;
2019-04-28 13:28:07 +08:00
err . span_suggestion_short (
2020-02-29 14:56:15 +03:00
self . prev_token . span ,
2019-04-28 13:28:07 +08:00
" remove this semicolon " ,
String ::new ( ) ,
Applicability ::MachineApplicable ,
) ;
if ! items . is_empty ( ) {
let previous_item = & items [ items . len ( ) - 1 ] ;
2019-09-26 17:51:36 +01:00
let previous_item_kind_name = match previous_item . kind {
2019-09-06 03:56:45 +01:00
// Say "braced struct" because tuple-structs and
// braceless-empty-struct declarations do take a semicolon.
2019-04-28 13:28:07 +08:00
ItemKind ::Struct ( .. ) = > Some ( " braced struct " ) ,
ItemKind ::Enum ( .. ) = > Some ( " enum " ) ,
ItemKind ::Trait ( .. ) = > Some ( " trait " ) ,
ItemKind ::Union ( .. ) = > Some ( " union " ) ,
_ = > None ,
} ;
if let Some ( name ) = previous_item_kind_name {
2019-12-22 17:42:04 -05:00
err . help ( & format! ( " {} declarations are not followed by a semicolon " , name ) ) ;
2019-04-28 13:28:07 +08:00
}
}
err . emit ( ) ;
true
} else {
false
}
}
2019-05-16 13:33:26 -07:00
2019-09-06 03:56:45 +01:00
/// Creates a `DiagnosticBuilder` for an unexpected token `t` and tries to recover if it is a
2019-05-21 22:17:53 -07:00
/// closing delimiter.
2019-10-08 09:35:34 +02:00
pub ( super ) fn unexpected_try_recover (
2019-05-21 22:17:53 -07:00
& mut self ,
2019-06-05 14:17:56 +03:00
t : & TokenKind ,
2019-05-21 22:17:53 -07:00
) -> PResult < ' a , bool /* recovered */ > {
2019-06-08 22:38:23 +03:00
let token_str = pprust ::token_kind_to_string ( t ) ;
2019-12-07 03:07:35 +01:00
let this_token_str = super ::token_descr ( & self . token ) ;
2019-06-05 01:17:07 +03:00
let ( prev_sp , sp ) = match ( & self . token . kind , self . subparser_name ) {
2019-05-21 22:17:53 -07:00
// Point at the end of the macro call when reaching end of macro arguments.
2019-05-24 02:04:56 +03:00
( token ::Eof , Some ( _ ) ) = > {
2019-06-07 13:31:13 +03:00
let sp = self . sess . source_map ( ) . next_point ( self . token . span ) ;
2019-05-21 22:17:53 -07:00
( sp , sp )
}
// We don't want to point at the following span after DUMMY_SP.
// This happens when the parser finds an empty TokenStream.
2020-02-29 14:56:15 +03:00
_ if self . prev_token . span = = DUMMY_SP = > ( self . token . span , self . token . span ) ,
2019-05-21 22:17:53 -07:00
// EOF, don't want to point at the following char, but rather the last token.
2020-02-29 14:56:15 +03:00
( token ::Eof , None ) = > ( self . prev_token . span , self . token . span ) ,
_ = > ( self . prev_token . span . shrink_to_hi ( ) , self . token . span ) ,
2019-05-21 22:17:53 -07:00
} ;
let msg = format! (
" expected `{}`, found {} " ,
token_str ,
2019-06-05 01:17:07 +03:00
match ( & self . token . kind , self . subparser_name ) {
2019-05-24 02:04:56 +03:00
( token ::Eof , Some ( origin ) ) = > format! ( " end of {} " , origin ) ,
2019-05-21 22:17:53 -07:00
_ = > this_token_str ,
} ,
) ;
let mut err = self . struct_span_err ( sp , & msg ) ;
let label_exp = format! ( " expected ` {} ` " , token_str ) ;
match self . recover_closing_delimiter ( & [ t . clone ( ) ] , err ) {
Err ( e ) = > err = e ,
Ok ( recovered ) = > {
return Ok ( recovered ) ;
}
}
2019-07-19 10:59:02 -07:00
let sm = self . sess . source_map ( ) ;
2019-10-20 14:35:46 -07:00
if ! sm . is_multiline ( prev_sp . until ( sp ) ) {
// When the spans are in the same line, it means that the only content
// between them is whitespace, point only at the found token.
err . span_label ( sp , label_exp ) ;
} else {
err . span_label ( prev_sp , label_exp ) ;
err . span_label ( sp , " unexpected token " ) ;
}
Err ( err )
}
pub ( super ) fn expect_semi ( & mut self ) -> PResult < ' a , ( ) > {
if self . eat ( & token ::Semi ) {
return Ok ( ( ) ) ;
}
let sm = self . sess . source_map ( ) ;
2020-05-15 20:51:12 -07:00
let msg = format! ( " expected `;`, found {} " , super ::token_descr ( & self . token ) ) ;
2019-10-20 14:35:46 -07:00
let appl = Applicability ::MachineApplicable ;
2020-02-29 14:56:15 +03:00
if self . token . span = = DUMMY_SP | | self . prev_token . span = = DUMMY_SP {
2020-03-06 12:13:55 +01:00
// Likely inside a macro, can't provide meaningful suggestions.
2020-01-02 08:56:12 +00:00
return self . expect ( & token ::Semi ) . map ( drop ) ;
2020-02-29 14:56:15 +03:00
} else if ! sm . is_multiline ( self . prev_token . span . until ( self . token . span ) ) {
2019-10-24 15:57:43 -07:00
// The current token is in the same line as the prior token, not recoverable.
2020-05-26 16:52:16 -04:00
} else if [ token ::Comma , token ::Colon ] . contains ( & self . token . kind )
2020-06-09 15:57:08 +02:00
& & self . prev_token . kind = = token ::CloseDelim ( token ::Paren )
2020-05-26 16:52:16 -04:00
{
// Likely typo: The current token is on a new line and is expected to be
// `.`, `;`, `?`, or an operator after a close delimiter token.
//
// let a = std::process::Command::new("echo")
// .arg("1")
// ,arg("2")
// ^
// https://github.com/rust-lang/rust/issues/72253
self . expect ( & token ::Semi ) ? ;
return Ok ( ( ) ) ;
2019-12-22 17:42:04 -05:00
} else if self . look_ahead ( 1 , | t | {
2020-02-22 20:19:49 +03:00
t = = & token ::CloseDelim ( token ::Brace ) | | t . can_begin_expr ( ) & & t . kind ! = token ::Colon
2019-12-22 17:42:04 -05:00
} ) & & [ token ::Comma , token ::Colon ] . contains ( & self . token . kind )
{
2019-10-20 14:35:46 -07:00
// Likely typo: `,` → `;` or `:` → `;`. This is triggered if the current token is
// either `,` or `:`, and the next token could either start a new statement or is a
// block close. For example:
//
// let x = 32:
// let y = 42;
2019-10-24 15:57:43 -07:00
self . bump ( ) ;
2020-02-29 14:56:15 +03:00
let sp = self . prev_token . span ;
2019-10-24 15:57:43 -07:00
self . struct_span_err ( sp , & msg )
2020-06-14 21:36:25 -07:00
. span_suggestion_short ( sp , " change this to `;` " , " ; " . to_string ( ) , appl )
2019-10-24 15:57:43 -07:00
. emit ( ) ;
2019-12-22 17:42:04 -05:00
return Ok ( ( ) ) ;
} else if self . look_ahead ( 0 , | t | {
t = = & token ::CloseDelim ( token ::Brace )
| | (
2020-02-22 20:19:49 +03:00
t . can_begin_expr ( ) & & t ! = & token ::Semi & & t ! = & token ::Pound
2019-12-22 17:42:04 -05:00
// Avoid triggering with too many trailing `#` in raw string.
)
} ) {
2019-10-20 14:35:46 -07:00
// Missing semicolon typo. This is triggered if the next token could either start a
// new statement or is a block close. For example:
//
// let x = 32
// let y = 42;
2020-02-29 14:56:15 +03:00
let sp = self . prev_token . span . shrink_to_hi ( ) ;
2019-10-24 15:57:43 -07:00
self . struct_span_err ( sp , & msg )
. span_label ( self . token . span , " unexpected token " )
. span_suggestion_short ( sp , " add `;` here " , " ; " . to_string ( ) , appl )
. emit ( ) ;
2019-12-22 17:42:04 -05:00
return Ok ( ( ) ) ;
2019-05-21 22:17:53 -07:00
}
2020-01-02 08:56:12 +00:00
self . expect ( & token ::Semi ) . map ( drop ) // Error unconditionally
2019-05-21 22:17:53 -07:00
}
2019-09-06 03:56:45 +01:00
/// Consumes alternative await syntaxes like `await!(<expr>)`, `await <expr>`,
2019-07-02 06:30:21 +02:00
/// `await? <expr>`, `await(<expr>)`, and `await { <expr> }`.
2019-12-03 14:21:03 +01:00
pub ( super ) fn recover_incorrect_await_syntax (
2019-05-16 13:33:26 -07:00
& mut self ,
lo : Span ,
await_sp : Span ,
2019-12-03 16:38:34 +01:00
attrs : AttrVec ,
2019-12-03 14:21:03 +01:00
) -> PResult < ' a , P < Expr > > {
let ( hi , expr , is_question ) = if self . token = = token ::Not {
2019-07-02 06:30:21 +02:00
// Handle `await!(<expr>)`.
2019-12-03 14:21:03 +01:00
self . recover_await_macro ( ) ?
} else {
self . recover_await_prefix ( await_sp ) ?
} ;
let sp = self . error_on_incorrect_await ( lo , hi , & expr , is_question ) ;
2020-10-22 10:51:49 -07:00
let kind = match expr . kind {
// Avoid knock-down errors as we don't know whether to interpret this as `foo().await?`
// or `foo()?.await` (the very reason we went with postfix syntax 😅).
ExprKind ::Try ( _ ) = > ExprKind ::Err ,
_ = > ExprKind ::Await ( expr ) ,
} ;
let expr = self . mk_expr ( lo . to ( sp ) , kind , attrs ) ;
2019-12-03 14:21:03 +01:00
self . maybe_recover_from_bad_qpath ( expr , true )
}
fn recover_await_macro ( & mut self ) -> PResult < ' a , ( Span , P < Expr > , bool ) > {
self . expect ( & token ::Not ) ? ;
self . expect ( & token ::OpenDelim ( token ::Paren ) ) ? ;
let expr = self . parse_expr ( ) ? ;
self . expect ( & token ::CloseDelim ( token ::Paren ) ) ? ;
2020-02-29 14:56:15 +03:00
Ok ( ( self . prev_token . span , expr , false ) )
2019-12-03 14:21:03 +01:00
}
2019-07-02 06:30:21 +02:00
2019-12-03 14:21:03 +01:00
fn recover_await_prefix ( & mut self , await_sp : Span ) -> PResult < ' a , ( Span , P < Expr > , bool ) > {
2019-05-16 13:33:26 -07:00
let is_question = self . eat ( & token ::Question ) ; // Handle `await? <expr>`.
let expr = if self . token = = token ::OpenDelim ( token ::Brace ) {
// Handle `await { <expr> }`.
// This needs to be handled separatedly from the next arm to avoid
// interpreting `await { <expr> }?` as `<expr>?.await`.
2019-12-22 17:42:04 -05:00
self . parse_block_expr ( None , self . token . span , BlockCheckMode ::Default , AttrVec ::new ( ) )
2019-05-16 13:33:26 -07:00
} else {
self . parse_expr ( )
2019-12-22 17:42:04 -05:00
}
. map_err ( | mut err | {
2019-05-16 13:33:26 -07:00
err . span_label ( await_sp , " while parsing this incorrect await expression " ) ;
err
} ) ? ;
2019-12-03 14:21:03 +01:00
Ok ( ( expr . span , expr , is_question ) )
2019-07-02 06:30:21 +02:00
}
fn error_on_incorrect_await ( & self , lo : Span , hi : Span , expr : & Expr , is_question : bool ) -> Span {
2019-12-22 17:42:04 -05:00
let expr_str =
self . span_to_snippet ( expr . span ) . unwrap_or_else ( | _ | pprust ::expr_to_string ( & expr ) ) ;
2019-05-16 13:33:26 -07:00
let suggestion = format! ( " {} .await {} " , expr_str , if is_question { " ? " } else { " " } ) ;
2019-07-02 06:30:21 +02:00
let sp = lo . to ( hi ) ;
2019-09-26 14:39:48 +01:00
let app = match expr . kind {
2019-05-16 13:33:26 -07:00
ExprKind ::Try ( _ ) = > Applicability ::MaybeIncorrect , // `await <expr>?`
_ = > Applicability ::MachineApplicable ,
} ;
self . struct_span_err ( sp , " incorrect use of `await` " )
. span_suggestion ( sp , " `await` is a postfix operation " , suggestion , app )
. emit ( ) ;
2019-07-02 06:30:21 +02:00
sp
2019-05-16 13:33:26 -07:00
}
2019-05-16 14:31:07 -07:00
2019-09-06 03:56:45 +01:00
/// If encountering `future.await()`, consumes and emits an error.
2019-10-08 09:35:34 +02:00
pub ( super ) fn recover_from_await_method_call ( & mut self ) {
2019-12-22 17:42:04 -05:00
if self . token = = token ::OpenDelim ( token ::Paren )
& & self . look_ahead ( 1 , | t | t = = & token ::CloseDelim ( token ::Paren ) )
2019-05-16 14:31:07 -07:00
{
// future.await()
2019-06-07 13:31:13 +03:00
let lo = self . token . span ;
2019-05-16 14:31:07 -07:00
self . bump ( ) ; // (
2019-06-07 13:31:13 +03:00
let sp = lo . to ( self . token . span ) ;
2019-05-16 14:31:07 -07:00
self . bump ( ) ; // )
2019-05-16 15:25:58 -07:00
self . struct_span_err ( sp , " incorrect use of `await` " )
. span_suggestion (
sp ,
" `await` is not a method call, remove the parentheses " ,
String ::new ( ) ,
Applicability ::MachineApplicable ,
2019-12-22 17:42:04 -05:00
)
2020-01-31 22:24:57 +10:00
. emit ( ) ;
2019-05-16 14:31:07 -07:00
}
}
2020-04-17 19:10:29 +02:00
pub ( super ) fn try_macro_suggestion ( & mut self ) -> PResult < ' a , P < Expr > > {
2020-04-17 18:26:39 +02:00
let is_try = self . token . is_keyword ( kw ::Try ) ;
2020-04-17 14:07:44 +02:00
let is_questionmark = self . look_ahead ( 1 , | t | t = = & token ::Not ) ; //check for !
let is_open = self . look_ahead ( 2 , | t | t = = & token ::OpenDelim ( token ::Paren ) ) ; //check for (
2020-04-17 18:26:39 +02:00
if is_try & & is_questionmark & & is_open {
2020-04-17 14:07:44 +02:00
let lo = self . token . span ;
self . bump ( ) ; //remove try
self . bump ( ) ; //remove !
let try_span = lo . to ( self . token . span ) ; //we take the try!( span
self . bump ( ) ; //remove (
let is_empty = self . token = = token ::CloseDelim ( token ::Paren ) ; //check if the block is empty
self . consume_block ( token ::Paren , ConsumeClosingDelim ::No ) ; //eat the block
let hi = self . token . span ;
self . bump ( ) ; //remove )
let mut err = self . struct_span_err ( lo . to ( hi ) , " use of deprecated `try` macro " ) ;
err . note ( " in the 2018 edition `try` is a reserved keyword, and the `try!()` macro is deprecated " ) ;
2020-04-17 19:29:36 +02:00
let prefix = if is_empty { " " } else { " alternatively, " } ;
2020-04-17 14:07:44 +02:00
if ! is_empty {
err . multipart_suggestion (
" you can use the `?` operator instead " ,
vec! [ ( try_span , " " . to_owned ( ) ) , ( hi , " ? " . to_owned ( ) ) ] ,
Applicability ::MachineApplicable ,
) ;
}
2020-04-17 19:29:36 +02:00
err . span_suggestion ( lo . shrink_to_lo ( ) , & format! ( " {} you can still access the deprecated `try!()` macro using the \" raw identifier \" syntax " , prefix ) , " r# " . to_string ( ) , Applicability ::MachineApplicable ) ;
2020-04-17 19:10:29 +02:00
err . emit ( ) ;
Ok ( self . mk_expr_err ( lo . to ( hi ) ) )
2020-04-17 14:07:44 +02:00
} else {
2020-04-17 19:10:29 +02:00
Err ( self . expected_expression_found ( ) ) // The user isn't trying to invoke the try! macro
2020-04-17 14:07:44 +02:00
}
}
2019-09-06 03:56:45 +01:00
/// Recovers a situation like `for ( $pat in $expr )`
2019-07-24 10:26:32 +02:00
/// and suggest writing `for $pat in $expr` instead.
///
/// This should be called before parsing the `$block`.
2019-10-08 09:35:34 +02:00
pub ( super ) fn recover_parens_around_for_head (
2019-07-24 10:26:32 +02:00
& mut self ,
pat : P < Pat > ,
expr : & Expr ,
begin_paren : Option < Span > ,
) -> P < Pat > {
match ( & self . token . kind , begin_paren ) {
( token ::CloseDelim ( token ::Paren ) , Some ( begin_par_sp ) ) = > {
self . bump ( ) ;
let pat_str = self
// Remove the `(` from the span of the pattern:
. span_to_snippet ( pat . span . trim_start ( begin_par_sp ) . unwrap ( ) )
. unwrap_or_else ( | _ | pprust ::pat_to_string ( & pat ) ) ;
2020-02-29 14:56:15 +03:00
self . struct_span_err ( self . prev_token . span , " unexpected closing `)` " )
2019-07-24 10:26:32 +02:00
. span_label ( begin_par_sp , " opening `(` " )
. span_suggestion (
2020-02-29 14:56:15 +03:00
begin_par_sp . to ( self . prev_token . span ) ,
2019-07-24 10:26:32 +02:00
" remove parenthesis in `for` loop " ,
format! ( " {} in {} " , pat_str , pprust ::expr_to_string ( & expr ) ) ,
// With e.g. `for (x) in y)` this would replace `(x) in y)`
// with `x) in y)` which is syntactically invalid.
// However, this is prevented before we get here.
Applicability ::MachineApplicable ,
)
. emit ( ) ;
// Unwrap `(pat)` into `pat` to avoid the `unused_parens` lint.
2019-09-26 16:18:31 +01:00
pat . and_then ( | pat | match pat . kind {
2019-07-24 10:26:32 +02:00
PatKind ::Paren ( pat ) = > pat ,
_ = > P ( pat ) ,
} )
}
_ = > pat ,
}
}
2019-10-08 09:35:34 +02:00
pub ( super ) fn could_ascription_be_path ( & self , node : & ast ::ExprKind ) -> bool {
2019-11-05 10:29:54 -08:00
( self . token = = token ::Lt & & // `foo:<bar`, likely a typoed turbofish.
2019-12-22 17:42:04 -05:00
self . look_ahead ( 1 , | t | t . is_ident ( ) & & ! t . is_reserved_ident ( ) ) )
| | self . token . is_ident ( ) & &
2020-10-26 21:02:48 -04:00
matches! ( node , ast ::ExprKind ::Path ( .. ) | ast ::ExprKind ::Field ( .. ) ) & &
2019-05-16 14:31:07 -07:00
! self . token . is_reserved_ident ( ) & & // v `foo:bar(baz)`
2019-12-22 17:42:04 -05:00
self . look_ahead ( 1 , | t | t = = & token ::OpenDelim ( token ::Paren ) )
2020-07-09 09:09:25 -07:00
| | self . look_ahead ( 1 , | t | t = = & token ::OpenDelim ( token ::Brace ) ) // `foo:bar {`
| | self . look_ahead ( 1 , | t | t = = & token ::Colon ) & & // `foo:bar::<baz`
self . look_ahead ( 2 , | t | t = = & token ::Lt ) & &
self . look_ahead ( 3 , | t | t . is_ident ( ) )
2019-12-22 17:42:04 -05:00
| | self . look_ahead ( 1 , | t | t = = & token ::Colon ) & & // `foo:bar:baz`
self . look_ahead ( 2 , | t | t . is_ident ( ) )
| | self . look_ahead ( 1 , | t | t = = & token ::ModSep )
& & ( self . look_ahead ( 2 , | t | t . is_ident ( ) ) | | // `foo:bar::baz`
2020-03-29 12:02:28 -04:00
self . look_ahead ( 2 , | t | t = = & token ::Lt ) ) // `foo:bar::<baz>`
2019-05-16 14:31:07 -07:00
}
2019-10-08 09:35:34 +02:00
pub ( super ) fn recover_seq_parse_error (
2019-05-16 14:31:07 -07:00
& mut self ,
delim : token ::DelimToken ,
lo : Span ,
result : PResult < ' a , P < Expr > > ,
) -> P < Expr > {
match result {
Ok ( x ) = > x ,
Err ( mut err ) = > {
err . emit ( ) ;
2019-10-25 18:30:02 -07:00
// Recover from parse error, callers expect the closing delim to be consumed.
self . consume_block ( delim , ConsumeClosingDelim ::Yes ) ;
2020-02-29 14:56:15 +03:00
self . mk_expr ( lo . to ( self . prev_token . span ) , ExprKind ::Err , AttrVec ::new ( ) )
2019-05-16 14:31:07 -07:00
}
}
}
2019-10-08 09:35:34 +02:00
pub ( super ) fn recover_closing_delimiter (
2019-05-16 14:31:07 -07:00
& mut self ,
2019-06-05 14:17:56 +03:00
tokens : & [ TokenKind ] ,
2019-05-16 14:31:07 -07:00
mut err : DiagnosticBuilder < ' a > ,
) -> PResult < ' a , bool > {
let mut pos = None ;
2019-09-06 03:56:45 +01:00
// We want to use the last closing delim that would apply.
2019-05-16 14:31:07 -07:00
for ( i , unmatched ) in self . unclosed_delims . iter ( ) . enumerate ( ) . rev ( ) {
if tokens . contains ( & token ::CloseDelim ( unmatched . expected_delim ) )
2019-06-07 13:31:13 +03:00
& & Some ( self . token . span ) > unmatched . unclosed_span
2019-05-16 14:31:07 -07:00
{
pos = Some ( i ) ;
}
}
match pos {
Some ( pos ) = > {
// Recover and assume that the detected unclosed delimiter was meant for
// this location. Emit the diagnostic and act as if the delimiter was
// present for the parser's sake.
2019-12-22 17:42:04 -05:00
// Don't attempt to recover from this unclosed delimiter more than once.
2019-05-16 14:31:07 -07:00
let unmatched = self . unclosed_delims . remove ( pos ) ;
let delim = TokenType ::Token ( token ::CloseDelim ( unmatched . expected_delim ) ) ;
2019-10-28 17:44:20 -07:00
if unmatched . found_delim . is_none ( ) {
// We encountered `Eof`, set this fact here to avoid complaining about missing
// `fn main()` when we found place to suggest the closing brace.
* self . sess . reached_eof . borrow_mut ( ) = true ;
}
2019-05-16 14:31:07 -07:00
2019-09-06 03:56:45 +01:00
// We want to suggest the inclusion of the closing delimiter where it makes
2019-05-16 14:31:07 -07:00
// the most sense, which is immediately after the last token:
//
// {foo(bar {}}
// - ^
// | |
2019-06-26 11:21:59 -05:00
// | help: `)` may belong here
2019-05-16 14:31:07 -07:00
// |
// unclosed delimiter
if let Some ( sp ) = unmatched . unclosed_span {
err . span_label ( sp , " unclosed delimiter " ) ;
}
2020-07-02 14:32:12 +09:00
// Backticks should be removed to apply suggestions.
let mut delim = delim . to_string ( ) ;
delim . retain ( | c | c ! = '`' ) ;
2019-05-16 14:31:07 -07:00
err . span_suggestion_short (
2020-02-29 14:56:15 +03:00
self . prev_token . span . shrink_to_hi ( ) ,
2020-07-02 14:32:12 +09:00
& format! ( " ` {} ` may belong here " , delim ) ,
delim ,
2019-05-16 14:31:07 -07:00
Applicability ::MaybeIncorrect ,
) ;
2019-10-25 18:30:02 -07:00
if unmatched . found_delim . is_none ( ) {
// Encountered `Eof` when lexing blocks. Do not recover here to avoid knockdown
// errors which would be emitted elsewhere in the parser and let other error
// recovery consume the rest of the file.
Err ( err )
} else {
err . emit ( ) ;
2019-12-22 17:42:04 -05:00
self . expected_tokens . clear ( ) ; // Reduce the number of errors.
2019-10-25 18:30:02 -07:00
Ok ( true )
}
2019-05-16 14:31:07 -07:00
}
_ = > Err ( err ) ,
}
}
2019-09-06 03:56:45 +01:00
/// Eats tokens until we can be relatively sure we reached the end of the
/// statement. This is something of a best-effort heuristic.
///
/// We terminate when we find an unmatched `}` (without consuming it).
2019-10-08 09:35:34 +02:00
pub ( super ) fn recover_stmt ( & mut self ) {
2019-05-16 14:31:07 -07:00
self . recover_stmt_ ( SemiColonMode ::Ignore , BlockMode ::Ignore )
}
2019-09-06 03:56:45 +01:00
/// If `break_on_semi` is `Break`, then we will stop consuming tokens after
/// finding (and consuming) a `;` outside of `{}` or `[]` (note that this is
/// approximate -- it can mean we break too early due to macros, but that
/// should only lead to sub-optimal recovery, not inaccurate parsing).
///
/// If `break_on_block` is `Break`, then we will stop consuming tokens
/// after finding (and consuming) a brace-delimited block.
2019-10-12 06:12:00 +02:00
pub ( super ) fn recover_stmt_ (
& mut self ,
break_on_semi : SemiColonMode ,
break_on_block : BlockMode ,
) {
2019-05-16 14:31:07 -07:00
let mut brace_depth = 0 ;
let mut bracket_depth = 0 ;
let mut in_block = false ;
2019-12-22 17:42:04 -05:00
debug! ( " recover_stmt_ enter loop (semi={:?}, block={:?}) " , break_on_semi , break_on_block ) ;
2019-05-16 14:31:07 -07:00
loop {
debug! ( " recover_stmt_ loop {:?} " , self . token ) ;
2019-06-05 01:17:07 +03:00
match self . token . kind {
2019-05-16 14:31:07 -07:00
token ::OpenDelim ( token ::DelimToken ::Brace ) = > {
brace_depth + = 1 ;
self . bump ( ) ;
2019-12-22 17:42:04 -05:00
if break_on_block = = BlockMode ::Break & & brace_depth = = 1 & & bracket_depth = = 0
{
2019-05-16 14:31:07 -07:00
in_block = true ;
}
}
token ::OpenDelim ( token ::DelimToken ::Bracket ) = > {
bracket_depth + = 1 ;
self . bump ( ) ;
}
token ::CloseDelim ( token ::DelimToken ::Brace ) = > {
if brace_depth = = 0 {
debug! ( " recover_stmt_ return - close delim {:?} " , self . token ) ;
break ;
}
brace_depth - = 1 ;
self . bump ( ) ;
if in_block & & bracket_depth = = 0 & & brace_depth = = 0 {
debug! ( " recover_stmt_ return - block end {:?} " , self . token ) ;
break ;
}
}
token ::CloseDelim ( token ::DelimToken ::Bracket ) = > {
bracket_depth - = 1 ;
if bracket_depth < 0 {
bracket_depth = 0 ;
}
self . bump ( ) ;
}
token ::Eof = > {
debug! ( " recover_stmt_ return - Eof " ) ;
break ;
}
token ::Semi = > {
self . bump ( ) ;
2019-12-22 17:42:04 -05:00
if break_on_semi = = SemiColonMode ::Break
& & brace_depth = = 0
& & bracket_depth = = 0
{
2019-05-16 14:31:07 -07:00
debug! ( " recover_stmt_ return - Semi " ) ;
break ;
}
}
2019-12-22 17:42:04 -05:00
token ::Comma
if break_on_semi = = SemiColonMode ::Comma
& & brace_depth = = 0
& & bracket_depth = = 0 = >
2019-05-16 15:25:58 -07:00
{
debug! ( " recover_stmt_ return - Semi " ) ;
break ;
2019-05-16 14:31:07 -07:00
}
2019-12-22 17:42:04 -05:00
_ = > self . bump ( ) ,
2019-05-16 14:31:07 -07:00
}
}
2019-05-23 12:55:26 -07:00
}
2019-10-08 09:35:34 +02:00
pub ( super ) fn check_for_for_in_in_typo ( & mut self , in_span : Span ) {
2019-05-23 12:55:26 -07:00
if self . eat_keyword ( kw ::In ) {
// a common typo: `for _ in in bar {}`
2020-02-29 14:56:15 +03:00
self . struct_span_err ( self . prev_token . span , " expected iterable, found keyword `in` " )
2019-07-24 10:51:20 +02:00
. span_suggestion_short (
2020-02-29 14:56:15 +03:00
in_span . until ( self . prev_token . span ) ,
2019-07-24 10:51:20 +02:00
" remove the duplicated `in` " ,
String ::new ( ) ,
Applicability ::MachineApplicable ,
)
. emit ( ) ;
2019-05-23 12:55:26 -07:00
}
}
2019-10-08 09:35:34 +02:00
pub ( super ) fn eat_incorrect_doc_comment_for_param_type ( & mut self ) {
2020-07-21 22:16:19 +03:00
if let token ::DocComment ( .. ) = self . token . kind {
2019-07-24 10:51:20 +02:00
self . struct_span_err (
2019-06-07 13:31:13 +03:00
self . token . span ,
2019-06-09 07:58:40 -03:00
" documentation comments cannot be applied to a function parameter's type " ,
2019-07-24 10:51:20 +02:00
)
. span_label ( self . token . span , " doc comments are not allowed here " )
. emit ( ) ;
2019-05-23 12:55:26 -07:00
self . bump ( ) ;
2019-12-22 17:42:04 -05:00
} else if self . token = = token ::Pound
& & self . look_ahead ( 1 , | t | * t = = token ::OpenDelim ( token ::Bracket ) )
{
2019-06-07 13:31:13 +03:00
let lo = self . token . span ;
2019-05-23 12:55:26 -07:00
// Skip every token until next possible arg.
while self . token ! = token ::CloseDelim ( token ::Bracket ) {
self . bump ( ) ;
}
2019-06-07 13:31:13 +03:00
let sp = lo . to ( self . token . span ) ;
2019-05-23 12:55:26 -07:00
self . bump ( ) ;
2019-12-22 17:42:04 -05:00
self . struct_span_err ( sp , " attributes cannot be applied to a function parameter's type " )
. span_label ( sp , " attributes are not allowed here " )
. emit ( ) ;
2019-05-23 12:55:26 -07:00
}
}
2019-10-08 09:35:34 +02:00
pub ( super ) fn parameter_without_type (
2019-05-23 12:55:26 -07:00
& mut self ,
err : & mut DiagnosticBuilder < '_ > ,
pat : P < ast ::Pat > ,
require_name : bool ,
2020-02-02 11:10:27 +01:00
first_param : bool ,
2019-05-29 15:25:46 -07:00
) -> Option < Ident > {
2019-05-23 12:55:26 -07:00
// If we find a pattern followed by an identifier, it could be an (incorrect)
// C-style parameter declaration.
2019-12-22 17:42:04 -05:00
if self . check_ident ( )
& & self . look_ahead ( 1 , | t | * t = = token ::Comma | | * t = = token ::CloseDelim ( token ::Paren ) )
{
// `fn foo(String s) {}`
2019-05-23 12:55:26 -07:00
let ident = self . parse_ident ( ) . unwrap ( ) ;
let span = pat . span . with_hi ( ident . span . hi ( ) ) ;
err . span_suggestion (
span ,
" declare the type after the parameter binding " ,
String ::from ( " <identifier>: <type> " ) ,
Applicability ::HasPlaceholders ,
) ;
2019-05-29 15:25:46 -07:00
return Some ( ident ) ;
2019-09-26 16:18:31 +01:00
} else if let PatKind ::Ident ( _ , ident , _ ) = pat . kind {
2019-12-22 17:42:04 -05:00
if require_name
2020-02-02 11:10:27 +01:00
& & ( self . token = = token ::Comma
2019-12-22 17:42:04 -05:00
| | self . token = = token ::Lt
| | self . token = = token ::CloseDelim ( token ::Paren ) )
{
// `fn foo(a, b) {}`, `fn foo(a<x>, b<y>) {}` or `fn foo(usize, usize) {}`
2020-02-02 11:10:27 +01:00
if first_param {
2019-10-01 15:17:50 +01:00
err . span_suggestion (
pat . span ,
" if this is a `self` type, give it a parameter name " ,
format! ( " self: {} " , ident ) ,
Applicability ::MaybeIncorrect ,
) ;
}
// Avoid suggesting that `fn foo(HashMap<u32>)` is fixed with a change to
// `fn foo(HashMap: TypeName<u32>)`.
if self . token ! = token ::Lt {
err . span_suggestion (
pat . span ,
2020-06-27 22:55:42 -05:00
" if this is a parameter name, give it a type " ,
2019-10-01 15:17:50 +01:00
format! ( " {} : TypeName " , ident ) ,
Applicability ::HasPlaceholders ,
) ;
}
2019-05-23 12:55:26 -07:00
err . span_suggestion (
pat . span ,
2019-05-29 15:25:46 -07:00
" if this is a type, explicitly ignore the parameter name " ,
2019-05-23 12:55:26 -07:00
format! ( " _: {} " , ident ) ,
Applicability ::MachineApplicable ,
) ;
2019-05-29 15:25:46 -07:00
err . note ( " anonymous parameters are removed in the 2018 edition (see RFC 1685) " ) ;
2019-10-01 15:17:50 +01:00
// Don't attempt to recover by using the `X` in `X<Y>` as the parameter name.
return if self . token = = token ::Lt { None } else { Some ( ident ) } ;
2019-05-23 12:55:26 -07:00
}
}
2019-05-29 15:25:46 -07:00
None
2019-05-16 14:31:07 -07:00
}
2019-10-08 09:35:34 +02:00
pub ( super ) fn recover_arg_parse ( & mut self ) -> PResult < ' a , ( P < ast ::Pat > , P < ast ::Ty > ) > {
2019-05-23 12:54:27 -07:00
let pat = self . parse_pat ( Some ( " argument name " ) ) ? ;
self . expect ( & token ::Colon ) ? ;
let ty = self . parse_ty ( ) ? ;
2019-11-11 23:34:57 +01:00
struct_span_err! (
self . diagnostic ( ) ,
pat . span ,
E0642 ,
" patterns aren't allowed in methods without bodies " ,
)
. span_suggestion_short (
pat . span ,
" give this argument a name or use an underscore to ignore it " ,
" _ " . to_owned ( ) ,
Applicability ::MachineApplicable ,
)
. emit ( ) ;
2019-05-23 12:54:27 -07:00
// Pretend the pattern is `_`, to avoid duplicate errors from AST validation.
2020-07-27 18:02:29 -04:00
let pat =
P ( Pat { kind : PatKind ::Wild , span : pat . span , id : ast ::DUMMY_NODE_ID , tokens : None } ) ;
2019-05-23 12:54:27 -07:00
Ok ( ( pat , ty ) )
}
2020-02-02 11:10:27 +01:00
pub ( super ) fn recover_bad_self_param ( & mut self , mut param : Param ) -> PResult < ' a , Param > {
2019-08-27 13:24:32 +02:00
let sp = param . pat . span ;
2019-09-26 17:25:31 +01:00
param . ty . kind = TyKind ::Err ;
2020-02-02 11:10:27 +01:00
self . struct_span_err ( sp , " unexpected `self` parameter in function " )
. span_label ( sp , " must be the first parameter of an associated function " )
. emit ( ) ;
2019-08-27 13:24:32 +02:00
Ok ( param )
2019-05-23 12:54:27 -07:00
}
2019-10-25 18:30:02 -07:00
pub ( super ) fn consume_block (
& mut self ,
delim : token ::DelimToken ,
consume_close : ConsumeClosingDelim ,
) {
2019-05-16 14:31:07 -07:00
let mut brace_depth = 0 ;
loop {
if self . eat ( & token ::OpenDelim ( delim ) ) {
brace_depth + = 1 ;
2019-10-25 18:30:02 -07:00
} else if self . check ( & token ::CloseDelim ( delim ) ) {
2019-05-16 14:31:07 -07:00
if brace_depth = = 0 {
2019-10-25 18:30:02 -07:00
if let ConsumeClosingDelim ::Yes = consume_close {
// Some of the callers of this method expect to be able to parse the
// closing delimiter themselves, so we leave it alone. Otherwise we advance
// the parser.
self . bump ( ) ;
}
2019-05-16 14:31:07 -07:00
return ;
} else {
2019-10-25 18:30:02 -07:00
self . bump ( ) ;
2019-05-16 14:31:07 -07:00
brace_depth - = 1 ;
continue ;
}
} else if self . token = = token ::Eof | | self . eat ( & token ::CloseDelim ( token ::NoDelim ) ) {
return ;
} else {
self . bump ( ) ;
}
}
}
2019-10-08 09:35:34 +02:00
pub ( super ) fn expected_expression_found ( & self ) -> DiagnosticBuilder < ' a > {
2019-06-05 01:17:07 +03:00
let ( span , msg ) = match ( & self . token . kind , self . subparser_name ) {
2019-05-24 02:04:56 +03:00
( & token ::Eof , Some ( origin ) ) = > {
2019-06-07 13:31:13 +03:00
let sp = self . sess . source_map ( ) . next_point ( self . token . span ) ;
2019-05-24 15:17:32 -07:00
( sp , format! ( " expected expression, found end of {} " , origin ) )
2019-05-21 23:16:46 -07:00
}
2019-12-22 17:42:04 -05:00
_ = > (
self . token . span ,
2019-12-07 03:07:35 +01:00
format! ( " expected expression, found {} " , super ::token_descr ( & self . token ) , ) ,
2019-12-22 17:42:04 -05:00
) ,
2019-05-21 23:16:46 -07:00
} ;
let mut err = self . struct_span_err ( span , & msg ) ;
2019-06-07 13:31:13 +03:00
let sp = self . sess . source_map ( ) . start_point ( self . token . span ) ;
2019-05-21 23:16:46 -07:00
if let Some ( sp ) = self . sess . ambiguous_block_expr_parse . borrow ( ) . get ( & sp ) {
self . sess . expr_parentheses_needed ( & mut err , * sp , None ) ;
}
err . span_label ( span , " expected expression " ) ;
err
}
2019-05-30 18:19:48 -07:00
2019-09-30 12:19:22 -07:00
fn consume_tts (
& mut self ,
2019-10-01 11:24:05 -07:00
mut acc : i64 , // `i64` because malformed code can have more closing delims than opening.
2019-10-01 15:51:50 -07:00
// Not using `FxHashMap` due to `token::TokenKind: !Eq + !Hash`.
modifier : & [ ( token ::TokenKind , i64 ) ] ,
2019-09-30 12:19:22 -07:00
) {
while acc > 0 {
2019-10-01 11:24:05 -07:00
if let Some ( ( _ , val ) ) = modifier . iter ( ) . find ( | ( t , _ ) | * t = = self . token . kind ) {
2019-09-30 12:19:22 -07:00
acc + = * val ;
}
2019-10-01 15:51:50 -07:00
if self . token . kind = = token ::Eof {
2019-09-30 12:19:22 -07:00
break ;
}
self . bump ( ) ;
}
}
2019-11-26 22:19:54 -05:00
/// Replace duplicated recovered parameters with `_` pattern to avoid unnecessary errors.
2019-06-01 14:13:57 -07:00
///
/// This is necessary because at this point we don't know whether we parsed a function with
2019-08-27 13:24:32 +02:00
/// anonymous parameters or a function with names but no types. In order to minimize
2019-11-26 22:19:54 -05:00
/// unnecessary errors, we assume the parameters are in the shape of `fn foo(a, b, c)` where
2019-08-27 13:24:32 +02:00
/// the parameters are *names* (so we don't emit errors about not being able to find `b` in
2019-06-01 14:13:57 -07:00
/// the local scope), but if we find the same name multiple times, like in `fn foo(i8, i8)`,
2019-08-27 13:24:32 +02:00
/// we deduplicate them to not complain about duplicated parameter names.
2019-10-08 09:35:34 +02:00
pub ( super ) fn deduplicate_recovered_params_names ( & self , fn_inputs : & mut Vec < Param > ) {
2019-05-30 18:19:48 -07:00
let mut seen_inputs = FxHashSet ::default ( ) ;
for input in fn_inputs . iter_mut ( ) {
2019-12-22 17:42:04 -05:00
let opt_ident = if let ( PatKind ::Ident ( _ , ident , _ ) , TyKind ::Err ) =
( & input . pat . kind , & input . ty . kind )
{
2019-05-30 18:19:48 -07:00
Some ( * ident )
} else {
None
} ;
if let Some ( ident ) = opt_ident {
if seen_inputs . contains ( & ident ) {
2019-09-26 16:18:31 +01:00
input . pat . kind = PatKind ::Wild ;
2019-05-30 18:19:48 -07:00
}
seen_inputs . insert ( ident ) ;
}
}
}
2020-10-03 19:30:32 +01:00
/// Handle encountering a symbol in a generic argument list that is not a `,` or `>`. In this
/// case, we emit an error and try to suggest enclosing a const argument in braces if it looks
/// like the user has forgotten them.
pub fn handle_ambiguous_unbraced_const_arg (
& mut self ,
args : & mut Vec < AngleBracketedArg > ,
) -> PResult < ' a , bool > {
// If we haven't encountered a closing `>`, then the argument is malformed.
// It's likely that the user has written a const expression without enclosing it
// in braces, so we try to recover here.
let arg = args . pop ( ) . unwrap ( ) ;
// FIXME: for some reason using `unexpected` or `expected_one_of_not_found` has
// adverse side-effects to subsequent errors and seems to advance the parser.
// We are causing this error here exclusively in case that a `const` expression
// could be recovered from the current parser state, even if followed by more
// arguments after a comma.
let mut err = self . struct_span_err (
self . token . span ,
& format! ( " expected one of `,` or `>`, found {} " , super ::token_descr ( & self . token ) ) ,
) ;
err . span_label ( self . token . span , " expected one of `,` or `>` " ) ;
match self . recover_const_arg ( arg . span ( ) , err ) {
Ok ( arg ) = > {
args . push ( AngleBracketedArg ::Arg ( arg ) ) ;
if self . eat ( & token ::Comma ) {
return Ok ( true ) ; // Continue
}
}
Err ( mut err ) = > {
args . push ( arg ) ;
// We will emit a more generic error later.
err . delay_as_bug ( ) ;
}
}
return Ok ( false ) ; // Don't continue.
}
/// Handle a generic const argument that had not been enclosed in braces, and suggest enclosing
/// it braces. In this situation, unlike in `handle_ambiguous_unbraced_const_arg`, this is
/// almost certainly a const argument, so we always offer a suggestion.
pub fn handle_unambiguous_unbraced_const_arg ( & mut self ) -> PResult < ' a , P < Expr > > {
let start = self . token . span ;
let expr = self . parse_expr_res ( Restrictions ::CONST_EXPR , None ) . map_err ( | mut err | {
err . span_label (
start . shrink_to_lo ( ) ,
" while parsing a const generic argument starting here " ,
) ;
err
} ) ? ;
if ! self . expr_is_valid_const_arg ( & expr ) {
self . struct_span_err (
expr . span ,
" expressions must be enclosed in braces to be used as const generic \
arguments " ,
)
. multipart_suggestion (
" enclose the `const` expression in braces " ,
vec! [
( expr . span . shrink_to_lo ( ) , " { " . to_string ( ) ) ,
( expr . span . shrink_to_hi ( ) , " } " . to_string ( ) ) ,
] ,
Applicability ::MachineApplicable ,
)
. emit ( ) ;
}
Ok ( expr )
}
/// Try to recover from possible generic const argument without `{` and `}`.
///
/// When encountering code like `foo::< bar + 3 >` or `foo::< bar - baz >` we suggest
/// `foo::<{ bar + 3 }>` and `foo::<{ bar - baz }>`, respectively. We only provide a suggestion
/// if we think that that the resulting expression would be well formed.
pub fn recover_const_arg (
& mut self ,
start : Span ,
mut err : DiagnosticBuilder < ' a > ,
) -> PResult < ' a , GenericArg > {
let is_op = AssocOp ::from_token ( & self . token )
. and_then ( | op | {
if let AssocOp ::Greater
| AssocOp ::Less
| AssocOp ::ShiftRight
| AssocOp ::GreaterEqual
// Don't recover from `foo::<bar = baz>`, because this could be an attempt to
// assign a value to a defaulted generic parameter.
| AssocOp ::Assign
| AssocOp ::AssignOp ( _ ) = op
{
None
} else {
Some ( op )
}
} )
. is_some ( ) ;
// This will be true when a trait object type `Foo +` or a path which was a `const fn` with
// type params has been parsed.
let was_op =
matches! ( self . prev_token . kind , token ::BinOp ( token ::Plus | token ::Shr ) | token ::Gt ) ;
if ! is_op & & ! was_op {
// We perform these checks and early return to avoid taking a snapshot unnecessarily.
return Err ( err ) ;
}
let snapshot = self . clone ( ) ;
if is_op {
self . bump ( ) ;
}
match self . parse_expr_res ( Restrictions ::CONST_EXPR , None ) {
Ok ( expr ) = > {
if token ::Comma = = self . token . kind | | self . token . kind . should_end_const_arg ( ) {
// Avoid the following output by checking that we consumed a full const arg:
// help: expressions must be enclosed in braces to be used as const generic
// arguments
// |
// LL | let sr: Vec<{ (u32, _, _) = vec![] };
// | ^ ^
err . multipart_suggestion (
" expressions must be enclosed in braces to be used as const generic \
arguments " ,
vec! [
( start . shrink_to_lo ( ) , " { " . to_string ( ) ) ,
( expr . span . shrink_to_hi ( ) , " } " . to_string ( ) ) ,
] ,
Applicability ::MaybeIncorrect ,
) ;
let value = self . mk_expr_err ( start . to ( expr . span ) ) ;
err . emit ( ) ;
return Ok ( GenericArg ::Const ( AnonConst { id : ast ::DUMMY_NODE_ID , value } ) ) ;
}
}
Err ( mut err ) = > {
err . cancel ( ) ;
}
}
* self = snapshot ;
Err ( err )
}
2019-04-28 13:28:07 +08:00
}