2019-12-22 17:42:04 -05:00
|
|
|
use super::{BlockMode, Parser, PathStyle, SemiColonMode, SeqSep, TokenExpectType, TokenType};
|
2019-10-15 22:48:13 +02:00
|
|
|
|
2019-12-05 06:38:06 +01:00
|
|
|
use rustc_data_structures::fx::FxHashSet;
|
2019-12-31 21:25:16 +01:00
|
|
|
use rustc_errors::{pluralize, struct_span_err};
|
|
|
|
use rustc_errors::{Applicability, DiagnosticBuilder, Handler, PResult};
|
2020-01-11 00:19:09 +00:00
|
|
|
use rustc_span::source_map::Spanned;
|
2019-12-31 20:15:40 +03:00
|
|
|
use rustc_span::symbol::kw;
|
|
|
|
use rustc_span::{MultiSpan, Span, SpanSnippetError, DUMMY_SP};
|
2019-12-22 17:42:04 -05:00
|
|
|
use syntax::ast::{
|
|
|
|
self, BinOpKind, BindingMode, BlockCheckMode, Expr, ExprKind, Ident, Item, Param,
|
|
|
|
};
|
|
|
|
use syntax::ast::{AttrVec, ItemKind, Mutability, Pat, PatKind, PathSegment, QSelf, Ty, TyKind};
|
2019-10-15 22:48:13 +02:00
|
|
|
use syntax::print::pprust;
|
|
|
|
use syntax::ptr::P;
|
2019-12-22 17:42:04 -05:00
|
|
|
use syntax::token::{self, token_can_begin_expr, TokenKind};
|
|
|
|
use syntax::util::parser::AssocOp;
|
2019-12-05 06:38:06 +01:00
|
|
|
|
2019-05-23 12:55:26 -07:00
|
|
|
use log::{debug, trace};
|
2019-07-13 21:15:21 -07:00
|
|
|
use std::mem;
|
2019-05-23 12:55:26 -07:00
|
|
|
|
2019-10-03 13:22:18 -07:00
|
|
|
const TURBOFISH: &'static str = "use `::<...>` instead of `<...>` to specify type arguments";
|
2019-10-08 09:35:34 +02:00
|
|
|
|
2019-05-30 18:19:48 -07:00
|
|
|
/// Creates a placeholder argument.
|
2019-10-08 09:35:34 +02:00
|
|
|
pub(super) fn dummy_arg(ident: Ident) -> Param {
|
2019-05-30 18:19:48 -07:00
|
|
|
let pat = P(Pat {
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
2019-12-16 17:28:40 +01:00
|
|
|
kind: PatKind::Ident(BindingMode::ByValue(Mutability::Not), ident, None),
|
2019-05-30 18:19:48 -07:00
|
|
|
span: ident.span,
|
|
|
|
});
|
2019-12-22 17:42:04 -05:00
|
|
|
let ty = Ty { kind: TyKind::Err, span: ident.span, id: ast::DUMMY_NODE_ID };
|
2019-09-09 09:26:25 -03:00
|
|
|
Param {
|
2019-12-03 16:38:34 +01:00
|
|
|
attrs: AttrVec::default(),
|
2019-09-09 09:26:25 -03:00
|
|
|
id: ast::DUMMY_NODE_ID,
|
|
|
|
pat,
|
|
|
|
span: ident.span,
|
|
|
|
ty: P(ty),
|
|
|
|
is_placeholder: false,
|
|
|
|
}
|
2019-05-30 18:19:48 -07:00
|
|
|
}
|
|
|
|
|
2019-05-23 12:55:26 -07:00
|
|
|
pub enum Error {
|
|
|
|
FileNotFoundForModule {
|
|
|
|
mod_name: String,
|
|
|
|
default_path: String,
|
|
|
|
secondary_path: String,
|
|
|
|
dir_path: String,
|
|
|
|
},
|
|
|
|
DuplicatePaths {
|
|
|
|
mod_name: String,
|
|
|
|
default_path: String,
|
|
|
|
secondary_path: String,
|
|
|
|
},
|
|
|
|
UselessDocComment,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Error {
|
2019-12-22 17:42:04 -05:00
|
|
|
fn span_err(self, sp: impl Into<MultiSpan>, handler: &Handler) -> DiagnosticBuilder<'_> {
|
2019-05-23 12:55:26 -07:00
|
|
|
match self {
|
|
|
|
Error::FileNotFoundForModule {
|
|
|
|
ref mod_name,
|
|
|
|
ref default_path,
|
|
|
|
ref secondary_path,
|
|
|
|
ref dir_path,
|
|
|
|
} => {
|
|
|
|
let mut err = struct_span_err!(
|
|
|
|
handler,
|
|
|
|
sp,
|
|
|
|
E0583,
|
|
|
|
"file not found for module `{}`",
|
|
|
|
mod_name,
|
|
|
|
);
|
|
|
|
err.help(&format!(
|
|
|
|
"name the file either {} or {} inside the directory \"{}\"",
|
2019-12-22 17:42:04 -05:00
|
|
|
default_path, secondary_path, dir_path,
|
2019-05-23 12:55:26 -07:00
|
|
|
));
|
|
|
|
err
|
|
|
|
}
|
|
|
|
Error::DuplicatePaths { ref mod_name, ref default_path, ref secondary_path } => {
|
|
|
|
let mut err = struct_span_err!(
|
|
|
|
handler,
|
|
|
|
sp,
|
|
|
|
E0584,
|
|
|
|
"file for module `{}` found at both {} and {}",
|
|
|
|
mod_name,
|
|
|
|
default_path,
|
|
|
|
secondary_path,
|
|
|
|
);
|
|
|
|
err.help("delete or rename one of them to remove the ambiguity");
|
|
|
|
err
|
|
|
|
}
|
|
|
|
Error::UselessDocComment => {
|
|
|
|
let mut err = struct_span_err!(
|
|
|
|
handler,
|
|
|
|
sp,
|
|
|
|
E0585,
|
|
|
|
"found a documentation comment that doesn't document anything",
|
|
|
|
);
|
2019-12-22 17:42:04 -05:00
|
|
|
err.help(
|
|
|
|
"doc comments must come before what they document, maybe a comment was \
|
|
|
|
intended with `//`?",
|
|
|
|
);
|
2019-05-23 12:55:26 -07:00
|
|
|
err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-04-28 13:28:07 +08:00
|
|
|
|
2019-10-08 09:35:34 +02:00
|
|
|
pub(super) trait RecoverQPath: Sized + 'static {
|
2019-04-28 13:28:07 +08:00
|
|
|
const PATH_STYLE: PathStyle = PathStyle::Expr;
|
|
|
|
fn to_ty(&self) -> Option<P<Ty>>;
|
|
|
|
fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self;
|
|
|
|
}
|
|
|
|
|
|
|
|
impl RecoverQPath for Ty {
|
|
|
|
const PATH_STYLE: PathStyle = PathStyle::Type;
|
|
|
|
fn to_ty(&self) -> Option<P<Ty>> {
|
|
|
|
Some(P(self.clone()))
|
|
|
|
}
|
|
|
|
fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self {
|
2019-12-22 17:42:04 -05:00
|
|
|
Self { span: path.span, kind: TyKind::Path(qself, path), id: ast::DUMMY_NODE_ID }
|
2019-04-28 13:28:07 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl RecoverQPath for Pat {
|
|
|
|
fn to_ty(&self) -> Option<P<Ty>> {
|
|
|
|
self.to_ty()
|
|
|
|
}
|
|
|
|
fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self {
|
2019-12-22 17:42:04 -05:00
|
|
|
Self { span: path.span, kind: PatKind::Path(qself, path), id: ast::DUMMY_NODE_ID }
|
2019-04-28 13:28:07 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl RecoverQPath for Expr {
|
|
|
|
fn to_ty(&self) -> Option<P<Ty>> {
|
|
|
|
self.to_ty()
|
|
|
|
}
|
|
|
|
fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self {
|
|
|
|
Self {
|
|
|
|
span: path.span,
|
2019-09-26 14:39:48 +01:00
|
|
|
kind: ExprKind::Path(qself, path),
|
2019-12-03 16:38:34 +01:00
|
|
|
attrs: AttrVec::new(),
|
2019-04-28 13:28:07 +08:00
|
|
|
id: ast::DUMMY_NODE_ID,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-10-25 18:30:02 -07:00
|
|
|
/// Control whether the closing delimiter should be consumed when calling `Parser::consume_block`.
|
|
|
|
crate enum ConsumeClosingDelim {
|
|
|
|
Yes,
|
|
|
|
No,
|
|
|
|
}
|
|
|
|
|
2019-04-28 13:28:07 +08:00
|
|
|
impl<'a> Parser<'a> {
|
2019-10-12 06:12:00 +02:00
|
|
|
pub(super) fn span_fatal_err<S: Into<MultiSpan>>(
|
|
|
|
&self,
|
|
|
|
sp: S,
|
|
|
|
err: Error,
|
|
|
|
) -> DiagnosticBuilder<'a> {
|
2019-05-23 12:55:26 -07:00
|
|
|
err.span_err(sp, self.diagnostic())
|
|
|
|
}
|
|
|
|
|
2019-10-16 10:59:30 +02:00
|
|
|
pub fn struct_span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> {
|
2019-05-23 12:55:26 -07:00
|
|
|
self.sess.span_diagnostic.struct_span_err(sp, m)
|
|
|
|
}
|
|
|
|
|
2019-10-16 10:59:30 +02:00
|
|
|
pub fn span_bug<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> ! {
|
2019-05-23 12:55:26 -07:00
|
|
|
self.sess.span_diagnostic.span_bug(sp, m)
|
|
|
|
}
|
|
|
|
|
2019-12-05 06:38:06 +01:00
|
|
|
pub(super) fn diagnostic(&self) -> &'a Handler {
|
2019-05-23 12:55:26 -07:00
|
|
|
&self.sess.span_diagnostic
|
|
|
|
}
|
|
|
|
|
2019-10-08 09:35:34 +02:00
|
|
|
pub(super) fn span_to_snippet(&self, span: Span) -> Result<String, SpanSnippetError> {
|
2019-07-24 11:01:30 +02:00
|
|
|
self.sess.source_map().span_to_snippet(span)
|
|
|
|
}
|
|
|
|
|
2019-10-08 09:35:34 +02:00
|
|
|
pub(super) fn expected_ident_found(&self) -> DiagnosticBuilder<'a> {
|
2019-05-23 12:55:26 -07:00
|
|
|
let mut err = self.struct_span_err(
|
2019-06-07 13:31:13 +03:00
|
|
|
self.token.span,
|
2019-12-07 03:07:35 +01:00
|
|
|
&format!("expected identifier, found {}", super::token_descr(&self.token)),
|
2019-05-23 12:55:26 -07:00
|
|
|
);
|
2019-11-20 14:50:13 -08:00
|
|
|
let valid_follow = &[
|
|
|
|
TokenKind::Eq,
|
|
|
|
TokenKind::Colon,
|
|
|
|
TokenKind::Comma,
|
|
|
|
TokenKind::Semi,
|
|
|
|
TokenKind::ModSep,
|
|
|
|
TokenKind::OpenDelim(token::DelimToken::Brace),
|
|
|
|
TokenKind::OpenDelim(token::DelimToken::Paren),
|
|
|
|
TokenKind::CloseDelim(token::DelimToken::Brace),
|
|
|
|
TokenKind::CloseDelim(token::DelimToken::Paren),
|
|
|
|
];
|
2019-06-05 11:56:06 +03:00
|
|
|
if let token::Ident(name, false) = self.token.kind {
|
2019-12-22 17:42:04 -05:00
|
|
|
if Ident::new(name, self.token.span).is_raw_guess()
|
|
|
|
&& self.look_ahead(1, |t| valid_follow.contains(&t.kind))
|
2019-11-20 14:50:13 -08:00
|
|
|
{
|
2019-05-23 12:55:26 -07:00
|
|
|
err.span_suggestion(
|
2019-06-07 13:31:13 +03:00
|
|
|
self.token.span,
|
2019-05-23 12:55:26 -07:00
|
|
|
"you can escape reserved keywords to use them as identifiers",
|
2019-06-05 11:56:06 +03:00
|
|
|
format!("r#{}", name),
|
2019-05-23 12:55:26 -07:00
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
2019-12-07 03:07:35 +01:00
|
|
|
if let Some(token_descr) = super::token_descr_opt(&self.token) {
|
2019-06-07 13:31:13 +03:00
|
|
|
err.span_label(self.token.span, format!("expected identifier, found {}", token_descr));
|
2019-05-23 12:55:26 -07:00
|
|
|
} else {
|
2019-06-07 13:31:13 +03:00
|
|
|
err.span_label(self.token.span, "expected identifier");
|
2019-05-23 12:55:26 -07:00
|
|
|
if self.token == token::Comma && self.look_ahead(1, |t| t.is_ident()) {
|
|
|
|
err.span_suggestion(
|
2019-06-07 13:31:13 +03:00
|
|
|
self.token.span,
|
2019-05-23 12:55:26 -07:00
|
|
|
"remove this comma",
|
|
|
|
String::new(),
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
err
|
|
|
|
}
|
|
|
|
|
2019-10-08 09:35:34 +02:00
|
|
|
pub(super) fn expected_one_of_not_found(
|
2019-05-23 13:10:24 -07:00
|
|
|
&mut self,
|
2019-06-05 14:17:56 +03:00
|
|
|
edible: &[TokenKind],
|
|
|
|
inedible: &[TokenKind],
|
2019-05-23 13:10:24 -07:00
|
|
|
) -> PResult<'a, bool /* recovered */> {
|
|
|
|
fn tokens_to_string(tokens: &[TokenType]) -> String {
|
|
|
|
let mut i = tokens.iter();
|
2019-09-06 03:56:45 +01:00
|
|
|
// This might be a sign we need a connect method on `Iterator`.
|
2019-12-22 17:42:04 -05:00
|
|
|
let b = i.next().map_or(String::new(), |t| t.to_string());
|
2019-05-23 13:10:24 -07:00
|
|
|
i.enumerate().fold(b, |mut b, (i, a)| {
|
|
|
|
if tokens.len() > 2 && i == tokens.len() - 2 {
|
|
|
|
b.push_str(", or ");
|
|
|
|
} else if tokens.len() == 2 && i == tokens.len() - 2 {
|
|
|
|
b.push_str(" or ");
|
|
|
|
} else {
|
|
|
|
b.push_str(", ");
|
|
|
|
}
|
|
|
|
b.push_str(&a.to_string());
|
|
|
|
b
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2019-12-22 17:42:04 -05:00
|
|
|
let mut expected = edible
|
|
|
|
.iter()
|
2019-05-23 13:10:24 -07:00
|
|
|
.map(|x| TokenType::Token(x.clone()))
|
|
|
|
.chain(inedible.iter().map(|x| TokenType::Token(x.clone())))
|
|
|
|
.chain(self.expected_tokens.iter().cloned())
|
|
|
|
.collect::<Vec<_>>();
|
|
|
|
expected.sort_by_cached_key(|x| x.to_string());
|
|
|
|
expected.dedup();
|
|
|
|
let expect = tokens_to_string(&expected[..]);
|
2019-12-07 03:07:35 +01:00
|
|
|
let actual = super::token_descr(&self.token);
|
2019-05-23 13:10:24 -07:00
|
|
|
let (msg_exp, (label_sp, label_exp)) = if expected.len() > 1 {
|
|
|
|
let short_expect = if expected.len() > 6 {
|
|
|
|
format!("{} possible tokens", expected.len())
|
|
|
|
} else {
|
|
|
|
expect.clone()
|
|
|
|
};
|
2019-12-22 17:42:04 -05:00
|
|
|
(
|
|
|
|
format!("expected one of {}, found {}", expect, actual),
|
2020-01-10 11:22:33 -08:00
|
|
|
(self.prev_span.shrink_to_hi(), format!("expected one of {}", short_expect)),
|
2019-12-22 17:42:04 -05:00
|
|
|
)
|
2019-05-23 13:10:24 -07:00
|
|
|
} else if expected.is_empty() {
|
2019-12-22 17:42:04 -05:00
|
|
|
(
|
|
|
|
format!("unexpected token: {}", actual),
|
|
|
|
(self.prev_span, "unexpected token after this".to_string()),
|
|
|
|
)
|
2019-05-23 13:10:24 -07:00
|
|
|
} else {
|
2019-12-22 17:42:04 -05:00
|
|
|
(
|
|
|
|
format!("expected {}, found {}", expect, actual),
|
2020-01-10 11:22:33 -08:00
|
|
|
(self.prev_span.shrink_to_hi(), format!("expected {}", expect)),
|
2019-12-22 17:42:04 -05:00
|
|
|
)
|
2019-05-23 13:10:24 -07:00
|
|
|
};
|
2019-06-07 13:31:13 +03:00
|
|
|
self.last_unexpected_token_span = Some(self.token.span);
|
2019-12-31 04:21:58 +01:00
|
|
|
let mut err = self.struct_span_err(self.token.span, &msg_exp);
|
2019-05-24 02:04:56 +03:00
|
|
|
let sp = if self.token == token::Eof {
|
2019-09-06 03:56:45 +01:00
|
|
|
// This is EOF; don't want to point at the following char, but rather the last token.
|
2019-05-23 13:10:24 -07:00
|
|
|
self.prev_span
|
|
|
|
} else {
|
|
|
|
label_sp
|
|
|
|
};
|
2019-12-22 17:42:04 -05:00
|
|
|
match self.recover_closing_delimiter(
|
|
|
|
&expected
|
|
|
|
.iter()
|
|
|
|
.filter_map(|tt| match tt {
|
|
|
|
TokenType::Token(t) => Some(t.clone()),
|
|
|
|
_ => None,
|
|
|
|
})
|
|
|
|
.collect::<Vec<_>>(),
|
|
|
|
err,
|
|
|
|
) {
|
2019-05-23 13:10:24 -07:00
|
|
|
Err(e) => err = e,
|
|
|
|
Ok(recovered) => {
|
|
|
|
return Ok(recovered);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-07-19 10:59:02 -07:00
|
|
|
let sm = self.sess.source_map();
|
2019-10-24 15:57:43 -07:00
|
|
|
if self.prev_span == DUMMY_SP {
|
|
|
|
// Account for macro context where the previous span might not be
|
|
|
|
// available to avoid incorrect output (#54841).
|
|
|
|
err.span_label(self.token.span, label_exp);
|
|
|
|
} else if !sm.is_multiline(self.token.span.shrink_to_hi().until(sp.shrink_to_lo())) {
|
|
|
|
// When the spans are in the same line, it means that the only content between
|
|
|
|
// them is whitespace, point at the found token in that case:
|
|
|
|
//
|
|
|
|
// X | () => { syntax error };
|
|
|
|
// | ^^^^^ expected one of 8 possible tokens here
|
|
|
|
//
|
|
|
|
// instead of having:
|
|
|
|
//
|
|
|
|
// X | () => { syntax error };
|
|
|
|
// | -^^^^^ unexpected token
|
|
|
|
// | |
|
|
|
|
// | expected one of 8 possible tokens here
|
|
|
|
err.span_label(self.token.span, label_exp);
|
|
|
|
} else {
|
|
|
|
err.span_label(sp, label_exp);
|
|
|
|
err.span_label(self.token.span, "unexpected token");
|
2019-05-23 13:10:24 -07:00
|
|
|
}
|
2019-07-17 11:40:36 -07:00
|
|
|
self.maybe_annotate_with_ascription(&mut err, false);
|
2019-05-23 13:10:24 -07:00
|
|
|
Err(err)
|
|
|
|
}
|
|
|
|
|
2019-07-17 11:40:36 -07:00
|
|
|
pub fn maybe_annotate_with_ascription(
|
2019-11-04 16:19:55 -08:00
|
|
|
&mut self,
|
2019-07-17 11:40:36 -07:00
|
|
|
err: &mut DiagnosticBuilder<'_>,
|
|
|
|
maybe_expected_semicolon: bool,
|
|
|
|
) {
|
2019-11-05 10:29:54 -08:00
|
|
|
if let Some((sp, likely_path)) = self.last_type_ascription.take() {
|
2019-07-19 10:59:02 -07:00
|
|
|
let sm = self.sess.source_map();
|
|
|
|
let next_pos = sm.lookup_char_pos(self.token.span.lo());
|
|
|
|
let op_pos = sm.lookup_char_pos(sp.hi());
|
2019-07-17 11:40:36 -07:00
|
|
|
|
2019-10-08 10:46:08 +02:00
|
|
|
let allow_unstable = self.sess.unstable_features.is_nightly_build();
|
|
|
|
|
2019-07-17 11:40:36 -07:00
|
|
|
if likely_path {
|
|
|
|
err.span_suggestion(
|
|
|
|
sp,
|
|
|
|
"maybe write a path separator here",
|
|
|
|
"::".to_string(),
|
2019-10-08 10:46:08 +02:00
|
|
|
if allow_unstable {
|
|
|
|
Applicability::MaybeIncorrect
|
|
|
|
} else {
|
|
|
|
Applicability::MachineApplicable
|
2019-07-17 11:40:36 -07:00
|
|
|
},
|
|
|
|
);
|
|
|
|
} else if op_pos.line != next_pos.line && maybe_expected_semicolon {
|
|
|
|
err.span_suggestion(
|
|
|
|
sp,
|
|
|
|
"try using a semicolon",
|
|
|
|
";".to_string(),
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
);
|
2019-10-08 10:46:08 +02:00
|
|
|
} else if allow_unstable {
|
2019-07-17 11:40:36 -07:00
|
|
|
err.span_label(sp, "tried to parse a type due to this type ascription");
|
2019-10-08 10:46:08 +02:00
|
|
|
} else {
|
|
|
|
err.span_label(sp, "tried to parse a type due to this");
|
2019-07-17 11:40:36 -07:00
|
|
|
}
|
2019-10-08 10:46:08 +02:00
|
|
|
if allow_unstable {
|
2019-07-17 11:40:36 -07:00
|
|
|
// Give extra information about type ascription only if it's a nightly compiler.
|
2019-12-22 17:42:04 -05:00
|
|
|
err.note(
|
|
|
|
"`#![feature(type_ascription)]` lets you annotate an expression with a \
|
|
|
|
type: `<expr>: <type>`",
|
|
|
|
);
|
|
|
|
err.note(
|
|
|
|
"for more information, see \
|
|
|
|
https://github.com/rust-lang/rust/issues/23416",
|
|
|
|
);
|
2019-07-17 11:40:36 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-05-23 12:55:26 -07:00
|
|
|
/// Eats and discards tokens until one of `kets` is encountered. Respects token trees,
|
|
|
|
/// passes through any errors encountered. Used for error recovery.
|
2019-10-08 09:35:34 +02:00
|
|
|
pub(super) fn eat_to_tokens(&mut self, kets: &[&TokenKind]) {
|
2019-12-22 17:42:04 -05:00
|
|
|
if let Err(ref mut err) =
|
|
|
|
self.parse_seq_to_before_tokens(kets, SeqSep::none(), TokenExpectType::Expect, |p| {
|
|
|
|
Ok(p.parse_token_tree())
|
|
|
|
})
|
|
|
|
{
|
2019-09-07 10:38:02 -04:00
|
|
|
err.cancel();
|
2019-05-23 12:55:26 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// This function checks if there are trailing angle brackets and produces
|
|
|
|
/// a diagnostic to suggest removing them.
|
|
|
|
///
|
|
|
|
/// ```ignore (diagnostic)
|
|
|
|
/// let _ = vec![1, 2, 3].into_iter().collect::<Vec<usize>>>>();
|
|
|
|
/// ^^ help: remove extra angle brackets
|
|
|
|
/// ```
|
2019-10-08 09:35:34 +02:00
|
|
|
pub(super) fn check_trailing_angle_brackets(&mut self, segment: &PathSegment, end: TokenKind) {
|
2019-05-23 12:55:26 -07:00
|
|
|
// This function is intended to be invoked after parsing a path segment where there are two
|
|
|
|
// cases:
|
|
|
|
//
|
|
|
|
// 1. A specific token is expected after the path segment.
|
|
|
|
// eg. `x.foo(`, `x.foo::<u32>(` (parenthesis - method call),
|
|
|
|
// `Foo::`, or `Foo::<Bar>::` (mod sep - continued path).
|
|
|
|
// 2. No specific token is expected after the path segment.
|
|
|
|
// eg. `x.foo` (field access)
|
|
|
|
//
|
|
|
|
// This function is called after parsing `.foo` and before parsing the token `end` (if
|
|
|
|
// present). This includes any angle bracket arguments, such as `.foo::<u32>` or
|
|
|
|
// `Foo::<Bar>`.
|
|
|
|
|
|
|
|
// We only care about trailing angle brackets if we previously parsed angle bracket
|
|
|
|
// arguments. This helps stop us incorrectly suggesting that extra angle brackets be
|
|
|
|
// removed in this case:
|
|
|
|
//
|
|
|
|
// `x.foo >> (3)` (where `x.foo` is a `u32` for example)
|
|
|
|
//
|
|
|
|
// This case is particularly tricky as we won't notice it just looking at the tokens -
|
|
|
|
// it will appear the same (in terms of upcoming tokens) as below (since the `::<u32>` will
|
|
|
|
// have already been parsed):
|
|
|
|
//
|
|
|
|
// `x.foo::<u32>>>(3)`
|
2019-12-22 17:42:04 -05:00
|
|
|
let parsed_angle_bracket_args =
|
|
|
|
segment.args.as_ref().map(|args| args.is_angle_bracketed()).unwrap_or(false);
|
2019-05-23 12:55:26 -07:00
|
|
|
|
|
|
|
debug!(
|
|
|
|
"check_trailing_angle_brackets: parsed_angle_bracket_args={:?}",
|
|
|
|
parsed_angle_bracket_args,
|
|
|
|
);
|
|
|
|
if !parsed_angle_bracket_args {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Keep the span at the start so we can highlight the sequence of `>` characters to be
|
|
|
|
// removed.
|
2019-06-07 13:31:13 +03:00
|
|
|
let lo = self.token.span;
|
2019-05-23 12:55:26 -07:00
|
|
|
|
|
|
|
// We need to look-ahead to see if we have `>` characters without moving the cursor forward
|
|
|
|
// (since we might have the field access case and the characters we're eating are
|
|
|
|
// actual operators and not trailing characters - ie `x.foo >> 3`).
|
|
|
|
let mut position = 0;
|
|
|
|
|
|
|
|
// We can encounter `>` or `>>` tokens in any order, so we need to keep track of how
|
|
|
|
// many of each (so we can correctly pluralize our error messages) and continue to
|
|
|
|
// advance.
|
|
|
|
let mut number_of_shr = 0;
|
|
|
|
let mut number_of_gt = 0;
|
|
|
|
while self.look_ahead(position, |t| {
|
|
|
|
trace!("check_trailing_angle_brackets: t={:?}", t);
|
|
|
|
if *t == token::BinOp(token::BinOpToken::Shr) {
|
|
|
|
number_of_shr += 1;
|
|
|
|
true
|
|
|
|
} else if *t == token::Gt {
|
|
|
|
number_of_gt += 1;
|
|
|
|
true
|
|
|
|
} else {
|
|
|
|
false
|
|
|
|
}
|
|
|
|
}) {
|
|
|
|
position += 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
// If we didn't find any trailing `>` characters, then we have nothing to error about.
|
|
|
|
debug!(
|
|
|
|
"check_trailing_angle_brackets: number_of_gt={:?} number_of_shr={:?}",
|
|
|
|
number_of_gt, number_of_shr,
|
|
|
|
);
|
|
|
|
if number_of_gt < 1 && number_of_shr < 1 {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Finally, double check that we have our end token as otherwise this is the
|
|
|
|
// second case.
|
|
|
|
if self.look_ahead(position, |t| {
|
|
|
|
trace!("check_trailing_angle_brackets: t={:?}", t);
|
|
|
|
*t == end
|
|
|
|
}) {
|
|
|
|
// Eat from where we started until the end token so that parsing can continue
|
|
|
|
// as if we didn't have those extra angle brackets.
|
|
|
|
self.eat_to_tokens(&[&end]);
|
2019-06-07 13:31:13 +03:00
|
|
|
let span = lo.until(self.token.span);
|
2019-05-23 12:55:26 -07:00
|
|
|
|
2019-09-19 15:13:40 +08:00
|
|
|
let total_num_of_gt = number_of_gt + number_of_shr * 2;
|
2019-12-30 14:56:57 +01:00
|
|
|
self.struct_span_err(
|
|
|
|
span,
|
|
|
|
&format!("unmatched angle bracket{}", pluralize!(total_num_of_gt)),
|
|
|
|
)
|
|
|
|
.span_suggestion(
|
|
|
|
span,
|
|
|
|
&format!("remove extra angle bracket{}", pluralize!(total_num_of_gt)),
|
|
|
|
String::new(),
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
)
|
|
|
|
.emit();
|
2019-05-23 12:55:26 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-01-11 00:19:09 +00:00
|
|
|
/// Check to see if a pair of chained operators looks like an attempt at chained comparison,
|
|
|
|
/// e.g. `1 < x <= 3`. If so, suggest either splitting the comparison into two, or
|
|
|
|
/// parenthesising the leftmost comparison.
|
|
|
|
fn attempt_chained_comparison_suggestion(
|
|
|
|
&mut self,
|
|
|
|
err: &mut DiagnosticBuilder<'_>,
|
|
|
|
inner_op: &Expr,
|
|
|
|
outer_op: &Spanned<AssocOp>,
|
|
|
|
) {
|
|
|
|
if let ExprKind::Binary(op, ref l1, ref r1) = inner_op.kind {
|
|
|
|
match (op.node, &outer_op.node) {
|
|
|
|
// `x < y < z` and friends.
|
|
|
|
(BinOpKind::Lt, AssocOp::Less) | (BinOpKind::Lt, AssocOp::LessEqual) |
|
|
|
|
(BinOpKind::Le, AssocOp::LessEqual) | (BinOpKind::Le, AssocOp::Less) |
|
|
|
|
// `x > y > z` and friends.
|
|
|
|
(BinOpKind::Gt, AssocOp::Greater) | (BinOpKind::Gt, AssocOp::GreaterEqual) |
|
|
|
|
(BinOpKind::Ge, AssocOp::GreaterEqual) | (BinOpKind::Ge, AssocOp::Greater) => {
|
|
|
|
let expr_to_str = |e: &Expr| {
|
|
|
|
self.span_to_snippet(e.span)
|
|
|
|
.unwrap_or_else(|_| pprust::expr_to_string(&e))
|
|
|
|
};
|
|
|
|
err.span_suggestion(
|
|
|
|
inner_op.span.to(outer_op.span),
|
|
|
|
"split the comparison into two...",
|
|
|
|
format!(
|
|
|
|
"{} {} {} && {} {}",
|
|
|
|
expr_to_str(&l1),
|
|
|
|
op.node.to_string(),
|
|
|
|
expr_to_str(&r1),
|
|
|
|
expr_to_str(&r1),
|
|
|
|
outer_op.node.to_ast_binop().unwrap().to_string(),
|
|
|
|
),
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
);
|
|
|
|
err.span_suggestion(
|
|
|
|
inner_op.span.to(outer_op.span),
|
|
|
|
"...or parenthesize one of the comparisons",
|
|
|
|
format!(
|
|
|
|
"({} {} {}) {}",
|
|
|
|
expr_to_str(&l1),
|
|
|
|
op.node.to_string(),
|
|
|
|
expr_to_str(&r1),
|
|
|
|
outer_op.node.to_ast_binop().unwrap().to_string(),
|
|
|
|
),
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
_ => {}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-09-06 03:56:45 +01:00
|
|
|
/// Produces an error if comparison operators are chained (RFC #558).
|
2019-09-29 19:07:26 -07:00
|
|
|
/// We only need to check the LHS, not the RHS, because all comparison ops have same
|
2019-10-01 11:24:05 -07:00
|
|
|
/// precedence (see `fn precedence`) and are left-associative (see `fn fixity`).
|
2019-09-29 19:07:26 -07:00
|
|
|
///
|
|
|
|
/// This can also be hit if someone incorrectly writes `foo<bar>()` when they should have used
|
2019-10-01 11:24:05 -07:00
|
|
|
/// the turbofish (`foo::<bar>()`) syntax. We attempt some heuristic recovery if that is the
|
|
|
|
/// case.
|
|
|
|
///
|
|
|
|
/// Keep in mind that given that `outer_op.is_comparison()` holds and comparison ops are left
|
|
|
|
/// associative we can infer that we have:
|
|
|
|
///
|
|
|
|
/// outer_op
|
|
|
|
/// / \
|
|
|
|
/// inner_op r2
|
|
|
|
/// / \
|
2020-01-11 00:19:09 +00:00
|
|
|
/// l1 r1
|
2019-10-08 09:35:34 +02:00
|
|
|
pub(super) fn check_no_chained_comparison(
|
2019-09-29 19:07:26 -07:00
|
|
|
&mut self,
|
2020-01-11 00:19:09 +00:00
|
|
|
inner_op: &Expr,
|
|
|
|
outer_op: &Spanned<AssocOp>,
|
2019-09-29 19:07:26 -07:00
|
|
|
) -> PResult<'a, Option<P<Expr>>> {
|
|
|
|
debug_assert!(
|
2020-01-11 00:19:09 +00:00
|
|
|
outer_op.node.is_comparison(),
|
2019-09-29 19:07:26 -07:00
|
|
|
"check_no_chained_comparison: {:?} is not comparison",
|
2020-01-11 00:19:09 +00:00
|
|
|
outer_op.node,
|
2019-09-29 19:07:26 -07:00
|
|
|
);
|
2019-10-01 15:51:50 -07:00
|
|
|
|
2019-12-22 17:42:04 -05:00
|
|
|
let mk_err_expr =
|
|
|
|
|this: &Self, span| Ok(Some(this.mk_expr(span, ExprKind::Err, AttrVec::new())));
|
2019-10-01 15:51:50 -07:00
|
|
|
|
2020-01-11 00:19:09 +00:00
|
|
|
match inner_op.kind {
|
2019-05-23 12:55:26 -07:00
|
|
|
ExprKind::Binary(op, _, _) if op.node.is_comparison() => {
|
2019-09-06 03:56:45 +01:00
|
|
|
// Respan to include both operators.
|
2019-09-29 19:07:26 -07:00
|
|
|
let op_span = op.span.to(self.prev_span);
|
2020-01-11 00:19:09 +00:00
|
|
|
let mut err =
|
|
|
|
self.struct_span_err(op_span, "comparison operators cannot be chained");
|
|
|
|
|
|
|
|
// If it looks like a genuine attempt to chain operators (as opposed to a
|
|
|
|
// misformatted turbofish, for instance), suggest a correct form.
|
|
|
|
self.attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op);
|
2019-10-01 11:24:05 -07:00
|
|
|
|
|
|
|
let suggest = |err: &mut DiagnosticBuilder<'_>| {
|
2019-10-03 13:22:18 -07:00
|
|
|
err.span_suggestion_verbose(
|
2019-10-01 11:24:05 -07:00
|
|
|
op_span.shrink_to_lo(),
|
|
|
|
TURBOFISH,
|
|
|
|
"::".to_string(),
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
);
|
|
|
|
};
|
|
|
|
|
2019-05-23 12:55:26 -07:00
|
|
|
if op.node == BinOpKind::Lt &&
|
2020-01-11 00:19:09 +00:00
|
|
|
outer_op.node == AssocOp::Less || // Include `<` to provide this recommendation
|
|
|
|
outer_op.node == AssocOp::Greater
|
2019-12-22 17:42:04 -05:00
|
|
|
// even in a case like the following:
|
|
|
|
{
|
|
|
|
// Foo<Bar<Baz<Qux, ()>>>
|
2020-01-11 00:19:09 +00:00
|
|
|
if outer_op.node == AssocOp::Less {
|
2019-09-29 19:07:26 -07:00
|
|
|
let snapshot = self.clone();
|
|
|
|
self.bump();
|
2019-10-01 11:24:05 -07:00
|
|
|
// So far we have parsed `foo<bar<`, consume the rest of the type args.
|
2019-12-22 17:42:04 -05:00
|
|
|
let modifiers =
|
|
|
|
[(token::Lt, 1), (token::Gt, -1), (token::BinOp(token::Shr), -2)];
|
2019-10-01 15:51:50 -07:00
|
|
|
self.consume_tts(1, &modifiers[..]);
|
2019-09-30 12:19:22 -07:00
|
|
|
|
2019-12-22 17:42:04 -05:00
|
|
|
if !&[token::OpenDelim(token::Paren), token::ModSep]
|
|
|
|
.contains(&self.token.kind)
|
|
|
|
{
|
2019-09-30 12:36:44 -07:00
|
|
|
// We don't have `foo< bar >(` or `foo< bar >::`, so we rewind the
|
|
|
|
// parser and bail out.
|
2019-09-29 19:07:26 -07:00
|
|
|
mem::replace(self, snapshot.clone());
|
|
|
|
}
|
|
|
|
}
|
2019-10-01 15:51:50 -07:00
|
|
|
return if token::ModSep == self.token.kind {
|
2019-09-30 12:36:44 -07:00
|
|
|
// We have some certainty that this was a bad turbofish at this point.
|
|
|
|
// `foo< bar >::`
|
2019-10-01 11:24:05 -07:00
|
|
|
suggest(&mut err);
|
2019-09-30 12:36:44 -07:00
|
|
|
|
|
|
|
let snapshot = self.clone();
|
|
|
|
self.bump(); // `::`
|
2019-10-01 11:24:05 -07:00
|
|
|
|
2019-09-30 12:36:44 -07:00
|
|
|
// Consume the rest of the likely `foo<bar>::new()` or return at `foo<bar>`.
|
|
|
|
match self.parse_expr() {
|
|
|
|
Ok(_) => {
|
|
|
|
// 99% certain that the suggestion is correct, continue parsing.
|
|
|
|
err.emit();
|
|
|
|
// FIXME: actually check that the two expressions in the binop are
|
|
|
|
// paths and resynthesize new fn call expression instead of using
|
|
|
|
// `ExprKind::Err` placeholder.
|
2020-01-11 00:19:09 +00:00
|
|
|
mk_err_expr(self, inner_op.span.to(self.prev_span))
|
2019-09-30 12:36:44 -07:00
|
|
|
}
|
2019-10-01 11:24:05 -07:00
|
|
|
Err(mut expr_err) => {
|
|
|
|
expr_err.cancel();
|
2019-09-30 12:36:44 -07:00
|
|
|
// Not entirely sure now, but we bubble the error up with the
|
|
|
|
// suggestion.
|
|
|
|
mem::replace(self, snapshot);
|
2019-10-01 15:51:50 -07:00
|
|
|
Err(err)
|
2019-09-30 12:36:44 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
} else if token::OpenDelim(token::Paren) == self.token.kind {
|
2019-09-30 12:19:22 -07:00
|
|
|
// We have high certainty that this was a bad turbofish at this point.
|
|
|
|
// `foo< bar >(`
|
2019-10-01 11:24:05 -07:00
|
|
|
suggest(&mut err);
|
2019-09-30 12:19:22 -07:00
|
|
|
// Consume the fn call arguments.
|
2019-10-03 13:22:18 -07:00
|
|
|
match self.consume_fn_args() {
|
|
|
|
Err(()) => Err(err),
|
|
|
|
Ok(()) => {
|
|
|
|
err.emit();
|
|
|
|
// FIXME: actually check that the two expressions in the binop are
|
|
|
|
// paths and resynthesize new fn call expression instead of using
|
|
|
|
// `ExprKind::Err` placeholder.
|
2020-01-11 00:19:09 +00:00
|
|
|
mk_err_expr(self, inner_op.span.to(self.prev_span))
|
2019-10-03 13:22:18 -07:00
|
|
|
}
|
2019-09-29 19:07:26 -07:00
|
|
|
}
|
|
|
|
} else {
|
2019-09-30 12:19:22 -07:00
|
|
|
// All we know is that this is `foo < bar >` and *nothing* else. Try to
|
|
|
|
// be helpful, but don't attempt to recover.
|
2019-10-01 11:24:05 -07:00
|
|
|
err.help(TURBOFISH);
|
2019-09-29 19:07:26 -07:00
|
|
|
err.help("or use `(...)` if you meant to specify fn arguments");
|
|
|
|
// These cases cause too many knock-down errors, bail out (#61329).
|
2019-10-01 15:51:50 -07:00
|
|
|
Err(err)
|
|
|
|
};
|
2019-05-23 12:55:26 -07:00
|
|
|
}
|
|
|
|
err.emit();
|
|
|
|
}
|
|
|
|
_ => {}
|
|
|
|
}
|
2019-09-29 19:07:26 -07:00
|
|
|
Ok(None)
|
2019-05-23 12:55:26 -07:00
|
|
|
}
|
|
|
|
|
2019-10-03 13:22:18 -07:00
|
|
|
fn consume_fn_args(&mut self) -> Result<(), ()> {
|
|
|
|
let snapshot = self.clone();
|
|
|
|
self.bump(); // `(`
|
|
|
|
|
|
|
|
// Consume the fn call arguments.
|
2019-12-22 17:42:04 -05:00
|
|
|
let modifiers =
|
|
|
|
[(token::OpenDelim(token::Paren), 1), (token::CloseDelim(token::Paren), -1)];
|
2019-10-03 13:22:18 -07:00
|
|
|
self.consume_tts(1, &modifiers[..]);
|
|
|
|
|
|
|
|
if self.token.kind == token::Eof {
|
|
|
|
// Not entirely sure that what we consumed were fn arguments, rollback.
|
|
|
|
mem::replace(self, snapshot);
|
|
|
|
Err(())
|
|
|
|
} else {
|
|
|
|
// 99% certain that the suggestion is correct, continue parsing.
|
|
|
|
Ok(())
|
|
|
|
}
|
2019-05-23 12:55:26 -07:00
|
|
|
}
|
|
|
|
|
2019-10-08 09:35:34 +02:00
|
|
|
pub(super) fn maybe_report_ambiguous_plus(
|
2019-04-28 13:28:07 +08:00
|
|
|
&mut self,
|
|
|
|
allow_plus: bool,
|
|
|
|
impl_dyn_multi: bool,
|
|
|
|
ty: &Ty,
|
|
|
|
) {
|
|
|
|
if !allow_plus && impl_dyn_multi {
|
|
|
|
let sum_with_parens = format!("({})", pprust::ty_to_string(&ty));
|
|
|
|
self.struct_span_err(ty.span, "ambiguous `+` in a type")
|
|
|
|
.span_suggestion(
|
|
|
|
ty.span,
|
|
|
|
"use parentheses to disambiguate",
|
|
|
|
sum_with_parens,
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
)
|
|
|
|
.emit();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-10-08 10:27:32 +02:00
|
|
|
pub(super) fn maybe_recover_from_bad_type_plus(
|
2019-04-28 13:28:07 +08:00
|
|
|
&mut self,
|
|
|
|
allow_plus: bool,
|
|
|
|
ty: &Ty,
|
|
|
|
) -> PResult<'a, ()> {
|
|
|
|
// Do not add `+` to expected tokens.
|
|
|
|
if !allow_plus || !self.token.is_like_plus() {
|
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
|
|
|
|
self.bump(); // `+`
|
|
|
|
let bounds = self.parse_generic_bounds(None)?;
|
|
|
|
let sum_span = ty.span.to(self.prev_span);
|
|
|
|
|
|
|
|
let mut err = struct_span_err!(
|
|
|
|
self.sess.span_diagnostic,
|
|
|
|
sum_span,
|
|
|
|
E0178,
|
|
|
|
"expected a path on the left-hand side of `+`, not `{}`",
|
|
|
|
pprust::ty_to_string(ty)
|
|
|
|
);
|
|
|
|
|
2019-09-26 17:25:31 +01:00
|
|
|
match ty.kind {
|
2019-04-28 13:28:07 +08:00
|
|
|
TyKind::Rptr(ref lifetime, ref mut_ty) => {
|
|
|
|
let sum_with_parens = pprust::to_string(|s| {
|
2019-06-24 14:15:11 -04:00
|
|
|
s.s.word("&");
|
|
|
|
s.print_opt_lifetime(lifetime);
|
2019-11-23 14:15:49 +00:00
|
|
|
s.print_mutability(mut_ty.mutbl, false);
|
2019-06-24 14:15:11 -04:00
|
|
|
s.popen();
|
|
|
|
s.print_type(&mut_ty.ty);
|
|
|
|
s.print_type_bounds(" +", &bounds);
|
2019-04-28 13:28:07 +08:00
|
|
|
s.pclose()
|
|
|
|
});
|
|
|
|
err.span_suggestion(
|
|
|
|
sum_span,
|
|
|
|
"try adding parentheses",
|
|
|
|
sum_with_parens,
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
TyKind::Ptr(..) | TyKind::BareFn(..) => {
|
|
|
|
err.span_label(sum_span, "perhaps you forgot parentheses?");
|
|
|
|
}
|
|
|
|
_ => {
|
|
|
|
err.span_label(sum_span, "expected a path");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
err.emit();
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2019-09-06 03:56:45 +01:00
|
|
|
/// Tries to recover from associated item paths like `[T]::AssocItem` / `(T, U)::AssocItem`.
|
|
|
|
/// Attempts to convert the base expression/pattern/type into a type, parses the `::AssocItem`
|
|
|
|
/// tail, and combines them into a `<Ty>::AssocItem` expression/pattern/type.
|
2019-10-08 09:35:34 +02:00
|
|
|
pub(super) fn maybe_recover_from_bad_qpath<T: RecoverQPath>(
|
2019-04-28 13:28:07 +08:00
|
|
|
&mut self,
|
|
|
|
base: P<T>,
|
|
|
|
allow_recovery: bool,
|
|
|
|
) -> PResult<'a, P<T>> {
|
|
|
|
// Do not add `::` to expected tokens.
|
|
|
|
if allow_recovery && self.token == token::ModSep {
|
|
|
|
if let Some(ty) = base.to_ty() {
|
|
|
|
return self.maybe_recover_from_bad_qpath_stage_2(ty.span, ty);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Ok(base)
|
|
|
|
}
|
|
|
|
|
2019-09-06 03:56:45 +01:00
|
|
|
/// Given an already parsed `Ty`, parses the `::AssocItem` tail and
|
|
|
|
/// combines them into a `<Ty>::AssocItem` expression/pattern/type.
|
2019-10-08 09:35:34 +02:00
|
|
|
pub(super) fn maybe_recover_from_bad_qpath_stage_2<T: RecoverQPath>(
|
2019-04-28 13:28:07 +08:00
|
|
|
&mut self,
|
|
|
|
ty_span: Span,
|
|
|
|
ty: P<Ty>,
|
|
|
|
) -> PResult<'a, P<T>> {
|
|
|
|
self.expect(&token::ModSep)?;
|
|
|
|
|
2019-12-22 17:42:04 -05:00
|
|
|
let mut path = ast::Path { segments: Vec::new(), span: DUMMY_SP };
|
2019-04-28 13:28:07 +08:00
|
|
|
self.parse_path_segments(&mut path.segments, T::PATH_STYLE)?;
|
|
|
|
path.span = ty_span.to(self.prev_span);
|
|
|
|
|
2019-12-22 17:42:04 -05:00
|
|
|
let ty_str = self.span_to_snippet(ty_span).unwrap_or_else(|_| pprust::ty_to_string(&ty));
|
2019-12-30 14:56:57 +01:00
|
|
|
self.struct_span_err(path.span, "missing angle brackets in associated item path")
|
2019-04-28 13:28:07 +08:00
|
|
|
.span_suggestion(
|
2019-09-06 03:56:45 +01:00
|
|
|
// This is a best-effort recovery.
|
2019-04-28 13:28:07 +08:00
|
|
|
path.span,
|
|
|
|
"try",
|
2019-10-08 22:17:46 +02:00
|
|
|
format!("<{}>::{}", ty_str, pprust::path_to_string(&path)),
|
2019-04-28 13:28:07 +08:00
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
)
|
|
|
|
.emit();
|
|
|
|
|
2019-09-06 03:56:45 +01:00
|
|
|
let path_span = ty_span.shrink_to_hi(); // Use an empty path since `position == 0`.
|
2019-12-22 17:42:04 -05:00
|
|
|
Ok(P(T::recovered(Some(QSelf { ty, path_span, position: 0 }), path)))
|
2019-04-28 13:28:07 +08:00
|
|
|
}
|
|
|
|
|
2019-10-08 09:35:34 +02:00
|
|
|
pub(super) fn maybe_consume_incorrect_semicolon(&mut self, items: &[P<Item>]) -> bool {
|
2019-04-28 13:28:07 +08:00
|
|
|
if self.eat(&token::Semi) {
|
|
|
|
let mut err = self.struct_span_err(self.prev_span, "expected item, found `;`");
|
|
|
|
err.span_suggestion_short(
|
|
|
|
self.prev_span,
|
|
|
|
"remove this semicolon",
|
|
|
|
String::new(),
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
);
|
|
|
|
if !items.is_empty() {
|
|
|
|
let previous_item = &items[items.len() - 1];
|
2019-09-26 17:51:36 +01:00
|
|
|
let previous_item_kind_name = match previous_item.kind {
|
2019-09-06 03:56:45 +01:00
|
|
|
// Say "braced struct" because tuple-structs and
|
|
|
|
// braceless-empty-struct declarations do take a semicolon.
|
2019-04-28 13:28:07 +08:00
|
|
|
ItemKind::Struct(..) => Some("braced struct"),
|
|
|
|
ItemKind::Enum(..) => Some("enum"),
|
|
|
|
ItemKind::Trait(..) => Some("trait"),
|
|
|
|
ItemKind::Union(..) => Some("union"),
|
|
|
|
_ => None,
|
|
|
|
};
|
|
|
|
if let Some(name) = previous_item_kind_name {
|
2019-12-22 17:42:04 -05:00
|
|
|
err.help(&format!("{} declarations are not followed by a semicolon", name));
|
2019-04-28 13:28:07 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
err.emit();
|
|
|
|
true
|
|
|
|
} else {
|
|
|
|
false
|
|
|
|
}
|
|
|
|
}
|
2019-05-16 13:33:26 -07:00
|
|
|
|
2019-09-06 03:56:45 +01:00
|
|
|
/// Creates a `DiagnosticBuilder` for an unexpected token `t` and tries to recover if it is a
|
2019-05-21 22:17:53 -07:00
|
|
|
/// closing delimiter.
|
2019-10-08 09:35:34 +02:00
|
|
|
pub(super) fn unexpected_try_recover(
|
2019-05-21 22:17:53 -07:00
|
|
|
&mut self,
|
2019-06-05 14:17:56 +03:00
|
|
|
t: &TokenKind,
|
2019-05-21 22:17:53 -07:00
|
|
|
) -> PResult<'a, bool /* recovered */> {
|
2019-06-08 22:38:23 +03:00
|
|
|
let token_str = pprust::token_kind_to_string(t);
|
2019-12-07 03:07:35 +01:00
|
|
|
let this_token_str = super::token_descr(&self.token);
|
2019-06-05 01:17:07 +03:00
|
|
|
let (prev_sp, sp) = match (&self.token.kind, self.subparser_name) {
|
2019-05-21 22:17:53 -07:00
|
|
|
// Point at the end of the macro call when reaching end of macro arguments.
|
2019-05-24 02:04:56 +03:00
|
|
|
(token::Eof, Some(_)) => {
|
2019-06-07 13:31:13 +03:00
|
|
|
let sp = self.sess.source_map().next_point(self.token.span);
|
2019-05-21 22:17:53 -07:00
|
|
|
(sp, sp)
|
|
|
|
}
|
|
|
|
// We don't want to point at the following span after DUMMY_SP.
|
|
|
|
// This happens when the parser finds an empty TokenStream.
|
2019-06-07 13:31:13 +03:00
|
|
|
_ if self.prev_span == DUMMY_SP => (self.token.span, self.token.span),
|
2019-05-21 22:17:53 -07:00
|
|
|
// EOF, don't want to point at the following char, but rather the last token.
|
2019-06-07 13:31:13 +03:00
|
|
|
(token::Eof, None) => (self.prev_span, self.token.span),
|
2020-01-10 11:22:33 -08:00
|
|
|
_ => (self.prev_span.shrink_to_hi(), self.token.span),
|
2019-05-21 22:17:53 -07:00
|
|
|
};
|
|
|
|
let msg = format!(
|
|
|
|
"expected `{}`, found {}",
|
|
|
|
token_str,
|
2019-06-05 01:17:07 +03:00
|
|
|
match (&self.token.kind, self.subparser_name) {
|
2019-05-24 02:04:56 +03:00
|
|
|
(token::Eof, Some(origin)) => format!("end of {}", origin),
|
2019-05-21 22:17:53 -07:00
|
|
|
_ => this_token_str,
|
|
|
|
},
|
|
|
|
);
|
|
|
|
let mut err = self.struct_span_err(sp, &msg);
|
|
|
|
let label_exp = format!("expected `{}`", token_str);
|
|
|
|
match self.recover_closing_delimiter(&[t.clone()], err) {
|
|
|
|
Err(e) => err = e,
|
|
|
|
Ok(recovered) => {
|
|
|
|
return Ok(recovered);
|
|
|
|
}
|
|
|
|
}
|
2019-07-19 10:59:02 -07:00
|
|
|
let sm = self.sess.source_map();
|
2019-10-20 14:35:46 -07:00
|
|
|
if !sm.is_multiline(prev_sp.until(sp)) {
|
|
|
|
// When the spans are in the same line, it means that the only content
|
|
|
|
// between them is whitespace, point only at the found token.
|
|
|
|
err.span_label(sp, label_exp);
|
|
|
|
} else {
|
|
|
|
err.span_label(prev_sp, label_exp);
|
|
|
|
err.span_label(sp, "unexpected token");
|
|
|
|
}
|
|
|
|
Err(err)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub(super) fn expect_semi(&mut self) -> PResult<'a, ()> {
|
|
|
|
if self.eat(&token::Semi) {
|
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
let sm = self.sess.source_map();
|
2019-12-07 03:07:35 +01:00
|
|
|
let msg = format!("expected `;`, found `{}`", super::token_descr(&self.token));
|
2019-10-20 14:35:46 -07:00
|
|
|
let appl = Applicability::MachineApplicable;
|
2019-10-24 15:57:43 -07:00
|
|
|
if self.token.span == DUMMY_SP || self.prev_span == DUMMY_SP {
|
|
|
|
// Likely inside a macro, can't provide meaninful suggestions.
|
2020-01-02 08:56:12 +00:00
|
|
|
return self.expect(&token::Semi).map(drop);
|
2019-10-24 15:57:43 -07:00
|
|
|
} else if !sm.is_multiline(self.prev_span.until(self.token.span)) {
|
|
|
|
// The current token is in the same line as the prior token, not recoverable.
|
2019-12-22 17:42:04 -05:00
|
|
|
} else if self.look_ahead(1, |t| {
|
|
|
|
t == &token::CloseDelim(token::Brace)
|
|
|
|
|| token_can_begin_expr(t) && t.kind != token::Colon
|
|
|
|
}) && [token::Comma, token::Colon].contains(&self.token.kind)
|
|
|
|
{
|
2019-10-20 14:35:46 -07:00
|
|
|
// Likely typo: `,` → `;` or `:` → `;`. This is triggered if the current token is
|
|
|
|
// either `,` or `:`, and the next token could either start a new statement or is a
|
|
|
|
// block close. For example:
|
|
|
|
//
|
|
|
|
// let x = 32:
|
|
|
|
// let y = 42;
|
2019-10-24 15:57:43 -07:00
|
|
|
self.bump();
|
|
|
|
let sp = self.prev_span;
|
|
|
|
self.struct_span_err(sp, &msg)
|
|
|
|
.span_suggestion(sp, "change this to `;`", ";".to_string(), appl)
|
|
|
|
.emit();
|
2019-12-22 17:42:04 -05:00
|
|
|
return Ok(());
|
|
|
|
} else if self.look_ahead(0, |t| {
|
|
|
|
t == &token::CloseDelim(token::Brace)
|
|
|
|
|| (
|
|
|
|
token_can_begin_expr(t) && t != &token::Semi && t != &token::Pound
|
|
|
|
// Avoid triggering with too many trailing `#` in raw string.
|
|
|
|
)
|
|
|
|
}) {
|
2019-10-20 14:35:46 -07:00
|
|
|
// Missing semicolon typo. This is triggered if the next token could either start a
|
|
|
|
// new statement or is a block close. For example:
|
|
|
|
//
|
|
|
|
// let x = 32
|
|
|
|
// let y = 42;
|
2019-10-24 15:57:43 -07:00
|
|
|
let sp = self.prev_span.shrink_to_hi();
|
|
|
|
self.struct_span_err(sp, &msg)
|
|
|
|
.span_label(self.token.span, "unexpected token")
|
|
|
|
.span_suggestion_short(sp, "add `;` here", ";".to_string(), appl)
|
|
|
|
.emit();
|
2019-12-22 17:42:04 -05:00
|
|
|
return Ok(());
|
2019-05-21 22:17:53 -07:00
|
|
|
}
|
2020-01-02 08:56:12 +00:00
|
|
|
self.expect(&token::Semi).map(drop) // Error unconditionally
|
2019-05-21 22:17:53 -07:00
|
|
|
}
|
|
|
|
|
2019-10-08 09:35:34 +02:00
|
|
|
pub(super) fn parse_semi_or_incorrect_foreign_fn_body(
|
2019-07-13 21:15:21 -07:00
|
|
|
&mut self,
|
|
|
|
ident: &Ident,
|
|
|
|
extern_sp: Span,
|
|
|
|
) -> PResult<'a, ()> {
|
|
|
|
if self.token != token::Semi {
|
2019-09-06 03:56:45 +01:00
|
|
|
// This might be an incorrect fn definition (#62109).
|
2019-07-13 21:15:21 -07:00
|
|
|
let parser_snapshot = self.clone();
|
|
|
|
match self.parse_inner_attrs_and_block() {
|
|
|
|
Ok((_, body)) => {
|
|
|
|
self.struct_span_err(ident.span, "incorrect `fn` inside `extern` block")
|
|
|
|
.span_label(ident.span, "can't have a body")
|
|
|
|
.span_label(body.span, "this body is invalid here")
|
|
|
|
.span_label(
|
|
|
|
extern_sp,
|
|
|
|
"`extern` blocks define existing foreign functions and `fn`s \
|
2019-12-22 17:42:04 -05:00
|
|
|
inside of them cannot have a body",
|
|
|
|
)
|
|
|
|
.help(
|
|
|
|
"you might have meant to write a function accessible through ffi, \
|
2019-07-13 21:15:21 -07:00
|
|
|
which can be done by writing `extern fn` outside of the \
|
2019-12-22 17:42:04 -05:00
|
|
|
`extern` block",
|
|
|
|
)
|
|
|
|
.note(
|
|
|
|
"for more information, visit \
|
|
|
|
https://doc.rust-lang.org/std/keyword.extern.html",
|
|
|
|
)
|
2019-07-13 21:15:21 -07:00
|
|
|
.emit();
|
|
|
|
}
|
|
|
|
Err(mut err) => {
|
|
|
|
err.cancel();
|
|
|
|
mem::replace(self, parser_snapshot);
|
2019-10-20 14:35:46 -07:00
|
|
|
self.expect_semi()?;
|
2019-07-13 21:15:21 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
self.bump();
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2019-09-06 03:56:45 +01:00
|
|
|
/// Consumes alternative await syntaxes like `await!(<expr>)`, `await <expr>`,
|
2019-07-02 06:30:21 +02:00
|
|
|
/// `await? <expr>`, `await(<expr>)`, and `await { <expr> }`.
|
2019-12-03 14:21:03 +01:00
|
|
|
pub(super) fn recover_incorrect_await_syntax(
|
2019-05-16 13:33:26 -07:00
|
|
|
&mut self,
|
|
|
|
lo: Span,
|
|
|
|
await_sp: Span,
|
2019-12-03 16:38:34 +01:00
|
|
|
attrs: AttrVec,
|
2019-12-03 14:21:03 +01:00
|
|
|
) -> PResult<'a, P<Expr>> {
|
|
|
|
let (hi, expr, is_question) = if self.token == token::Not {
|
2019-07-02 06:30:21 +02:00
|
|
|
// Handle `await!(<expr>)`.
|
2019-12-03 14:21:03 +01:00
|
|
|
self.recover_await_macro()?
|
|
|
|
} else {
|
|
|
|
self.recover_await_prefix(await_sp)?
|
|
|
|
};
|
|
|
|
let sp = self.error_on_incorrect_await(lo, hi, &expr, is_question);
|
|
|
|
let expr = self.mk_expr(lo.to(sp), ExprKind::Await(expr), attrs);
|
|
|
|
self.maybe_recover_from_bad_qpath(expr, true)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn recover_await_macro(&mut self) -> PResult<'a, (Span, P<Expr>, bool)> {
|
|
|
|
self.expect(&token::Not)?;
|
|
|
|
self.expect(&token::OpenDelim(token::Paren))?;
|
|
|
|
let expr = self.parse_expr()?;
|
|
|
|
self.expect(&token::CloseDelim(token::Paren))?;
|
|
|
|
Ok((self.prev_span, expr, false))
|
|
|
|
}
|
2019-07-02 06:30:21 +02:00
|
|
|
|
2019-12-03 14:21:03 +01:00
|
|
|
fn recover_await_prefix(&mut self, await_sp: Span) -> PResult<'a, (Span, P<Expr>, bool)> {
|
2019-05-16 13:33:26 -07:00
|
|
|
let is_question = self.eat(&token::Question); // Handle `await? <expr>`.
|
|
|
|
let expr = if self.token == token::OpenDelim(token::Brace) {
|
|
|
|
// Handle `await { <expr> }`.
|
|
|
|
// This needs to be handled separatedly from the next arm to avoid
|
|
|
|
// interpreting `await { <expr> }?` as `<expr>?.await`.
|
2019-12-22 17:42:04 -05:00
|
|
|
self.parse_block_expr(None, self.token.span, BlockCheckMode::Default, AttrVec::new())
|
2019-05-16 13:33:26 -07:00
|
|
|
} else {
|
|
|
|
self.parse_expr()
|
2019-12-22 17:42:04 -05:00
|
|
|
}
|
|
|
|
.map_err(|mut err| {
|
2019-05-16 13:33:26 -07:00
|
|
|
err.span_label(await_sp, "while parsing this incorrect await expression");
|
|
|
|
err
|
|
|
|
})?;
|
2019-12-03 14:21:03 +01:00
|
|
|
Ok((expr.span, expr, is_question))
|
2019-07-02 06:30:21 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
fn error_on_incorrect_await(&self, lo: Span, hi: Span, expr: &Expr, is_question: bool) -> Span {
|
2019-12-22 17:42:04 -05:00
|
|
|
let expr_str =
|
|
|
|
self.span_to_snippet(expr.span).unwrap_or_else(|_| pprust::expr_to_string(&expr));
|
2019-05-16 13:33:26 -07:00
|
|
|
let suggestion = format!("{}.await{}", expr_str, if is_question { "?" } else { "" });
|
2019-07-02 06:30:21 +02:00
|
|
|
let sp = lo.to(hi);
|
2019-09-26 14:39:48 +01:00
|
|
|
let app = match expr.kind {
|
2019-05-16 13:33:26 -07:00
|
|
|
ExprKind::Try(_) => Applicability::MaybeIncorrect, // `await <expr>?`
|
|
|
|
_ => Applicability::MachineApplicable,
|
|
|
|
};
|
|
|
|
self.struct_span_err(sp, "incorrect use of `await`")
|
|
|
|
.span_suggestion(sp, "`await` is a postfix operation", suggestion, app)
|
|
|
|
.emit();
|
2019-07-02 06:30:21 +02:00
|
|
|
sp
|
2019-05-16 13:33:26 -07:00
|
|
|
}
|
2019-05-16 14:31:07 -07:00
|
|
|
|
2019-09-06 03:56:45 +01:00
|
|
|
/// If encountering `future.await()`, consumes and emits an error.
|
2019-10-08 09:35:34 +02:00
|
|
|
pub(super) fn recover_from_await_method_call(&mut self) {
|
2019-12-22 17:42:04 -05:00
|
|
|
if self.token == token::OpenDelim(token::Paren)
|
|
|
|
&& self.look_ahead(1, |t| t == &token::CloseDelim(token::Paren))
|
2019-05-16 14:31:07 -07:00
|
|
|
{
|
|
|
|
// future.await()
|
2019-06-07 13:31:13 +03:00
|
|
|
let lo = self.token.span;
|
2019-05-16 14:31:07 -07:00
|
|
|
self.bump(); // (
|
2019-06-07 13:31:13 +03:00
|
|
|
let sp = lo.to(self.token.span);
|
2019-05-16 14:31:07 -07:00
|
|
|
self.bump(); // )
|
2019-05-16 15:25:58 -07:00
|
|
|
self.struct_span_err(sp, "incorrect use of `await`")
|
|
|
|
.span_suggestion(
|
|
|
|
sp,
|
|
|
|
"`await` is not a method call, remove the parentheses",
|
|
|
|
String::new(),
|
|
|
|
Applicability::MachineApplicable,
|
2019-12-22 17:42:04 -05:00
|
|
|
)
|
|
|
|
.emit()
|
2019-05-16 14:31:07 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-09-06 03:56:45 +01:00
|
|
|
/// Recovers a situation like `for ( $pat in $expr )`
|
2019-07-24 10:26:32 +02:00
|
|
|
/// and suggest writing `for $pat in $expr` instead.
|
|
|
|
///
|
|
|
|
/// This should be called before parsing the `$block`.
|
2019-10-08 09:35:34 +02:00
|
|
|
pub(super) fn recover_parens_around_for_head(
|
2019-07-24 10:26:32 +02:00
|
|
|
&mut self,
|
|
|
|
pat: P<Pat>,
|
|
|
|
expr: &Expr,
|
|
|
|
begin_paren: Option<Span>,
|
|
|
|
) -> P<Pat> {
|
|
|
|
match (&self.token.kind, begin_paren) {
|
|
|
|
(token::CloseDelim(token::Paren), Some(begin_par_sp)) => {
|
|
|
|
self.bump();
|
|
|
|
|
|
|
|
let pat_str = self
|
|
|
|
// Remove the `(` from the span of the pattern:
|
|
|
|
.span_to_snippet(pat.span.trim_start(begin_par_sp).unwrap())
|
|
|
|
.unwrap_or_else(|_| pprust::pat_to_string(&pat));
|
|
|
|
|
|
|
|
self.struct_span_err(self.prev_span, "unexpected closing `)`")
|
|
|
|
.span_label(begin_par_sp, "opening `(`")
|
|
|
|
.span_suggestion(
|
|
|
|
begin_par_sp.to(self.prev_span),
|
|
|
|
"remove parenthesis in `for` loop",
|
|
|
|
format!("{} in {}", pat_str, pprust::expr_to_string(&expr)),
|
|
|
|
// With e.g. `for (x) in y)` this would replace `(x) in y)`
|
|
|
|
// with `x) in y)` which is syntactically invalid.
|
|
|
|
// However, this is prevented before we get here.
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
)
|
|
|
|
.emit();
|
|
|
|
|
|
|
|
// Unwrap `(pat)` into `pat` to avoid the `unused_parens` lint.
|
2019-09-26 16:18:31 +01:00
|
|
|
pat.and_then(|pat| match pat.kind {
|
2019-07-24 10:26:32 +02:00
|
|
|
PatKind::Paren(pat) => pat,
|
|
|
|
_ => P(pat),
|
|
|
|
})
|
|
|
|
}
|
|
|
|
_ => pat,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-10-08 09:35:34 +02:00
|
|
|
pub(super) fn could_ascription_be_path(&self, node: &ast::ExprKind) -> bool {
|
2019-11-05 10:29:54 -08:00
|
|
|
(self.token == token::Lt && // `foo:<bar`, likely a typoed turbofish.
|
2019-12-22 17:42:04 -05:00
|
|
|
self.look_ahead(1, |t| t.is_ident() && !t.is_reserved_ident()))
|
|
|
|
|| self.token.is_ident() &&
|
2019-11-05 10:29:54 -08:00
|
|
|
match node {
|
|
|
|
// `foo::` → `foo:` or `foo.bar::` → `foo.bar:`
|
|
|
|
ast::ExprKind::Path(..) | ast::ExprKind::Field(..) => true,
|
|
|
|
_ => false,
|
|
|
|
} &&
|
2019-05-16 14:31:07 -07:00
|
|
|
!self.token.is_reserved_ident() && // v `foo:bar(baz)`
|
2019-12-22 17:42:04 -05:00
|
|
|
self.look_ahead(1, |t| t == &token::OpenDelim(token::Paren))
|
|
|
|
|| self.look_ahead(1, |t| t == &token::Lt) && // `foo:bar<baz`
|
|
|
|
self.look_ahead(2, |t| t.is_ident())
|
|
|
|
|| self.look_ahead(1, |t| t == &token::Colon) && // `foo:bar:baz`
|
|
|
|
self.look_ahead(2, |t| t.is_ident())
|
|
|
|
|| self.look_ahead(1, |t| t == &token::ModSep)
|
|
|
|
&& (self.look_ahead(2, |t| t.is_ident()) || // `foo:bar::baz`
|
|
|
|
self.look_ahead(2, |t| t == &token::Lt)) // `foo:bar::<baz>`
|
2019-05-16 14:31:07 -07:00
|
|
|
}
|
|
|
|
|
2019-10-08 09:35:34 +02:00
|
|
|
pub(super) fn recover_seq_parse_error(
|
2019-05-16 14:31:07 -07:00
|
|
|
&mut self,
|
|
|
|
delim: token::DelimToken,
|
|
|
|
lo: Span,
|
|
|
|
result: PResult<'a, P<Expr>>,
|
|
|
|
) -> P<Expr> {
|
|
|
|
match result {
|
|
|
|
Ok(x) => x,
|
|
|
|
Err(mut err) => {
|
|
|
|
err.emit();
|
2019-10-25 18:30:02 -07:00
|
|
|
// Recover from parse error, callers expect the closing delim to be consumed.
|
|
|
|
self.consume_block(delim, ConsumeClosingDelim::Yes);
|
2019-12-03 16:38:34 +01:00
|
|
|
self.mk_expr(lo.to(self.prev_span), ExprKind::Err, AttrVec::new())
|
2019-05-16 14:31:07 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-10-08 09:35:34 +02:00
|
|
|
pub(super) fn recover_closing_delimiter(
|
2019-05-16 14:31:07 -07:00
|
|
|
&mut self,
|
2019-06-05 14:17:56 +03:00
|
|
|
tokens: &[TokenKind],
|
2019-05-16 14:31:07 -07:00
|
|
|
mut err: DiagnosticBuilder<'a>,
|
|
|
|
) -> PResult<'a, bool> {
|
|
|
|
let mut pos = None;
|
2019-09-06 03:56:45 +01:00
|
|
|
// We want to use the last closing delim that would apply.
|
2019-05-16 14:31:07 -07:00
|
|
|
for (i, unmatched) in self.unclosed_delims.iter().enumerate().rev() {
|
|
|
|
if tokens.contains(&token::CloseDelim(unmatched.expected_delim))
|
2019-06-07 13:31:13 +03:00
|
|
|
&& Some(self.token.span) > unmatched.unclosed_span
|
2019-05-16 14:31:07 -07:00
|
|
|
{
|
|
|
|
pos = Some(i);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
match pos {
|
|
|
|
Some(pos) => {
|
|
|
|
// Recover and assume that the detected unclosed delimiter was meant for
|
|
|
|
// this location. Emit the diagnostic and act as if the delimiter was
|
|
|
|
// present for the parser's sake.
|
|
|
|
|
2019-12-22 17:42:04 -05:00
|
|
|
// Don't attempt to recover from this unclosed delimiter more than once.
|
2019-05-16 14:31:07 -07:00
|
|
|
let unmatched = self.unclosed_delims.remove(pos);
|
|
|
|
let delim = TokenType::Token(token::CloseDelim(unmatched.expected_delim));
|
2019-10-28 17:44:20 -07:00
|
|
|
if unmatched.found_delim.is_none() {
|
|
|
|
// We encountered `Eof`, set this fact here to avoid complaining about missing
|
|
|
|
// `fn main()` when we found place to suggest the closing brace.
|
|
|
|
*self.sess.reached_eof.borrow_mut() = true;
|
|
|
|
}
|
2019-05-16 14:31:07 -07:00
|
|
|
|
2019-09-06 03:56:45 +01:00
|
|
|
// We want to suggest the inclusion of the closing delimiter where it makes
|
2019-05-16 14:31:07 -07:00
|
|
|
// the most sense, which is immediately after the last token:
|
|
|
|
//
|
|
|
|
// {foo(bar {}}
|
|
|
|
// - ^
|
|
|
|
// | |
|
2019-06-26 11:21:59 -05:00
|
|
|
// | help: `)` may belong here
|
2019-05-16 14:31:07 -07:00
|
|
|
// |
|
|
|
|
// unclosed delimiter
|
|
|
|
if let Some(sp) = unmatched.unclosed_span {
|
|
|
|
err.span_label(sp, "unclosed delimiter");
|
|
|
|
}
|
|
|
|
err.span_suggestion_short(
|
2020-01-10 11:22:33 -08:00
|
|
|
self.prev_span.shrink_to_hi(),
|
2019-05-16 14:31:07 -07:00
|
|
|
&format!("{} may belong here", delim.to_string()),
|
|
|
|
delim.to_string(),
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
);
|
2019-10-25 18:30:02 -07:00
|
|
|
if unmatched.found_delim.is_none() {
|
|
|
|
// Encountered `Eof` when lexing blocks. Do not recover here to avoid knockdown
|
|
|
|
// errors which would be emitted elsewhere in the parser and let other error
|
|
|
|
// recovery consume the rest of the file.
|
|
|
|
Err(err)
|
|
|
|
} else {
|
|
|
|
err.emit();
|
2019-12-22 17:42:04 -05:00
|
|
|
self.expected_tokens.clear(); // Reduce the number of errors.
|
2019-10-25 18:30:02 -07:00
|
|
|
Ok(true)
|
|
|
|
}
|
2019-05-16 14:31:07 -07:00
|
|
|
}
|
|
|
|
_ => Err(err),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-09-06 03:56:45 +01:00
|
|
|
/// Eats tokens until we can be relatively sure we reached the end of the
|
|
|
|
/// statement. This is something of a best-effort heuristic.
|
|
|
|
///
|
|
|
|
/// We terminate when we find an unmatched `}` (without consuming it).
|
2019-10-08 09:35:34 +02:00
|
|
|
pub(super) fn recover_stmt(&mut self) {
|
2019-05-16 14:31:07 -07:00
|
|
|
self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore)
|
|
|
|
}
|
|
|
|
|
2019-09-06 03:56:45 +01:00
|
|
|
/// If `break_on_semi` is `Break`, then we will stop consuming tokens after
|
|
|
|
/// finding (and consuming) a `;` outside of `{}` or `[]` (note that this is
|
|
|
|
/// approximate -- it can mean we break too early due to macros, but that
|
|
|
|
/// should only lead to sub-optimal recovery, not inaccurate parsing).
|
|
|
|
///
|
|
|
|
/// If `break_on_block` is `Break`, then we will stop consuming tokens
|
|
|
|
/// after finding (and consuming) a brace-delimited block.
|
2019-10-12 06:12:00 +02:00
|
|
|
pub(super) fn recover_stmt_(
|
|
|
|
&mut self,
|
|
|
|
break_on_semi: SemiColonMode,
|
|
|
|
break_on_block: BlockMode,
|
|
|
|
) {
|
2019-05-16 14:31:07 -07:00
|
|
|
let mut brace_depth = 0;
|
|
|
|
let mut bracket_depth = 0;
|
|
|
|
let mut in_block = false;
|
2019-12-22 17:42:04 -05:00
|
|
|
debug!("recover_stmt_ enter loop (semi={:?}, block={:?})", break_on_semi, break_on_block);
|
2019-05-16 14:31:07 -07:00
|
|
|
loop {
|
|
|
|
debug!("recover_stmt_ loop {:?}", self.token);
|
2019-06-05 01:17:07 +03:00
|
|
|
match self.token.kind {
|
2019-05-16 14:31:07 -07:00
|
|
|
token::OpenDelim(token::DelimToken::Brace) => {
|
|
|
|
brace_depth += 1;
|
|
|
|
self.bump();
|
2019-12-22 17:42:04 -05:00
|
|
|
if break_on_block == BlockMode::Break && brace_depth == 1 && bracket_depth == 0
|
|
|
|
{
|
2019-05-16 14:31:07 -07:00
|
|
|
in_block = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
token::OpenDelim(token::DelimToken::Bracket) => {
|
|
|
|
bracket_depth += 1;
|
|
|
|
self.bump();
|
|
|
|
}
|
|
|
|
token::CloseDelim(token::DelimToken::Brace) => {
|
|
|
|
if brace_depth == 0 {
|
|
|
|
debug!("recover_stmt_ return - close delim {:?}", self.token);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
brace_depth -= 1;
|
|
|
|
self.bump();
|
|
|
|
if in_block && bracket_depth == 0 && brace_depth == 0 {
|
|
|
|
debug!("recover_stmt_ return - block end {:?}", self.token);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
token::CloseDelim(token::DelimToken::Bracket) => {
|
|
|
|
bracket_depth -= 1;
|
|
|
|
if bracket_depth < 0 {
|
|
|
|
bracket_depth = 0;
|
|
|
|
}
|
|
|
|
self.bump();
|
|
|
|
}
|
|
|
|
token::Eof => {
|
|
|
|
debug!("recover_stmt_ return - Eof");
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
token::Semi => {
|
|
|
|
self.bump();
|
2019-12-22 17:42:04 -05:00
|
|
|
if break_on_semi == SemiColonMode::Break
|
|
|
|
&& brace_depth == 0
|
|
|
|
&& bracket_depth == 0
|
|
|
|
{
|
2019-05-16 14:31:07 -07:00
|
|
|
debug!("recover_stmt_ return - Semi");
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
2019-12-22 17:42:04 -05:00
|
|
|
token::Comma
|
|
|
|
if break_on_semi == SemiColonMode::Comma
|
|
|
|
&& brace_depth == 0
|
|
|
|
&& bracket_depth == 0 =>
|
2019-05-16 15:25:58 -07:00
|
|
|
{
|
|
|
|
debug!("recover_stmt_ return - Semi");
|
|
|
|
break;
|
2019-05-16 14:31:07 -07:00
|
|
|
}
|
2019-12-22 17:42:04 -05:00
|
|
|
_ => self.bump(),
|
2019-05-16 14:31:07 -07:00
|
|
|
}
|
|
|
|
}
|
2019-05-23 12:55:26 -07:00
|
|
|
}
|
|
|
|
|
2019-10-08 09:35:34 +02:00
|
|
|
pub(super) fn check_for_for_in_in_typo(&mut self, in_span: Span) {
|
2019-05-23 12:55:26 -07:00
|
|
|
if self.eat_keyword(kw::In) {
|
|
|
|
// a common typo: `for _ in in bar {}`
|
2019-07-24 10:51:20 +02:00
|
|
|
self.struct_span_err(self.prev_span, "expected iterable, found keyword `in`")
|
|
|
|
.span_suggestion_short(
|
|
|
|
in_span.until(self.prev_span),
|
|
|
|
"remove the duplicated `in`",
|
|
|
|
String::new(),
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
)
|
|
|
|
.emit();
|
2019-05-23 12:55:26 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-10-08 09:35:34 +02:00
|
|
|
pub(super) fn expected_semi_or_open_brace<T>(&mut self) -> PResult<'a, T> {
|
2019-12-07 03:07:35 +01:00
|
|
|
let token_str = super::token_descr(&self.token);
|
2019-12-31 04:21:58 +01:00
|
|
|
let msg = &format!("expected `;` or `{{`, found {}", token_str);
|
|
|
|
let mut err = self.struct_span_err(self.token.span, msg);
|
2019-06-07 13:31:13 +03:00
|
|
|
err.span_label(self.token.span, "expected `;` or `{`");
|
2019-05-23 12:55:26 -07:00
|
|
|
Err(err)
|
|
|
|
}
|
|
|
|
|
2019-10-08 09:35:34 +02:00
|
|
|
pub(super) fn eat_incorrect_doc_comment_for_param_type(&mut self) {
|
2019-06-05 01:17:07 +03:00
|
|
|
if let token::DocComment(_) = self.token.kind {
|
2019-07-24 10:51:20 +02:00
|
|
|
self.struct_span_err(
|
2019-06-07 13:31:13 +03:00
|
|
|
self.token.span,
|
2019-06-09 07:58:40 -03:00
|
|
|
"documentation comments cannot be applied to a function parameter's type",
|
2019-07-24 10:51:20 +02:00
|
|
|
)
|
|
|
|
.span_label(self.token.span, "doc comments are not allowed here")
|
|
|
|
.emit();
|
2019-05-23 12:55:26 -07:00
|
|
|
self.bump();
|
2019-12-22 17:42:04 -05:00
|
|
|
} else if self.token == token::Pound
|
|
|
|
&& self.look_ahead(1, |t| *t == token::OpenDelim(token::Bracket))
|
|
|
|
{
|
2019-06-07 13:31:13 +03:00
|
|
|
let lo = self.token.span;
|
2019-05-23 12:55:26 -07:00
|
|
|
// Skip every token until next possible arg.
|
|
|
|
while self.token != token::CloseDelim(token::Bracket) {
|
|
|
|
self.bump();
|
|
|
|
}
|
2019-06-07 13:31:13 +03:00
|
|
|
let sp = lo.to(self.token.span);
|
2019-05-23 12:55:26 -07:00
|
|
|
self.bump();
|
2019-12-22 17:42:04 -05:00
|
|
|
self.struct_span_err(sp, "attributes cannot be applied to a function parameter's type")
|
|
|
|
.span_label(sp, "attributes are not allowed here")
|
|
|
|
.emit();
|
2019-05-23 12:55:26 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-10-08 09:35:34 +02:00
|
|
|
pub(super) fn parameter_without_type(
|
2019-05-23 12:55:26 -07:00
|
|
|
&mut self,
|
|
|
|
err: &mut DiagnosticBuilder<'_>,
|
|
|
|
pat: P<ast::Pat>,
|
|
|
|
require_name: bool,
|
2020-01-29 01:30:01 +01:00
|
|
|
is_self_semantic: bool,
|
|
|
|
in_assoc_item: bool,
|
2019-05-29 15:25:46 -07:00
|
|
|
) -> Option<Ident> {
|
2019-05-23 12:55:26 -07:00
|
|
|
// If we find a pattern followed by an identifier, it could be an (incorrect)
|
|
|
|
// C-style parameter declaration.
|
2019-12-22 17:42:04 -05:00
|
|
|
if self.check_ident()
|
|
|
|
&& self.look_ahead(1, |t| *t == token::Comma || *t == token::CloseDelim(token::Paren))
|
|
|
|
{
|
|
|
|
// `fn foo(String s) {}`
|
2019-05-23 12:55:26 -07:00
|
|
|
let ident = self.parse_ident().unwrap();
|
|
|
|
let span = pat.span.with_hi(ident.span.hi());
|
|
|
|
|
|
|
|
err.span_suggestion(
|
|
|
|
span,
|
|
|
|
"declare the type after the parameter binding",
|
|
|
|
String::from("<identifier>: <type>"),
|
|
|
|
Applicability::HasPlaceholders,
|
|
|
|
);
|
2019-05-29 15:25:46 -07:00
|
|
|
return Some(ident);
|
2019-09-26 16:18:31 +01:00
|
|
|
} else if let PatKind::Ident(_, ident, _) = pat.kind {
|
2019-12-22 17:42:04 -05:00
|
|
|
if require_name
|
2020-01-29 01:30:01 +01:00
|
|
|
&& (in_assoc_item
|
2019-12-22 17:42:04 -05:00
|
|
|
|| self.token == token::Comma
|
|
|
|
|| self.token == token::Lt
|
|
|
|
|| self.token == token::CloseDelim(token::Paren))
|
|
|
|
{
|
|
|
|
// `fn foo(a, b) {}`, `fn foo(a<x>, b<y>) {}` or `fn foo(usize, usize) {}`
|
2020-01-29 01:30:01 +01:00
|
|
|
if is_self_semantic {
|
2019-10-01 15:17:50 +01:00
|
|
|
err.span_suggestion(
|
|
|
|
pat.span,
|
|
|
|
"if this is a `self` type, give it a parameter name",
|
|
|
|
format!("self: {}", ident),
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
// Avoid suggesting that `fn foo(HashMap<u32>)` is fixed with a change to
|
|
|
|
// `fn foo(HashMap: TypeName<u32>)`.
|
|
|
|
if self.token != token::Lt {
|
|
|
|
err.span_suggestion(
|
|
|
|
pat.span,
|
|
|
|
"if this was a parameter name, give it a type",
|
|
|
|
format!("{}: TypeName", ident),
|
|
|
|
Applicability::HasPlaceholders,
|
|
|
|
);
|
|
|
|
}
|
2019-05-23 12:55:26 -07:00
|
|
|
err.span_suggestion(
|
|
|
|
pat.span,
|
2019-05-29 15:25:46 -07:00
|
|
|
"if this is a type, explicitly ignore the parameter name",
|
2019-05-23 12:55:26 -07:00
|
|
|
format!("_: {}", ident),
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
);
|
2019-05-29 15:25:46 -07:00
|
|
|
err.note("anonymous parameters are removed in the 2018 edition (see RFC 1685)");
|
2019-10-01 15:17:50 +01:00
|
|
|
|
|
|
|
// Don't attempt to recover by using the `X` in `X<Y>` as the parameter name.
|
|
|
|
return if self.token == token::Lt { None } else { Some(ident) };
|
2019-05-23 12:55:26 -07:00
|
|
|
}
|
|
|
|
}
|
2019-05-29 15:25:46 -07:00
|
|
|
None
|
2019-05-16 14:31:07 -07:00
|
|
|
}
|
|
|
|
|
2019-10-08 09:35:34 +02:00
|
|
|
pub(super) fn recover_arg_parse(&mut self) -> PResult<'a, (P<ast::Pat>, P<ast::Ty>)> {
|
2019-05-23 12:54:27 -07:00
|
|
|
let pat = self.parse_pat(Some("argument name"))?;
|
|
|
|
self.expect(&token::Colon)?;
|
|
|
|
let ty = self.parse_ty()?;
|
|
|
|
|
2019-11-11 23:34:57 +01:00
|
|
|
struct_span_err!(
|
|
|
|
self.diagnostic(),
|
|
|
|
pat.span,
|
|
|
|
E0642,
|
|
|
|
"patterns aren't allowed in methods without bodies",
|
|
|
|
)
|
|
|
|
.span_suggestion_short(
|
|
|
|
pat.span,
|
|
|
|
"give this argument a name or use an underscore to ignore it",
|
|
|
|
"_".to_owned(),
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
)
|
|
|
|
.emit();
|
2019-05-23 12:54:27 -07:00
|
|
|
|
|
|
|
// Pretend the pattern is `_`, to avoid duplicate errors from AST validation.
|
2019-12-22 17:42:04 -05:00
|
|
|
let pat = P(Pat { kind: PatKind::Wild, span: pat.span, id: ast::DUMMY_NODE_ID });
|
2019-05-23 12:54:27 -07:00
|
|
|
Ok((pat, ty))
|
|
|
|
}
|
|
|
|
|
2019-10-08 09:35:34 +02:00
|
|
|
pub(super) fn recover_bad_self_param(
|
2019-05-23 12:54:27 -07:00
|
|
|
&mut self,
|
2019-08-27 13:24:32 +02:00
|
|
|
mut param: ast::Param,
|
2020-01-29 01:30:01 +01:00
|
|
|
in_assoc_item: bool,
|
2019-08-27 13:24:32 +02:00
|
|
|
) -> PResult<'a, ast::Param> {
|
|
|
|
let sp = param.pat.span;
|
2019-09-26 17:25:31 +01:00
|
|
|
param.ty.kind = TyKind::Err;
|
2019-05-23 13:10:24 -07:00
|
|
|
let mut err = self.struct_span_err(sp, "unexpected `self` parameter in function");
|
2020-01-29 01:30:01 +01:00
|
|
|
if in_assoc_item {
|
2019-05-23 13:10:24 -07:00
|
|
|
err.span_label(sp, "must be the first associated function parameter");
|
2019-05-23 12:54:27 -07:00
|
|
|
} else {
|
2019-05-23 13:10:24 -07:00
|
|
|
err.span_label(sp, "not valid as function parameter");
|
|
|
|
err.note("`self` is only valid as the first parameter of an associated function");
|
2019-05-23 12:54:27 -07:00
|
|
|
}
|
|
|
|
err.emit();
|
2019-08-27 13:24:32 +02:00
|
|
|
Ok(param)
|
2019-05-23 12:54:27 -07:00
|
|
|
}
|
|
|
|
|
2019-10-25 18:30:02 -07:00
|
|
|
pub(super) fn consume_block(
|
|
|
|
&mut self,
|
|
|
|
delim: token::DelimToken,
|
|
|
|
consume_close: ConsumeClosingDelim,
|
|
|
|
) {
|
2019-05-16 14:31:07 -07:00
|
|
|
let mut brace_depth = 0;
|
|
|
|
loop {
|
|
|
|
if self.eat(&token::OpenDelim(delim)) {
|
|
|
|
brace_depth += 1;
|
2019-10-25 18:30:02 -07:00
|
|
|
} else if self.check(&token::CloseDelim(delim)) {
|
2019-05-16 14:31:07 -07:00
|
|
|
if brace_depth == 0 {
|
2019-10-25 18:30:02 -07:00
|
|
|
if let ConsumeClosingDelim::Yes = consume_close {
|
|
|
|
// Some of the callers of this method expect to be able to parse the
|
|
|
|
// closing delimiter themselves, so we leave it alone. Otherwise we advance
|
|
|
|
// the parser.
|
|
|
|
self.bump();
|
|
|
|
}
|
2019-05-16 14:31:07 -07:00
|
|
|
return;
|
|
|
|
} else {
|
2019-10-25 18:30:02 -07:00
|
|
|
self.bump();
|
2019-05-16 14:31:07 -07:00
|
|
|
brace_depth -= 1;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
} else if self.token == token::Eof || self.eat(&token::CloseDelim(token::NoDelim)) {
|
|
|
|
return;
|
|
|
|
} else {
|
|
|
|
self.bump();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-10-08 09:35:34 +02:00
|
|
|
pub(super) fn expected_expression_found(&self) -> DiagnosticBuilder<'a> {
|
2019-06-05 01:17:07 +03:00
|
|
|
let (span, msg) = match (&self.token.kind, self.subparser_name) {
|
2019-05-24 02:04:56 +03:00
|
|
|
(&token::Eof, Some(origin)) => {
|
2019-06-07 13:31:13 +03:00
|
|
|
let sp = self.sess.source_map().next_point(self.token.span);
|
2019-05-24 15:17:32 -07:00
|
|
|
(sp, format!("expected expression, found end of {}", origin))
|
2019-05-21 23:16:46 -07:00
|
|
|
}
|
2019-12-22 17:42:04 -05:00
|
|
|
_ => (
|
|
|
|
self.token.span,
|
2019-12-07 03:07:35 +01:00
|
|
|
format!("expected expression, found {}", super::token_descr(&self.token),),
|
2019-12-22 17:42:04 -05:00
|
|
|
),
|
2019-05-21 23:16:46 -07:00
|
|
|
};
|
|
|
|
let mut err = self.struct_span_err(span, &msg);
|
2019-06-07 13:31:13 +03:00
|
|
|
let sp = self.sess.source_map().start_point(self.token.span);
|
2019-05-21 23:16:46 -07:00
|
|
|
if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&sp) {
|
|
|
|
self.sess.expr_parentheses_needed(&mut err, *sp, None);
|
|
|
|
}
|
|
|
|
err.span_label(span, "expected expression");
|
|
|
|
err
|
|
|
|
}
|
2019-05-30 18:19:48 -07:00
|
|
|
|
2019-09-30 12:19:22 -07:00
|
|
|
fn consume_tts(
|
|
|
|
&mut self,
|
2019-10-01 11:24:05 -07:00
|
|
|
mut acc: i64, // `i64` because malformed code can have more closing delims than opening.
|
2019-10-01 15:51:50 -07:00
|
|
|
// Not using `FxHashMap` due to `token::TokenKind: !Eq + !Hash`.
|
|
|
|
modifier: &[(token::TokenKind, i64)],
|
2019-09-30 12:19:22 -07:00
|
|
|
) {
|
|
|
|
while acc > 0 {
|
2019-10-01 11:24:05 -07:00
|
|
|
if let Some((_, val)) = modifier.iter().find(|(t, _)| *t == self.token.kind) {
|
2019-09-30 12:19:22 -07:00
|
|
|
acc += *val;
|
|
|
|
}
|
2019-10-01 15:51:50 -07:00
|
|
|
if self.token.kind == token::Eof {
|
2019-09-30 12:19:22 -07:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
self.bump();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-26 22:19:54 -05:00
|
|
|
/// Replace duplicated recovered parameters with `_` pattern to avoid unnecessary errors.
|
2019-06-01 14:13:57 -07:00
|
|
|
///
|
|
|
|
/// This is necessary because at this point we don't know whether we parsed a function with
|
2019-08-27 13:24:32 +02:00
|
|
|
/// anonymous parameters or a function with names but no types. In order to minimize
|
2019-11-26 22:19:54 -05:00
|
|
|
/// unnecessary errors, we assume the parameters are in the shape of `fn foo(a, b, c)` where
|
2019-08-27 13:24:32 +02:00
|
|
|
/// the parameters are *names* (so we don't emit errors about not being able to find `b` in
|
2019-06-01 14:13:57 -07:00
|
|
|
/// the local scope), but if we find the same name multiple times, like in `fn foo(i8, i8)`,
|
2019-08-27 13:24:32 +02:00
|
|
|
/// we deduplicate them to not complain about duplicated parameter names.
|
2019-10-08 09:35:34 +02:00
|
|
|
pub(super) fn deduplicate_recovered_params_names(&self, fn_inputs: &mut Vec<Param>) {
|
2019-05-30 18:19:48 -07:00
|
|
|
let mut seen_inputs = FxHashSet::default();
|
|
|
|
for input in fn_inputs.iter_mut() {
|
2019-12-22 17:42:04 -05:00
|
|
|
let opt_ident = if let (PatKind::Ident(_, ident, _), TyKind::Err) =
|
|
|
|
(&input.pat.kind, &input.ty.kind)
|
|
|
|
{
|
2019-05-30 18:19:48 -07:00
|
|
|
Some(*ident)
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
};
|
|
|
|
if let Some(ident) = opt_ident {
|
|
|
|
if seen_inputs.contains(&ident) {
|
2019-09-26 16:18:31 +01:00
|
|
|
input.pat.kind = PatKind::Wild;
|
2019-05-30 18:19:48 -07:00
|
|
|
}
|
|
|
|
seen_inputs.insert(ident);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-04-28 13:28:07 +08:00
|
|
|
}
|