2019-05-22 16:56:51 -04:00
|
|
|
use crate::ast::{
|
2019-08-27 13:24:32 +02:00
|
|
|
self, Param, BinOpKind, BindingMode, BlockCheckMode, Expr, ExprKind, Ident, Item, ItemKind,
|
2019-05-30 18:19:48 -07:00
|
|
|
Mutability, Pat, PatKind, PathSegment, QSelf, Ty, TyKind, VariantData,
|
2019-05-22 16:56:51 -04:00
|
|
|
};
|
2019-07-17 11:40:36 -07:00
|
|
|
use crate::feature_gate::{feature_err, UnstableFeatures};
|
2019-05-09 17:08:55 -04:00
|
|
|
use crate::parse::{SeqSep, PResult, Parser, ParseSess};
|
2019-05-23 12:55:26 -07:00
|
|
|
use crate::parse::parser::{BlockMode, PathStyle, SemiColonMode, TokenType, TokenExpectType};
|
2019-06-05 14:17:56 +03:00
|
|
|
use crate::parse::token::{self, TokenKind};
|
2019-04-28 13:28:07 +08:00
|
|
|
use crate::print::pprust;
|
|
|
|
use crate::ptr::P;
|
2019-05-23 15:31:43 +10:00
|
|
|
use crate::symbol::{kw, sym};
|
2019-04-28 13:28:07 +08:00
|
|
|
use crate::ThinVec;
|
2019-05-23 12:55:26 -07:00
|
|
|
use crate::util::parser::AssocOp;
|
2019-09-19 15:13:40 +08:00
|
|
|
use errors::{Applicability, DiagnosticBuilder, DiagnosticId, pluralise};
|
2019-05-30 18:19:48 -07:00
|
|
|
use rustc_data_structures::fx::FxHashSet;
|
2019-07-24 11:01:30 +02:00
|
|
|
use syntax_pos::{Span, DUMMY_SP, MultiSpan, SpanSnippetError};
|
2019-05-23 12:55:26 -07:00
|
|
|
use log::{debug, trace};
|
2019-07-13 21:15:21 -07:00
|
|
|
use std::mem;
|
2019-05-23 12:55:26 -07:00
|
|
|
|
2019-05-30 18:19:48 -07:00
|
|
|
/// Creates a placeholder argument.
|
2019-08-27 13:24:32 +02:00
|
|
|
crate fn dummy_arg(ident: Ident) -> Param {
|
2019-05-30 18:19:48 -07:00
|
|
|
let pat = P(Pat {
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
2019-09-26 16:18:31 +01:00
|
|
|
kind: PatKind::Ident(BindingMode::ByValue(Mutability::Immutable), ident, None),
|
2019-05-30 18:19:48 -07:00
|
|
|
span: ident.span,
|
|
|
|
});
|
|
|
|
let ty = Ty {
|
2019-09-26 17:25:31 +01:00
|
|
|
kind: TyKind::Err,
|
2019-05-30 18:19:48 -07:00
|
|
|
span: ident.span,
|
|
|
|
id: ast::DUMMY_NODE_ID
|
|
|
|
};
|
2019-09-09 09:26:25 -03:00
|
|
|
Param {
|
|
|
|
attrs: ThinVec::default(),
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
|
|
|
pat,
|
|
|
|
span: ident.span,
|
|
|
|
ty: P(ty),
|
|
|
|
is_placeholder: false,
|
|
|
|
}
|
2019-05-30 18:19:48 -07:00
|
|
|
}
|
|
|
|
|
2019-05-23 12:55:26 -07:00
|
|
|
pub enum Error {
|
|
|
|
FileNotFoundForModule {
|
|
|
|
mod_name: String,
|
|
|
|
default_path: String,
|
|
|
|
secondary_path: String,
|
|
|
|
dir_path: String,
|
|
|
|
},
|
|
|
|
DuplicatePaths {
|
|
|
|
mod_name: String,
|
|
|
|
default_path: String,
|
|
|
|
secondary_path: String,
|
|
|
|
},
|
|
|
|
UselessDocComment,
|
|
|
|
InclusiveRangeWithNoEnd,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Error {
|
|
|
|
fn span_err<S: Into<MultiSpan>>(
|
|
|
|
self,
|
|
|
|
sp: S,
|
|
|
|
handler: &errors::Handler,
|
|
|
|
) -> DiagnosticBuilder<'_> {
|
|
|
|
match self {
|
|
|
|
Error::FileNotFoundForModule {
|
|
|
|
ref mod_name,
|
|
|
|
ref default_path,
|
|
|
|
ref secondary_path,
|
|
|
|
ref dir_path,
|
|
|
|
} => {
|
|
|
|
let mut err = struct_span_err!(
|
|
|
|
handler,
|
|
|
|
sp,
|
|
|
|
E0583,
|
|
|
|
"file not found for module `{}`",
|
|
|
|
mod_name,
|
|
|
|
);
|
|
|
|
err.help(&format!(
|
|
|
|
"name the file either {} or {} inside the directory \"{}\"",
|
|
|
|
default_path,
|
|
|
|
secondary_path,
|
|
|
|
dir_path,
|
|
|
|
));
|
|
|
|
err
|
|
|
|
}
|
|
|
|
Error::DuplicatePaths { ref mod_name, ref default_path, ref secondary_path } => {
|
|
|
|
let mut err = struct_span_err!(
|
|
|
|
handler,
|
|
|
|
sp,
|
|
|
|
E0584,
|
|
|
|
"file for module `{}` found at both {} and {}",
|
|
|
|
mod_name,
|
|
|
|
default_path,
|
|
|
|
secondary_path,
|
|
|
|
);
|
|
|
|
err.help("delete or rename one of them to remove the ambiguity");
|
|
|
|
err
|
|
|
|
}
|
|
|
|
Error::UselessDocComment => {
|
|
|
|
let mut err = struct_span_err!(
|
|
|
|
handler,
|
|
|
|
sp,
|
|
|
|
E0585,
|
|
|
|
"found a documentation comment that doesn't document anything",
|
|
|
|
);
|
|
|
|
err.help("doc comments must come before what they document, maybe a comment was \
|
|
|
|
intended with `//`?");
|
|
|
|
err
|
|
|
|
}
|
|
|
|
Error::InclusiveRangeWithNoEnd => {
|
|
|
|
let mut err = struct_span_err!(
|
|
|
|
handler,
|
|
|
|
sp,
|
|
|
|
E0586,
|
|
|
|
"inclusive range with no end",
|
|
|
|
);
|
|
|
|
err.help("inclusive ranges must be bounded at the end (`..=b` or `a..=b`)");
|
|
|
|
err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-04-28 13:28:07 +08:00
|
|
|
|
|
|
|
pub trait RecoverQPath: Sized + 'static {
|
|
|
|
const PATH_STYLE: PathStyle = PathStyle::Expr;
|
|
|
|
fn to_ty(&self) -> Option<P<Ty>>;
|
|
|
|
fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self;
|
|
|
|
}
|
|
|
|
|
|
|
|
impl RecoverQPath for Ty {
|
|
|
|
const PATH_STYLE: PathStyle = PathStyle::Type;
|
|
|
|
fn to_ty(&self) -> Option<P<Ty>> {
|
|
|
|
Some(P(self.clone()))
|
|
|
|
}
|
|
|
|
fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self {
|
|
|
|
Self {
|
|
|
|
span: path.span,
|
2019-09-26 17:25:31 +01:00
|
|
|
kind: TyKind::Path(qself, path),
|
2019-04-28 13:28:07 +08:00
|
|
|
id: ast::DUMMY_NODE_ID,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl RecoverQPath for Pat {
|
|
|
|
fn to_ty(&self) -> Option<P<Ty>> {
|
|
|
|
self.to_ty()
|
|
|
|
}
|
|
|
|
fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self {
|
|
|
|
Self {
|
|
|
|
span: path.span,
|
2019-09-26 16:18:31 +01:00
|
|
|
kind: PatKind::Path(qself, path),
|
2019-04-28 13:28:07 +08:00
|
|
|
id: ast::DUMMY_NODE_ID,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl RecoverQPath for Expr {
|
|
|
|
fn to_ty(&self) -> Option<P<Ty>> {
|
|
|
|
self.to_ty()
|
|
|
|
}
|
|
|
|
fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self {
|
|
|
|
Self {
|
|
|
|
span: path.span,
|
2019-09-26 14:39:48 +01:00
|
|
|
kind: ExprKind::Path(qself, path),
|
2019-04-28 13:28:07 +08:00
|
|
|
attrs: ThinVec::new(),
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a> Parser<'a> {
|
2019-05-23 12:55:26 -07:00
|
|
|
pub fn fatal(&self, m: &str) -> DiagnosticBuilder<'a> {
|
2019-06-07 13:31:13 +03:00
|
|
|
self.span_fatal(self.token.span, m)
|
2019-05-23 12:55:26 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn span_fatal<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> {
|
|
|
|
self.sess.span_diagnostic.struct_span_fatal(sp, m)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn span_fatal_err<S: Into<MultiSpan>>(&self, sp: S, err: Error) -> DiagnosticBuilder<'a> {
|
|
|
|
err.span_err(sp, self.diagnostic())
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn bug(&self, m: &str) -> ! {
|
2019-06-07 13:31:13 +03:00
|
|
|
self.sess.span_diagnostic.span_bug(self.token.span, m)
|
2019-05-23 12:55:26 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) {
|
|
|
|
self.sess.span_diagnostic.span_err(sp, m)
|
|
|
|
}
|
|
|
|
|
|
|
|
crate fn struct_span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> {
|
|
|
|
self.sess.span_diagnostic.struct_span_err(sp, m)
|
|
|
|
}
|
|
|
|
|
|
|
|
crate fn span_bug<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> ! {
|
|
|
|
self.sess.span_diagnostic.span_bug(sp, m)
|
|
|
|
}
|
|
|
|
|
|
|
|
crate fn diagnostic(&self) -> &'a errors::Handler {
|
|
|
|
&self.sess.span_diagnostic
|
|
|
|
}
|
|
|
|
|
2019-07-24 11:01:30 +02:00
|
|
|
crate fn span_to_snippet(&self, span: Span) -> Result<String, SpanSnippetError> {
|
|
|
|
self.sess.source_map().span_to_snippet(span)
|
|
|
|
}
|
|
|
|
|
2019-05-23 12:55:26 -07:00
|
|
|
crate fn expected_ident_found(&self) -> DiagnosticBuilder<'a> {
|
|
|
|
let mut err = self.struct_span_err(
|
2019-06-07 13:31:13 +03:00
|
|
|
self.token.span,
|
2019-05-23 12:55:26 -07:00
|
|
|
&format!("expected identifier, found {}", self.this_token_descr()),
|
|
|
|
);
|
2019-06-05 11:56:06 +03:00
|
|
|
if let token::Ident(name, false) = self.token.kind {
|
2019-06-07 13:31:13 +03:00
|
|
|
if Ident::new(name, self.token.span).is_raw_guess() {
|
2019-05-23 12:55:26 -07:00
|
|
|
err.span_suggestion(
|
2019-06-07 13:31:13 +03:00
|
|
|
self.token.span,
|
2019-05-23 12:55:26 -07:00
|
|
|
"you can escape reserved keywords to use them as identifiers",
|
2019-06-05 11:56:06 +03:00
|
|
|
format!("r#{}", name),
|
2019-05-23 12:55:26 -07:00
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if let Some(token_descr) = self.token_descr() {
|
2019-06-07 13:31:13 +03:00
|
|
|
err.span_label(self.token.span, format!("expected identifier, found {}", token_descr));
|
2019-05-23 12:55:26 -07:00
|
|
|
} else {
|
2019-06-07 13:31:13 +03:00
|
|
|
err.span_label(self.token.span, "expected identifier");
|
2019-05-23 12:55:26 -07:00
|
|
|
if self.token == token::Comma && self.look_ahead(1, |t| t.is_ident()) {
|
|
|
|
err.span_suggestion(
|
2019-06-07 13:31:13 +03:00
|
|
|
self.token.span,
|
2019-05-23 12:55:26 -07:00
|
|
|
"remove this comma",
|
|
|
|
String::new(),
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
err
|
|
|
|
}
|
|
|
|
|
2019-05-23 13:10:24 -07:00
|
|
|
pub fn expected_one_of_not_found(
|
|
|
|
&mut self,
|
2019-06-05 14:17:56 +03:00
|
|
|
edible: &[TokenKind],
|
|
|
|
inedible: &[TokenKind],
|
2019-05-23 13:10:24 -07:00
|
|
|
) -> PResult<'a, bool /* recovered */> {
|
|
|
|
fn tokens_to_string(tokens: &[TokenType]) -> String {
|
|
|
|
let mut i = tokens.iter();
|
2019-09-06 03:56:45 +01:00
|
|
|
// This might be a sign we need a connect method on `Iterator`.
|
2019-05-23 13:10:24 -07:00
|
|
|
let b = i.next()
|
|
|
|
.map_or(String::new(), |t| t.to_string());
|
|
|
|
i.enumerate().fold(b, |mut b, (i, a)| {
|
|
|
|
if tokens.len() > 2 && i == tokens.len() - 2 {
|
|
|
|
b.push_str(", or ");
|
|
|
|
} else if tokens.len() == 2 && i == tokens.len() - 2 {
|
|
|
|
b.push_str(" or ");
|
|
|
|
} else {
|
|
|
|
b.push_str(", ");
|
|
|
|
}
|
|
|
|
b.push_str(&a.to_string());
|
|
|
|
b
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
let mut expected = edible.iter()
|
|
|
|
.map(|x| TokenType::Token(x.clone()))
|
|
|
|
.chain(inedible.iter().map(|x| TokenType::Token(x.clone())))
|
|
|
|
.chain(self.expected_tokens.iter().cloned())
|
|
|
|
.collect::<Vec<_>>();
|
|
|
|
expected.sort_by_cached_key(|x| x.to_string());
|
|
|
|
expected.dedup();
|
|
|
|
let expect = tokens_to_string(&expected[..]);
|
|
|
|
let actual = self.this_token_to_string();
|
|
|
|
let (msg_exp, (label_sp, label_exp)) = if expected.len() > 1 {
|
|
|
|
let short_expect = if expected.len() > 6 {
|
|
|
|
format!("{} possible tokens", expected.len())
|
|
|
|
} else {
|
|
|
|
expect.clone()
|
|
|
|
};
|
|
|
|
(format!("expected one of {}, found `{}`", expect, actual),
|
|
|
|
(self.sess.source_map().next_point(self.prev_span),
|
|
|
|
format!("expected one of {} here", short_expect)))
|
|
|
|
} else if expected.is_empty() {
|
|
|
|
(format!("unexpected token: `{}`", actual),
|
|
|
|
(self.prev_span, "unexpected token after this".to_string()))
|
|
|
|
} else {
|
|
|
|
(format!("expected {}, found `{}`", expect, actual),
|
|
|
|
(self.sess.source_map().next_point(self.prev_span),
|
|
|
|
format!("expected {} here", expect)))
|
|
|
|
};
|
2019-06-07 13:31:13 +03:00
|
|
|
self.last_unexpected_token_span = Some(self.token.span);
|
2019-05-23 13:10:24 -07:00
|
|
|
let mut err = self.fatal(&msg_exp);
|
2019-05-23 15:31:43 +10:00
|
|
|
if self.token.is_ident_named(sym::and) {
|
2019-05-23 13:10:24 -07:00
|
|
|
err.span_suggestion_short(
|
2019-06-07 13:31:13 +03:00
|
|
|
self.token.span,
|
2019-05-23 13:10:24 -07:00
|
|
|
"use `&&` instead of `and` for the boolean operator",
|
|
|
|
"&&".to_string(),
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
);
|
|
|
|
}
|
2019-05-23 15:31:43 +10:00
|
|
|
if self.token.is_ident_named(sym::or) {
|
2019-05-23 13:10:24 -07:00
|
|
|
err.span_suggestion_short(
|
2019-06-07 13:31:13 +03:00
|
|
|
self.token.span,
|
2019-05-23 13:10:24 -07:00
|
|
|
"use `||` instead of `or` for the boolean operator",
|
|
|
|
"||".to_string(),
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
);
|
|
|
|
}
|
2019-05-24 02:04:56 +03:00
|
|
|
let sp = if self.token == token::Eof {
|
2019-09-06 03:56:45 +01:00
|
|
|
// This is EOF; don't want to point at the following char, but rather the last token.
|
2019-05-23 13:10:24 -07:00
|
|
|
self.prev_span
|
|
|
|
} else {
|
|
|
|
label_sp
|
|
|
|
};
|
|
|
|
match self.recover_closing_delimiter(&expected.iter().filter_map(|tt| match tt {
|
|
|
|
TokenType::Token(t) => Some(t.clone()),
|
|
|
|
_ => None,
|
|
|
|
}).collect::<Vec<_>>(), err) {
|
|
|
|
Err(e) => err = e,
|
|
|
|
Ok(recovered) => {
|
|
|
|
return Ok(recovered);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let is_semi_suggestable = expected.iter().any(|t| match t {
|
2019-09-06 03:56:45 +01:00
|
|
|
TokenType::Token(token::Semi) => true, // We expect a `;` here.
|
2019-05-23 13:10:24 -07:00
|
|
|
_ => false,
|
2019-09-06 03:56:45 +01:00
|
|
|
}) && ( // A `;` would be expected before the current keyword.
|
2019-05-23 13:10:24 -07:00
|
|
|
self.token.is_keyword(kw::Break) ||
|
|
|
|
self.token.is_keyword(kw::Continue) ||
|
|
|
|
self.token.is_keyword(kw::For) ||
|
|
|
|
self.token.is_keyword(kw::If) ||
|
|
|
|
self.token.is_keyword(kw::Let) ||
|
|
|
|
self.token.is_keyword(kw::Loop) ||
|
|
|
|
self.token.is_keyword(kw::Match) ||
|
|
|
|
self.token.is_keyword(kw::Return) ||
|
|
|
|
self.token.is_keyword(kw::While)
|
|
|
|
);
|
2019-07-19 10:59:02 -07:00
|
|
|
let sm = self.sess.source_map();
|
|
|
|
match (sm.lookup_line(self.token.span.lo()), sm.lookup_line(sp.lo())) {
|
2019-05-23 13:10:24 -07:00
|
|
|
(Ok(ref a), Ok(ref b)) if a.line != b.line && is_semi_suggestable => {
|
|
|
|
// The spans are in different lines, expected `;` and found `let` or `return`.
|
|
|
|
// High likelihood that it is only a missing `;`.
|
|
|
|
err.span_suggestion_short(
|
|
|
|
label_sp,
|
|
|
|
"a semicolon may be missing here",
|
|
|
|
";".to_string(),
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
);
|
|
|
|
err.emit();
|
|
|
|
return Ok(true);
|
|
|
|
}
|
|
|
|
(Ok(ref a), Ok(ref b)) if a.line == b.line => {
|
|
|
|
// When the spans are in the same line, it means that the only content between
|
|
|
|
// them is whitespace, point at the found token in that case:
|
|
|
|
//
|
|
|
|
// X | () => { syntax error };
|
|
|
|
// | ^^^^^ expected one of 8 possible tokens here
|
|
|
|
//
|
|
|
|
// instead of having:
|
|
|
|
//
|
|
|
|
// X | () => { syntax error };
|
|
|
|
// | -^^^^^ unexpected token
|
|
|
|
// | |
|
|
|
|
// | expected one of 8 possible tokens here
|
2019-06-07 13:31:13 +03:00
|
|
|
err.span_label(self.token.span, label_exp);
|
2019-05-23 13:10:24 -07:00
|
|
|
}
|
|
|
|
_ if self.prev_span == syntax_pos::DUMMY_SP => {
|
|
|
|
// Account for macro context where the previous span might not be
|
|
|
|
// available to avoid incorrect output (#54841).
|
2019-06-07 13:31:13 +03:00
|
|
|
err.span_label(self.token.span, "unexpected token");
|
2019-05-23 13:10:24 -07:00
|
|
|
}
|
|
|
|
_ => {
|
|
|
|
err.span_label(sp, label_exp);
|
2019-06-07 13:31:13 +03:00
|
|
|
err.span_label(self.token.span, "unexpected token");
|
2019-05-23 13:10:24 -07:00
|
|
|
}
|
|
|
|
}
|
2019-07-17 11:40:36 -07:00
|
|
|
self.maybe_annotate_with_ascription(&mut err, false);
|
2019-05-23 13:10:24 -07:00
|
|
|
Err(err)
|
|
|
|
}
|
|
|
|
|
2019-07-17 11:40:36 -07:00
|
|
|
pub fn maybe_annotate_with_ascription(
|
|
|
|
&self,
|
|
|
|
err: &mut DiagnosticBuilder<'_>,
|
|
|
|
maybe_expected_semicolon: bool,
|
|
|
|
) {
|
|
|
|
if let Some((sp, likely_path)) = self.last_type_ascription {
|
2019-07-19 10:59:02 -07:00
|
|
|
let sm = self.sess.source_map();
|
|
|
|
let next_pos = sm.lookup_char_pos(self.token.span.lo());
|
|
|
|
let op_pos = sm.lookup_char_pos(sp.hi());
|
2019-07-17 11:40:36 -07:00
|
|
|
|
|
|
|
if likely_path {
|
|
|
|
err.span_suggestion(
|
|
|
|
sp,
|
|
|
|
"maybe write a path separator here",
|
|
|
|
"::".to_string(),
|
|
|
|
match self.sess.unstable_features {
|
|
|
|
UnstableFeatures::Disallow => Applicability::MachineApplicable,
|
|
|
|
_ => Applicability::MaybeIncorrect,
|
|
|
|
},
|
|
|
|
);
|
|
|
|
} else if op_pos.line != next_pos.line && maybe_expected_semicolon {
|
|
|
|
err.span_suggestion(
|
|
|
|
sp,
|
|
|
|
"try using a semicolon",
|
|
|
|
";".to_string(),
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
);
|
|
|
|
} else if let UnstableFeatures::Disallow = self.sess.unstable_features {
|
|
|
|
err.span_label(sp, "tried to parse a type due to this");
|
|
|
|
} else {
|
|
|
|
err.span_label(sp, "tried to parse a type due to this type ascription");
|
|
|
|
}
|
|
|
|
if let UnstableFeatures::Disallow = self.sess.unstable_features {
|
|
|
|
// Give extra information about type ascription only if it's a nightly compiler.
|
|
|
|
} else {
|
|
|
|
err.note("`#![feature(type_ascription)]` lets you annotate an expression with a \
|
|
|
|
type: `<expr>: <type>`");
|
|
|
|
err.note("for more information, see \
|
|
|
|
https://github.com/rust-lang/rust/issues/23416");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-05-23 12:55:26 -07:00
|
|
|
/// Eats and discards tokens until one of `kets` is encountered. Respects token trees,
|
|
|
|
/// passes through any errors encountered. Used for error recovery.
|
2019-06-05 14:17:56 +03:00
|
|
|
crate fn eat_to_tokens(&mut self, kets: &[&TokenKind]) {
|
2019-05-23 12:55:26 -07:00
|
|
|
if let Err(ref mut err) = self.parse_seq_to_before_tokens(
|
|
|
|
kets,
|
|
|
|
SeqSep::none(),
|
|
|
|
TokenExpectType::Expect,
|
|
|
|
|p| Ok(p.parse_token_tree()),
|
|
|
|
) {
|
2019-09-07 10:38:02 -04:00
|
|
|
err.cancel();
|
2019-05-23 12:55:26 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// This function checks if there are trailing angle brackets and produces
|
|
|
|
/// a diagnostic to suggest removing them.
|
|
|
|
///
|
|
|
|
/// ```ignore (diagnostic)
|
|
|
|
/// let _ = vec![1, 2, 3].into_iter().collect::<Vec<usize>>>>();
|
|
|
|
/// ^^ help: remove extra angle brackets
|
|
|
|
/// ```
|
2019-06-05 14:17:56 +03:00
|
|
|
crate fn check_trailing_angle_brackets(&mut self, segment: &PathSegment, end: TokenKind) {
|
2019-05-23 12:55:26 -07:00
|
|
|
// This function is intended to be invoked after parsing a path segment where there are two
|
|
|
|
// cases:
|
|
|
|
//
|
|
|
|
// 1. A specific token is expected after the path segment.
|
|
|
|
// eg. `x.foo(`, `x.foo::<u32>(` (parenthesis - method call),
|
|
|
|
// `Foo::`, or `Foo::<Bar>::` (mod sep - continued path).
|
|
|
|
// 2. No specific token is expected after the path segment.
|
|
|
|
// eg. `x.foo` (field access)
|
|
|
|
//
|
|
|
|
// This function is called after parsing `.foo` and before parsing the token `end` (if
|
|
|
|
// present). This includes any angle bracket arguments, such as `.foo::<u32>` or
|
|
|
|
// `Foo::<Bar>`.
|
|
|
|
|
|
|
|
// We only care about trailing angle brackets if we previously parsed angle bracket
|
|
|
|
// arguments. This helps stop us incorrectly suggesting that extra angle brackets be
|
|
|
|
// removed in this case:
|
|
|
|
//
|
|
|
|
// `x.foo >> (3)` (where `x.foo` is a `u32` for example)
|
|
|
|
//
|
|
|
|
// This case is particularly tricky as we won't notice it just looking at the tokens -
|
|
|
|
// it will appear the same (in terms of upcoming tokens) as below (since the `::<u32>` will
|
|
|
|
// have already been parsed):
|
|
|
|
//
|
|
|
|
// `x.foo::<u32>>>(3)`
|
|
|
|
let parsed_angle_bracket_args = segment.args
|
|
|
|
.as_ref()
|
|
|
|
.map(|args| args.is_angle_bracketed())
|
|
|
|
.unwrap_or(false);
|
|
|
|
|
|
|
|
debug!(
|
|
|
|
"check_trailing_angle_brackets: parsed_angle_bracket_args={:?}",
|
|
|
|
parsed_angle_bracket_args,
|
|
|
|
);
|
|
|
|
if !parsed_angle_bracket_args {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Keep the span at the start so we can highlight the sequence of `>` characters to be
|
|
|
|
// removed.
|
2019-06-07 13:31:13 +03:00
|
|
|
let lo = self.token.span;
|
2019-05-23 12:55:26 -07:00
|
|
|
|
|
|
|
// We need to look-ahead to see if we have `>` characters without moving the cursor forward
|
|
|
|
// (since we might have the field access case and the characters we're eating are
|
|
|
|
// actual operators and not trailing characters - ie `x.foo >> 3`).
|
|
|
|
let mut position = 0;
|
|
|
|
|
|
|
|
// We can encounter `>` or `>>` tokens in any order, so we need to keep track of how
|
|
|
|
// many of each (so we can correctly pluralize our error messages) and continue to
|
|
|
|
// advance.
|
|
|
|
let mut number_of_shr = 0;
|
|
|
|
let mut number_of_gt = 0;
|
|
|
|
while self.look_ahead(position, |t| {
|
|
|
|
trace!("check_trailing_angle_brackets: t={:?}", t);
|
|
|
|
if *t == token::BinOp(token::BinOpToken::Shr) {
|
|
|
|
number_of_shr += 1;
|
|
|
|
true
|
|
|
|
} else if *t == token::Gt {
|
|
|
|
number_of_gt += 1;
|
|
|
|
true
|
|
|
|
} else {
|
|
|
|
false
|
|
|
|
}
|
|
|
|
}) {
|
|
|
|
position += 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
// If we didn't find any trailing `>` characters, then we have nothing to error about.
|
|
|
|
debug!(
|
|
|
|
"check_trailing_angle_brackets: number_of_gt={:?} number_of_shr={:?}",
|
|
|
|
number_of_gt, number_of_shr,
|
|
|
|
);
|
|
|
|
if number_of_gt < 1 && number_of_shr < 1 {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Finally, double check that we have our end token as otherwise this is the
|
|
|
|
// second case.
|
|
|
|
if self.look_ahead(position, |t| {
|
|
|
|
trace!("check_trailing_angle_brackets: t={:?}", t);
|
|
|
|
*t == end
|
|
|
|
}) {
|
|
|
|
// Eat from where we started until the end token so that parsing can continue
|
|
|
|
// as if we didn't have those extra angle brackets.
|
|
|
|
self.eat_to_tokens(&[&end]);
|
2019-06-07 13:31:13 +03:00
|
|
|
let span = lo.until(self.token.span);
|
2019-05-23 12:55:26 -07:00
|
|
|
|
2019-09-19 15:13:40 +08:00
|
|
|
let total_num_of_gt = number_of_gt + number_of_shr * 2;
|
2019-05-23 12:55:26 -07:00
|
|
|
self.diagnostic()
|
|
|
|
.struct_span_err(
|
|
|
|
span,
|
2019-09-19 15:13:40 +08:00
|
|
|
&format!("unmatched angle bracket{}", pluralise!(total_num_of_gt)),
|
2019-05-23 12:55:26 -07:00
|
|
|
)
|
|
|
|
.span_suggestion(
|
|
|
|
span,
|
2019-09-19 15:13:40 +08:00
|
|
|
&format!("remove extra angle bracket{}", pluralise!(total_num_of_gt)),
|
2019-05-23 12:55:26 -07:00
|
|
|
String::new(),
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
)
|
|
|
|
.emit();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-09-06 03:56:45 +01:00
|
|
|
/// Produces an error if comparison operators are chained (RFC #558).
|
2019-09-29 19:07:26 -07:00
|
|
|
/// We only need to check the LHS, not the RHS, because all comparison ops have same
|
|
|
|
/// precedence and are left-associative.
|
|
|
|
///
|
|
|
|
/// This can also be hit if someone incorrectly writes `foo<bar>()` when they should have used
|
|
|
|
/// the turbofish syntax. We attempt some heuristic recovery if that is the case.
|
|
|
|
crate fn check_no_chained_comparison(
|
|
|
|
&mut self,
|
|
|
|
lhs: &Expr,
|
|
|
|
outer_op: &AssocOp,
|
|
|
|
) -> PResult<'a, Option<P<Expr>>> {
|
|
|
|
debug_assert!(
|
|
|
|
outer_op.is_comparison(),
|
|
|
|
"check_no_chained_comparison: {:?} is not comparison",
|
|
|
|
outer_op,
|
|
|
|
);
|
2019-09-26 14:39:48 +01:00
|
|
|
match lhs.kind {
|
2019-05-23 12:55:26 -07:00
|
|
|
ExprKind::Binary(op, _, _) if op.node.is_comparison() => {
|
2019-09-30 12:19:22 -07:00
|
|
|
|
2019-09-06 03:56:45 +01:00
|
|
|
// Respan to include both operators.
|
2019-09-29 19:07:26 -07:00
|
|
|
let op_span = op.span.to(self.prev_span);
|
2019-07-24 10:51:20 +02:00
|
|
|
let mut err = self.struct_span_err(
|
|
|
|
op_span,
|
|
|
|
"chained comparison operators require parentheses",
|
|
|
|
);
|
2019-05-23 12:55:26 -07:00
|
|
|
if op.node == BinOpKind::Lt &&
|
|
|
|
*outer_op == AssocOp::Less || // Include `<` to provide this recommendation
|
|
|
|
*outer_op == AssocOp::Greater // even in a case like the following:
|
|
|
|
{ // Foo<Bar<Baz<Qux, ()>>>
|
2019-09-29 19:07:26 -07:00
|
|
|
let msg = "use `::<...>` instead of `<...>` if you meant to specify type \
|
|
|
|
arguments";
|
|
|
|
if *outer_op == AssocOp::Less {
|
|
|
|
let snapshot = self.clone();
|
|
|
|
self.bump();
|
2019-09-30 12:19:22 -07:00
|
|
|
// So far we have parsed `foo<bar<`, consume the rest of the type params
|
|
|
|
let modifiers = vec![
|
|
|
|
(token::Lt, 1),
|
|
|
|
(token::Gt, -1),
|
|
|
|
(token::BinOp(token::Shr), -2),
|
|
|
|
];
|
|
|
|
let early_return = vec![token::Eof];
|
|
|
|
self.consume_tts(1, &modifiers[..], &early_return[..]);
|
|
|
|
|
2019-09-30 12:36:44 -07:00
|
|
|
if !&[
|
|
|
|
token::OpenDelim(token::Paren),
|
|
|
|
token::ModSep,
|
|
|
|
].contains(&self.token.kind) {
|
|
|
|
// We don't have `foo< bar >(` or `foo< bar >::`, so we rewind the
|
|
|
|
// parser and bail out.
|
2019-09-29 19:07:26 -07:00
|
|
|
mem::replace(self, snapshot.clone());
|
|
|
|
}
|
|
|
|
}
|
2019-09-30 12:36:44 -07:00
|
|
|
if token::ModSep == self.token.kind {
|
|
|
|
// We have some certainty that this was a bad turbofish at this point.
|
|
|
|
// `foo< bar >::`
|
|
|
|
err.span_suggestion(
|
|
|
|
op_span.shrink_to_lo(),
|
|
|
|
msg,
|
|
|
|
"::".to_string(),
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
);
|
|
|
|
|
|
|
|
let snapshot = self.clone();
|
|
|
|
|
|
|
|
self.bump(); // `::`
|
|
|
|
// Consume the rest of the likely `foo<bar>::new()` or return at `foo<bar>`.
|
|
|
|
match self.parse_expr() {
|
|
|
|
Ok(_) => {
|
|
|
|
// 99% certain that the suggestion is correct, continue parsing.
|
|
|
|
err.emit();
|
|
|
|
// FIXME: actually check that the two expressions in the binop are
|
|
|
|
// paths and resynthesize new fn call expression instead of using
|
|
|
|
// `ExprKind::Err` placeholder.
|
|
|
|
return Ok(Some(self.mk_expr(
|
|
|
|
lhs.span.to(self.prev_span),
|
|
|
|
ExprKind::Err,
|
|
|
|
ThinVec::new(),
|
|
|
|
)));
|
|
|
|
}
|
|
|
|
Err(mut err) => {
|
|
|
|
err.cancel();
|
|
|
|
// Not entirely sure now, but we bubble the error up with the
|
|
|
|
// suggestion.
|
|
|
|
mem::replace(self, snapshot);
|
|
|
|
return Err(err);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else if token::OpenDelim(token::Paren) == self.token.kind {
|
2019-09-30 12:19:22 -07:00
|
|
|
// We have high certainty that this was a bad turbofish at this point.
|
|
|
|
// `foo< bar >(`
|
2019-09-29 19:07:26 -07:00
|
|
|
err.span_suggestion(
|
|
|
|
op_span.shrink_to_lo(),
|
|
|
|
msg,
|
|
|
|
"::".to_string(),
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
);
|
2019-09-30 12:19:22 -07:00
|
|
|
|
2019-09-29 19:07:26 -07:00
|
|
|
let snapshot = self.clone();
|
2019-09-30 12:36:44 -07:00
|
|
|
self.bump(); // `(`
|
2019-09-30 12:19:22 -07:00
|
|
|
|
|
|
|
// Consume the fn call arguments.
|
|
|
|
let modifiers = vec![
|
|
|
|
(token::OpenDelim(token::Paren), 1),
|
|
|
|
(token::CloseDelim(token::Paren), -1),
|
|
|
|
];
|
|
|
|
let early_return = vec![token::Eof];
|
|
|
|
self.consume_tts(1, &modifiers[..], &early_return[..]);
|
|
|
|
|
2019-09-29 19:07:26 -07:00
|
|
|
if self.token.kind == token::Eof {
|
2019-09-30 12:19:22 -07:00
|
|
|
// Not entirely sure now, but we bubble the error up with the
|
|
|
|
// suggestion.
|
2019-09-29 19:07:26 -07:00
|
|
|
mem::replace(self, snapshot);
|
|
|
|
return Err(err);
|
|
|
|
} else {
|
2019-09-30 12:19:22 -07:00
|
|
|
// 99% certain that the suggestion is correct, continue parsing.
|
2019-09-29 19:07:26 -07:00
|
|
|
err.emit();
|
2019-09-30 12:19:22 -07:00
|
|
|
// FIXME: actually check that the two expressions in the binop are
|
|
|
|
// paths and resynthesize new fn call expression instead of using
|
|
|
|
// `ExprKind::Err` placeholder.
|
2019-09-29 19:07:26 -07:00
|
|
|
return Ok(Some(self.mk_expr(
|
|
|
|
lhs.span.to(self.prev_span),
|
|
|
|
ExprKind::Err,
|
|
|
|
ThinVec::new(),
|
|
|
|
)));
|
|
|
|
}
|
|
|
|
} else {
|
2019-09-30 12:19:22 -07:00
|
|
|
// All we know is that this is `foo < bar >` and *nothing* else. Try to
|
|
|
|
// be helpful, but don't attempt to recover.
|
2019-09-29 19:07:26 -07:00
|
|
|
err.help(msg);
|
|
|
|
err.help("or use `(...)` if you meant to specify fn arguments");
|
|
|
|
// These cases cause too many knock-down errors, bail out (#61329).
|
|
|
|
}
|
2019-09-05 13:15:42 -07:00
|
|
|
return Err(err);
|
2019-05-23 12:55:26 -07:00
|
|
|
}
|
|
|
|
err.emit();
|
|
|
|
}
|
|
|
|
_ => {}
|
|
|
|
}
|
2019-09-29 19:07:26 -07:00
|
|
|
Ok(None)
|
2019-05-23 12:55:26 -07:00
|
|
|
}
|
|
|
|
|
2019-04-28 13:28:07 +08:00
|
|
|
crate fn maybe_report_ambiguous_plus(
|
|
|
|
&mut self,
|
|
|
|
allow_plus: bool,
|
|
|
|
impl_dyn_multi: bool,
|
|
|
|
ty: &Ty,
|
|
|
|
) {
|
|
|
|
if !allow_plus && impl_dyn_multi {
|
|
|
|
let sum_with_parens = format!("({})", pprust::ty_to_string(&ty));
|
|
|
|
self.struct_span_err(ty.span, "ambiguous `+` in a type")
|
|
|
|
.span_suggestion(
|
|
|
|
ty.span,
|
|
|
|
"use parentheses to disambiguate",
|
|
|
|
sum_with_parens,
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
)
|
|
|
|
.emit();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-05-22 16:56:51 -04:00
|
|
|
crate fn maybe_report_invalid_custom_discriminants(
|
2019-05-09 17:08:55 -04:00
|
|
|
sess: &ParseSess,
|
2019-08-13 21:40:21 -03:00
|
|
|
variants: &[ast::Variant],
|
2019-05-22 16:56:51 -04:00
|
|
|
) {
|
2019-08-13 21:40:21 -03:00
|
|
|
let has_fields = variants.iter().any(|variant| match variant.data {
|
2019-05-22 16:56:51 -04:00
|
|
|
VariantData::Tuple(..) | VariantData::Struct(..) => true,
|
|
|
|
VariantData::Unit(..) => false,
|
|
|
|
});
|
|
|
|
|
2019-08-13 21:40:21 -03:00
|
|
|
let discriminant_spans = variants.iter().filter(|variant| match variant.data {
|
2019-05-09 17:08:55 -04:00
|
|
|
VariantData::Tuple(..) | VariantData::Struct(..) => false,
|
|
|
|
VariantData::Unit(..) => true,
|
|
|
|
})
|
2019-08-13 21:40:21 -03:00
|
|
|
.filter_map(|variant| variant.disr_expr.as_ref().map(|c| c.value.span))
|
2019-05-09 17:08:55 -04:00
|
|
|
.collect::<Vec<_>>();
|
|
|
|
|
2019-05-22 16:56:51 -04:00
|
|
|
if !discriminant_spans.is_empty() && has_fields {
|
2019-07-17 11:40:36 -07:00
|
|
|
let mut err = feature_err(
|
2019-05-09 17:08:55 -04:00
|
|
|
sess,
|
|
|
|
sym::arbitrary_enum_discriminant,
|
2019-05-22 16:56:51 -04:00
|
|
|
discriminant_spans.clone(),
|
2019-05-09 17:08:55 -04:00
|
|
|
crate::feature_gate::GateIssue::Language,
|
|
|
|
"custom discriminant values are not allowed in enums with tuple or struct variants",
|
2019-05-22 16:56:51 -04:00
|
|
|
);
|
|
|
|
for sp in discriminant_spans {
|
2019-05-09 17:08:55 -04:00
|
|
|
err.span_label(sp, "disallowed custom discriminant");
|
2019-05-22 16:56:51 -04:00
|
|
|
}
|
|
|
|
for variant in variants.iter() {
|
2019-08-13 21:40:21 -03:00
|
|
|
match &variant.data {
|
2019-05-09 17:08:55 -04:00
|
|
|
VariantData::Struct(..) => {
|
|
|
|
err.span_label(
|
|
|
|
variant.span,
|
|
|
|
"struct variant defined here",
|
|
|
|
);
|
|
|
|
}
|
|
|
|
VariantData::Tuple(..) => {
|
|
|
|
err.span_label(
|
|
|
|
variant.span,
|
|
|
|
"tuple variant defined here",
|
|
|
|
);
|
|
|
|
}
|
|
|
|
VariantData::Unit(..) => {}
|
2019-05-22 16:56:51 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
err.emit();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-04-28 13:28:07 +08:00
|
|
|
crate fn maybe_recover_from_bad_type_plus(
|
|
|
|
&mut self,
|
|
|
|
allow_plus: bool,
|
|
|
|
ty: &Ty,
|
|
|
|
) -> PResult<'a, ()> {
|
|
|
|
// Do not add `+` to expected tokens.
|
|
|
|
if !allow_plus || !self.token.is_like_plus() {
|
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
|
|
|
|
self.bump(); // `+`
|
|
|
|
let bounds = self.parse_generic_bounds(None)?;
|
|
|
|
let sum_span = ty.span.to(self.prev_span);
|
|
|
|
|
|
|
|
let mut err = struct_span_err!(
|
|
|
|
self.sess.span_diagnostic,
|
|
|
|
sum_span,
|
|
|
|
E0178,
|
|
|
|
"expected a path on the left-hand side of `+`, not `{}`",
|
|
|
|
pprust::ty_to_string(ty)
|
|
|
|
);
|
|
|
|
|
2019-09-26 17:25:31 +01:00
|
|
|
match ty.kind {
|
2019-04-28 13:28:07 +08:00
|
|
|
TyKind::Rptr(ref lifetime, ref mut_ty) => {
|
|
|
|
let sum_with_parens = pprust::to_string(|s| {
|
2019-06-24 14:15:11 -04:00
|
|
|
s.s.word("&");
|
|
|
|
s.print_opt_lifetime(lifetime);
|
|
|
|
s.print_mutability(mut_ty.mutbl);
|
|
|
|
s.popen();
|
|
|
|
s.print_type(&mut_ty.ty);
|
|
|
|
s.print_type_bounds(" +", &bounds);
|
2019-04-28 13:28:07 +08:00
|
|
|
s.pclose()
|
|
|
|
});
|
|
|
|
err.span_suggestion(
|
|
|
|
sum_span,
|
|
|
|
"try adding parentheses",
|
|
|
|
sum_with_parens,
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
TyKind::Ptr(..) | TyKind::BareFn(..) => {
|
|
|
|
err.span_label(sum_span, "perhaps you forgot parentheses?");
|
|
|
|
}
|
|
|
|
_ => {
|
|
|
|
err.span_label(sum_span, "expected a path");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
err.emit();
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2019-09-06 03:56:45 +01:00
|
|
|
/// Tries to recover from associated item paths like `[T]::AssocItem` / `(T, U)::AssocItem`.
|
|
|
|
/// Attempts to convert the base expression/pattern/type into a type, parses the `::AssocItem`
|
|
|
|
/// tail, and combines them into a `<Ty>::AssocItem` expression/pattern/type.
|
2019-04-28 13:28:07 +08:00
|
|
|
crate fn maybe_recover_from_bad_qpath<T: RecoverQPath>(
|
|
|
|
&mut self,
|
|
|
|
base: P<T>,
|
|
|
|
allow_recovery: bool,
|
|
|
|
) -> PResult<'a, P<T>> {
|
|
|
|
// Do not add `::` to expected tokens.
|
|
|
|
if allow_recovery && self.token == token::ModSep {
|
|
|
|
if let Some(ty) = base.to_ty() {
|
|
|
|
return self.maybe_recover_from_bad_qpath_stage_2(ty.span, ty);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Ok(base)
|
|
|
|
}
|
|
|
|
|
2019-09-06 03:56:45 +01:00
|
|
|
/// Given an already parsed `Ty`, parses the `::AssocItem` tail and
|
|
|
|
/// combines them into a `<Ty>::AssocItem` expression/pattern/type.
|
2019-04-28 13:28:07 +08:00
|
|
|
crate fn maybe_recover_from_bad_qpath_stage_2<T: RecoverQPath>(
|
|
|
|
&mut self,
|
|
|
|
ty_span: Span,
|
|
|
|
ty: P<Ty>,
|
|
|
|
) -> PResult<'a, P<T>> {
|
|
|
|
self.expect(&token::ModSep)?;
|
|
|
|
|
|
|
|
let mut path = ast::Path {
|
|
|
|
segments: Vec::new(),
|
2019-05-21 22:17:53 -07:00
|
|
|
span: DUMMY_SP,
|
2019-04-28 13:28:07 +08:00
|
|
|
};
|
|
|
|
self.parse_path_segments(&mut path.segments, T::PATH_STYLE)?;
|
|
|
|
path.span = ty_span.to(self.prev_span);
|
|
|
|
|
|
|
|
let ty_str = self
|
|
|
|
.span_to_snippet(ty_span)
|
|
|
|
.unwrap_or_else(|_| pprust::ty_to_string(&ty));
|
|
|
|
self.diagnostic()
|
|
|
|
.struct_span_err(path.span, "missing angle brackets in associated item path")
|
|
|
|
.span_suggestion(
|
2019-09-06 03:56:45 +01:00
|
|
|
// This is a best-effort recovery.
|
2019-04-28 13:28:07 +08:00
|
|
|
path.span,
|
|
|
|
"try",
|
|
|
|
format!("<{}>::{}", ty_str, path),
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
)
|
|
|
|
.emit();
|
|
|
|
|
2019-09-06 03:56:45 +01:00
|
|
|
let path_span = ty_span.shrink_to_hi(); // Use an empty path since `position == 0`.
|
2019-04-28 13:28:07 +08:00
|
|
|
Ok(P(T::recovered(
|
|
|
|
Some(QSelf {
|
|
|
|
ty,
|
|
|
|
path_span,
|
|
|
|
position: 0,
|
|
|
|
}),
|
|
|
|
path,
|
|
|
|
)))
|
|
|
|
}
|
|
|
|
|
|
|
|
crate fn maybe_consume_incorrect_semicolon(&mut self, items: &[P<Item>]) -> bool {
|
|
|
|
if self.eat(&token::Semi) {
|
|
|
|
let mut err = self.struct_span_err(self.prev_span, "expected item, found `;`");
|
|
|
|
err.span_suggestion_short(
|
|
|
|
self.prev_span,
|
|
|
|
"remove this semicolon",
|
|
|
|
String::new(),
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
);
|
|
|
|
if !items.is_empty() {
|
|
|
|
let previous_item = &items[items.len() - 1];
|
2019-09-26 17:51:36 +01:00
|
|
|
let previous_item_kind_name = match previous_item.kind {
|
2019-09-06 03:56:45 +01:00
|
|
|
// Say "braced struct" because tuple-structs and
|
|
|
|
// braceless-empty-struct declarations do take a semicolon.
|
2019-04-28 13:28:07 +08:00
|
|
|
ItemKind::Struct(..) => Some("braced struct"),
|
|
|
|
ItemKind::Enum(..) => Some("enum"),
|
|
|
|
ItemKind::Trait(..) => Some("trait"),
|
|
|
|
ItemKind::Union(..) => Some("union"),
|
|
|
|
_ => None,
|
|
|
|
};
|
|
|
|
if let Some(name) = previous_item_kind_name {
|
|
|
|
err.help(&format!(
|
|
|
|
"{} declarations are not followed by a semicolon",
|
|
|
|
name
|
|
|
|
));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
err.emit();
|
|
|
|
true
|
|
|
|
} else {
|
|
|
|
false
|
|
|
|
}
|
|
|
|
}
|
2019-05-16 13:33:26 -07:00
|
|
|
|
2019-09-06 03:56:45 +01:00
|
|
|
/// Creates a `DiagnosticBuilder` for an unexpected token `t` and tries to recover if it is a
|
2019-05-21 22:17:53 -07:00
|
|
|
/// closing delimiter.
|
|
|
|
pub fn unexpected_try_recover(
|
|
|
|
&mut self,
|
2019-06-05 14:17:56 +03:00
|
|
|
t: &TokenKind,
|
2019-05-21 22:17:53 -07:00
|
|
|
) -> PResult<'a, bool /* recovered */> {
|
2019-06-08 22:38:23 +03:00
|
|
|
let token_str = pprust::token_kind_to_string(t);
|
2019-05-21 22:17:53 -07:00
|
|
|
let this_token_str = self.this_token_descr();
|
2019-06-05 01:17:07 +03:00
|
|
|
let (prev_sp, sp) = match (&self.token.kind, self.subparser_name) {
|
2019-05-21 22:17:53 -07:00
|
|
|
// Point at the end of the macro call when reaching end of macro arguments.
|
2019-05-24 02:04:56 +03:00
|
|
|
(token::Eof, Some(_)) => {
|
2019-06-07 13:31:13 +03:00
|
|
|
let sp = self.sess.source_map().next_point(self.token.span);
|
2019-05-21 22:17:53 -07:00
|
|
|
(sp, sp)
|
|
|
|
}
|
|
|
|
// We don't want to point at the following span after DUMMY_SP.
|
|
|
|
// This happens when the parser finds an empty TokenStream.
|
2019-06-07 13:31:13 +03:00
|
|
|
_ if self.prev_span == DUMMY_SP => (self.token.span, self.token.span),
|
2019-05-21 22:17:53 -07:00
|
|
|
// EOF, don't want to point at the following char, but rather the last token.
|
2019-06-07 13:31:13 +03:00
|
|
|
(token::Eof, None) => (self.prev_span, self.token.span),
|
|
|
|
_ => (self.sess.source_map().next_point(self.prev_span), self.token.span),
|
2019-05-21 22:17:53 -07:00
|
|
|
};
|
|
|
|
let msg = format!(
|
|
|
|
"expected `{}`, found {}",
|
|
|
|
token_str,
|
2019-06-05 01:17:07 +03:00
|
|
|
match (&self.token.kind, self.subparser_name) {
|
2019-05-24 02:04:56 +03:00
|
|
|
(token::Eof, Some(origin)) => format!("end of {}", origin),
|
2019-05-21 22:17:53 -07:00
|
|
|
_ => this_token_str,
|
|
|
|
},
|
|
|
|
);
|
|
|
|
let mut err = self.struct_span_err(sp, &msg);
|
|
|
|
let label_exp = format!("expected `{}`", token_str);
|
|
|
|
match self.recover_closing_delimiter(&[t.clone()], err) {
|
|
|
|
Err(e) => err = e,
|
|
|
|
Ok(recovered) => {
|
|
|
|
return Ok(recovered);
|
|
|
|
}
|
|
|
|
}
|
2019-07-19 10:59:02 -07:00
|
|
|
let sm = self.sess.source_map();
|
|
|
|
match (sm.lookup_line(prev_sp.lo()), sm.lookup_line(sp.lo())) {
|
2019-05-21 22:17:53 -07:00
|
|
|
(Ok(ref a), Ok(ref b)) if a.line == b.line => {
|
|
|
|
// When the spans are in the same line, it means that the only content
|
|
|
|
// between them is whitespace, point only at the found token.
|
|
|
|
err.span_label(sp, label_exp);
|
|
|
|
}
|
|
|
|
_ => {
|
|
|
|
err.span_label(prev_sp, label_exp);
|
|
|
|
err.span_label(sp, "unexpected token");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Err(err)
|
|
|
|
}
|
|
|
|
|
2019-07-13 21:15:21 -07:00
|
|
|
crate fn parse_semi_or_incorrect_foreign_fn_body(
|
|
|
|
&mut self,
|
|
|
|
ident: &Ident,
|
|
|
|
extern_sp: Span,
|
|
|
|
) -> PResult<'a, ()> {
|
|
|
|
if self.token != token::Semi {
|
2019-09-06 03:56:45 +01:00
|
|
|
// This might be an incorrect fn definition (#62109).
|
2019-07-13 21:15:21 -07:00
|
|
|
let parser_snapshot = self.clone();
|
|
|
|
match self.parse_inner_attrs_and_block() {
|
|
|
|
Ok((_, body)) => {
|
|
|
|
self.struct_span_err(ident.span, "incorrect `fn` inside `extern` block")
|
|
|
|
.span_label(ident.span, "can't have a body")
|
|
|
|
.span_label(body.span, "this body is invalid here")
|
|
|
|
.span_label(
|
|
|
|
extern_sp,
|
|
|
|
"`extern` blocks define existing foreign functions and `fn`s \
|
2019-07-13 22:25:23 -07:00
|
|
|
inside of them cannot have a body")
|
2019-07-13 21:15:21 -07:00
|
|
|
.help("you might have meant to write a function accessible through ffi, \
|
|
|
|
which can be done by writing `extern fn` outside of the \
|
|
|
|
`extern` block")
|
|
|
|
.note("for more information, visit \
|
|
|
|
https://doc.rust-lang.org/std/keyword.extern.html")
|
|
|
|
.emit();
|
|
|
|
}
|
|
|
|
Err(mut err) => {
|
|
|
|
err.cancel();
|
|
|
|
mem::replace(self, parser_snapshot);
|
|
|
|
self.expect(&token::Semi)?;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
self.bump();
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2019-09-06 03:56:45 +01:00
|
|
|
/// Consumes alternative await syntaxes like `await!(<expr>)`, `await <expr>`,
|
2019-07-02 06:30:21 +02:00
|
|
|
/// `await? <expr>`, `await(<expr>)`, and `await { <expr> }`.
|
2019-05-16 13:33:26 -07:00
|
|
|
crate fn parse_incorrect_await_syntax(
|
|
|
|
&mut self,
|
|
|
|
lo: Span,
|
|
|
|
await_sp: Span,
|
|
|
|
) -> PResult<'a, (Span, ExprKind)> {
|
2019-07-02 06:30:21 +02:00
|
|
|
if self.token == token::Not {
|
|
|
|
// Handle `await!(<expr>)`.
|
|
|
|
self.expect(&token::Not)?;
|
|
|
|
self.expect(&token::OpenDelim(token::Paren))?;
|
|
|
|
let expr = self.parse_expr()?;
|
|
|
|
self.expect(&token::CloseDelim(token::Paren))?;
|
|
|
|
let sp = self.error_on_incorrect_await(lo, self.prev_span, &expr, false);
|
|
|
|
return Ok((sp, ExprKind::Await(expr)))
|
|
|
|
}
|
|
|
|
|
2019-05-16 13:33:26 -07:00
|
|
|
let is_question = self.eat(&token::Question); // Handle `await? <expr>`.
|
|
|
|
let expr = if self.token == token::OpenDelim(token::Brace) {
|
|
|
|
// Handle `await { <expr> }`.
|
|
|
|
// This needs to be handled separatedly from the next arm to avoid
|
|
|
|
// interpreting `await { <expr> }?` as `<expr>?.await`.
|
|
|
|
self.parse_block_expr(
|
|
|
|
None,
|
2019-06-07 13:31:13 +03:00
|
|
|
self.token.span,
|
2019-05-16 13:33:26 -07:00
|
|
|
BlockCheckMode::Default,
|
|
|
|
ThinVec::new(),
|
|
|
|
)
|
|
|
|
} else {
|
|
|
|
self.parse_expr()
|
|
|
|
}.map_err(|mut err| {
|
|
|
|
err.span_label(await_sp, "while parsing this incorrect await expression");
|
|
|
|
err
|
|
|
|
})?;
|
2019-07-02 06:30:21 +02:00
|
|
|
let sp = self.error_on_incorrect_await(lo, expr.span, &expr, is_question);
|
|
|
|
Ok((sp, ExprKind::Await(expr)))
|
|
|
|
}
|
|
|
|
|
|
|
|
fn error_on_incorrect_await(&self, lo: Span, hi: Span, expr: &Expr, is_question: bool) -> Span {
|
2019-07-24 11:01:30 +02:00
|
|
|
let expr_str = self.span_to_snippet(expr.span)
|
2019-05-16 13:33:26 -07:00
|
|
|
.unwrap_or_else(|_| pprust::expr_to_string(&expr));
|
|
|
|
let suggestion = format!("{}.await{}", expr_str, if is_question { "?" } else { "" });
|
2019-07-02 06:30:21 +02:00
|
|
|
let sp = lo.to(hi);
|
2019-09-26 14:39:48 +01:00
|
|
|
let app = match expr.kind {
|
2019-05-16 13:33:26 -07:00
|
|
|
ExprKind::Try(_) => Applicability::MaybeIncorrect, // `await <expr>?`
|
|
|
|
_ => Applicability::MachineApplicable,
|
|
|
|
};
|
|
|
|
self.struct_span_err(sp, "incorrect use of `await`")
|
|
|
|
.span_suggestion(sp, "`await` is a postfix operation", suggestion, app)
|
|
|
|
.emit();
|
2019-07-02 06:30:21 +02:00
|
|
|
sp
|
2019-05-16 13:33:26 -07:00
|
|
|
}
|
2019-05-16 14:31:07 -07:00
|
|
|
|
2019-09-06 03:56:45 +01:00
|
|
|
/// If encountering `future.await()`, consumes and emits an error.
|
2019-05-16 14:31:07 -07:00
|
|
|
crate fn recover_from_await_method_call(&mut self) {
|
|
|
|
if self.token == token::OpenDelim(token::Paren) &&
|
|
|
|
self.look_ahead(1, |t| t == &token::CloseDelim(token::Paren))
|
|
|
|
{
|
|
|
|
// future.await()
|
2019-06-07 13:31:13 +03:00
|
|
|
let lo = self.token.span;
|
2019-05-16 14:31:07 -07:00
|
|
|
self.bump(); // (
|
2019-06-07 13:31:13 +03:00
|
|
|
let sp = lo.to(self.token.span);
|
2019-05-16 14:31:07 -07:00
|
|
|
self.bump(); // )
|
2019-05-16 15:25:58 -07:00
|
|
|
self.struct_span_err(sp, "incorrect use of `await`")
|
|
|
|
.span_suggestion(
|
|
|
|
sp,
|
|
|
|
"`await` is not a method call, remove the parentheses",
|
|
|
|
String::new(),
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
).emit()
|
2019-05-16 14:31:07 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-09-06 03:56:45 +01:00
|
|
|
/// Recovers a situation like `for ( $pat in $expr )`
|
2019-07-24 10:26:32 +02:00
|
|
|
/// and suggest writing `for $pat in $expr` instead.
|
|
|
|
///
|
|
|
|
/// This should be called before parsing the `$block`.
|
|
|
|
crate fn recover_parens_around_for_head(
|
|
|
|
&mut self,
|
|
|
|
pat: P<Pat>,
|
|
|
|
expr: &Expr,
|
|
|
|
begin_paren: Option<Span>,
|
|
|
|
) -> P<Pat> {
|
|
|
|
match (&self.token.kind, begin_paren) {
|
|
|
|
(token::CloseDelim(token::Paren), Some(begin_par_sp)) => {
|
|
|
|
self.bump();
|
|
|
|
|
|
|
|
let pat_str = self
|
|
|
|
// Remove the `(` from the span of the pattern:
|
|
|
|
.span_to_snippet(pat.span.trim_start(begin_par_sp).unwrap())
|
|
|
|
.unwrap_or_else(|_| pprust::pat_to_string(&pat));
|
|
|
|
|
|
|
|
self.struct_span_err(self.prev_span, "unexpected closing `)`")
|
|
|
|
.span_label(begin_par_sp, "opening `(`")
|
|
|
|
.span_suggestion(
|
|
|
|
begin_par_sp.to(self.prev_span),
|
|
|
|
"remove parenthesis in `for` loop",
|
|
|
|
format!("{} in {}", pat_str, pprust::expr_to_string(&expr)),
|
|
|
|
// With e.g. `for (x) in y)` this would replace `(x) in y)`
|
|
|
|
// with `x) in y)` which is syntactically invalid.
|
|
|
|
// However, this is prevented before we get here.
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
)
|
|
|
|
.emit();
|
|
|
|
|
|
|
|
// Unwrap `(pat)` into `pat` to avoid the `unused_parens` lint.
|
2019-09-26 16:18:31 +01:00
|
|
|
pat.and_then(|pat| match pat.kind {
|
2019-07-24 10:26:32 +02:00
|
|
|
PatKind::Paren(pat) => pat,
|
|
|
|
_ => P(pat),
|
|
|
|
})
|
|
|
|
}
|
|
|
|
_ => pat,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-05-16 14:31:07 -07:00
|
|
|
crate fn could_ascription_be_path(&self, node: &ast::ExprKind) -> bool {
|
|
|
|
self.token.is_ident() &&
|
|
|
|
if let ast::ExprKind::Path(..) = node { true } else { false } &&
|
|
|
|
!self.token.is_reserved_ident() && // v `foo:bar(baz)`
|
|
|
|
self.look_ahead(1, |t| t == &token::OpenDelim(token::Paren)) ||
|
|
|
|
self.look_ahead(1, |t| t == &token::Lt) && // `foo:bar<baz`
|
|
|
|
self.look_ahead(2, |t| t.is_ident()) ||
|
|
|
|
self.look_ahead(1, |t| t == &token::Colon) && // `foo:bar:baz`
|
|
|
|
self.look_ahead(2, |t| t.is_ident()) ||
|
2019-07-17 11:40:36 -07:00
|
|
|
self.look_ahead(1, |t| t == &token::ModSep) &&
|
|
|
|
(self.look_ahead(2, |t| t.is_ident()) || // `foo:bar::baz`
|
|
|
|
self.look_ahead(2, |t| t == &token::Lt)) // `foo:bar::<baz>`
|
2019-05-16 14:31:07 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
crate fn recover_seq_parse_error(
|
|
|
|
&mut self,
|
|
|
|
delim: token::DelimToken,
|
|
|
|
lo: Span,
|
|
|
|
result: PResult<'a, P<Expr>>,
|
|
|
|
) -> P<Expr> {
|
|
|
|
match result {
|
|
|
|
Ok(x) => x,
|
|
|
|
Err(mut err) => {
|
|
|
|
err.emit();
|
2019-09-06 03:56:45 +01:00
|
|
|
// Recover from parse error.
|
2019-05-16 14:31:07 -07:00
|
|
|
self.consume_block(delim);
|
|
|
|
self.mk_expr(lo.to(self.prev_span), ExprKind::Err, ThinVec::new())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
crate fn recover_closing_delimiter(
|
|
|
|
&mut self,
|
2019-06-05 14:17:56 +03:00
|
|
|
tokens: &[TokenKind],
|
2019-05-16 14:31:07 -07:00
|
|
|
mut err: DiagnosticBuilder<'a>,
|
|
|
|
) -> PResult<'a, bool> {
|
|
|
|
let mut pos = None;
|
2019-09-06 03:56:45 +01:00
|
|
|
// We want to use the last closing delim that would apply.
|
2019-05-16 14:31:07 -07:00
|
|
|
for (i, unmatched) in self.unclosed_delims.iter().enumerate().rev() {
|
|
|
|
if tokens.contains(&token::CloseDelim(unmatched.expected_delim))
|
2019-06-07 13:31:13 +03:00
|
|
|
&& Some(self.token.span) > unmatched.unclosed_span
|
2019-05-16 14:31:07 -07:00
|
|
|
{
|
|
|
|
pos = Some(i);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
match pos {
|
|
|
|
Some(pos) => {
|
|
|
|
// Recover and assume that the detected unclosed delimiter was meant for
|
|
|
|
// this location. Emit the diagnostic and act as if the delimiter was
|
|
|
|
// present for the parser's sake.
|
|
|
|
|
|
|
|
// Don't attempt to recover from this unclosed delimiter more than once.
|
|
|
|
let unmatched = self.unclosed_delims.remove(pos);
|
|
|
|
let delim = TokenType::Token(token::CloseDelim(unmatched.expected_delim));
|
|
|
|
|
2019-09-06 03:56:45 +01:00
|
|
|
// We want to suggest the inclusion of the closing delimiter where it makes
|
2019-05-16 14:31:07 -07:00
|
|
|
// the most sense, which is immediately after the last token:
|
|
|
|
//
|
|
|
|
// {foo(bar {}}
|
|
|
|
// - ^
|
|
|
|
// | |
|
2019-06-26 11:21:59 -05:00
|
|
|
// | help: `)` may belong here
|
2019-05-16 14:31:07 -07:00
|
|
|
// |
|
|
|
|
// unclosed delimiter
|
|
|
|
if let Some(sp) = unmatched.unclosed_span {
|
|
|
|
err.span_label(sp, "unclosed delimiter");
|
|
|
|
}
|
|
|
|
err.span_suggestion_short(
|
|
|
|
self.sess.source_map().next_point(self.prev_span),
|
|
|
|
&format!("{} may belong here", delim.to_string()),
|
|
|
|
delim.to_string(),
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
);
|
|
|
|
err.emit();
|
|
|
|
self.expected_tokens.clear(); // reduce errors
|
|
|
|
Ok(true)
|
|
|
|
}
|
|
|
|
_ => Err(err),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-09-06 03:56:45 +01:00
|
|
|
/// Recovers from `pub` keyword in places where it seems _reasonable_ but isn't valid.
|
2019-05-16 14:31:07 -07:00
|
|
|
crate fn eat_bad_pub(&mut self) {
|
2019-05-13 22:46:20 +03:00
|
|
|
if self.token.is_keyword(kw::Pub) {
|
2019-05-16 14:31:07 -07:00
|
|
|
match self.parse_visibility(false) {
|
|
|
|
Ok(vis) => {
|
2019-05-16 15:25:58 -07:00
|
|
|
self.diagnostic()
|
|
|
|
.struct_span_err(vis.span, "unnecessary visibility qualifier")
|
|
|
|
.span_label(vis.span, "`pub` not permitted here")
|
|
|
|
.emit();
|
2019-05-16 14:31:07 -07:00
|
|
|
}
|
|
|
|
Err(mut err) => err.emit(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-09-06 03:56:45 +01:00
|
|
|
/// Eats tokens until we can be relatively sure we reached the end of the
|
|
|
|
/// statement. This is something of a best-effort heuristic.
|
|
|
|
///
|
|
|
|
/// We terminate when we find an unmatched `}` (without consuming it).
|
2019-09-06 22:38:07 +01:00
|
|
|
crate fn recover_stmt(&mut self) {
|
2019-05-16 14:31:07 -07:00
|
|
|
self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore)
|
|
|
|
}
|
|
|
|
|
2019-09-06 03:56:45 +01:00
|
|
|
/// If `break_on_semi` is `Break`, then we will stop consuming tokens after
|
|
|
|
/// finding (and consuming) a `;` outside of `{}` or `[]` (note that this is
|
|
|
|
/// approximate -- it can mean we break too early due to macros, but that
|
|
|
|
/// should only lead to sub-optimal recovery, not inaccurate parsing).
|
|
|
|
///
|
|
|
|
/// If `break_on_block` is `Break`, then we will stop consuming tokens
|
|
|
|
/// after finding (and consuming) a brace-delimited block.
|
2019-05-16 14:31:07 -07:00
|
|
|
crate fn recover_stmt_(&mut self, break_on_semi: SemiColonMode, break_on_block: BlockMode) {
|
|
|
|
let mut brace_depth = 0;
|
|
|
|
let mut bracket_depth = 0;
|
|
|
|
let mut in_block = false;
|
|
|
|
debug!("recover_stmt_ enter loop (semi={:?}, block={:?})",
|
|
|
|
break_on_semi, break_on_block);
|
|
|
|
loop {
|
|
|
|
debug!("recover_stmt_ loop {:?}", self.token);
|
2019-06-05 01:17:07 +03:00
|
|
|
match self.token.kind {
|
2019-05-16 14:31:07 -07:00
|
|
|
token::OpenDelim(token::DelimToken::Brace) => {
|
|
|
|
brace_depth += 1;
|
|
|
|
self.bump();
|
|
|
|
if break_on_block == BlockMode::Break &&
|
|
|
|
brace_depth == 1 &&
|
|
|
|
bracket_depth == 0 {
|
|
|
|
in_block = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
token::OpenDelim(token::DelimToken::Bracket) => {
|
|
|
|
bracket_depth += 1;
|
|
|
|
self.bump();
|
|
|
|
}
|
|
|
|
token::CloseDelim(token::DelimToken::Brace) => {
|
|
|
|
if brace_depth == 0 {
|
|
|
|
debug!("recover_stmt_ return - close delim {:?}", self.token);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
brace_depth -= 1;
|
|
|
|
self.bump();
|
|
|
|
if in_block && bracket_depth == 0 && brace_depth == 0 {
|
|
|
|
debug!("recover_stmt_ return - block end {:?}", self.token);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
token::CloseDelim(token::DelimToken::Bracket) => {
|
|
|
|
bracket_depth -= 1;
|
|
|
|
if bracket_depth < 0 {
|
|
|
|
bracket_depth = 0;
|
|
|
|
}
|
|
|
|
self.bump();
|
|
|
|
}
|
|
|
|
token::Eof => {
|
|
|
|
debug!("recover_stmt_ return - Eof");
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
token::Semi => {
|
|
|
|
self.bump();
|
|
|
|
if break_on_semi == SemiColonMode::Break &&
|
|
|
|
brace_depth == 0 &&
|
|
|
|
bracket_depth == 0 {
|
|
|
|
debug!("recover_stmt_ return - Semi");
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
2019-05-16 15:25:58 -07:00
|
|
|
token::Comma if break_on_semi == SemiColonMode::Comma &&
|
2019-05-16 14:31:07 -07:00
|
|
|
brace_depth == 0 &&
|
2019-05-16 15:25:58 -07:00
|
|
|
bracket_depth == 0 =>
|
|
|
|
{
|
|
|
|
debug!("recover_stmt_ return - Semi");
|
|
|
|
break;
|
2019-05-16 14:31:07 -07:00
|
|
|
}
|
|
|
|
_ => {
|
|
|
|
self.bump()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-05-23 12:55:26 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
crate fn check_for_for_in_in_typo(&mut self, in_span: Span) {
|
|
|
|
if self.eat_keyword(kw::In) {
|
|
|
|
// a common typo: `for _ in in bar {}`
|
2019-07-24 10:51:20 +02:00
|
|
|
self.struct_span_err(self.prev_span, "expected iterable, found keyword `in`")
|
|
|
|
.span_suggestion_short(
|
|
|
|
in_span.until(self.prev_span),
|
|
|
|
"remove the duplicated `in`",
|
|
|
|
String::new(),
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
)
|
|
|
|
.emit();
|
2019-05-23 12:55:26 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-09-30 00:20:03 +02:00
|
|
|
crate fn expected_semi_or_open_brace<T>(&mut self) -> PResult<'a, T> {
|
2019-05-23 12:55:26 -07:00
|
|
|
let token_str = self.this_token_descr();
|
|
|
|
let mut err = self.fatal(&format!("expected `;` or `{{`, found {}", token_str));
|
2019-06-07 13:31:13 +03:00
|
|
|
err.span_label(self.token.span, "expected `;` or `{`");
|
2019-05-23 12:55:26 -07:00
|
|
|
Err(err)
|
|
|
|
}
|
|
|
|
|
2019-08-27 13:24:32 +02:00
|
|
|
crate fn eat_incorrect_doc_comment_for_param_type(&mut self) {
|
2019-06-05 01:17:07 +03:00
|
|
|
if let token::DocComment(_) = self.token.kind {
|
2019-07-24 10:51:20 +02:00
|
|
|
self.struct_span_err(
|
2019-06-07 13:31:13 +03:00
|
|
|
self.token.span,
|
2019-06-09 07:58:40 -03:00
|
|
|
"documentation comments cannot be applied to a function parameter's type",
|
2019-07-24 10:51:20 +02:00
|
|
|
)
|
|
|
|
.span_label(self.token.span, "doc comments are not allowed here")
|
|
|
|
.emit();
|
2019-05-23 12:55:26 -07:00
|
|
|
self.bump();
|
|
|
|
} else if self.token == token::Pound && self.look_ahead(1, |t| {
|
|
|
|
*t == token::OpenDelim(token::Bracket)
|
|
|
|
}) {
|
2019-06-07 13:31:13 +03:00
|
|
|
let lo = self.token.span;
|
2019-05-23 12:55:26 -07:00
|
|
|
// Skip every token until next possible arg.
|
|
|
|
while self.token != token::CloseDelim(token::Bracket) {
|
|
|
|
self.bump();
|
|
|
|
}
|
2019-06-07 13:31:13 +03:00
|
|
|
let sp = lo.to(self.token.span);
|
2019-05-23 12:55:26 -07:00
|
|
|
self.bump();
|
2019-07-24 10:51:20 +02:00
|
|
|
self.struct_span_err(
|
2019-05-23 12:55:26 -07:00
|
|
|
sp,
|
2019-06-09 07:58:40 -03:00
|
|
|
"attributes cannot be applied to a function parameter's type",
|
2019-07-24 10:51:20 +02:00
|
|
|
)
|
|
|
|
.span_label(sp, "attributes are not allowed here")
|
|
|
|
.emit();
|
2019-05-23 12:55:26 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-08-27 13:24:32 +02:00
|
|
|
crate fn parameter_without_type(
|
2019-05-23 12:55:26 -07:00
|
|
|
&mut self,
|
|
|
|
err: &mut DiagnosticBuilder<'_>,
|
|
|
|
pat: P<ast::Pat>,
|
|
|
|
require_name: bool,
|
|
|
|
is_trait_item: bool,
|
2019-05-29 15:25:46 -07:00
|
|
|
) -> Option<Ident> {
|
2019-05-23 12:55:26 -07:00
|
|
|
// If we find a pattern followed by an identifier, it could be an (incorrect)
|
|
|
|
// C-style parameter declaration.
|
|
|
|
if self.check_ident() && self.look_ahead(1, |t| {
|
|
|
|
*t == token::Comma || *t == token::CloseDelim(token::Paren)
|
2019-05-29 15:25:46 -07:00
|
|
|
}) { // `fn foo(String s) {}`
|
2019-05-23 12:55:26 -07:00
|
|
|
let ident = self.parse_ident().unwrap();
|
|
|
|
let span = pat.span.with_hi(ident.span.hi());
|
|
|
|
|
|
|
|
err.span_suggestion(
|
|
|
|
span,
|
|
|
|
"declare the type after the parameter binding",
|
|
|
|
String::from("<identifier>: <type>"),
|
|
|
|
Applicability::HasPlaceholders,
|
|
|
|
);
|
2019-05-29 15:25:46 -07:00
|
|
|
return Some(ident);
|
2019-09-26 16:18:31 +01:00
|
|
|
} else if let PatKind::Ident(_, ident, _) = pat.kind {
|
2019-05-29 15:25:46 -07:00
|
|
|
if require_name && (
|
|
|
|
is_trait_item ||
|
|
|
|
self.token == token::Comma ||
|
|
|
|
self.token == token::CloseDelim(token::Paren)
|
|
|
|
) { // `fn foo(a, b) {}` or `fn foo(usize, usize) {}`
|
2019-05-23 12:55:26 -07:00
|
|
|
err.span_suggestion(
|
|
|
|
pat.span,
|
2019-05-29 15:25:46 -07:00
|
|
|
"if this was a parameter name, give it a type",
|
|
|
|
format!("{}: TypeName", ident),
|
|
|
|
Applicability::HasPlaceholders,
|
|
|
|
);
|
2019-05-23 12:55:26 -07:00
|
|
|
err.span_suggestion(
|
|
|
|
pat.span,
|
2019-05-29 15:25:46 -07:00
|
|
|
"if this is a type, explicitly ignore the parameter name",
|
2019-05-23 12:55:26 -07:00
|
|
|
format!("_: {}", ident),
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
);
|
2019-05-29 15:25:46 -07:00
|
|
|
err.note("anonymous parameters are removed in the 2018 edition (see RFC 1685)");
|
|
|
|
return Some(ident);
|
2019-05-23 12:55:26 -07:00
|
|
|
}
|
|
|
|
}
|
2019-05-29 15:25:46 -07:00
|
|
|
None
|
2019-05-16 14:31:07 -07:00
|
|
|
}
|
|
|
|
|
2019-05-23 12:54:27 -07:00
|
|
|
crate fn recover_arg_parse(&mut self) -> PResult<'a, (P<ast::Pat>, P<ast::Ty>)> {
|
|
|
|
let pat = self.parse_pat(Some("argument name"))?;
|
|
|
|
self.expect(&token::Colon)?;
|
|
|
|
let ty = self.parse_ty()?;
|
|
|
|
|
2019-07-24 10:51:20 +02:00
|
|
|
self.diagnostic()
|
|
|
|
.struct_span_err_with_code(
|
|
|
|
pat.span,
|
|
|
|
"patterns aren't allowed in methods without bodies",
|
|
|
|
DiagnosticId::Error("E0642".into()),
|
|
|
|
)
|
|
|
|
.span_suggestion_short(
|
|
|
|
pat.span,
|
|
|
|
"give this argument a name or use an underscore to ignore it",
|
|
|
|
"_".to_owned(),
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
)
|
|
|
|
.emit();
|
2019-05-23 12:54:27 -07:00
|
|
|
|
|
|
|
// Pretend the pattern is `_`, to avoid duplicate errors from AST validation.
|
|
|
|
let pat = P(Pat {
|
2019-09-26 16:18:31 +01:00
|
|
|
kind: PatKind::Wild,
|
2019-05-23 12:54:27 -07:00
|
|
|
span: pat.span,
|
|
|
|
id: ast::DUMMY_NODE_ID
|
|
|
|
});
|
|
|
|
Ok((pat, ty))
|
|
|
|
}
|
|
|
|
|
2019-08-27 13:24:32 +02:00
|
|
|
crate fn recover_bad_self_param(
|
2019-05-23 12:54:27 -07:00
|
|
|
&mut self,
|
2019-08-27 13:24:32 +02:00
|
|
|
mut param: ast::Param,
|
2019-05-23 12:54:27 -07:00
|
|
|
is_trait_item: bool,
|
2019-08-27 13:24:32 +02:00
|
|
|
) -> PResult<'a, ast::Param> {
|
|
|
|
let sp = param.pat.span;
|
2019-09-26 17:25:31 +01:00
|
|
|
param.ty.kind = TyKind::Err;
|
2019-05-23 13:10:24 -07:00
|
|
|
let mut err = self.struct_span_err(sp, "unexpected `self` parameter in function");
|
2019-05-23 12:54:27 -07:00
|
|
|
if is_trait_item {
|
2019-05-23 13:10:24 -07:00
|
|
|
err.span_label(sp, "must be the first associated function parameter");
|
2019-05-23 12:54:27 -07:00
|
|
|
} else {
|
2019-05-23 13:10:24 -07:00
|
|
|
err.span_label(sp, "not valid as function parameter");
|
|
|
|
err.note("`self` is only valid as the first parameter of an associated function");
|
2019-05-23 12:54:27 -07:00
|
|
|
}
|
|
|
|
err.emit();
|
2019-08-27 13:24:32 +02:00
|
|
|
Ok(param)
|
2019-05-23 12:54:27 -07:00
|
|
|
}
|
|
|
|
|
2019-05-16 14:31:07 -07:00
|
|
|
crate fn consume_block(&mut self, delim: token::DelimToken) {
|
|
|
|
let mut brace_depth = 0;
|
|
|
|
loop {
|
|
|
|
if self.eat(&token::OpenDelim(delim)) {
|
|
|
|
brace_depth += 1;
|
|
|
|
} else if self.eat(&token::CloseDelim(delim)) {
|
|
|
|
if brace_depth == 0 {
|
|
|
|
return;
|
|
|
|
} else {
|
|
|
|
brace_depth -= 1;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
} else if self.token == token::Eof || self.eat(&token::CloseDelim(token::NoDelim)) {
|
|
|
|
return;
|
|
|
|
} else {
|
|
|
|
self.bump();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-05-21 23:16:46 -07:00
|
|
|
crate fn expected_expression_found(&self) -> DiagnosticBuilder<'a> {
|
2019-06-05 01:17:07 +03:00
|
|
|
let (span, msg) = match (&self.token.kind, self.subparser_name) {
|
2019-05-24 02:04:56 +03:00
|
|
|
(&token::Eof, Some(origin)) => {
|
2019-06-07 13:31:13 +03:00
|
|
|
let sp = self.sess.source_map().next_point(self.token.span);
|
2019-05-24 15:17:32 -07:00
|
|
|
(sp, format!("expected expression, found end of {}", origin))
|
2019-05-21 23:16:46 -07:00
|
|
|
}
|
2019-06-07 13:31:13 +03:00
|
|
|
_ => (self.token.span, format!(
|
2019-05-21 23:16:46 -07:00
|
|
|
"expected expression, found {}",
|
|
|
|
self.this_token_descr(),
|
|
|
|
)),
|
|
|
|
};
|
|
|
|
let mut err = self.struct_span_err(span, &msg);
|
2019-06-07 13:31:13 +03:00
|
|
|
let sp = self.sess.source_map().start_point(self.token.span);
|
2019-05-21 23:16:46 -07:00
|
|
|
if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&sp) {
|
|
|
|
self.sess.expr_parentheses_needed(&mut err, *sp, None);
|
|
|
|
}
|
|
|
|
err.span_label(span, "expected expression");
|
|
|
|
err
|
|
|
|
}
|
2019-05-30 18:19:48 -07:00
|
|
|
|
2019-09-30 12:19:22 -07:00
|
|
|
fn consume_tts(
|
|
|
|
&mut self,
|
|
|
|
mut acc: i64,
|
|
|
|
modifier: &[(token::TokenKind, i64)], // Not using FxHasMap and FxHashSet due to
|
|
|
|
early_return: &[token::TokenKind], // `token::TokenKind: !Eq + !Hash`.
|
|
|
|
) {
|
|
|
|
while acc > 0 {
|
|
|
|
if let Some((_, val)) = modifier.iter().filter(|(t, _)| *t == self.token.kind).next() {
|
|
|
|
acc += *val;
|
|
|
|
}
|
|
|
|
if early_return.contains(&self.token.kind) {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
self.bump();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-08-27 13:24:32 +02:00
|
|
|
/// Replace duplicated recovered parameters with `_` pattern to avoid unecessary errors.
|
2019-06-01 14:13:57 -07:00
|
|
|
///
|
|
|
|
/// This is necessary because at this point we don't know whether we parsed a function with
|
2019-08-27 13:24:32 +02:00
|
|
|
/// anonymous parameters or a function with names but no types. In order to minimize
|
|
|
|
/// unecessary errors, we assume the parameters are in the shape of `fn foo(a, b, c)` where
|
|
|
|
/// the parameters are *names* (so we don't emit errors about not being able to find `b` in
|
2019-06-01 14:13:57 -07:00
|
|
|
/// the local scope), but if we find the same name multiple times, like in `fn foo(i8, i8)`,
|
2019-08-27 13:24:32 +02:00
|
|
|
/// we deduplicate them to not complain about duplicated parameter names.
|
|
|
|
crate fn deduplicate_recovered_params_names(&self, fn_inputs: &mut Vec<Param>) {
|
2019-05-30 18:19:48 -07:00
|
|
|
let mut seen_inputs = FxHashSet::default();
|
|
|
|
for input in fn_inputs.iter_mut() {
|
|
|
|
let opt_ident = if let (PatKind::Ident(_, ident, _), TyKind::Err) = (
|
2019-09-26 17:25:31 +01:00
|
|
|
&input.pat.kind, &input.ty.kind,
|
2019-05-30 18:19:48 -07:00
|
|
|
) {
|
|
|
|
Some(*ident)
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
};
|
|
|
|
if let Some(ident) = opt_ident {
|
|
|
|
if seen_inputs.contains(&ident) {
|
2019-09-26 16:18:31 +01:00
|
|
|
input.pat.kind = PatKind::Wild;
|
2019-05-30 18:19:48 -07:00
|
|
|
}
|
|
|
|
seen_inputs.insert(ident);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-04-28 13:28:07 +08:00
|
|
|
}
|