Auto merge of #109128 - chenyukang:yukang/remove-type-ascription, r=estebank
Remove type ascription from parser and diagnostics Mostly based on https://github.com/rust-lang/rust/pull/106826 Part of #101728 r? `@estebank`
This commit is contained in:
commit
98c33e47a4
97 changed files with 762 additions and 1150 deletions
|
@ -1589,7 +1589,6 @@ pub enum ClosureBinder {
|
|||
pub struct MacCall {
|
||||
pub path: Path,
|
||||
pub args: P<DelimArgs>,
|
||||
pub prior_type_ascription: Option<(Span, bool)>,
|
||||
}
|
||||
|
||||
impl MacCall {
|
||||
|
|
|
@ -631,7 +631,7 @@ pub fn noop_visit_attribute<T: MutVisitor>(attr: &mut Attribute, vis: &mut T) {
|
|||
}
|
||||
|
||||
pub fn noop_visit_mac<T: MutVisitor>(mac: &mut MacCall, vis: &mut T) {
|
||||
let MacCall { path, args, prior_type_ascription: _ } = mac;
|
||||
let MacCall { path, args } = mac;
|
||||
vis.visit_path(path);
|
||||
visit_delim_args(args, vis);
|
||||
}
|
||||
|
|
|
@ -53,8 +53,6 @@ pub enum AssocOp {
|
|||
DotDot,
|
||||
/// `..=` range
|
||||
DotDotEq,
|
||||
/// `:`
|
||||
Colon,
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Debug)]
|
||||
|
@ -96,7 +94,6 @@ impl AssocOp {
|
|||
token::DotDotEq => Some(DotDotEq),
|
||||
// DotDotDot is no longer supported, but we need some way to display the error
|
||||
token::DotDotDot => Some(DotDotEq),
|
||||
token::Colon => Some(Colon),
|
||||
// `<-` should probably be `< -`
|
||||
token::LArrow => Some(Less),
|
||||
_ if t.is_keyword(kw::As) => Some(As),
|
||||
|
@ -133,7 +130,7 @@ impl AssocOp {
|
|||
pub fn precedence(&self) -> usize {
|
||||
use AssocOp::*;
|
||||
match *self {
|
||||
As | Colon => 14,
|
||||
As => 14,
|
||||
Multiply | Divide | Modulus => 13,
|
||||
Add | Subtract => 12,
|
||||
ShiftLeft | ShiftRight => 11,
|
||||
|
@ -156,7 +153,7 @@ impl AssocOp {
|
|||
Assign | AssignOp(_) => Fixity::Right,
|
||||
As | Multiply | Divide | Modulus | Add | Subtract | ShiftLeft | ShiftRight | BitAnd
|
||||
| BitXor | BitOr | Less | Greater | LessEqual | GreaterEqual | Equal | NotEqual
|
||||
| LAnd | LOr | Colon => Fixity::Left,
|
||||
| LAnd | LOr => Fixity::Left,
|
||||
DotDot | DotDotEq => Fixity::None,
|
||||
}
|
||||
}
|
||||
|
@ -166,8 +163,9 @@ impl AssocOp {
|
|||
match *self {
|
||||
Less | Greater | LessEqual | GreaterEqual | Equal | NotEqual => true,
|
||||
Assign | AssignOp(_) | As | Multiply | Divide | Modulus | Add | Subtract
|
||||
| ShiftLeft | ShiftRight | BitAnd | BitXor | BitOr | LAnd | LOr | DotDot | DotDotEq
|
||||
| Colon => false,
|
||||
| ShiftLeft | ShiftRight | BitAnd | BitXor | BitOr | LAnd | LOr | DotDot | DotDotEq => {
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -177,7 +175,7 @@ impl AssocOp {
|
|||
Assign | AssignOp(_) => true,
|
||||
Less | Greater | LessEqual | GreaterEqual | Equal | NotEqual | As | Multiply
|
||||
| Divide | Modulus | Add | Subtract | ShiftLeft | ShiftRight | BitAnd | BitXor
|
||||
| BitOr | LAnd | LOr | DotDot | DotDotEq | Colon => false,
|
||||
| BitOr | LAnd | LOr | DotDot | DotDotEq => false,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -202,7 +200,7 @@ impl AssocOp {
|
|||
BitOr => Some(BinOpKind::BitOr),
|
||||
LAnd => Some(BinOpKind::And),
|
||||
LOr => Some(BinOpKind::Or),
|
||||
Assign | AssignOp(_) | As | DotDot | DotDotEq | Colon => None,
|
||||
Assign | AssignOp(_) | As | DotDot | DotDotEq => None,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -223,10 +221,9 @@ impl AssocOp {
|
|||
Greater | // `{ 42 } > 3`
|
||||
GreaterEqual | // `{ 42 } >= 3`
|
||||
AssignOp(_) | // `{ 42 } +=`
|
||||
As | // `{ 42 } as usize`
|
||||
// Equal | // `{ 42 } == { 42 }` Accepting these here would regress incorrect
|
||||
// NotEqual | // `{ 42 } != { 42 }` struct literals parser recovery.
|
||||
Colon, // `{ 42 }: usize`
|
||||
// NotEqual | // `{ 42 } != { 42 } struct literals parser recovery.
|
||||
As // `{ 42 } as usize`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@ -254,7 +251,6 @@ pub enum ExprPrecedence {
|
|||
Binary(BinOpKind),
|
||||
|
||||
Cast,
|
||||
Type,
|
||||
|
||||
Assign,
|
||||
AssignOp,
|
||||
|
@ -313,7 +309,6 @@ impl ExprPrecedence {
|
|||
// Binop-like expr kinds, handled by `AssocOp`.
|
||||
ExprPrecedence::Binary(op) => AssocOp::from_ast_binop(op).precedence() as i8,
|
||||
ExprPrecedence::Cast => AssocOp::As.precedence() as i8,
|
||||
ExprPrecedence::Type => AssocOp::Colon.precedence() as i8,
|
||||
|
||||
ExprPrecedence::Assign |
|
||||
ExprPrecedence::AssignOp => AssocOp::Assign.precedence() as i8,
|
||||
|
|
|
@ -341,10 +341,16 @@ impl<'a> State<'a> {
|
|||
self.print_type(ty);
|
||||
}
|
||||
ast::ExprKind::Type(expr, ty) => {
|
||||
let prec = AssocOp::Colon.precedence() as i8;
|
||||
self.print_expr_maybe_paren(expr, prec);
|
||||
self.word_space(":");
|
||||
self.word("type_ascribe!(");
|
||||
self.ibox(0);
|
||||
self.print_expr(expr);
|
||||
|
||||
self.word(",");
|
||||
self.space_if_not_bol();
|
||||
self.print_type(ty);
|
||||
|
||||
self.end();
|
||||
self.word(")");
|
||||
}
|
||||
ast::ExprKind::Let(pat, scrutinee, _) => {
|
||||
self.print_let(pat, scrutinee);
|
||||
|
|
|
@ -68,9 +68,7 @@ pub fn parse_asm_args<'a>(
|
|||
if !p.eat(&token::Comma) {
|
||||
if allow_templates {
|
||||
// After a template string, we always expect *only* a comma...
|
||||
let mut err = diag.create_err(errors::AsmExpectedComma { span: p.token.span });
|
||||
p.maybe_annotate_with_ascription(&mut err, false);
|
||||
return Err(err);
|
||||
return Err(diag.create_err(errors::AsmExpectedComma { span: p.token.span }));
|
||||
} else {
|
||||
// ...after that delegate to `expect` to also include the other expected tokens.
|
||||
return Err(p.expect(&token::Comma).err().unwrap());
|
||||
|
|
|
@ -61,7 +61,6 @@ pub fn expand_assert<'cx>(
|
|||
delim: MacDelimiter::Parenthesis,
|
||||
tokens,
|
||||
}),
|
||||
prior_type_ascription: None,
|
||||
})),
|
||||
);
|
||||
expr_if_not(cx, call_site_span, cond_expr, then, None)
|
||||
|
|
|
@ -182,7 +182,6 @@ impl<'cx, 'a> Context<'cx, 'a> {
|
|||
delim: MacDelimiter::Parenthesis,
|
||||
tokens: initial.into_iter().chain(captures).collect::<TokenStream>(),
|
||||
}),
|
||||
prior_type_ascription: None,
|
||||
})),
|
||||
)
|
||||
}
|
||||
|
|
|
@ -63,7 +63,6 @@ fn expand<'cx>(
|
|||
delim: MacDelimiter::Parenthesis,
|
||||
tokens: tts,
|
||||
}),
|
||||
prior_type_ascription: None,
|
||||
})),
|
||||
),
|
||||
)
|
||||
|
|
|
@ -992,7 +992,6 @@ pub struct ExpansionData {
|
|||
pub depth: usize,
|
||||
pub module: Rc<ModuleData>,
|
||||
pub dir_ownership: DirOwnership,
|
||||
pub prior_type_ascription: Option<(Span, bool)>,
|
||||
/// Some parent node that is close to this macro call
|
||||
pub lint_node_id: NodeId,
|
||||
pub is_trailing_mac: bool,
|
||||
|
@ -1043,7 +1042,6 @@ impl<'a> ExtCtxt<'a> {
|
|||
depth: 0,
|
||||
module: Default::default(),
|
||||
dir_ownership: DirOwnership::Owned { relative: None },
|
||||
prior_type_ascription: None,
|
||||
lint_node_id: ast::CRATE_NODE_ID,
|
||||
is_trailing_mac: false,
|
||||
},
|
||||
|
|
|
@ -657,8 +657,6 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
|||
self.parse_ast_fragment(tok_result, fragment_kind, &mac.path, span)
|
||||
}
|
||||
SyntaxExtensionKind::LegacyBang(expander) => {
|
||||
let prev = self.cx.current_expansion.prior_type_ascription;
|
||||
self.cx.current_expansion.prior_type_ascription = mac.prior_type_ascription;
|
||||
let tok_result = expander.expand(self.cx, span, mac.args.tokens.clone());
|
||||
let result = if let Some(result) = fragment_kind.make_from(tok_result) {
|
||||
result
|
||||
|
@ -666,7 +664,6 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
|||
self.error_wrong_fragment_kind(fragment_kind, &mac, span);
|
||||
fragment_kind.dummy(span)
|
||||
};
|
||||
self.cx.current_expansion.prior_type_ascription = prev;
|
||||
result
|
||||
}
|
||||
_ => unreachable!(),
|
||||
|
|
|
@ -250,8 +250,7 @@ fn expand_macro<'cx>(
|
|||
trace_macros_note(&mut cx.expansions, sp, msg);
|
||||
}
|
||||
|
||||
let mut p = Parser::new(sess, tts, false, None);
|
||||
p.last_type_ascription = cx.current_expansion.prior_type_ascription;
|
||||
let p = Parser::new(sess, tts, false, None);
|
||||
|
||||
if is_local {
|
||||
cx.resolver.record_macro_rule_usage(node_id, i);
|
||||
|
|
|
@ -21,7 +21,6 @@ pub fn placeholder(
|
|||
delim: ast::MacDelimiter::Parenthesis,
|
||||
tokens: ast::tokenstream::TokenStream::new(Vec::new()),
|
||||
}),
|
||||
prior_type_ascription: None,
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -1407,10 +1407,16 @@ impl<'a> State<'a> {
|
|||
self.print_type(ty);
|
||||
}
|
||||
hir::ExprKind::Type(expr, ty) => {
|
||||
let prec = AssocOp::Colon.precedence() as i8;
|
||||
self.print_expr_maybe_paren(expr, prec);
|
||||
self.word_space(":");
|
||||
self.word("type_ascribe!(");
|
||||
self.ibox(0);
|
||||
self.print_expr(expr);
|
||||
|
||||
self.word(",");
|
||||
self.space_if_not_bol();
|
||||
self.print_type(ty);
|
||||
|
||||
self.end();
|
||||
self.word(")");
|
||||
}
|
||||
hir::ExprKind::DropTemps(init) => {
|
||||
// Print `{`:
|
||||
|
|
|
@ -685,10 +685,9 @@ enum ArmType {
|
|||
/// For example, if we are constructing a witness for the match against
|
||||
///
|
||||
/// ```compile_fail,E0004
|
||||
/// # #![feature(type_ascription)]
|
||||
/// struct Pair(Option<(u32, u32)>, bool);
|
||||
/// # fn foo(p: Pair) {
|
||||
/// match (p: Pair) {
|
||||
/// match p {
|
||||
/// Pair(None, _) => {}
|
||||
/// Pair(_, false) => {}
|
||||
/// }
|
||||
|
|
|
@ -420,6 +420,15 @@ parse_maybe_fn_typo_with_impl = you might have meant to write `impl` instead of
|
|||
parse_expected_fn_path_found_fn_keyword = expected identifier, found keyword `fn`
|
||||
.suggestion = use `Fn` to refer to the trait
|
||||
|
||||
parse_path_single_colon = path separator must be a double colon
|
||||
.suggestion = use a double colon instead
|
||||
|
||||
parse_colon_as_semi = statements are terminated with a semicolon
|
||||
.suggestion = use a semicolon instead
|
||||
|
||||
parse_type_ascription_removed =
|
||||
if you meant to annotate an expression with a type, the type ascription syntax has been removed, see issue #101728 <https://github.com/rust-lang/rust/issues/101728>
|
||||
|
||||
parse_where_clause_before_tuple_struct_body = where clauses are not allowed before tuple struct bodies
|
||||
.label = unexpected where clause
|
||||
.name_label = while parsing this tuple struct
|
||||
|
|
|
@ -1340,6 +1340,28 @@ pub(crate) struct ExpectedFnPathFoundFnKeyword {
|
|||
pub fn_token_span: Span,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(parse_path_single_colon)]
|
||||
pub(crate) struct PathSingleColon {
|
||||
#[primary_span]
|
||||
#[suggestion(applicability = "machine-applicable", code = "::")]
|
||||
pub span: Span,
|
||||
|
||||
#[note(parse_type_ascription_removed)]
|
||||
pub type_ascription: Option<()>,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(parse_colon_as_semi)]
|
||||
pub(crate) struct ColonAsSemi {
|
||||
#[primary_span]
|
||||
#[suggestion(applicability = "machine-applicable", code = ";")]
|
||||
pub span: Span,
|
||||
|
||||
#[note(parse_type_ascription_removed)]
|
||||
pub type_ascription: Option<()>,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(parse_where_clause_before_tuple_struct_body)]
|
||||
pub(crate) struct WhereClauseBeforeTupleStructBody {
|
||||
|
|
|
@ -4,7 +4,7 @@ use super::{
|
|||
TokenExpectType, TokenType,
|
||||
};
|
||||
use crate::errors::{
|
||||
AmbiguousPlus, AttributeOnParamType, BadQPathStage2, BadTypePlus, BadTypePlusSub,
|
||||
AmbiguousPlus, AttributeOnParamType, BadQPathStage2, BadTypePlus, BadTypePlusSub, ColonAsSemi,
|
||||
ComparisonOperatorsCannotBeChained, ComparisonOperatorsCannotBeChainedSugg,
|
||||
ConstGenericWithoutBraces, ConstGenericWithoutBracesSugg, DocCommentDoesNotDocumentAnything,
|
||||
DocCommentOnParamType, DoubleColonInBound, ExpectedIdentifier, ExpectedSemi, ExpectedSemiSugg,
|
||||
|
@ -84,6 +84,7 @@ impl RecoverQPath for Ty {
|
|||
}
|
||||
|
||||
impl RecoverQPath for Pat {
|
||||
const PATH_STYLE: PathStyle = PathStyle::Pat;
|
||||
fn to_ty(&self) -> Option<P<Ty>> {
|
||||
self.to_ty()
|
||||
}
|
||||
|
@ -663,7 +664,6 @@ impl<'a> Parser<'a> {
|
|||
err.span_label(sp, label_exp);
|
||||
err.span_label(self.token.span, "unexpected token");
|
||||
}
|
||||
self.maybe_annotate_with_ascription(&mut err, false);
|
||||
Err(err)
|
||||
}
|
||||
|
||||
|
@ -788,59 +788,6 @@ impl<'a> Parser<'a> {
|
|||
None
|
||||
}
|
||||
|
||||
pub fn maybe_annotate_with_ascription(
|
||||
&mut self,
|
||||
err: &mut Diagnostic,
|
||||
maybe_expected_semicolon: bool,
|
||||
) {
|
||||
if let Some((sp, likely_path)) = self.last_type_ascription.take() {
|
||||
let sm = self.sess.source_map();
|
||||
let next_pos = sm.lookup_char_pos(self.token.span.lo());
|
||||
let op_pos = sm.lookup_char_pos(sp.hi());
|
||||
|
||||
let allow_unstable = self.sess.unstable_features.is_nightly_build();
|
||||
|
||||
if likely_path {
|
||||
err.span_suggestion(
|
||||
sp,
|
||||
"maybe write a path separator here",
|
||||
"::",
|
||||
if allow_unstable {
|
||||
Applicability::MaybeIncorrect
|
||||
} else {
|
||||
Applicability::MachineApplicable
|
||||
},
|
||||
);
|
||||
self.sess.type_ascription_path_suggestions.borrow_mut().insert(sp);
|
||||
} else if op_pos.line != next_pos.line && maybe_expected_semicolon {
|
||||
err.span_suggestion(
|
||||
sp,
|
||||
"try using a semicolon",
|
||||
";",
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
} else if allow_unstable {
|
||||
err.span_label(sp, "tried to parse a type due to this type ascription");
|
||||
} else {
|
||||
err.span_label(sp, "tried to parse a type due to this");
|
||||
}
|
||||
if allow_unstable {
|
||||
// Give extra information about type ascription only if it's a nightly compiler.
|
||||
err.note(
|
||||
"`#![feature(type_ascription)]` lets you annotate an expression with a type: \
|
||||
`<expr>: <type>`",
|
||||
);
|
||||
if !likely_path {
|
||||
// Avoid giving too much info when it was likely an unrelated typo.
|
||||
err.note(
|
||||
"see issue #23416 <https://github.com/rust-lang/rust/issues/23416> \
|
||||
for more information",
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Eats and discards tokens until one of `kets` is encountered. Respects token trees,
|
||||
/// passes through any errors encountered. Used for error recovery.
|
||||
pub(super) fn eat_to_tokens(&mut self, kets: &[&TokenKind]) {
|
||||
|
@ -1622,12 +1569,36 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
|
||||
pub(super) fn expect_semi(&mut self) -> PResult<'a, ()> {
|
||||
if self.eat(&token::Semi) {
|
||||
if self.eat(&token::Semi) || self.recover_colon_as_semi() {
|
||||
return Ok(());
|
||||
}
|
||||
self.expect(&token::Semi).map(drop) // Error unconditionally
|
||||
}
|
||||
|
||||
pub(super) fn recover_colon_as_semi(&mut self) -> bool {
|
||||
let line_idx = |span: Span| {
|
||||
self.sess
|
||||
.source_map()
|
||||
.span_to_lines(span)
|
||||
.ok()
|
||||
.and_then(|lines| Some(lines.lines.get(0)?.line_index))
|
||||
};
|
||||
|
||||
if self.may_recover()
|
||||
&& self.token == token::Colon
|
||||
&& self.look_ahead(1, |next| line_idx(self.token.span) < line_idx(next.span))
|
||||
{
|
||||
self.sess.emit_err(ColonAsSemi {
|
||||
span: self.token.span,
|
||||
type_ascription: self.sess.unstable_features.is_nightly_build().then_some(()),
|
||||
});
|
||||
self.bump();
|
||||
return true;
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
/// Consumes alternative await syntaxes like `await!(<expr>)`, `await <expr>`,
|
||||
/// `await? <expr>`, `await(<expr>)`, and `await { <expr> }`.
|
||||
pub(super) fn recover_incorrect_await_syntax(
|
||||
|
@ -1790,24 +1761,6 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
pub(super) fn could_ascription_be_path(&self, node: &ast::ExprKind) -> bool {
|
||||
(self.token == token::Lt && // `foo:<bar`, likely a typoed turbofish.
|
||||
self.look_ahead(1, |t| t.is_ident() && !t.is_reserved_ident()))
|
||||
|| self.token.is_ident() &&
|
||||
matches!(node, ast::ExprKind::Path(..) | ast::ExprKind::Field(..)) &&
|
||||
!self.token.is_reserved_ident() && // v `foo:bar(baz)`
|
||||
self.look_ahead(1, |t| t == &token::OpenDelim(Delimiter::Parenthesis))
|
||||
|| self.look_ahead(1, |t| t == &token::OpenDelim(Delimiter::Brace)) // `foo:bar {`
|
||||
|| self.look_ahead(1, |t| t == &token::Colon) && // `foo:bar::<baz`
|
||||
self.look_ahead(2, |t| t == &token::Lt) &&
|
||||
self.look_ahead(3, |t| t.is_ident())
|
||||
|| self.look_ahead(1, |t| t == &token::Colon) && // `foo:bar:baz`
|
||||
self.look_ahead(2, |t| t.is_ident())
|
||||
|| self.look_ahead(1, |t| t == &token::ModSep)
|
||||
&& (self.look_ahead(2, |t| t.is_ident()) || // `foo:bar::baz`
|
||||
self.look_ahead(2, |t| t == &token::Lt)) // `foo:bar::<baz>`
|
||||
}
|
||||
|
||||
pub(super) fn recover_seq_parse_error(
|
||||
&mut self,
|
||||
delim: Delimiter,
|
||||
|
@ -1902,7 +1855,6 @@ impl<'a> Parser<'a> {
|
|||
&& brace_depth == 0
|
||||
&& bracket_depth == 0 =>
|
||||
{
|
||||
debug!("recover_stmt_ return - Semi");
|
||||
break;
|
||||
}
|
||||
_ => self.bump(),
|
||||
|
|
|
@ -174,10 +174,8 @@ impl<'a> Parser<'a> {
|
|||
self.parse_expr_prefix(attrs)?
|
||||
}
|
||||
};
|
||||
let last_type_ascription_set = self.last_type_ascription.is_some();
|
||||
|
||||
if !self.should_continue_as_assoc_expr(&lhs) {
|
||||
self.last_type_ascription = None;
|
||||
return Ok(lhs);
|
||||
}
|
||||
|
||||
|
@ -301,9 +299,6 @@ impl<'a> Parser<'a> {
|
|||
if op == AssocOp::As {
|
||||
lhs = self.parse_assoc_op_cast(lhs, lhs_span, ExprKind::Cast)?;
|
||||
continue;
|
||||
} else if op == AssocOp::Colon {
|
||||
lhs = self.parse_assoc_op_ascribe(lhs, lhs_span)?;
|
||||
continue;
|
||||
} else if op == AssocOp::DotDot || op == AssocOp::DotDotEq {
|
||||
// If we didn't have to handle `x..`/`x..=`, it would be pretty easy to
|
||||
// generalise it to the Fixity::None code.
|
||||
|
@ -364,7 +359,7 @@ impl<'a> Parser<'a> {
|
|||
let aopexpr = self.mk_assign_op(source_map::respan(cur_op_span, aop), lhs, rhs);
|
||||
self.mk_expr(span, aopexpr)
|
||||
}
|
||||
AssocOp::As | AssocOp::Colon | AssocOp::DotDot | AssocOp::DotDotEq => {
|
||||
AssocOp::As | AssocOp::DotDot | AssocOp::DotDotEq => {
|
||||
self.span_bug(span, "AssocOp should have been handled by special case")
|
||||
}
|
||||
};
|
||||
|
@ -373,9 +368,7 @@ impl<'a> Parser<'a> {
|
|||
break;
|
||||
}
|
||||
}
|
||||
if last_type_ascription_set {
|
||||
self.last_type_ascription = None;
|
||||
}
|
||||
|
||||
Ok(lhs)
|
||||
}
|
||||
|
||||
|
@ -743,7 +736,7 @@ impl<'a> Parser<'a> {
|
|||
(
|
||||
// `foo: `
|
||||
ExprKind::Path(None, ast::Path { segments, .. }),
|
||||
TokenKind::Ident(kw::For | kw::Loop | kw::While, false),
|
||||
token::Ident(kw::For | kw::Loop | kw::While, false),
|
||||
) if segments.len() == 1 => {
|
||||
let snapshot = self.create_snapshot_for_diagnostic();
|
||||
let label = Label {
|
||||
|
@ -838,21 +831,19 @@ impl<'a> Parser<'a> {
|
|||
&mut self,
|
||||
cast_expr: P<Expr>,
|
||||
) -> PResult<'a, P<Expr>> {
|
||||
if let ExprKind::Type(_, _) = cast_expr.kind {
|
||||
panic!("ExprKind::Type must not be parsed");
|
||||
}
|
||||
|
||||
let span = cast_expr.span;
|
||||
let (cast_kind, maybe_ascription_span) =
|
||||
if let ExprKind::Type(ascripted_expr, _) = &cast_expr.kind {
|
||||
("type ascription", Some(ascripted_expr.span.shrink_to_hi().with_hi(span.hi())))
|
||||
} else {
|
||||
("cast", None)
|
||||
};
|
||||
|
||||
let with_postfix = self.parse_expr_dot_or_call_with_(cast_expr, span)?;
|
||||
|
||||
// Check if an illegal postfix operator has been added after the cast.
|
||||
// If the resulting expression is not a cast, it is an illegal postfix operator.
|
||||
if !matches!(with_postfix.kind, ExprKind::Cast(_, _) | ExprKind::Type(_, _)) {
|
||||
if !matches!(with_postfix.kind, ExprKind::Cast(_, _)) {
|
||||
let msg = format!(
|
||||
"{cast_kind} cannot be followed by {}",
|
||||
"cast cannot be followed by {}",
|
||||
match with_postfix.kind {
|
||||
ExprKind::Index(_, _) => "indexing",
|
||||
ExprKind::Try(_) => "`?`",
|
||||
|
@ -878,44 +869,13 @@ impl<'a> Parser<'a> {
|
|||
);
|
||||
};
|
||||
|
||||
// If type ascription is "likely an error", the user will already be getting a useful
|
||||
// help message, and doesn't need a second.
|
||||
if self.last_type_ascription.map_or(false, |last_ascription| last_ascription.1) {
|
||||
self.maybe_annotate_with_ascription(&mut err, false);
|
||||
} else if let Some(ascription_span) = maybe_ascription_span {
|
||||
let is_nightly = self.sess.unstable_features.is_nightly_build();
|
||||
if is_nightly {
|
||||
suggest_parens(&mut err);
|
||||
}
|
||||
err.span_suggestion(
|
||||
ascription_span,
|
||||
&format!(
|
||||
"{}remove the type ascription",
|
||||
if is_nightly { "alternatively, " } else { "" }
|
||||
),
|
||||
"",
|
||||
if is_nightly {
|
||||
Applicability::MaybeIncorrect
|
||||
} else {
|
||||
Applicability::MachineApplicable
|
||||
},
|
||||
);
|
||||
} else {
|
||||
suggest_parens(&mut err);
|
||||
}
|
||||
suggest_parens(&mut err);
|
||||
|
||||
err.emit();
|
||||
};
|
||||
Ok(with_postfix)
|
||||
}
|
||||
|
||||
fn parse_assoc_op_ascribe(&mut self, lhs: P<Expr>, lhs_span: Span) -> PResult<'a, P<Expr>> {
|
||||
let maybe_path = self.could_ascription_be_path(&lhs.kind);
|
||||
self.last_type_ascription = Some((self.prev_token.span, maybe_path));
|
||||
let lhs = self.parse_assoc_op_cast(lhs, lhs_span, ExprKind::Type)?;
|
||||
self.sess.gated_spans.gate(sym::type_ascription, lhs.span);
|
||||
Ok(lhs)
|
||||
}
|
||||
|
||||
/// Parse `& mut? <expr>` or `& raw [ const | mut ] <expr>`.
|
||||
fn parse_expr_borrow(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> {
|
||||
self.expect_and()?;
|
||||
|
@ -1010,7 +970,7 @@ impl<'a> Parser<'a> {
|
|||
};
|
||||
if has_dot {
|
||||
// expr.f
|
||||
e = self.parse_expr_dot_suffix(lo, e)?;
|
||||
e = self.parse_dot_suffix_expr(lo, e)?;
|
||||
continue;
|
||||
}
|
||||
if self.expr_is_complete(&e) {
|
||||
|
@ -1024,13 +984,7 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
fn look_ahead_type_ascription_as_field(&mut self) -> bool {
|
||||
self.look_ahead(1, |t| t.is_ident())
|
||||
&& self.look_ahead(2, |t| t == &token::Colon)
|
||||
&& self.look_ahead(3, |t| t.can_begin_expr())
|
||||
}
|
||||
|
||||
fn parse_expr_dot_suffix(&mut self, lo: Span, base: P<Expr>) -> PResult<'a, P<Expr>> {
|
||||
fn parse_dot_suffix_expr(&mut self, lo: Span, base: P<Expr>) -> PResult<'a, P<Expr>> {
|
||||
match self.token.uninterpolate().kind {
|
||||
token::Ident(..) => self.parse_dot_suffix(base, lo),
|
||||
token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) => {
|
||||
|
@ -1183,9 +1137,7 @@ impl<'a> Parser<'a> {
|
|||
|
||||
/// Parse a function call expression, `expr(...)`.
|
||||
fn parse_expr_fn_call(&mut self, lo: Span, fun: P<Expr>) -> P<Expr> {
|
||||
let snapshot = if self.token.kind == token::OpenDelim(Delimiter::Parenthesis)
|
||||
&& self.look_ahead_type_ascription_as_field()
|
||||
{
|
||||
let snapshot = if self.token.kind == token::OpenDelim(Delimiter::Parenthesis) {
|
||||
Some((self.create_snapshot_for_diagnostic(), fun.kind.clone()))
|
||||
} else {
|
||||
None
|
||||
|
@ -1216,7 +1168,6 @@ impl<'a> Parser<'a> {
|
|||
if !self.may_recover() {
|
||||
return None;
|
||||
}
|
||||
|
||||
match (seq.as_mut(), snapshot) {
|
||||
(Err(err), Some((mut snapshot, ExprKind::Path(None, path)))) => {
|
||||
snapshot.bump(); // `(`
|
||||
|
@ -1260,9 +1211,7 @@ impl<'a> Parser<'a> {
|
|||
return Some(self.mk_expr_err(span));
|
||||
}
|
||||
Ok(_) => {}
|
||||
Err(mut err) => {
|
||||
err.emit();
|
||||
}
|
||||
Err(err) => err.cancel(),
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
|
@ -1516,7 +1465,6 @@ impl<'a> Parser<'a> {
|
|||
let mac = P(MacCall {
|
||||
path,
|
||||
args: self.parse_delim_args()?,
|
||||
prior_type_ascription: self.last_type_ascription,
|
||||
});
|
||||
(lo.to(self.prev_token.span), ExprKind::MacCall(mac))
|
||||
} else if self.check(&token::OpenDelim(Delimiter::Brace))
|
||||
|
@ -1535,7 +1483,7 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
|
||||
/// Parse `'label: $expr`. The label is already parsed.
|
||||
fn parse_expr_labeled(
|
||||
pub(super) fn parse_expr_labeled(
|
||||
&mut self,
|
||||
label_: Label,
|
||||
mut consume_colon: bool,
|
||||
|
@ -3013,6 +2961,11 @@ impl<'a> Parser<'a> {
|
|||
} else {
|
||||
e.span_label(pth.span, "while parsing this struct");
|
||||
}
|
||||
|
||||
if !recover {
|
||||
return Err(e);
|
||||
}
|
||||
|
||||
e.emit();
|
||||
|
||||
// If the next token is a comma, then try to parse
|
||||
|
@ -3024,6 +2977,7 @@ impl<'a> Parser<'a> {
|
|||
break;
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
};
|
||||
|
|
|
@ -443,7 +443,7 @@ impl<'a> Parser<'a> {
|
|||
Ok(args) => {
|
||||
self.eat_semi_for_macro_if_needed(&args);
|
||||
self.complain_if_pub_macro(vis, false);
|
||||
Ok(MacCall { path, args, prior_type_ascription: self.last_type_ascription })
|
||||
Ok(MacCall { path, args })
|
||||
}
|
||||
|
||||
Err(mut err) => {
|
||||
|
|
|
@ -148,9 +148,6 @@ pub struct Parser<'a> {
|
|||
max_angle_bracket_count: u32,
|
||||
|
||||
last_unexpected_token_span: Option<Span>,
|
||||
/// Span pointing at the `:` for the last type ascription the parser has seen, and whether it
|
||||
/// looked like it could have been a mistyped path or literal `Option:Some(42)`).
|
||||
pub last_type_ascription: Option<(Span, bool /* likely path typo */)>,
|
||||
/// If present, this `Parser` is not parsing Rust code but rather a macro call.
|
||||
subparser_name: Option<&'static str>,
|
||||
capture_state: CaptureState,
|
||||
|
@ -165,7 +162,7 @@ pub struct Parser<'a> {
|
|||
// This type is used a lot, e.g. it's cloned when matching many declarative macro rules with nonterminals. Make sure
|
||||
// it doesn't unintentionally get bigger.
|
||||
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
|
||||
rustc_data_structures::static_assert_size!(Parser<'_>, 288);
|
||||
rustc_data_structures::static_assert_size!(Parser<'_>, 272);
|
||||
|
||||
/// Stores span information about a closure.
|
||||
#[derive(Clone)]
|
||||
|
@ -470,7 +467,6 @@ impl<'a> Parser<'a> {
|
|||
unmatched_angle_bracket_count: 0,
|
||||
max_angle_bracket_count: 0,
|
||||
last_unexpected_token_span: None,
|
||||
last_type_ascription: None,
|
||||
subparser_name,
|
||||
capture_state: CaptureState {
|
||||
capturing: Capturing::No,
|
||||
|
@ -941,10 +937,14 @@ impl<'a> Parser<'a> {
|
|||
// propagate the help message from sub error 'e' to main error 'expect_err;
|
||||
expect_err.children.push(xx.clone());
|
||||
}
|
||||
expect_err.emit();
|
||||
|
||||
e.cancel();
|
||||
break;
|
||||
if self.token == token::Colon {
|
||||
// we will try to recover in `maybe_recover_struct_lit_bad_delims`
|
||||
return Err(expect_err);
|
||||
} else {
|
||||
expect_err.emit();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -406,11 +406,11 @@ impl<'a> Parser<'a> {
|
|||
// Parse pattern starting with a path
|
||||
let (qself, path) = if self.eat_lt() {
|
||||
// Parse a qualified path
|
||||
let (qself, path) = self.parse_qpath(PathStyle::Expr)?;
|
||||
let (qself, path) = self.parse_qpath(PathStyle::Pat)?;
|
||||
(Some(qself), path)
|
||||
} else {
|
||||
// Parse an unqualified path
|
||||
(None, self.parse_path(PathStyle::Expr)?)
|
||||
(None, self.parse_path(PathStyle::Pat)?)
|
||||
};
|
||||
let span = lo.to(self.prev_token.span);
|
||||
|
||||
|
@ -666,7 +666,7 @@ impl<'a> Parser<'a> {
|
|||
fn parse_pat_mac_invoc(&mut self, path: Path) -> PResult<'a, PatKind> {
|
||||
self.bump();
|
||||
let args = self.parse_delim_args()?;
|
||||
let mac = P(MacCall { path, args, prior_type_ascription: self.last_type_ascription });
|
||||
let mac = P(MacCall { path, args });
|
||||
Ok(PatKind::MacCall(mac))
|
||||
}
|
||||
|
||||
|
@ -789,11 +789,11 @@ impl<'a> Parser<'a> {
|
|||
let lo = self.token.span;
|
||||
let (qself, path) = if self.eat_lt() {
|
||||
// Parse a qualified path
|
||||
let (qself, path) = self.parse_qpath(PathStyle::Expr)?;
|
||||
let (qself, path) = self.parse_qpath(PathStyle::Pat)?;
|
||||
(Some(qself), path)
|
||||
} else {
|
||||
// Parse an unqualified path
|
||||
(None, self.parse_path(PathStyle::Expr)?)
|
||||
(None, self.parse_path(PathStyle::Pat)?)
|
||||
};
|
||||
let hi = self.prev_token.span;
|
||||
Ok(self.mk_expr(lo.to(hi), ExprKind::Path(qself, path)))
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
|
||||
use super::{Parser, Restrictions, TokenType};
|
||||
use crate::errors::PathSingleColon;
|
||||
use crate::{errors, maybe_whole};
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
|
||||
|
@ -8,7 +9,7 @@ use rustc_ast::{
|
|||
AssocConstraintKind, BlockCheckMode, GenericArg, GenericArgs, Generics, ParenthesizedArgs,
|
||||
Path, PathSegment, QSelf,
|
||||
};
|
||||
use rustc_errors::{Applicability, PResult};
|
||||
use rustc_errors::{Applicability, IntoDiagnostic, PResult};
|
||||
use rustc_span::source_map::{BytePos, Span};
|
||||
use rustc_span::symbol::{kw, sym, Ident};
|
||||
use std::mem;
|
||||
|
@ -24,7 +25,19 @@ pub enum PathStyle {
|
|||
/// In all such contexts the non-path interpretation is preferred by default for practical
|
||||
/// reasons, but the path interpretation can be forced by the disambiguator `::`, e.g.
|
||||
/// `x<y>` - comparisons, `x::<y>` - unambiguously a path.
|
||||
///
|
||||
/// Also, a path may never be followed by a `:`. This means that we can eagerly recover if
|
||||
/// we encounter it.
|
||||
Expr,
|
||||
/// The same as `Expr`, but may be followed by a `:`.
|
||||
/// For example, this code:
|
||||
/// ```rust
|
||||
/// struct S;
|
||||
///
|
||||
/// let S: S;
|
||||
/// // ^ Followed by a `:`
|
||||
/// ```
|
||||
Pat,
|
||||
/// In other contexts, notably in types, no ambiguity exists and paths can be written
|
||||
/// without the disambiguator, e.g., `x<y>` - unambiguously a path.
|
||||
/// Paths with disambiguators are still accepted, `x::<Y>` - unambiguously a path too.
|
||||
|
@ -38,6 +51,12 @@ pub enum PathStyle {
|
|||
Mod,
|
||||
}
|
||||
|
||||
impl PathStyle {
|
||||
fn has_generic_ambiguity(&self) -> bool {
|
||||
matches!(self, Self::Expr | Self::Pat)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Parser<'a> {
|
||||
/// Parses a qualified path.
|
||||
/// Assumes that the leading `<` has been parsed already.
|
||||
|
@ -183,7 +202,6 @@ impl<'a> Parser<'a> {
|
|||
segments.push(PathSegment::path_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt)));
|
||||
}
|
||||
self.parse_path_segments(&mut segments, style, ty_generics)?;
|
||||
|
||||
Ok(Path { segments, span: lo.to(self.prev_token.span), tokens: None })
|
||||
}
|
||||
|
||||
|
@ -195,7 +213,7 @@ impl<'a> Parser<'a> {
|
|||
) -> PResult<'a, ()> {
|
||||
loop {
|
||||
let segment = self.parse_path_segment(style, ty_generics)?;
|
||||
if style == PathStyle::Expr {
|
||||
if style.has_generic_ambiguity() {
|
||||
// In order to check for trailing angle brackets, we must have finished
|
||||
// recursing (`parse_path_segment` can indirectly call this function),
|
||||
// that is, the next token must be the highlighted part of the below example:
|
||||
|
@ -217,6 +235,29 @@ impl<'a> Parser<'a> {
|
|||
segments.push(segment);
|
||||
|
||||
if self.is_import_coupler() || !self.eat(&token::ModSep) {
|
||||
if style == PathStyle::Expr
|
||||
&& self.may_recover()
|
||||
&& self.token == token::Colon
|
||||
&& self.look_ahead(1, |token| token.is_ident() && !token.is_reserved_ident())
|
||||
{
|
||||
// Emit a special error message for `a::b:c` to help users
|
||||
// otherwise, `a: c` might have meant to introduce a new binding
|
||||
if self.token.span.lo() == self.prev_token.span.hi()
|
||||
&& self.look_ahead(1, |token| self.token.span.hi() == token.span.lo())
|
||||
{
|
||||
self.bump(); // bump past the colon
|
||||
self.sess.emit_err(PathSingleColon {
|
||||
span: self.prev_token.span,
|
||||
type_ascription: self
|
||||
.sess
|
||||
.unstable_features
|
||||
.is_nightly_build()
|
||||
.then_some(()),
|
||||
});
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
@ -270,8 +311,25 @@ impl<'a> Parser<'a> {
|
|||
ty_generics,
|
||||
)?;
|
||||
self.expect_gt().map_err(|mut err| {
|
||||
// Try to recover a `:` into a `::`
|
||||
if self.token == token::Colon
|
||||
&& self.look_ahead(1, |token| {
|
||||
token.is_ident() && !token.is_reserved_ident()
|
||||
})
|
||||
{
|
||||
err.cancel();
|
||||
err = PathSingleColon {
|
||||
span: self.token.span,
|
||||
type_ascription: self
|
||||
.sess
|
||||
.unstable_features
|
||||
.is_nightly_build()
|
||||
.then_some(()),
|
||||
}
|
||||
.into_diagnostic(self.diagnostic());
|
||||
}
|
||||
// Attempt to find places where a missing `>` might belong.
|
||||
if let Some(arg) = args
|
||||
else if let Some(arg) = args
|
||||
.iter()
|
||||
.rev()
|
||||
.find(|arg| !matches!(arg, AngleBracketedArg::Constraint(_)))
|
||||
|
|
|
@ -10,6 +10,8 @@ use super::{
|
|||
use crate::errors;
|
||||
use crate::maybe_whole;
|
||||
|
||||
use crate::errors::MalformedLoopLabel;
|
||||
use ast::Label;
|
||||
use rustc_ast as ast;
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::{self, Delimiter, TokenKind};
|
||||
|
@ -19,7 +21,8 @@ use rustc_ast::{Block, BlockCheckMode, Expr, ExprKind, HasAttrs, Local, Stmt};
|
|||
use rustc_ast::{StmtKind, DUMMY_NODE_ID};
|
||||
use rustc_errors::{Applicability, DiagnosticBuilder, ErrorGuaranteed, PResult};
|
||||
use rustc_span::source_map::{BytePos, Span};
|
||||
use rustc_span::symbol::{kw, sym};
|
||||
use rustc_span::symbol::{kw, sym, Ident};
|
||||
|
||||
use std::mem;
|
||||
use thin_vec::{thin_vec, ThinVec};
|
||||
|
||||
|
@ -186,7 +189,7 @@ impl<'a> Parser<'a> {
|
|||
_ => MacStmtStyle::NoBraces,
|
||||
};
|
||||
|
||||
let mac = P(MacCall { path, args, prior_type_ascription: self.last_type_ascription });
|
||||
let mac = P(MacCall { path, args });
|
||||
|
||||
let kind = if (style == MacStmtStyle::Braces
|
||||
&& self.token != token::Dot
|
||||
|
@ -546,10 +549,36 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
let stmt = match self.parse_full_stmt(recover) {
|
||||
Err(mut err) if recover.yes() => {
|
||||
self.maybe_annotate_with_ascription(&mut err, false);
|
||||
if let Some(ref mut snapshot) = snapshot {
|
||||
snapshot.recover_diff_marker();
|
||||
}
|
||||
if self.token == token::Colon {
|
||||
// if next token is following a colon, it's likely a path
|
||||
// and we can suggest a path separator
|
||||
let ident_span = self.prev_token.span;
|
||||
self.bump();
|
||||
if self.token.span.lo() == self.prev_token.span.hi() {
|
||||
err.span_suggestion_verbose(
|
||||
self.prev_token.span,
|
||||
"maybe write a path separator here",
|
||||
"::",
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
}
|
||||
if self.look_ahead(1, |token| token == &token::Eq) {
|
||||
err.span_suggestion_verbose(
|
||||
ident_span.shrink_to_lo(),
|
||||
"you might have meant to introduce a new binding",
|
||||
"let ",
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
}
|
||||
if self.sess.unstable_features.is_nightly_build() {
|
||||
// FIXME(Nilstrieb): Remove this again after a few months.
|
||||
err.note("type ascription syntax has been removed, see issue #101728 <https://github.com/rust-lang/rust/issues/101728>");
|
||||
}
|
||||
}
|
||||
|
||||
err.emit();
|
||||
self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore);
|
||||
Some(self.mk_stmt_err(self.token.span))
|
||||
|
@ -580,19 +609,25 @@ impl<'a> Parser<'a> {
|
|||
};
|
||||
|
||||
let mut eat_semi = true;
|
||||
let mut add_semi_to_stmt = false;
|
||||
|
||||
match &mut stmt.kind {
|
||||
// Expression without semicolon.
|
||||
StmtKind::Expr(expr)
|
||||
if self.token != token::Eof && classify::expr_requires_semi_to_be_stmt(expr) => {
|
||||
// Just check for errors and recover; do not eat semicolon yet.
|
||||
// `expect_one_of` returns PResult<'a, bool /* recovered */>
|
||||
let replace_with_err =
|
||||
match self.expect_one_of(&[], &[token::Semi, token::CloseDelim(Delimiter::Brace)]) {
|
||||
|
||||
let expect_result = self.expect_one_of(&[], &[token::Semi, token::CloseDelim(Delimiter::Brace)]);
|
||||
|
||||
let replace_with_err = 'break_recover: {
|
||||
match expect_result {
|
||||
// Recover from parser, skip type error to avoid extra errors.
|
||||
Ok(true) => true,
|
||||
Err(mut e) => {
|
||||
if let TokenKind::DocComment(..) = self.token.kind &&
|
||||
let Ok(snippet) = self.span_to_snippet(self.token.span) {
|
||||
Ok(true) => true,
|
||||
Err(mut e) => {
|
||||
if let TokenKind::DocComment(..) = self.token.kind
|
||||
&& let Ok(snippet) = self.span_to_snippet(self.token.span)
|
||||
{
|
||||
let sp = self.token.span;
|
||||
let marker = &snippet[..3];
|
||||
let (comment_marker, doc_comment_marker) = marker.split_at(2);
|
||||
|
@ -606,21 +641,72 @@ impl<'a> Parser<'a> {
|
|||
format!("{} {}", comment_marker, doc_comment_marker),
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
}
|
||||
|
||||
if let Err(mut e) =
|
||||
self.check_mistyped_turbofish_with_multiple_type_params(e, expr)
|
||||
{
|
||||
if recover.no() {
|
||||
return Err(e);
|
||||
}
|
||||
e.emit();
|
||||
self.recover_stmt();
|
||||
|
||||
if self.recover_colon_as_semi() {
|
||||
// recover_colon_as_semi has already emitted a nicer error.
|
||||
e.delay_as_bug();
|
||||
add_semi_to_stmt = true;
|
||||
eat_semi = false;
|
||||
|
||||
break 'break_recover false;
|
||||
}
|
||||
|
||||
match &expr.kind {
|
||||
ExprKind::Path(None, ast::Path { segments, .. }) if segments.len() == 1 => {
|
||||
if self.token == token::Colon
|
||||
&& self.look_ahead(1, |token| {
|
||||
token.is_whole_block() || matches!(
|
||||
token.kind,
|
||||
token::Ident(kw::For | kw::Loop | kw::While, false)
|
||||
| token::OpenDelim(Delimiter::Brace)
|
||||
)
|
||||
})
|
||||
{
|
||||
let snapshot = self.create_snapshot_for_diagnostic();
|
||||
let label = Label {
|
||||
ident: Ident::from_str_and_span(
|
||||
&format!("'{}", segments[0].ident),
|
||||
segments[0].ident.span,
|
||||
),
|
||||
};
|
||||
match self.parse_expr_labeled(label, false) {
|
||||
Ok(labeled_expr) => {
|
||||
e.delay_as_bug();
|
||||
self.sess.emit_err(MalformedLoopLabel {
|
||||
span: label.ident.span,
|
||||
correct_label: label.ident,
|
||||
});
|
||||
*expr = labeled_expr;
|
||||
break 'break_recover false;
|
||||
}
|
||||
Err(err) => {
|
||||
err.cancel();
|
||||
self.restore_snapshot(snapshot);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
if let Err(mut e) =
|
||||
self.check_mistyped_turbofish_with_multiple_type_params(e, expr)
|
||||
{
|
||||
if recover.no() {
|
||||
return Err(e);
|
||||
}
|
||||
e.emit();
|
||||
self.recover_stmt();
|
||||
}
|
||||
|
||||
true
|
||||
|
||||
}
|
||||
true
|
||||
Ok(false) => false
|
||||
}
|
||||
_ => false
|
||||
};
|
||||
|
||||
if replace_with_err {
|
||||
// We already emitted an error, so don't emit another type error
|
||||
let sp = expr.span.to(self.prev_token.span);
|
||||
|
@ -643,9 +729,10 @@ impl<'a> Parser<'a> {
|
|||
StmtKind::Empty | StmtKind::Item(_) | StmtKind::Local(_) | StmtKind::Semi(_) => eat_semi = false,
|
||||
}
|
||||
|
||||
if eat_semi && self.eat(&token::Semi) {
|
||||
if add_semi_to_stmt || (eat_semi && self.eat(&token::Semi)) {
|
||||
stmt = stmt.add_trailing_semicolon();
|
||||
}
|
||||
|
||||
stmt.span = stmt.span.to(self.prev_token.span);
|
||||
Ok(Some(stmt))
|
||||
}
|
||||
|
|
|
@ -317,7 +317,6 @@ impl<'a> Parser<'a> {
|
|||
let msg = format!("expected type, found {}", super::token_descr(&self.token));
|
||||
let mut err = self.struct_span_err(self.token.span, &msg);
|
||||
err.span_label(self.token.span, "expected type");
|
||||
self.maybe_annotate_with_ascription(&mut err, true);
|
||||
return Err(err);
|
||||
};
|
||||
|
||||
|
@ -651,11 +650,7 @@ impl<'a> Parser<'a> {
|
|||
let path = self.parse_path_inner(PathStyle::Type, ty_generics)?;
|
||||
if self.eat(&token::Not) {
|
||||
// Macro invocation in type position
|
||||
Ok(TyKind::MacCall(P(MacCall {
|
||||
path,
|
||||
args: self.parse_delim_args()?,
|
||||
prior_type_ascription: self.last_type_ascription,
|
||||
})))
|
||||
Ok(TyKind::MacCall(P(MacCall { path, args: self.parse_delim_args()? })))
|
||||
} else if allow_plus == AllowPlus::Yes && self.check_plus() {
|
||||
// `Trait1 + Trait2 + 'a`
|
||||
self.parse_remaining_bounds_path(ThinVec::new(), path, lo, true)
|
||||
|
|
|
@ -1345,7 +1345,13 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
ribs: Option<&PerNS<Vec<Rib<'a>>>>,
|
||||
ignore_binding: Option<&'a NameBinding<'a>>,
|
||||
) -> PathResult<'a> {
|
||||
debug!("resolve_path(path={:?}, opt_ns={:?}, finalize={:?})", path, opt_ns, finalize);
|
||||
debug!(
|
||||
"resolve_path(path={:?}, opt_ns={:?}, finalize={:?}) path_len: {}",
|
||||
path,
|
||||
opt_ns,
|
||||
finalize,
|
||||
path.len()
|
||||
);
|
||||
|
||||
let mut module = None;
|
||||
let mut allow_super = true;
|
||||
|
|
|
@ -548,9 +548,6 @@ struct DiagnosticMetadata<'ast> {
|
|||
/// they are used (in a `break` or `continue` statement)
|
||||
unused_labels: FxHashMap<NodeId, Span>,
|
||||
|
||||
/// Only used for better errors on `fn(): fn()`.
|
||||
current_type_ascription: Vec<Span>,
|
||||
|
||||
/// Only used for better errors on `let x = { foo: bar };`.
|
||||
/// In the case of a parse error with `let x = { foo: bar, };`, this isn't needed, it's only
|
||||
/// needed for cases where this parses as a correct type ascription.
|
||||
|
@ -4064,17 +4061,8 @@ impl<'a: 'ast, 'b, 'ast, 'tcx> LateResolutionVisitor<'a, 'b, 'ast, 'tcx> {
|
|||
}
|
||||
}
|
||||
}
|
||||
ExprKind::Type(ref type_expr, ref ty) => {
|
||||
// `ParseSess::type_ascription_path_suggestions` keeps spans of colon tokens in
|
||||
// type ascription. Here we are trying to retrieve the span of the colon token as
|
||||
// well, but only if it's written without spaces `expr:Ty` and therefore confusable
|
||||
// with `expr::Ty`, only in this case it will match the span from
|
||||
// `type_ascription_path_suggestions`.
|
||||
self.diagnostic_metadata
|
||||
.current_type_ascription
|
||||
.push(type_expr.span.between(ty.span));
|
||||
ExprKind::Type(ref _type_expr, ref _ty) => {
|
||||
visit::walk_expr(self, expr);
|
||||
self.diagnostic_metadata.current_type_ascription.pop();
|
||||
}
|
||||
// `async |x| ...` gets desugared to `|x| async {...}`, so we need to
|
||||
// resolve the arguments within the proper scopes so that usages of them inside the
|
||||
|
|
|
@ -28,7 +28,7 @@ use rustc_span::edit_distance::find_best_match_for_name;
|
|||
use rustc_span::edition::Edition;
|
||||
use rustc_span::hygiene::MacroKind;
|
||||
use rustc_span::symbol::{kw, sym, Ident, Symbol};
|
||||
use rustc_span::{BytePos, Span};
|
||||
use rustc_span::Span;
|
||||
|
||||
use std::iter;
|
||||
use std::ops::Deref;
|
||||
|
@ -350,18 +350,15 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> {
|
|||
return (err, candidates);
|
||||
}
|
||||
|
||||
if !self.type_ascription_suggestion(&mut err, base_error.span) {
|
||||
let mut fallback =
|
||||
self.suggest_trait_and_bounds(&mut err, source, res, span, &base_error);
|
||||
let mut fallback = self.suggest_trait_and_bounds(&mut err, source, res, span, &base_error);
|
||||
|
||||
// if we have suggested using pattern matching, then don't add needless suggestions
|
||||
// for typos.
|
||||
fallback |= self.suggest_typo(&mut err, source, path, span, &base_error);
|
||||
// if we have suggested using pattern matching, then don't add needless suggestions
|
||||
// for typos.
|
||||
fallback |= self.suggest_typo(&mut err, source, path, span, &base_error);
|
||||
|
||||
if fallback {
|
||||
// Fallback label.
|
||||
err.span_label(base_error.span, &base_error.fallback_label);
|
||||
}
|
||||
if fallback {
|
||||
// Fallback label.
|
||||
err.span_label(base_error.span, &base_error.fallback_label);
|
||||
}
|
||||
self.err_code_special_cases(&mut err, source, path, span);
|
||||
|
||||
|
@ -494,24 +491,6 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> {
|
|||
.filter(|(_, enum_ty_path)| !enum_ty_path.starts_with("std::prelude::"))
|
||||
.collect();
|
||||
if !enum_candidates.is_empty() {
|
||||
if let (PathSource::Type, Some(span)) =
|
||||
(source, self.diagnostic_metadata.current_type_ascription.last())
|
||||
{
|
||||
if self
|
||||
.r
|
||||
.tcx
|
||||
.sess
|
||||
.parse_sess
|
||||
.type_ascription_path_suggestions
|
||||
.borrow()
|
||||
.contains(span)
|
||||
{
|
||||
// Already reported this issue on the lhs of the type ascription.
|
||||
err.downgrade_to_delayed_bug();
|
||||
return (true, candidates);
|
||||
}
|
||||
}
|
||||
|
||||
enum_candidates.sort();
|
||||
|
||||
// Contextualize for E0412 "cannot find type", but don't belabor the point
|
||||
|
@ -1393,26 +1372,6 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> {
|
|||
Res::Def(DefKind::Enum, def_id),
|
||||
PathSource::TupleStruct(..) | PathSource::Expr(..),
|
||||
) => {
|
||||
if self
|
||||
.diagnostic_metadata
|
||||
.current_type_ascription
|
||||
.last()
|
||||
.map(|sp| {
|
||||
self.r
|
||||
.tcx
|
||||
.sess
|
||||
.parse_sess
|
||||
.type_ascription_path_suggestions
|
||||
.borrow()
|
||||
.contains(&sp)
|
||||
})
|
||||
.unwrap_or(false)
|
||||
{
|
||||
err.downgrade_to_delayed_bug();
|
||||
// We already suggested changing `:` into `::` during parsing.
|
||||
return false;
|
||||
}
|
||||
|
||||
self.suggest_using_enum_variant(err, source, def_id, span);
|
||||
}
|
||||
(Res::Def(DefKind::Struct, def_id), source) if ns == ValueNS => {
|
||||
|
@ -1817,80 +1776,6 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> {
|
|||
})
|
||||
}
|
||||
|
||||
/// Only used in a specific case of type ascription suggestions
|
||||
fn get_colon_suggestion_span(&self, start: Span) -> Span {
|
||||
let sm = self.r.tcx.sess.source_map();
|
||||
start.to(sm.next_point(start))
|
||||
}
|
||||
|
||||
fn type_ascription_suggestion(&self, err: &mut Diagnostic, base_span: Span) -> bool {
|
||||
let sm = self.r.tcx.sess.source_map();
|
||||
let base_snippet = sm.span_to_snippet(base_span);
|
||||
if let Some(&sp) = self.diagnostic_metadata.current_type_ascription.last() {
|
||||
if let Ok(snippet) = sm.span_to_snippet(sp) {
|
||||
let len = snippet.trim_end().len() as u32;
|
||||
if snippet.trim() == ":" {
|
||||
let colon_sp =
|
||||
sp.with_lo(sp.lo() + BytePos(len - 1)).with_hi(sp.lo() + BytePos(len));
|
||||
let mut show_label = true;
|
||||
if sm.is_multiline(sp) {
|
||||
err.span_suggestion_short(
|
||||
colon_sp,
|
||||
"maybe you meant to write `;` here",
|
||||
";",
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
} else {
|
||||
let after_colon_sp =
|
||||
self.get_colon_suggestion_span(colon_sp.shrink_to_hi());
|
||||
if snippet.len() == 1 {
|
||||
// `foo:bar`
|
||||
err.span_suggestion(
|
||||
colon_sp,
|
||||
"maybe you meant to write a path separator here",
|
||||
"::",
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
show_label = false;
|
||||
if !self
|
||||
.r
|
||||
.tcx
|
||||
.sess
|
||||
.parse_sess
|
||||
.type_ascription_path_suggestions
|
||||
.borrow_mut()
|
||||
.insert(colon_sp)
|
||||
{
|
||||
err.downgrade_to_delayed_bug();
|
||||
}
|
||||
}
|
||||
if let Ok(base_snippet) = base_snippet {
|
||||
// Try to find an assignment
|
||||
let eq_span = sm.span_look_ahead(after_colon_sp, Some("="), Some(50));
|
||||
if let Ok(ref snippet) = sm.span_to_snippet(eq_span) && snippet == "=" {
|
||||
err.span_suggestion(
|
||||
base_span,
|
||||
"maybe you meant to write an assignment here",
|
||||
format!("let {}", base_snippet),
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
show_label = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
if show_label {
|
||||
err.span_label(
|
||||
base_span,
|
||||
"expecting a type here because of type ascription",
|
||||
);
|
||||
}
|
||||
return show_label;
|
||||
}
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
// try to give a suggestion for this pattern: `name = blah`, which is common in other languages
|
||||
// suggest `let name = blah` to introduce a new binding
|
||||
fn let_binding_suggestion(&mut self, err: &mut Diagnostic, ident_span: Span) -> bool {
|
||||
|
|
|
@ -214,8 +214,6 @@ pub struct ParseSess {
|
|||
pub env_depinfo: Lock<FxHashSet<(Symbol, Option<Symbol>)>>,
|
||||
/// File paths accessed during the build.
|
||||
pub file_depinfo: Lock<FxHashSet<Symbol>>,
|
||||
/// All the type ascriptions expressions that have had a suggestion for likely path typo.
|
||||
pub type_ascription_path_suggestions: Lock<FxHashSet<Span>>,
|
||||
/// Whether cfg(version) should treat the current release as incomplete
|
||||
pub assume_incomplete_release: bool,
|
||||
/// Spans passed to `proc_macro::quote_span`. Each span has a numerical
|
||||
|
@ -258,7 +256,6 @@ impl ParseSess {
|
|||
reached_eof: AtomicBool::new(false),
|
||||
env_depinfo: Default::default(),
|
||||
file_depinfo: Default::default(),
|
||||
type_ascription_path_suggestions: Default::default(),
|
||||
assume_incomplete_release: false,
|
||||
proc_macro_quoted_spans: Default::default(),
|
||||
attr_id_generator: AttrIdGenerator::new(),
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue