Rip it out
My type ascription Oh rip it out Ah If you think we live too much then You can sacrifice diagnostics Don't mix your garbage Into my syntax So many weird hacks keep diagnostics alive Yet I don't even step outside So many bad diagnostics keep tyasc alive Yet tyasc doesn't even bother to survive!
This commit is contained in:
parent
2034b6d23c
commit
c63b6a437e
97 changed files with 951 additions and 954 deletions
|
@ -1589,7 +1589,6 @@ pub enum ClosureBinder {
|
|||
pub struct MacCall {
|
||||
pub path: Path,
|
||||
pub args: P<DelimArgs>,
|
||||
pub prior_type_ascription: Option<(Span, bool)>,
|
||||
}
|
||||
|
||||
impl MacCall {
|
||||
|
|
|
@ -631,7 +631,7 @@ pub fn noop_visit_attribute<T: MutVisitor>(attr: &mut Attribute, vis: &mut T) {
|
|||
}
|
||||
|
||||
pub fn noop_visit_mac<T: MutVisitor>(mac: &mut MacCall, vis: &mut T) {
|
||||
let MacCall { path, args, prior_type_ascription: _ } = mac;
|
||||
let MacCall { path, args } = mac;
|
||||
vis.visit_path(path);
|
||||
visit_delim_args(args, vis);
|
||||
}
|
||||
|
|
|
@ -53,8 +53,6 @@ pub enum AssocOp {
|
|||
DotDot,
|
||||
/// `..=` range
|
||||
DotDotEq,
|
||||
/// `:`
|
||||
Colon,
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Debug)]
|
||||
|
@ -96,7 +94,6 @@ impl AssocOp {
|
|||
token::DotDotEq => Some(DotDotEq),
|
||||
// DotDotDot is no longer supported, but we need some way to display the error
|
||||
token::DotDotDot => Some(DotDotEq),
|
||||
token::Colon => Some(Colon),
|
||||
// `<-` should probably be `< -`
|
||||
token::LArrow => Some(Less),
|
||||
_ if t.is_keyword(kw::As) => Some(As),
|
||||
|
@ -133,7 +130,7 @@ impl AssocOp {
|
|||
pub fn precedence(&self) -> usize {
|
||||
use AssocOp::*;
|
||||
match *self {
|
||||
As | Colon => 14,
|
||||
As => 14,
|
||||
Multiply | Divide | Modulus => 13,
|
||||
Add | Subtract => 12,
|
||||
ShiftLeft | ShiftRight => 11,
|
||||
|
@ -156,7 +153,7 @@ impl AssocOp {
|
|||
Assign | AssignOp(_) => Fixity::Right,
|
||||
As | Multiply | Divide | Modulus | Add | Subtract | ShiftLeft | ShiftRight | BitAnd
|
||||
| BitXor | BitOr | Less | Greater | LessEqual | GreaterEqual | Equal | NotEqual
|
||||
| LAnd | LOr | Colon => Fixity::Left,
|
||||
| LAnd | LOr => Fixity::Left,
|
||||
DotDot | DotDotEq => Fixity::None,
|
||||
}
|
||||
}
|
||||
|
@ -166,8 +163,9 @@ impl AssocOp {
|
|||
match *self {
|
||||
Less | Greater | LessEqual | GreaterEqual | Equal | NotEqual => true,
|
||||
Assign | AssignOp(_) | As | Multiply | Divide | Modulus | Add | Subtract
|
||||
| ShiftLeft | ShiftRight | BitAnd | BitXor | BitOr | LAnd | LOr | DotDot | DotDotEq
|
||||
| Colon => false,
|
||||
| ShiftLeft | ShiftRight | BitAnd | BitXor | BitOr | LAnd | LOr | DotDot | DotDotEq => {
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -177,7 +175,7 @@ impl AssocOp {
|
|||
Assign | AssignOp(_) => true,
|
||||
Less | Greater | LessEqual | GreaterEqual | Equal | NotEqual | As | Multiply
|
||||
| Divide | Modulus | Add | Subtract | ShiftLeft | ShiftRight | BitAnd | BitXor
|
||||
| BitOr | LAnd | LOr | DotDot | DotDotEq | Colon => false,
|
||||
| BitOr | LAnd | LOr | DotDot | DotDotEq => false,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -202,7 +200,7 @@ impl AssocOp {
|
|||
BitOr => Some(BinOpKind::BitOr),
|
||||
LAnd => Some(BinOpKind::And),
|
||||
LOr => Some(BinOpKind::Or),
|
||||
Assign | AssignOp(_) | As | DotDot | DotDotEq | Colon => None,
|
||||
Assign | AssignOp(_) | As | DotDot | DotDotEq => None,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -223,10 +221,9 @@ impl AssocOp {
|
|||
Greater | // `{ 42 } > 3`
|
||||
GreaterEqual | // `{ 42 } >= 3`
|
||||
AssignOp(_) | // `{ 42 } +=`
|
||||
As | // `{ 42 } as usize`
|
||||
// Equal | // `{ 42 } == { 42 }` Accepting these here would regress incorrect
|
||||
// NotEqual | // `{ 42 } != { 42 }` struct literals parser recovery.
|
||||
Colon, // `{ 42 }: usize`
|
||||
// NotEqual | // `{ 42 } != { 42 } struct literals parser recovery.
|
||||
As // `{ 42 } as usize`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@ -254,7 +251,6 @@ pub enum ExprPrecedence {
|
|||
Binary(BinOpKind),
|
||||
|
||||
Cast,
|
||||
Type,
|
||||
|
||||
Assign,
|
||||
AssignOp,
|
||||
|
@ -313,7 +309,6 @@ impl ExprPrecedence {
|
|||
// Binop-like expr kinds, handled by `AssocOp`.
|
||||
ExprPrecedence::Binary(op) => AssocOp::from_ast_binop(op).precedence() as i8,
|
||||
ExprPrecedence::Cast => AssocOp::As.precedence() as i8,
|
||||
ExprPrecedence::Type => AssocOp::Colon.precedence() as i8,
|
||||
|
||||
ExprPrecedence::Assign |
|
||||
ExprPrecedence::AssignOp => AssocOp::Assign.precedence() as i8,
|
||||
|
|
|
@ -341,10 +341,16 @@ impl<'a> State<'a> {
|
|||
self.print_type(ty);
|
||||
}
|
||||
ast::ExprKind::Type(expr, ty) => {
|
||||
let prec = AssocOp::Colon.precedence() as i8;
|
||||
self.print_expr_maybe_paren(expr, prec);
|
||||
self.word_space(":");
|
||||
self.word("type_ascribe!(");
|
||||
self.ibox(0);
|
||||
self.print_expr(expr);
|
||||
|
||||
self.word(",");
|
||||
self.space_if_not_bol();
|
||||
self.print_type(ty);
|
||||
|
||||
self.end();
|
||||
self.word(")");
|
||||
}
|
||||
ast::ExprKind::Let(pat, scrutinee, _) => {
|
||||
self.print_let(pat, scrutinee);
|
||||
|
|
|
@ -69,7 +69,6 @@ pub fn parse_asm_args<'a>(
|
|||
if allow_templates {
|
||||
// After a template string, we always expect *only* a comma...
|
||||
let mut err = diag.create_err(errors::AsmExpectedComma { span: p.token.span });
|
||||
p.maybe_annotate_with_ascription(&mut err, false);
|
||||
return Err(err);
|
||||
} else {
|
||||
// ...after that delegate to `expect` to also include the other expected tokens.
|
||||
|
|
|
@ -61,7 +61,6 @@ pub fn expand_assert<'cx>(
|
|||
delim: MacDelimiter::Parenthesis,
|
||||
tokens,
|
||||
}),
|
||||
prior_type_ascription: None,
|
||||
})),
|
||||
);
|
||||
expr_if_not(cx, call_site_span, cond_expr, then, None)
|
||||
|
|
|
@ -182,7 +182,6 @@ impl<'cx, 'a> Context<'cx, 'a> {
|
|||
delim: MacDelimiter::Parenthesis,
|
||||
tokens: initial.into_iter().chain(captures).collect::<TokenStream>(),
|
||||
}),
|
||||
prior_type_ascription: None,
|
||||
})),
|
||||
)
|
||||
}
|
||||
|
|
|
@ -63,7 +63,6 @@ fn expand<'cx>(
|
|||
delim: MacDelimiter::Parenthesis,
|
||||
tokens: tts,
|
||||
}),
|
||||
prior_type_ascription: None,
|
||||
})),
|
||||
),
|
||||
)
|
||||
|
|
|
@ -992,7 +992,6 @@ pub struct ExpansionData {
|
|||
pub depth: usize,
|
||||
pub module: Rc<ModuleData>,
|
||||
pub dir_ownership: DirOwnership,
|
||||
pub prior_type_ascription: Option<(Span, bool)>,
|
||||
/// Some parent node that is close to this macro call
|
||||
pub lint_node_id: NodeId,
|
||||
pub is_trailing_mac: bool,
|
||||
|
@ -1043,7 +1042,6 @@ impl<'a> ExtCtxt<'a> {
|
|||
depth: 0,
|
||||
module: Default::default(),
|
||||
dir_ownership: DirOwnership::Owned { relative: None },
|
||||
prior_type_ascription: None,
|
||||
lint_node_id: ast::CRATE_NODE_ID,
|
||||
is_trailing_mac: false,
|
||||
},
|
||||
|
|
|
@ -657,8 +657,6 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
|||
self.parse_ast_fragment(tok_result, fragment_kind, &mac.path, span)
|
||||
}
|
||||
SyntaxExtensionKind::LegacyBang(expander) => {
|
||||
let prev = self.cx.current_expansion.prior_type_ascription;
|
||||
self.cx.current_expansion.prior_type_ascription = mac.prior_type_ascription;
|
||||
let tok_result = expander.expand(self.cx, span, mac.args.tokens.clone());
|
||||
let result = if let Some(result) = fragment_kind.make_from(tok_result) {
|
||||
result
|
||||
|
@ -666,7 +664,6 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
|||
self.error_wrong_fragment_kind(fragment_kind, &mac, span);
|
||||
fragment_kind.dummy(span)
|
||||
};
|
||||
self.cx.current_expansion.prior_type_ascription = prev;
|
||||
result
|
||||
}
|
||||
_ => unreachable!(),
|
||||
|
|
|
@ -250,8 +250,7 @@ fn expand_macro<'cx>(
|
|||
trace_macros_note(&mut cx.expansions, sp, msg);
|
||||
}
|
||||
|
||||
let mut p = Parser::new(sess, tts, false, None);
|
||||
p.last_type_ascription = cx.current_expansion.prior_type_ascription;
|
||||
let p = Parser::new(sess, tts, false, None);
|
||||
|
||||
if is_local {
|
||||
cx.resolver.record_macro_rule_usage(node_id, i);
|
||||
|
|
|
@ -21,7 +21,6 @@ pub fn placeholder(
|
|||
delim: ast::MacDelimiter::Parenthesis,
|
||||
tokens: ast::tokenstream::TokenStream::new(Vec::new()),
|
||||
}),
|
||||
prior_type_ascription: None,
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -1407,10 +1407,16 @@ impl<'a> State<'a> {
|
|||
self.print_type(ty);
|
||||
}
|
||||
hir::ExprKind::Type(expr, ty) => {
|
||||
let prec = AssocOp::Colon.precedence() as i8;
|
||||
self.print_expr_maybe_paren(expr, prec);
|
||||
self.word_space(":");
|
||||
self.word("type_ascribe!(");
|
||||
self.ibox(0);
|
||||
self.print_expr(expr);
|
||||
|
||||
self.word(",");
|
||||
self.space_if_not_bol();
|
||||
self.print_type(ty);
|
||||
|
||||
self.end();
|
||||
self.word(")");
|
||||
}
|
||||
hir::ExprKind::DropTemps(init) => {
|
||||
// Print `{`:
|
||||
|
|
|
@ -420,6 +420,14 @@ parse_maybe_fn_typo_with_impl = you might have meant to write `impl` instead of
|
|||
parse_expected_fn_path_found_fn_keyword = expected identifier, found keyword `fn`
|
||||
.suggestion = use `Fn` to refer to the trait
|
||||
|
||||
parse_path_single_colon = path separator must be a double colon
|
||||
.suggestion = use a double colon instead
|
||||
|
||||
parse_colon_as_semi = statements are terminated with a semicolon
|
||||
.suggestion = use a semicolon instead
|
||||
|
||||
parse_type_ascription_removed = type ascription syntax has been removed, see issue #101728 <https://github.com/rust-lang/rust/issues/101728>
|
||||
|
||||
parse_where_clause_before_tuple_struct_body = where clauses are not allowed before tuple struct bodies
|
||||
.label = unexpected where clause
|
||||
.name_label = while parsing this tuple struct
|
||||
|
|
|
@ -1340,6 +1340,28 @@ pub(crate) struct ExpectedFnPathFoundFnKeyword {
|
|||
pub fn_token_span: Span,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(parse_path_single_colon)]
|
||||
pub(crate) struct PathSingleColon {
|
||||
#[primary_span]
|
||||
#[suggestion(applicability = "machine-applicable", code = "::")]
|
||||
pub span: Span,
|
||||
|
||||
#[note(parse_type_ascription_removed)]
|
||||
pub type_ascription: Option<()>,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(parse_colon_as_semi)]
|
||||
pub(crate) struct ColonAsSemi {
|
||||
#[primary_span]
|
||||
#[suggestion(applicability = "machine-applicable", code = ";")]
|
||||
pub span: Span,
|
||||
|
||||
#[note(parse_type_ascription_removed)]
|
||||
pub type_ascription: Option<()>,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(parse_where_clause_before_tuple_struct_body)]
|
||||
pub(crate) struct WhereClauseBeforeTupleStructBody {
|
||||
|
|
|
@ -4,7 +4,7 @@ use super::{
|
|||
TokenExpectType, TokenType,
|
||||
};
|
||||
use crate::errors::{
|
||||
AmbiguousPlus, AttributeOnParamType, BadQPathStage2, BadTypePlus, BadTypePlusSub,
|
||||
AmbiguousPlus, AttributeOnParamType, BadQPathStage2, BadTypePlus, BadTypePlusSub, ColonAsSemi,
|
||||
ComparisonOperatorsCannotBeChained, ComparisonOperatorsCannotBeChainedSugg,
|
||||
ConstGenericWithoutBraces, ConstGenericWithoutBracesSugg, DocCommentDoesNotDocumentAnything,
|
||||
DocCommentOnParamType, DoubleColonInBound, ExpectedIdentifier, ExpectedSemi, ExpectedSemiSugg,
|
||||
|
@ -84,6 +84,7 @@ impl RecoverQPath for Ty {
|
|||
}
|
||||
|
||||
impl RecoverQPath for Pat {
|
||||
const PATH_STYLE: PathStyle = PathStyle::Pat;
|
||||
fn to_ty(&self) -> Option<P<Ty>> {
|
||||
self.to_ty()
|
||||
}
|
||||
|
@ -237,6 +238,7 @@ impl<'a> DerefMut for SnapshotParser<'a> {
|
|||
|
||||
impl<'a> Parser<'a> {
|
||||
#[rustc_lint_diagnostics]
|
||||
#[track_caller]
|
||||
pub fn struct_span_err<S: Into<MultiSpan>>(
|
||||
&self,
|
||||
sp: S,
|
||||
|
@ -663,7 +665,6 @@ impl<'a> Parser<'a> {
|
|||
err.span_label(sp, label_exp);
|
||||
err.span_label(self.token.span, "unexpected token");
|
||||
}
|
||||
self.maybe_annotate_with_ascription(&mut err, false);
|
||||
Err(err)
|
||||
}
|
||||
|
||||
|
@ -788,59 +789,6 @@ impl<'a> Parser<'a> {
|
|||
None
|
||||
}
|
||||
|
||||
pub fn maybe_annotate_with_ascription(
|
||||
&mut self,
|
||||
err: &mut Diagnostic,
|
||||
maybe_expected_semicolon: bool,
|
||||
) {
|
||||
if let Some((sp, likely_path)) = self.last_type_ascription.take() {
|
||||
let sm = self.sess.source_map();
|
||||
let next_pos = sm.lookup_char_pos(self.token.span.lo());
|
||||
let op_pos = sm.lookup_char_pos(sp.hi());
|
||||
|
||||
let allow_unstable = self.sess.unstable_features.is_nightly_build();
|
||||
|
||||
if likely_path {
|
||||
err.span_suggestion(
|
||||
sp,
|
||||
"maybe write a path separator here",
|
||||
"::",
|
||||
if allow_unstable {
|
||||
Applicability::MaybeIncorrect
|
||||
} else {
|
||||
Applicability::MachineApplicable
|
||||
},
|
||||
);
|
||||
self.sess.type_ascription_path_suggestions.borrow_mut().insert(sp);
|
||||
} else if op_pos.line != next_pos.line && maybe_expected_semicolon {
|
||||
err.span_suggestion(
|
||||
sp,
|
||||
"try using a semicolon",
|
||||
";",
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
} else if allow_unstable {
|
||||
err.span_label(sp, "tried to parse a type due to this type ascription");
|
||||
} else {
|
||||
err.span_label(sp, "tried to parse a type due to this");
|
||||
}
|
||||
if allow_unstable {
|
||||
// Give extra information about type ascription only if it's a nightly compiler.
|
||||
err.note(
|
||||
"`#![feature(type_ascription)]` lets you annotate an expression with a type: \
|
||||
`<expr>: <type>`",
|
||||
);
|
||||
if !likely_path {
|
||||
// Avoid giving too much info when it was likely an unrelated typo.
|
||||
err.note(
|
||||
"see issue #23416 <https://github.com/rust-lang/rust/issues/23416> \
|
||||
for more information",
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Eats and discards tokens until one of `kets` is encountered. Respects token trees,
|
||||
/// passes through any errors encountered. Used for error recovery.
|
||||
pub(super) fn eat_to_tokens(&mut self, kets: &[&TokenKind]) {
|
||||
|
@ -1625,9 +1573,40 @@ impl<'a> Parser<'a> {
|
|||
if self.eat(&token::Semi) {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if self.recover_colon_as_semi() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
self.expect(&token::Semi).map(drop) // Error unconditionally
|
||||
}
|
||||
|
||||
pub(super) fn recover_colon_as_semi(&mut self) -> bool {
|
||||
let line_idx = |span: Span| {
|
||||
self.sess
|
||||
.source_map()
|
||||
.span_to_lines(span)
|
||||
.ok()
|
||||
.and_then(|lines| Some(lines.lines.get(0)?.line_index))
|
||||
};
|
||||
|
||||
if self.may_recover()
|
||||
&& self.token == token::Colon
|
||||
&& self.look_ahead(1, |next| line_idx(self.token.span) < line_idx(next.span))
|
||||
{
|
||||
self.sess.emit_err(ColonAsSemi {
|
||||
span: self.token.span,
|
||||
type_ascription: self.sess.unstable_features.is_nightly_build().then_some(()),
|
||||
});
|
||||
|
||||
self.bump();
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
/// Consumes alternative await syntaxes like `await!(<expr>)`, `await <expr>`,
|
||||
/// `await? <expr>`, `await(<expr>)`, and `await { <expr> }`.
|
||||
pub(super) fn recover_incorrect_await_syntax(
|
||||
|
@ -1790,37 +1769,27 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
pub(super) fn could_ascription_be_path(&self, node: &ast::ExprKind) -> bool {
|
||||
(self.token == token::Lt && // `foo:<bar`, likely a typoed turbofish.
|
||||
self.look_ahead(1, |t| t.is_ident() && !t.is_reserved_ident()))
|
||||
|| self.token.is_ident() &&
|
||||
matches!(node, ast::ExprKind::Path(..) | ast::ExprKind::Field(..)) &&
|
||||
!self.token.is_reserved_ident() && // v `foo:bar(baz)`
|
||||
self.look_ahead(1, |t| t == &token::OpenDelim(Delimiter::Parenthesis))
|
||||
|| self.look_ahead(1, |t| t == &token::OpenDelim(Delimiter::Brace)) // `foo:bar {`
|
||||
|| self.look_ahead(1, |t| t == &token::Colon) && // `foo:bar::<baz`
|
||||
self.look_ahead(2, |t| t == &token::Lt) &&
|
||||
self.look_ahead(3, |t| t.is_ident())
|
||||
|| self.look_ahead(1, |t| t == &token::Colon) && // `foo:bar:baz`
|
||||
self.look_ahead(2, |t| t.is_ident())
|
||||
|| self.look_ahead(1, |t| t == &token::ModSep)
|
||||
&& (self.look_ahead(2, |t| t.is_ident()) || // `foo:bar::baz`
|
||||
self.look_ahead(2, |t| t == &token::Lt)) // `foo:bar::<baz>`
|
||||
}
|
||||
|
||||
pub(super) fn recover_seq_parse_error(
|
||||
&mut self,
|
||||
delim: Delimiter,
|
||||
lo: Span,
|
||||
result: PResult<'a, P<Expr>>,
|
||||
) -> P<Expr> {
|
||||
use crate::parser::DUMMY_NODE_ID;
|
||||
match result {
|
||||
Ok(x) => x,
|
||||
Err(mut err) => {
|
||||
err.emit();
|
||||
// Recover from parse error, callers expect the closing delim to be consumed.
|
||||
self.consume_block(delim, ConsumeClosingDelim::Yes);
|
||||
self.mk_expr(lo.to(self.prev_token.span), ExprKind::Err)
|
||||
debug!("recover_seq_parse_error: consumed tokens until {:?} {:?}", lo, self.token);
|
||||
let res = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Err);
|
||||
if res.id == DUMMY_NODE_ID {
|
||||
//panic!("debug now ....: {:?}", res);
|
||||
res
|
||||
} else {
|
||||
res
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1902,7 +1871,7 @@ impl<'a> Parser<'a> {
|
|||
&& brace_depth == 0
|
||||
&& bracket_depth == 0 =>
|
||||
{
|
||||
debug!("recover_stmt_ return - Semi");
|
||||
debug!("recover_stmt_ return - Comma");
|
||||
break;
|
||||
}
|
||||
_ => self.bump(),
|
||||
|
|
|
@ -174,10 +174,8 @@ impl<'a> Parser<'a> {
|
|||
self.parse_expr_prefix(attrs)?
|
||||
}
|
||||
};
|
||||
let last_type_ascription_set = self.last_type_ascription.is_some();
|
||||
|
||||
if !self.should_continue_as_assoc_expr(&lhs) {
|
||||
self.last_type_ascription = None;
|
||||
return Ok(lhs);
|
||||
}
|
||||
|
||||
|
@ -296,14 +294,22 @@ impl<'a> Parser<'a> {
|
|||
continue;
|
||||
}
|
||||
|
||||
// Special cases:
|
||||
if op.node == AssocOp::As {
|
||||
lhs = self.parse_assoc_op_cast(lhs, lhs_span, ExprKind::Cast)?;
|
||||
continue;
|
||||
} else if op.node == AssocOp::DotDot || op.node == AssocOp::DotDotEq {
|
||||
// If we didn't have to handle `x..`/`x..=`, it would be pretty easy to
|
||||
// generalise it to the Fixity::None code.
|
||||
lhs = self.parse_expr_range(prec, lhs, op.node, cur_op_span)?;
|
||||
break;
|
||||
}
|
||||
|
||||
let op = op.node;
|
||||
// Special cases:
|
||||
if op == AssocOp::As {
|
||||
lhs = self.parse_assoc_op_cast(lhs, lhs_span, ExprKind::Cast)?;
|
||||
continue;
|
||||
} else if op == AssocOp::Colon {
|
||||
lhs = self.parse_assoc_op_ascribe(lhs, lhs_span)?;
|
||||
continue;
|
||||
} else if op == AssocOp::DotDot || op == AssocOp::DotDotEq {
|
||||
// If we didn't have to handle `x..`/`x..=`, it would be pretty easy to
|
||||
// generalise it to the Fixity::None code.
|
||||
|
@ -364,7 +370,7 @@ impl<'a> Parser<'a> {
|
|||
let aopexpr = self.mk_assign_op(source_map::respan(cur_op_span, aop), lhs, rhs);
|
||||
self.mk_expr(span, aopexpr)
|
||||
}
|
||||
AssocOp::As | AssocOp::Colon | AssocOp::DotDot | AssocOp::DotDotEq => {
|
||||
AssocOp::As | AssocOp::DotDot | AssocOp::DotDotEq => {
|
||||
self.span_bug(span, "AssocOp should have been handled by special case")
|
||||
}
|
||||
};
|
||||
|
@ -373,9 +379,7 @@ impl<'a> Parser<'a> {
|
|||
break;
|
||||
}
|
||||
}
|
||||
if last_type_ascription_set {
|
||||
self.last_type_ascription = None;
|
||||
}
|
||||
|
||||
Ok(lhs)
|
||||
}
|
||||
|
||||
|
@ -615,7 +619,9 @@ impl<'a> Parser<'a> {
|
|||
token::Ident(..) if this.may_recover() && this.is_mistaken_not_ident_negation() => {
|
||||
make_it!(this, attrs, |this, _| this.recover_not_expr(lo))
|
||||
}
|
||||
_ => return this.parse_expr_dot_or_call(Some(attrs)),
|
||||
_ => {
|
||||
return this.parse_expr_dot_or_call(Some(attrs));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -743,7 +749,7 @@ impl<'a> Parser<'a> {
|
|||
(
|
||||
// `foo: `
|
||||
ExprKind::Path(None, ast::Path { segments, .. }),
|
||||
TokenKind::Ident(kw::For | kw::Loop | kw::While, false),
|
||||
token::Ident(kw::For | kw::Loop | kw::While, false),
|
||||
) if segments.len() == 1 => {
|
||||
let snapshot = self.create_snapshot_for_diagnostic();
|
||||
let label = Label {
|
||||
|
@ -838,21 +844,19 @@ impl<'a> Parser<'a> {
|
|||
&mut self,
|
||||
cast_expr: P<Expr>,
|
||||
) -> PResult<'a, P<Expr>> {
|
||||
if let ExprKind::Type(_, _) = cast_expr.kind {
|
||||
panic!("ExprKind::Type must not be parsed");
|
||||
}
|
||||
|
||||
let span = cast_expr.span;
|
||||
let (cast_kind, maybe_ascription_span) =
|
||||
if let ExprKind::Type(ascripted_expr, _) = &cast_expr.kind {
|
||||
("type ascription", Some(ascripted_expr.span.shrink_to_hi().with_hi(span.hi())))
|
||||
} else {
|
||||
("cast", None)
|
||||
};
|
||||
|
||||
let with_postfix = self.parse_expr_dot_or_call_with_(cast_expr, span)?;
|
||||
|
||||
// Check if an illegal postfix operator has been added after the cast.
|
||||
// If the resulting expression is not a cast, it is an illegal postfix operator.
|
||||
if !matches!(with_postfix.kind, ExprKind::Cast(_, _) | ExprKind::Type(_, _)) {
|
||||
if !matches!(with_postfix.kind, ExprKind::Cast(_, _)) {
|
||||
let msg = format!(
|
||||
"{cast_kind} cannot be followed by {}",
|
||||
"cast cannot be followed by {}",
|
||||
match with_postfix.kind {
|
||||
ExprKind::Index(_, _) => "indexing",
|
||||
ExprKind::Try(_) => "`?`",
|
||||
|
@ -878,44 +882,13 @@ impl<'a> Parser<'a> {
|
|||
);
|
||||
};
|
||||
|
||||
// If type ascription is "likely an error", the user will already be getting a useful
|
||||
// help message, and doesn't need a second.
|
||||
if self.last_type_ascription.map_or(false, |last_ascription| last_ascription.1) {
|
||||
self.maybe_annotate_with_ascription(&mut err, false);
|
||||
} else if let Some(ascription_span) = maybe_ascription_span {
|
||||
let is_nightly = self.sess.unstable_features.is_nightly_build();
|
||||
if is_nightly {
|
||||
suggest_parens(&mut err);
|
||||
}
|
||||
err.span_suggestion(
|
||||
ascription_span,
|
||||
&format!(
|
||||
"{}remove the type ascription",
|
||||
if is_nightly { "alternatively, " } else { "" }
|
||||
),
|
||||
"",
|
||||
if is_nightly {
|
||||
Applicability::MaybeIncorrect
|
||||
} else {
|
||||
Applicability::MachineApplicable
|
||||
},
|
||||
);
|
||||
} else {
|
||||
suggest_parens(&mut err);
|
||||
}
|
||||
suggest_parens(&mut err);
|
||||
|
||||
err.emit();
|
||||
};
|
||||
Ok(with_postfix)
|
||||
}
|
||||
|
||||
fn parse_assoc_op_ascribe(&mut self, lhs: P<Expr>, lhs_span: Span) -> PResult<'a, P<Expr>> {
|
||||
let maybe_path = self.could_ascription_be_path(&lhs.kind);
|
||||
self.last_type_ascription = Some((self.prev_token.span, maybe_path));
|
||||
let lhs = self.parse_assoc_op_cast(lhs, lhs_span, ExprKind::Type)?;
|
||||
self.sess.gated_spans.gate(sym::type_ascription, lhs.span);
|
||||
Ok(lhs)
|
||||
}
|
||||
|
||||
/// Parse `& mut? <expr>` or `& raw [ const | mut ] <expr>`.
|
||||
fn parse_expr_borrow(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> {
|
||||
self.expect_and()?;
|
||||
|
@ -1010,7 +983,7 @@ impl<'a> Parser<'a> {
|
|||
};
|
||||
if has_dot {
|
||||
// expr.f
|
||||
e = self.parse_expr_dot_suffix(lo, e)?;
|
||||
e = self.parse_dot_suffix_expr(lo, e)?;
|
||||
continue;
|
||||
}
|
||||
if self.expr_is_complete(&e) {
|
||||
|
@ -1024,13 +997,7 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
fn look_ahead_type_ascription_as_field(&mut self) -> bool {
|
||||
self.look_ahead(1, |t| t.is_ident())
|
||||
&& self.look_ahead(2, |t| t == &token::Colon)
|
||||
&& self.look_ahead(3, |t| t.can_begin_expr())
|
||||
}
|
||||
|
||||
fn parse_expr_dot_suffix(&mut self, lo: Span, base: P<Expr>) -> PResult<'a, P<Expr>> {
|
||||
fn parse_dot_suffix_expr(&mut self, lo: Span, base: P<Expr>) -> PResult<'a, P<Expr>> {
|
||||
match self.token.uninterpolate().kind {
|
||||
token::Ident(..) => self.parse_dot_suffix(base, lo),
|
||||
token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) => {
|
||||
|
@ -1183,9 +1150,7 @@ impl<'a> Parser<'a> {
|
|||
|
||||
/// Parse a function call expression, `expr(...)`.
|
||||
fn parse_expr_fn_call(&mut self, lo: Span, fun: P<Expr>) -> P<Expr> {
|
||||
let snapshot = if self.token.kind == token::OpenDelim(Delimiter::Parenthesis)
|
||||
&& self.look_ahead_type_ascription_as_field()
|
||||
{
|
||||
let snapshot = if self.token.kind == token::OpenDelim(Delimiter::Parenthesis) {
|
||||
Some((self.create_snapshot_for_diagnostic(), fun.kind.clone()))
|
||||
} else {
|
||||
None
|
||||
|
@ -1216,7 +1181,6 @@ impl<'a> Parser<'a> {
|
|||
if !self.may_recover() {
|
||||
return None;
|
||||
}
|
||||
|
||||
match (seq.as_mut(), snapshot) {
|
||||
(Err(err), Some((mut snapshot, ExprKind::Path(None, path)))) => {
|
||||
snapshot.bump(); // `(`
|
||||
|
@ -1260,9 +1224,7 @@ impl<'a> Parser<'a> {
|
|||
return Some(self.mk_expr_err(span));
|
||||
}
|
||||
Ok(_) => {}
|
||||
Err(mut err) => {
|
||||
err.emit();
|
||||
}
|
||||
Err(err) => err.cancel(),
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
|
@ -1516,7 +1478,6 @@ impl<'a> Parser<'a> {
|
|||
let mac = P(MacCall {
|
||||
path,
|
||||
args: self.parse_delim_args()?,
|
||||
prior_type_ascription: self.last_type_ascription,
|
||||
});
|
||||
(lo.to(self.prev_token.span), ExprKind::MacCall(mac))
|
||||
} else if self.check(&token::OpenDelim(Delimiter::Brace))
|
||||
|
@ -1535,7 +1496,7 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
|
||||
/// Parse `'label: $expr`. The label is already parsed.
|
||||
fn parse_expr_labeled(
|
||||
pub(super) fn parse_expr_labeled(
|
||||
&mut self,
|
||||
label_: Label,
|
||||
mut consume_colon: bool,
|
||||
|
@ -3013,6 +2974,11 @@ impl<'a> Parser<'a> {
|
|||
} else {
|
||||
e.span_label(pth.span, "while parsing this struct");
|
||||
}
|
||||
|
||||
if !recover {
|
||||
return Err(e);
|
||||
}
|
||||
|
||||
e.emit();
|
||||
|
||||
// If the next token is a comma, then try to parse
|
||||
|
@ -3024,6 +2990,7 @@ impl<'a> Parser<'a> {
|
|||
break;
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
};
|
||||
|
|
|
@ -443,7 +443,7 @@ impl<'a> Parser<'a> {
|
|||
Ok(args) => {
|
||||
self.eat_semi_for_macro_if_needed(&args);
|
||||
self.complain_if_pub_macro(vis, false);
|
||||
Ok(MacCall { path, args, prior_type_ascription: self.last_type_ascription })
|
||||
Ok(MacCall { path, args })
|
||||
}
|
||||
|
||||
Err(mut err) => {
|
||||
|
|
|
@ -148,9 +148,6 @@ pub struct Parser<'a> {
|
|||
max_angle_bracket_count: u32,
|
||||
|
||||
last_unexpected_token_span: Option<Span>,
|
||||
/// Span pointing at the `:` for the last type ascription the parser has seen, and whether it
|
||||
/// looked like it could have been a mistyped path or literal `Option:Some(42)`).
|
||||
pub last_type_ascription: Option<(Span, bool /* likely path typo */)>,
|
||||
/// If present, this `Parser` is not parsing Rust code but rather a macro call.
|
||||
subparser_name: Option<&'static str>,
|
||||
capture_state: CaptureState,
|
||||
|
@ -165,7 +162,7 @@ pub struct Parser<'a> {
|
|||
// This type is used a lot, e.g. it's cloned when matching many declarative macro rules with nonterminals. Make sure
|
||||
// it doesn't unintentionally get bigger.
|
||||
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
|
||||
rustc_data_structures::static_assert_size!(Parser<'_>, 288);
|
||||
rustc_data_structures::static_assert_size!(Parser<'_>, 320);
|
||||
|
||||
/// Stores span information about a closure.
|
||||
#[derive(Clone)]
|
||||
|
@ -470,7 +467,6 @@ impl<'a> Parser<'a> {
|
|||
unmatched_angle_bracket_count: 0,
|
||||
max_angle_bracket_count: 0,
|
||||
last_unexpected_token_span: None,
|
||||
last_type_ascription: None,
|
||||
subparser_name,
|
||||
capture_state: CaptureState {
|
||||
capturing: Capturing::No,
|
||||
|
@ -832,10 +828,11 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
|
||||
fn expect_any_with_type(&mut self, kets: &[&TokenKind], expect: TokenExpectType) -> bool {
|
||||
kets.iter().any(|k| match expect {
|
||||
let res = kets.iter().any(|k| match expect {
|
||||
TokenExpectType::Expect => self.check(k),
|
||||
TokenExpectType::NoExpect => self.token == **k,
|
||||
})
|
||||
});
|
||||
res
|
||||
}
|
||||
|
||||
fn parse_seq_to_before_tokens<T>(
|
||||
|
@ -941,10 +938,14 @@ impl<'a> Parser<'a> {
|
|||
// propagate the help message from sub error 'e' to main error 'expect_err;
|
||||
expect_err.children.push(xx.clone());
|
||||
}
|
||||
expect_err.emit();
|
||||
|
||||
e.cancel();
|
||||
break;
|
||||
if self.token == token::Colon {
|
||||
// we will try to recover in `maybe_recover_struct_lit_bad_delims`
|
||||
return Err(expect_err);
|
||||
} else {
|
||||
expect_err.emit();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -959,7 +960,6 @@ impl<'a> Parser<'a> {
|
|||
let t = f(self)?;
|
||||
v.push(t);
|
||||
}
|
||||
|
||||
Ok((v, trailing, recovered))
|
||||
}
|
||||
|
||||
|
@ -1045,6 +1045,7 @@ impl<'a> Parser<'a> {
|
|||
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
|
||||
) -> PResult<'a, (ThinVec<T>, bool /* trailing */)> {
|
||||
let (val, trailing, recovered) = self.parse_seq_to_before_end(ket, sep, f)?;
|
||||
|
||||
if !recovered {
|
||||
self.eat(ket);
|
||||
}
|
||||
|
|
|
@ -406,11 +406,11 @@ impl<'a> Parser<'a> {
|
|||
// Parse pattern starting with a path
|
||||
let (qself, path) = if self.eat_lt() {
|
||||
// Parse a qualified path
|
||||
let (qself, path) = self.parse_qpath(PathStyle::Expr)?;
|
||||
let (qself, path) = self.parse_qpath(PathStyle::Pat)?;
|
||||
(Some(qself), path)
|
||||
} else {
|
||||
// Parse an unqualified path
|
||||
(None, self.parse_path(PathStyle::Expr)?)
|
||||
(None, self.parse_path(PathStyle::Pat)?)
|
||||
};
|
||||
let span = lo.to(self.prev_token.span);
|
||||
|
||||
|
@ -666,7 +666,7 @@ impl<'a> Parser<'a> {
|
|||
fn parse_pat_mac_invoc(&mut self, path: Path) -> PResult<'a, PatKind> {
|
||||
self.bump();
|
||||
let args = self.parse_delim_args()?;
|
||||
let mac = P(MacCall { path, args, prior_type_ascription: self.last_type_ascription });
|
||||
let mac = P(MacCall { path, args });
|
||||
Ok(PatKind::MacCall(mac))
|
||||
}
|
||||
|
||||
|
@ -789,11 +789,11 @@ impl<'a> Parser<'a> {
|
|||
let lo = self.token.span;
|
||||
let (qself, path) = if self.eat_lt() {
|
||||
// Parse a qualified path
|
||||
let (qself, path) = self.parse_qpath(PathStyle::Expr)?;
|
||||
let (qself, path) = self.parse_qpath(PathStyle::Pat)?;
|
||||
(Some(qself), path)
|
||||
} else {
|
||||
// Parse an unqualified path
|
||||
(None, self.parse_path(PathStyle::Expr)?)
|
||||
(None, self.parse_path(PathStyle::Pat)?)
|
||||
};
|
||||
let hi = self.prev_token.span;
|
||||
Ok(self.mk_expr(lo.to(hi), ExprKind::Path(qself, path)))
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
|
||||
use super::{Parser, Restrictions, TokenType};
|
||||
use crate::errors::PathSingleColon;
|
||||
use crate::{errors, maybe_whole};
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
|
||||
|
@ -8,7 +9,7 @@ use rustc_ast::{
|
|||
AssocConstraintKind, BlockCheckMode, GenericArg, GenericArgs, Generics, ParenthesizedArgs,
|
||||
Path, PathSegment, QSelf,
|
||||
};
|
||||
use rustc_errors::{Applicability, PResult};
|
||||
use rustc_errors::{pluralize, Applicability, IntoDiagnostic, PResult};
|
||||
use rustc_span::source_map::{BytePos, Span};
|
||||
use rustc_span::symbol::{kw, sym, Ident};
|
||||
use std::mem;
|
||||
|
@ -16,7 +17,7 @@ use thin_vec::ThinVec;
|
|||
use tracing::debug;
|
||||
|
||||
/// Specifies how to parse a path.
|
||||
#[derive(Copy, Clone, PartialEq)]
|
||||
#[derive(Copy, Clone, PartialEq, Debug)]
|
||||
pub enum PathStyle {
|
||||
/// In some contexts, notably in expressions, paths with generic arguments are ambiguous
|
||||
/// with something else. For example, in expressions `segment < ....` can be interpreted
|
||||
|
@ -24,7 +25,19 @@ pub enum PathStyle {
|
|||
/// In all such contexts the non-path interpretation is preferred by default for practical
|
||||
/// reasons, but the path interpretation can be forced by the disambiguator `::`, e.g.
|
||||
/// `x<y>` - comparisons, `x::<y>` - unambiguously a path.
|
||||
///
|
||||
/// Also, a path may never be followed by a `:`. This means that we can eagerly recover if
|
||||
/// we encounter it.
|
||||
Expr,
|
||||
/// The same as `Expr`, but may be followed by a `:`.
|
||||
/// For example, this code:
|
||||
/// ```rust
|
||||
/// struct S;
|
||||
///
|
||||
/// let S: S;
|
||||
/// // ^ Followed by a `:`
|
||||
/// ```
|
||||
Pat,
|
||||
/// In other contexts, notably in types, no ambiguity exists and paths can be written
|
||||
/// without the disambiguator, e.g., `x<y>` - unambiguously a path.
|
||||
/// Paths with disambiguators are still accepted, `x::<Y>` - unambiguously a path too.
|
||||
|
@ -38,6 +51,12 @@ pub enum PathStyle {
|
|||
Mod,
|
||||
}
|
||||
|
||||
impl PathStyle {
|
||||
fn has_generic_ambiguity(&self) -> bool {
|
||||
matches!(self, Self::Expr | Self::Pat)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Parser<'a> {
|
||||
/// Parses a qualified path.
|
||||
/// Assumes that the leading `<` has been parsed already.
|
||||
|
@ -183,7 +202,9 @@ impl<'a> Parser<'a> {
|
|||
segments.push(PathSegment::path_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt)));
|
||||
}
|
||||
self.parse_path_segments(&mut segments, style, ty_generics)?;
|
||||
|
||||
if segments.len() > 1 {
|
||||
//panic!("debug now ...");
|
||||
}
|
||||
Ok(Path { segments, span: lo.to(self.prev_token.span), tokens: None })
|
||||
}
|
||||
|
||||
|
@ -195,7 +216,7 @@ impl<'a> Parser<'a> {
|
|||
) -> PResult<'a, ()> {
|
||||
loop {
|
||||
let segment = self.parse_path_segment(style, ty_generics)?;
|
||||
if style == PathStyle::Expr {
|
||||
if style.has_generic_ambiguity() {
|
||||
// In order to check for trailing angle brackets, we must have finished
|
||||
// recursing (`parse_path_segment` can indirectly call this function),
|
||||
// that is, the next token must be the highlighted part of the below example:
|
||||
|
@ -217,6 +238,29 @@ impl<'a> Parser<'a> {
|
|||
segments.push(segment);
|
||||
|
||||
if self.is_import_coupler() || !self.eat(&token::ModSep) {
|
||||
if style == PathStyle::Expr
|
||||
&& self.may_recover()
|
||||
&& self.token == token::Colon
|
||||
&& self.look_ahead(1, |token| token.is_ident() && !token.is_reserved_ident())
|
||||
{
|
||||
// Emit a special error message for `a::b:c` to help users
|
||||
// otherwise, `a: c` might have meant to introduce a new binding
|
||||
if self.token.span.lo() == self.prev_token.span.hi()
|
||||
&& self.look_ahead(1, |token| self.token.span.hi() == token.span.lo())
|
||||
{
|
||||
self.bump(); // bump past the colon
|
||||
self.sess.emit_err(PathSingleColon {
|
||||
span: self.prev_token.span,
|
||||
type_ascription: self
|
||||
.sess
|
||||
.unstable_features
|
||||
.is_nightly_build()
|
||||
.then_some(()),
|
||||
});
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
@ -270,8 +314,25 @@ impl<'a> Parser<'a> {
|
|||
ty_generics,
|
||||
)?;
|
||||
self.expect_gt().map_err(|mut err| {
|
||||
// Try to recover a `:` into a `::`
|
||||
if self.token == token::Colon
|
||||
&& self.look_ahead(1, |token| {
|
||||
token.is_ident() && !token.is_reserved_ident()
|
||||
})
|
||||
{
|
||||
err.cancel();
|
||||
err = PathSingleColon {
|
||||
span: self.token.span,
|
||||
type_ascription: self
|
||||
.sess
|
||||
.unstable_features
|
||||
.is_nightly_build()
|
||||
.then_some(()),
|
||||
}
|
||||
.into_diagnostic(self.diagnostic());
|
||||
}
|
||||
// Attempt to find places where a missing `>` might belong.
|
||||
if let Some(arg) = args
|
||||
else if let Some(arg) = args
|
||||
.iter()
|
||||
.rev()
|
||||
.find(|arg| !matches!(arg, AngleBracketedArg::Constraint(_)))
|
||||
|
@ -679,6 +740,7 @@ impl<'a> Parser<'a> {
|
|||
&mut self,
|
||||
ty_generics: Option<&Generics>,
|
||||
) -> PResult<'a, Option<GenericArg>> {
|
||||
debug!("pain");
|
||||
let start = self.token.span;
|
||||
let arg = if self.check_lifetime() && self.look_ahead(1, |t| !t.is_like_plus()) {
|
||||
// Parse lifetime argument.
|
||||
|
@ -687,6 +749,7 @@ impl<'a> Parser<'a> {
|
|||
// Parse const argument.
|
||||
GenericArg::Const(self.parse_const_arg()?)
|
||||
} else if self.check_type() {
|
||||
debug!("type");
|
||||
// Parse type argument.
|
||||
|
||||
// Proactively create a parser snapshot enabling us to rewind and try to reparse the
|
||||
|
|
|
@ -10,6 +10,8 @@ use super::{
|
|||
use crate::errors;
|
||||
use crate::maybe_whole;
|
||||
|
||||
use crate::errors::MalformedLoopLabel;
|
||||
use ast::Label;
|
||||
use rustc_ast as ast;
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::{self, Delimiter, TokenKind};
|
||||
|
@ -19,7 +21,8 @@ use rustc_ast::{Block, BlockCheckMode, Expr, ExprKind, HasAttrs, Local, Stmt};
|
|||
use rustc_ast::{StmtKind, DUMMY_NODE_ID};
|
||||
use rustc_errors::{Applicability, DiagnosticBuilder, ErrorGuaranteed, PResult};
|
||||
use rustc_span::source_map::{BytePos, Span};
|
||||
use rustc_span::symbol::{kw, sym};
|
||||
use rustc_span::symbol::{kw, sym, Ident};
|
||||
|
||||
use std::mem;
|
||||
use thin_vec::{thin_vec, ThinVec};
|
||||
|
||||
|
@ -186,7 +189,7 @@ impl<'a> Parser<'a> {
|
|||
_ => MacStmtStyle::NoBraces,
|
||||
};
|
||||
|
||||
let mac = P(MacCall { path, args, prior_type_ascription: self.last_type_ascription });
|
||||
let mac = P(MacCall { path, args });
|
||||
|
||||
let kind = if (style == MacStmtStyle::Braces
|
||||
&& self.token != token::Dot
|
||||
|
@ -546,10 +549,36 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
let stmt = match self.parse_full_stmt(recover) {
|
||||
Err(mut err) if recover.yes() => {
|
||||
self.maybe_annotate_with_ascription(&mut err, false);
|
||||
if let Some(ref mut snapshot) = snapshot {
|
||||
snapshot.recover_diff_marker();
|
||||
}
|
||||
if self.token == token::Colon {
|
||||
// if next token is following a colon, it's likely a path
|
||||
// and we can suggest a path separator
|
||||
let ident_span = self.prev_token.span;
|
||||
self.bump();
|
||||
if self.token.span.lo() == self.prev_token.span.hi() {
|
||||
err.span_suggestion_verbose(
|
||||
self.prev_token.span,
|
||||
"maybe write a path separator here",
|
||||
"::",
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
}
|
||||
if self.look_ahead(1, |token| token == &token::Eq) {
|
||||
err.span_suggestion_verbose(
|
||||
ident_span.shrink_to_lo(),
|
||||
"you might have meant to introduce a new binding",
|
||||
"let ",
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
}
|
||||
if self.sess.unstable_features.is_nightly_build() {
|
||||
// FIXME(Nilstrieb): Remove this again after a few months.
|
||||
err.note("type ascription syntax has been removed, see issue #101728 <https://github.com/rust-lang/rust/issues/101728>");
|
||||
}
|
||||
}
|
||||
|
||||
err.emit();
|
||||
self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore);
|
||||
Some(self.mk_stmt_err(self.token.span))
|
||||
|
@ -580,19 +609,25 @@ impl<'a> Parser<'a> {
|
|||
};
|
||||
|
||||
let mut eat_semi = true;
|
||||
let mut add_semi_to_stmt = false;
|
||||
|
||||
match &mut stmt.kind {
|
||||
// Expression without semicolon.
|
||||
StmtKind::Expr(expr)
|
||||
if self.token != token::Eof && classify::expr_requires_semi_to_be_stmt(expr) => {
|
||||
// Just check for errors and recover; do not eat semicolon yet.
|
||||
// `expect_one_of` returns PResult<'a, bool /* recovered */>
|
||||
let replace_with_err =
|
||||
match self.expect_one_of(&[], &[token::Semi, token::CloseDelim(Delimiter::Brace)]) {
|
||||
|
||||
let expect_result = self.expect_one_of(&[], &[token::Semi, token::CloseDelim(Delimiter::Brace)]);
|
||||
|
||||
let replace_with_err = 'break_recover: {
|
||||
match expect_result {
|
||||
// Recover from parser, skip type error to avoid extra errors.
|
||||
Ok(true) => true,
|
||||
Err(mut e) => {
|
||||
if let TokenKind::DocComment(..) = self.token.kind &&
|
||||
let Ok(snippet) = self.span_to_snippet(self.token.span) {
|
||||
Ok(true) => true,
|
||||
Err(mut e) => {
|
||||
if let TokenKind::DocComment(..) = self.token.kind
|
||||
&& let Ok(snippet) = self.span_to_snippet(self.token.span)
|
||||
{
|
||||
let sp = self.token.span;
|
||||
let marker = &snippet[..3];
|
||||
let (comment_marker, doc_comment_marker) = marker.split_at(2);
|
||||
|
@ -606,21 +641,72 @@ impl<'a> Parser<'a> {
|
|||
format!("{} {}", comment_marker, doc_comment_marker),
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
}
|
||||
|
||||
if let Err(mut e) =
|
||||
self.check_mistyped_turbofish_with_multiple_type_params(e, expr)
|
||||
{
|
||||
if recover.no() {
|
||||
return Err(e);
|
||||
}
|
||||
e.emit();
|
||||
self.recover_stmt();
|
||||
|
||||
if self.recover_colon_as_semi() {
|
||||
// recover_colon_as_semi has already emitted a nicer error.
|
||||
e.cancel();
|
||||
add_semi_to_stmt = true;
|
||||
eat_semi = false;
|
||||
|
||||
break 'break_recover false;
|
||||
}
|
||||
|
||||
match &expr.kind {
|
||||
ExprKind::Path(None, ast::Path { segments, .. }) if segments.len() == 1 => {
|
||||
if self.token == token::Colon
|
||||
&& self.look_ahead(1, |token| {
|
||||
token.is_whole_block() || matches!(
|
||||
token.kind,
|
||||
token::Ident(kw::For | kw::Loop | kw::While, false)
|
||||
| token::OpenDelim(Delimiter::Brace)
|
||||
)
|
||||
})
|
||||
{
|
||||
let snapshot = self.create_snapshot_for_diagnostic();
|
||||
let label = Label {
|
||||
ident: Ident::from_str_and_span(
|
||||
&format!("'{}", segments[0].ident),
|
||||
segments[0].ident.span,
|
||||
),
|
||||
};
|
||||
match self.parse_expr_labeled(label, false) {
|
||||
Ok(labeled_expr) => {
|
||||
e.cancel();
|
||||
self.sess.emit_err(MalformedLoopLabel {
|
||||
span: label.ident.span,
|
||||
correct_label: label.ident,
|
||||
});
|
||||
*expr = labeled_expr;
|
||||
break 'break_recover false;
|
||||
}
|
||||
Err(err) => {
|
||||
err.cancel();
|
||||
self.restore_snapshot(snapshot);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
if let Err(mut e) =
|
||||
self.check_mistyped_turbofish_with_multiple_type_params(e, expr)
|
||||
{
|
||||
if recover.no() {
|
||||
return Err(e);
|
||||
}
|
||||
e.emit();
|
||||
self.recover_stmt();
|
||||
}
|
||||
|
||||
true
|
||||
|
||||
}
|
||||
true
|
||||
Ok(false) => false
|
||||
}
|
||||
_ => false
|
||||
};
|
||||
|
||||
if replace_with_err {
|
||||
// We already emitted an error, so don't emit another type error
|
||||
let sp = expr.span.to(self.prev_token.span);
|
||||
|
@ -643,9 +729,10 @@ impl<'a> Parser<'a> {
|
|||
StmtKind::Empty | StmtKind::Item(_) | StmtKind::Local(_) | StmtKind::Semi(_) => eat_semi = false,
|
||||
}
|
||||
|
||||
if eat_semi && self.eat(&token::Semi) {
|
||||
if add_semi_to_stmt || (eat_semi && self.eat(&token::Semi)) {
|
||||
stmt = stmt.add_trailing_semicolon();
|
||||
}
|
||||
|
||||
stmt.span = stmt.span.to(self.prev_token.span);
|
||||
Ok(Some(stmt))
|
||||
}
|
||||
|
|
|
@ -317,7 +317,6 @@ impl<'a> Parser<'a> {
|
|||
let msg = format!("expected type, found {}", super::token_descr(&self.token));
|
||||
let mut err = self.struct_span_err(self.token.span, &msg);
|
||||
err.span_label(self.token.span, "expected type");
|
||||
self.maybe_annotate_with_ascription(&mut err, true);
|
||||
return Err(err);
|
||||
};
|
||||
|
||||
|
@ -651,11 +650,7 @@ impl<'a> Parser<'a> {
|
|||
let path = self.parse_path_inner(PathStyle::Type, ty_generics)?;
|
||||
if self.eat(&token::Not) {
|
||||
// Macro invocation in type position
|
||||
Ok(TyKind::MacCall(P(MacCall {
|
||||
path,
|
||||
args: self.parse_delim_args()?,
|
||||
prior_type_ascription: self.last_type_ascription,
|
||||
})))
|
||||
Ok(TyKind::MacCall(P(MacCall { path, args: self.parse_delim_args()? })))
|
||||
} else if allow_plus == AllowPlus::Yes && self.check_plus() {
|
||||
// `Trait1 + Trait2 + 'a`
|
||||
self.parse_remaining_bounds_path(ThinVec::new(), path, lo, true)
|
||||
|
|
|
@ -777,6 +777,10 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
.sess
|
||||
.create_err(errs::SelfImportOnlyInImportListWithNonEmptyPrefix { span }),
|
||||
ResolutionError::FailedToResolve { label, suggestion } => {
|
||||
if label.len() > 0 {
|
||||
//panic!("debug now");
|
||||
}
|
||||
|
||||
let mut err =
|
||||
struct_span_err!(self.tcx.sess, span, E0433, "failed to resolve: {}", &label);
|
||||
err.span_label(span, label);
|
||||
|
|
|
@ -1345,7 +1345,13 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
ribs: Option<&PerNS<Vec<Rib<'a>>>>,
|
||||
ignore_binding: Option<&'a NameBinding<'a>>,
|
||||
) -> PathResult<'a> {
|
||||
debug!("resolve_path(path={:?}, opt_ns={:?}, finalize={:?})", path, opt_ns, finalize);
|
||||
debug!(
|
||||
"resolve_path(path={:?}, opt_ns={:?}, finalize={:?}) path_len: {}",
|
||||
path,
|
||||
opt_ns,
|
||||
finalize,
|
||||
path.len()
|
||||
);
|
||||
|
||||
let mut module = None;
|
||||
let mut allow_super = true;
|
||||
|
|
|
@ -1264,14 +1264,15 @@ impl<'a: 'ast, 'b, 'ast, 'tcx> LateResolutionVisitor<'a, 'b, 'ast, 'tcx> {
|
|||
opt_ns: Option<Namespace>, // `None` indicates a module path in import
|
||||
finalize: Option<Finalize>,
|
||||
) -> PathResult<'a> {
|
||||
self.r.resolve_path_with_ribs(
|
||||
let res = self.r.resolve_path_with_ribs(
|
||||
path,
|
||||
opt_ns,
|
||||
&self.parent_scope,
|
||||
finalize,
|
||||
Some(&self.ribs),
|
||||
None,
|
||||
)
|
||||
);
|
||||
res
|
||||
}
|
||||
|
||||
// AST resolution
|
||||
|
@ -3488,10 +3489,6 @@ impl<'a: 'ast, 'b, 'ast, 'tcx> LateResolutionVisitor<'a, 'b, 'ast, 'tcx> {
|
|||
//
|
||||
// Similar thing, for types, happens in `report_errors` above.
|
||||
let report_errors_for_call = |this: &mut Self, parent_err: Spanned<ResolutionError<'a>>| {
|
||||
if !source.is_call() {
|
||||
return Some(parent_err);
|
||||
}
|
||||
|
||||
// Before we start looking for candidates, we have to get our hands
|
||||
// on the type user is trying to perform invocation on; basically:
|
||||
// we're transforming `HashMap::new` into just `HashMap`.
|
||||
|
@ -3721,6 +3718,7 @@ impl<'a: 'ast, 'b, 'ast, 'tcx> LateResolutionVisitor<'a, 'b, 'ast, 'tcx> {
|
|||
}
|
||||
|
||||
/// Handles paths that may refer to associated items.
|
||||
#[instrument(level = "debug", skip(self))]
|
||||
fn resolve_qpath(
|
||||
&mut self,
|
||||
qself: &Option<P<QSelf>>,
|
||||
|
@ -3728,11 +3726,6 @@ impl<'a: 'ast, 'b, 'ast, 'tcx> LateResolutionVisitor<'a, 'b, 'ast, 'tcx> {
|
|||
ns: Namespace,
|
||||
finalize: Finalize,
|
||||
) -> Result<Option<PartialRes>, Spanned<ResolutionError<'a>>> {
|
||||
debug!(
|
||||
"resolve_qpath(qself={:?}, path={:?}, ns={:?}, finalize={:?})",
|
||||
qself, path, ns, finalize,
|
||||
);
|
||||
|
||||
if let Some(qself) = qself {
|
||||
if qself.position == 0 {
|
||||
// This is a case like `<T>::B`, where there is no
|
||||
|
|
|
@ -305,6 +305,7 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> {
|
|||
|
||||
/// Handles error reporting for `smart_resolve_path_fragment` function.
|
||||
/// Creates base error and amends it with one short label and possibly some longer helps/notes.
|
||||
#[instrument(level = "debug", skip(self))]
|
||||
pub(crate) fn smart_resolve_report_errors(
|
||||
&mut self,
|
||||
path: &[Segment],
|
||||
|
@ -350,7 +351,7 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> {
|
|||
return (err, candidates);
|
||||
}
|
||||
|
||||
if !self.type_ascription_suggestion(&mut err, base_error.span) {
|
||||
if !self.suggest_missing_let(&mut err, base_error.span) {
|
||||
let mut fallback =
|
||||
self.suggest_trait_and_bounds(&mut err, source, res, span, &base_error);
|
||||
|
||||
|
@ -1823,7 +1824,8 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> {
|
|||
start.to(sm.next_point(start))
|
||||
}
|
||||
|
||||
fn type_ascription_suggestion(&self, err: &mut Diagnostic, base_span: Span) -> bool {
|
||||
#[instrument(level = "debug", skip(self, err))]
|
||||
fn suggest_missing_let(&self, err: &mut Diagnostic, base_span: Span) -> bool {
|
||||
let sm = self.r.tcx.sess.source_map();
|
||||
let base_snippet = sm.span_to_snippet(base_span);
|
||||
if let Some(&sp) = self.diagnostic_metadata.current_type_ascription.last() {
|
||||
|
@ -1878,12 +1880,6 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> {
|
|||
}
|
||||
}
|
||||
}
|
||||
if show_label {
|
||||
err.span_label(
|
||||
base_span,
|
||||
"expecting a type here because of type ascription",
|
||||
);
|
||||
}
|
||||
return show_label;
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue