Auto merge of #101619 - Xiretza:rustc_parse-session-diagnostics, r=davidtwco
Migrate more of rustc_parse to SessionDiagnostic Still far from complete, but I thought I'd add a checkpoint here because rebasing was starting to get annoying.
This commit is contained in:
commit
09ae7846a2
28 changed files with 2045 additions and 1283 deletions
|
@ -3450,6 +3450,8 @@ version = "0.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"annotate-snippets",
|
"annotate-snippets",
|
||||||
"atty",
|
"atty",
|
||||||
|
"rustc_ast",
|
||||||
|
"rustc_ast_pretty",
|
||||||
"rustc_data_structures",
|
"rustc_data_structures",
|
||||||
"rustc_error_messages",
|
"rustc_error_messages",
|
||||||
"rustc_hir",
|
"rustc_hir",
|
||||||
|
|
|
@ -71,6 +71,8 @@ parser_field_expression_with_generic = field expressions cannot have generic arg
|
||||||
parser_macro_invocation_with_qualified_path = macros cannot use qualified paths
|
parser_macro_invocation_with_qualified_path = macros cannot use qualified paths
|
||||||
|
|
||||||
parser_unexpected_token_after_label = expected `while`, `for`, `loop` or `{"{"}` after a label
|
parser_unexpected_token_after_label = expected `while`, `for`, `loop` or `{"{"}` after a label
|
||||||
|
.suggestion_remove_label = consider removing the label
|
||||||
|
.suggestion_enclose_in_block = consider enclosing expression in a block
|
||||||
|
|
||||||
parser_require_colon_after_labeled_expression = labeled expression must be followed by `:`
|
parser_require_colon_after_labeled_expression = labeled expression must be followed by `:`
|
||||||
.note = labels are used before loops and blocks, allowing e.g., `break 'label` to them
|
.note = labels are used before loops and blocks, allowing e.g., `break 'label` to them
|
||||||
|
@ -161,3 +163,209 @@ parser_use_eq_instead = unexpected `==`
|
||||||
|
|
||||||
parser_use_empty_block_not_semi = expected { "`{}`" }, found `;`
|
parser_use_empty_block_not_semi = expected { "`{}`" }, found `;`
|
||||||
.suggestion = try using { "`{}`" } instead
|
.suggestion = try using { "`{}`" } instead
|
||||||
|
|
||||||
|
parser_comparison_interpreted_as_generic =
|
||||||
|
`<` is interpreted as a start of generic arguments for `{$type}`, not a comparison
|
||||||
|
.label_args = interpreted as generic arguments
|
||||||
|
.label_comparison = not interpreted as comparison
|
||||||
|
.suggestion = try comparing the cast value
|
||||||
|
|
||||||
|
parser_shift_interpreted_as_generic =
|
||||||
|
`<<` is interpreted as a start of generic arguments for `{$type}`, not a shift
|
||||||
|
.label_args = interpreted as generic arguments
|
||||||
|
.label_comparison = not interpreted as shift
|
||||||
|
.suggestion = try shifting the cast value
|
||||||
|
|
||||||
|
parser_found_expr_would_be_stmt = expected expression, found `{$token}`
|
||||||
|
.label = expected expression
|
||||||
|
|
||||||
|
parser_leading_plus_not_supported = leading `+` is not supported
|
||||||
|
.label = unexpected `+`
|
||||||
|
.suggestion_remove_plus = try removing the `+`
|
||||||
|
|
||||||
|
parser_parentheses_with_struct_fields = invalid `struct` delimiters or `fn` call arguments
|
||||||
|
.suggestion_braces_for_struct = if `{$type}` is a struct, use braces as delimiters
|
||||||
|
.suggestion_no_fields_for_fn = if `{$type}` is a function, use the arguments directly
|
||||||
|
|
||||||
|
parser_labeled_loop_in_break = parentheses are required around this expression to avoid confusion with a labeled break expression
|
||||||
|
|
||||||
|
parser_sugg_wrap_expression_in_parentheses = wrap the expression in parentheses
|
||||||
|
|
||||||
|
parser_array_brackets_instead_of_braces = this is a block expression, not an array
|
||||||
|
.suggestion = to make an array, use square brackets instead of curly braces
|
||||||
|
|
||||||
|
parser_match_arm_body_without_braces = `match` arm body without braces
|
||||||
|
.label_statements = {$num_statements ->
|
||||||
|
[one] this statement is not surrounded by a body
|
||||||
|
*[other] these statements are not surrounded by a body
|
||||||
|
}
|
||||||
|
.label_arrow = while parsing the `match` arm starting here
|
||||||
|
.suggestion_add_braces = surround the {$num_statements ->
|
||||||
|
[one] statement
|
||||||
|
*[other] statements
|
||||||
|
} with a body
|
||||||
|
.suggestion_use_comma_not_semicolon = use a comma to end a `match` arm expression
|
||||||
|
|
||||||
|
parser_struct_literal_not_allowed_here = struct literals are not allowed here
|
||||||
|
.suggestion = surround the struct literal with parentheses
|
||||||
|
|
||||||
|
parser_invalid_interpolated_expression = invalid interpolated expression
|
||||||
|
|
||||||
|
parser_hexadecimal_float_literal_not_supported = hexadecimal float literal is not supported
|
||||||
|
parser_octal_float_literal_not_supported = octal float literal is not supported
|
||||||
|
parser_binary_float_literal_not_supported = binary float literal is not supported
|
||||||
|
parser_not_supported = not supported
|
||||||
|
|
||||||
|
parser_invalid_literal_suffix = suffixes on {$kind} literals are invalid
|
||||||
|
.label = invalid suffix `{$suffix}`
|
||||||
|
|
||||||
|
parser_invalid_literal_suffix_on_tuple_index = suffixes on a tuple index are invalid
|
||||||
|
.label = invalid suffix `{$suffix}`
|
||||||
|
.tuple_exception_line_1 = `{$suffix}` is *temporarily* accepted on tuple index fields as it was incorrectly accepted on stable for a few releases
|
||||||
|
.tuple_exception_line_2 = on proc macros, you'll want to use `syn::Index::from` or `proc_macro::Literal::*_unsuffixed` for code that will desugar to tuple field access
|
||||||
|
.tuple_exception_line_3 = see issue #60210 <https://github.com/rust-lang/rust/issues/60210> for more information
|
||||||
|
|
||||||
|
parser_non_string_abi_literal = non-string ABI literal
|
||||||
|
.suggestion = specify the ABI with a string literal
|
||||||
|
|
||||||
|
parser_mismatched_closing_delimiter = mismatched closing delimiter: `{$delimiter}`
|
||||||
|
.label_unmatched = mismatched closing delimiter
|
||||||
|
.label_opening_candidate = closing delimiter possibly meant for this
|
||||||
|
.label_unclosed = unclosed delimiter
|
||||||
|
|
||||||
|
parser_incorrect_visibility_restriction = incorrect visibility restriction
|
||||||
|
.help = some possible visibility restrictions are:
|
||||||
|
`pub(crate)`: visible only on the current crate
|
||||||
|
`pub(super)`: visible only in the current module's parent
|
||||||
|
`pub(in path::to::module)`: visible only on the specified path
|
||||||
|
.suggestion = make this visible only to module `{$inner_str}` with `in`
|
||||||
|
|
||||||
|
parser_assignment_else_not_allowed = <assignment> ... else {"{"} ... {"}"} is not allowed
|
||||||
|
|
||||||
|
parser_expected_statement_after_outer_attr = expected statement after outer attribute
|
||||||
|
|
||||||
|
parser_doc_comment_does_not_document_anything = found a documentation comment that doesn't document anything
|
||||||
|
.help = doc comments must come before what they document, maybe a comment was intended with `//`?
|
||||||
|
.suggestion = missing comma here
|
||||||
|
|
||||||
|
parser_const_let_mutually_exclusive = `const` and `let` are mutually exclusive
|
||||||
|
.suggestion = remove `let`
|
||||||
|
|
||||||
|
parser_invalid_expression_in_let_else = a `{$operator}` expression cannot be directly assigned in `let...else`
|
||||||
|
parser_invalid_curly_in_let_else = right curly brace `{"}"}` before `else` in a `let...else` statement not allowed
|
||||||
|
|
||||||
|
parser_compound_assignment_expression_in_let = can't reassign to an uninitialized variable
|
||||||
|
.suggestion = initialize the variable
|
||||||
|
.help = if you meant to overwrite, remove the `let` binding
|
||||||
|
|
||||||
|
parser_suffixed_literal_in_attribute = suffixed literals are not allowed in attributes
|
||||||
|
.help = instead of using a suffixed literal (`1u8`, `1.0f32`, etc.), use an unsuffixed version (`1`, `1.0`, etc.)
|
||||||
|
|
||||||
|
parser_invalid_meta_item = expected unsuffixed literal or identifier, found `{$token}`
|
||||||
|
|
||||||
|
parser_label_inner_attr_does_not_annotate_this = the inner attribute doesn't annotate this {$item}
|
||||||
|
parser_sugg_change_inner_attr_to_outer = to annotate the {$item}, change the attribute from inner to outer style
|
||||||
|
|
||||||
|
parser_inner_attr_not_permitted_after_outer_doc_comment = an inner attribute is not permitted following an outer doc comment
|
||||||
|
.label_attr = not permitted following an outer doc comment
|
||||||
|
.label_prev_doc_comment = previous doc comment
|
||||||
|
.label_does_not_annotate_this = {parser_label_inner_attr_does_not_annotate_this}
|
||||||
|
.sugg_change_inner_to_outer = {parser_sugg_change_inner_attr_to_outer}
|
||||||
|
|
||||||
|
parser_inner_attr_not_permitted_after_outer_attr = an inner attribute is not permitted following an outer attribute
|
||||||
|
.label_attr = not permitted following an outer attribute
|
||||||
|
.label_prev_attr = previous outer attribute
|
||||||
|
.label_does_not_annotate_this = {parser_label_inner_attr_does_not_annotate_this}
|
||||||
|
.sugg_change_inner_to_outer = {parser_sugg_change_inner_attr_to_outer}
|
||||||
|
|
||||||
|
parser_inner_attr_not_permitted = an inner attribute is not permitted in this context
|
||||||
|
.label_does_not_annotate_this = {parser_label_inner_attr_does_not_annotate_this}
|
||||||
|
.sugg_change_inner_to_outer = {parser_sugg_change_inner_attr_to_outer}
|
||||||
|
|
||||||
|
parser_inner_attr_explanation = inner attributes, like `#![no_std]`, annotate the item enclosing them, and are usually found at the beginning of source files
|
||||||
|
parser_outer_attr_explanation = outer attributes, like `#[test]`, annotate the item following them
|
||||||
|
|
||||||
|
parser_inner_doc_comment_not_permitted = expected outer doc comment
|
||||||
|
.note = inner doc comments like this (starting with `//!` or `/*!`) can only appear before items
|
||||||
|
.suggestion = you might have meant to write a regular comment
|
||||||
|
.label_does_not_annotate_this = the inner doc comment doesn't annotate this {$item}
|
||||||
|
.sugg_change_inner_to_outer = to annotate the {$item}, change the doc comment from inner to outer style
|
||||||
|
|
||||||
|
parser_expected_identifier_found_reserved_identifier_str = expected identifier, found reserved identifier `{$token}`
|
||||||
|
parser_expected_identifier_found_keyword_str = expected identifier, found keyword `{$token}`
|
||||||
|
parser_expected_identifier_found_reserved_keyword_str = expected identifier, found reserved keyword `{$token}`
|
||||||
|
parser_expected_identifier_found_doc_comment_str = expected identifier, found doc comment `{$token}`
|
||||||
|
parser_expected_identifier_found_str = expected identifier, found `{$token}`
|
||||||
|
|
||||||
|
parser_expected_identifier_found_reserved_identifier = expected identifier, found reserved identifier
|
||||||
|
parser_expected_identifier_found_keyword = expected identifier, found keyword
|
||||||
|
parser_expected_identifier_found_reserved_keyword = expected identifier, found reserved keyword
|
||||||
|
parser_expected_identifier_found_doc_comment = expected identifier, found doc comment
|
||||||
|
parser_expected_identifier = expected identifier
|
||||||
|
|
||||||
|
parser_sugg_escape_to_use_as_identifier = escape `{$ident_name}` to use it as an identifier
|
||||||
|
|
||||||
|
parser_sugg_remove_comma = remove this comma
|
||||||
|
|
||||||
|
parser_expected_semi_found_reserved_identifier_str = expected `;`, found reserved identifier `{$token}`
|
||||||
|
parser_expected_semi_found_keyword_str = expected `;`, found keyword `{$token}`
|
||||||
|
parser_expected_semi_found_reserved_keyword_str = expected `;`, found reserved keyword `{$token}`
|
||||||
|
parser_expected_semi_found_doc_comment_str = expected `;`, found doc comment `{$token}`
|
||||||
|
parser_expected_semi_found_str = expected `;`, found `{$token}`
|
||||||
|
|
||||||
|
parser_sugg_change_this_to_semi = change this to `;`
|
||||||
|
parser_sugg_add_semi = add `;` here
|
||||||
|
parser_label_unexpected_token = unexpected token
|
||||||
|
|
||||||
|
parser_unmatched_angle_brackets = {$num_extra_brackets ->
|
||||||
|
[one] unmatched angle bracket
|
||||||
|
*[other] unmatched angle brackets
|
||||||
|
}
|
||||||
|
.suggestion = {$num_extra_brackets ->
|
||||||
|
[one] remove extra angle bracket
|
||||||
|
*[other] remove extra angle brackets
|
||||||
|
}
|
||||||
|
|
||||||
|
parser_generic_parameters_without_angle_brackets = generic parameters without surrounding angle brackets
|
||||||
|
.suggestion = surround the type parameters with angle brackets
|
||||||
|
|
||||||
|
parser_comparison_operators_cannot_be_chained = comparison operators cannot be chained
|
||||||
|
.sugg_parentheses_for_function_args = or use `(...)` if you meant to specify fn arguments
|
||||||
|
.sugg_split_comparison = split the comparison into two
|
||||||
|
.sugg_parenthesize = parenthesize the comparison
|
||||||
|
parser_sugg_turbofish_syntax = use `::<...>` instead of `<...>` to specify lifetime, type, or const arguments
|
||||||
|
|
||||||
|
parser_question_mark_in_type = invalid `?` in type
|
||||||
|
.label = `?` is only allowed on expressions, not types
|
||||||
|
.suggestion = if you meant to express that the type might not contain a value, use the `Option` wrapper type
|
||||||
|
|
||||||
|
parser_unexpected_parentheses_in_for_head = unexpected parentheses surrounding `for` loop head
|
||||||
|
.suggestion = remove parentheses in `for` loop
|
||||||
|
|
||||||
|
parser_doc_comment_on_param_type = documentation comments cannot be applied to a function parameter's type
|
||||||
|
.label = doc comments are not allowed here
|
||||||
|
|
||||||
|
parser_attribute_on_param_type = attributes cannot be applied to a function parameter's type
|
||||||
|
.label = attributes are not allowed here
|
||||||
|
|
||||||
|
parser_pattern_method_param_without_body = patterns aren't allowed in methods without bodies
|
||||||
|
.suggestion = give this argument a name or use an underscore to ignore it
|
||||||
|
|
||||||
|
parser_self_param_not_first = unexpected `self` parameter in function
|
||||||
|
.label = must be the first parameter of an associated function
|
||||||
|
|
||||||
|
parser_const_generic_without_braces = expressions must be enclosed in braces to be used as const generic arguments
|
||||||
|
.suggestion = enclose the `const` expression in braces
|
||||||
|
|
||||||
|
parser_unexpected_const_param_declaration = unexpected `const` parameter declaration
|
||||||
|
.label = expected a `const` expression, not a parameter declaration
|
||||||
|
.suggestion = `const` parameters must be declared for the `impl`
|
||||||
|
|
||||||
|
parser_unexpected_const_in_generic_param = expected lifetime, type, or constant, found keyword `const`
|
||||||
|
.suggestion = the `const` keyword is only needed in the definition of the type
|
||||||
|
|
||||||
|
parser_async_move_order_incorrect = the order of `move` and `async` is incorrect
|
||||||
|
.suggestion = try switching the order
|
||||||
|
|
||||||
|
parser_double_colon_in_bound = expected `:` followed by trait or lifetime
|
||||||
|
.suggestion = use single colon
|
||||||
|
|
|
@ -66,3 +66,5 @@ session_crate_name_invalid = crate names cannot start with a `-`, but `{$s}` has
|
||||||
session_crate_name_empty = crate name must not be empty
|
session_crate_name_empty = crate name must not be empty
|
||||||
|
|
||||||
session_invalid_character_in_create_name = invalid character `{$character}` in crate name: `{$crate_name}`
|
session_invalid_character_in_create_name = invalid character `{$character}` in crate name: `{$crate_name}`
|
||||||
|
|
||||||
|
session_expr_parentheses_needed = parentheses are required to parse this as an expression
|
||||||
|
|
|
@ -8,6 +8,8 @@ doctest = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
tracing = "0.1"
|
tracing = "0.1"
|
||||||
|
rustc_ast = { path = "../rustc_ast" }
|
||||||
|
rustc_ast_pretty = { path = "../rustc_ast_pretty" }
|
||||||
rustc_error_messages = { path = "../rustc_error_messages" }
|
rustc_error_messages = { path = "../rustc_error_messages" }
|
||||||
rustc_serialize = { path = "../rustc_serialize" }
|
rustc_serialize = { path = "../rustc_serialize" }
|
||||||
rustc_span = { path = "../rustc_span" }
|
rustc_span = { path = "../rustc_span" }
|
||||||
|
|
|
@ -3,6 +3,8 @@ use crate::{
|
||||||
CodeSuggestion, DiagnosticMessage, EmissionGuarantee, Level, LintDiagnosticBuilder, MultiSpan,
|
CodeSuggestion, DiagnosticMessage, EmissionGuarantee, Level, LintDiagnosticBuilder, MultiSpan,
|
||||||
SubdiagnosticMessage, Substitution, SubstitutionPart, SuggestionStyle,
|
SubdiagnosticMessage, Substitution, SubstitutionPart, SuggestionStyle,
|
||||||
};
|
};
|
||||||
|
use rustc_ast as ast;
|
||||||
|
use rustc_ast_pretty::pprust;
|
||||||
use rustc_data_structures::fx::FxHashMap;
|
use rustc_data_structures::fx::FxHashMap;
|
||||||
use rustc_error_messages::FluentValue;
|
use rustc_error_messages::FluentValue;
|
||||||
use rustc_hir as hir;
|
use rustc_hir as hir;
|
||||||
|
@ -175,6 +177,24 @@ impl IntoDiagnosticArg for hir::ConstContext {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl IntoDiagnosticArg for ast::Path {
|
||||||
|
fn into_diagnostic_arg(self) -> DiagnosticArgValue<'static> {
|
||||||
|
DiagnosticArgValue::Str(Cow::Owned(pprust::path_to_string(&self)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IntoDiagnosticArg for ast::token::Token {
|
||||||
|
fn into_diagnostic_arg(self) -> DiagnosticArgValue<'static> {
|
||||||
|
DiagnosticArgValue::Str(pprust::token_to_string(&self))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IntoDiagnosticArg for ast::token::TokenKind {
|
||||||
|
fn into_diagnostic_arg(self) -> DiagnosticArgValue<'static> {
|
||||||
|
DiagnosticArgValue::Str(pprust::token_kind_to_string(&self))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Trait implemented by error types. This should not be implemented manually. Instead, use
|
/// Trait implemented by error types. This should not be implemented manually. Instead, use
|
||||||
/// `#[derive(Subdiagnostic)]` -- see [rustc_macros::Subdiagnostic].
|
/// `#[derive(Subdiagnostic)]` -- see [rustc_macros::Subdiagnostic].
|
||||||
#[cfg_attr(bootstrap, rustc_diagnostic_item = "AddSubdiagnostic")]
|
#[cfg_attr(bootstrap, rustc_diagnostic_item = "AddSubdiagnostic")]
|
||||||
|
|
|
@ -281,6 +281,8 @@ impl<'a> DiagnosticDeriveVariantBuilder<'a> {
|
||||||
if should_generate_set_arg(&field) {
|
if should_generate_set_arg(&field) {
|
||||||
let diag = &self.parent.diag;
|
let diag = &self.parent.diag;
|
||||||
let ident = field.ident.as_ref().unwrap();
|
let ident = field.ident.as_ref().unwrap();
|
||||||
|
// strip `r#` prefix, if present
|
||||||
|
let ident = format_ident!("{}", ident);
|
||||||
return quote! {
|
return quote! {
|
||||||
#diag.set_arg(
|
#diag.set_arg(
|
||||||
stringify!(#ident),
|
stringify!(#ident),
|
||||||
|
|
|
@ -189,6 +189,9 @@ impl<'a> SubdiagnosticDeriveBuilder<'a> {
|
||||||
|
|
||||||
let diag = &self.diag;
|
let diag = &self.diag;
|
||||||
let ident = ast.ident.as_ref().unwrap();
|
let ident = ast.ident.as_ref().unwrap();
|
||||||
|
// strip `r#` prefix, if present
|
||||||
|
let ident = format_ident!("{}", ident);
|
||||||
|
|
||||||
quote! {
|
quote! {
|
||||||
#diag.set_arg(
|
#diag.set_arg(
|
||||||
stringify!(#ident),
|
stringify!(#ident),
|
||||||
|
|
1251
compiler/rustc_parse/src/errors.rs
Normal file
1251
compiler/rustc_parse/src/errors.rs
Normal file
File diff suppressed because it is too large
Load diff
|
@ -32,6 +32,8 @@ use parser::{emit_unclosed_delims, make_unclosed_delims_error, Parser};
|
||||||
pub mod lexer;
|
pub mod lexer;
|
||||||
pub mod validate_attr;
|
pub mod validate_attr;
|
||||||
|
|
||||||
|
mod errors;
|
||||||
|
|
||||||
// A bunch of utility functions of the form `parse_<thing>_from_<source>`
|
// A bunch of utility functions of the form `parse_<thing>_from_<source>`
|
||||||
// where <thing> includes crate, expr, item, stmt, tts, and one that
|
// where <thing> includes crate, expr, item, stmt, tts, and one that
|
||||||
// uses a HOF to parse anything, and <source> includes file and
|
// uses a HOF to parse anything, and <source> includes file and
|
||||||
|
|
|
@ -1,27 +1,26 @@
|
||||||
|
use crate::errors::{InvalidMetaItem, SuffixedLiteralInAttribute};
|
||||||
|
|
||||||
use super::{AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, PathStyle};
|
use super::{AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, PathStyle};
|
||||||
use rustc_ast as ast;
|
use rustc_ast as ast;
|
||||||
use rustc_ast::attr;
|
use rustc_ast::attr;
|
||||||
use rustc_ast::token::{self, Delimiter, Nonterminal};
|
use rustc_ast::token::{self, Delimiter, Nonterminal};
|
||||||
use rustc_ast_pretty::pprust;
|
use rustc_errors::{error_code, fluent, Diagnostic, IntoDiagnostic, PResult};
|
||||||
use rustc_errors::{error_code, Diagnostic, PResult};
|
|
||||||
use rustc_span::{sym, BytePos, Span};
|
use rustc_span::{sym, BytePos, Span};
|
||||||
use std::convert::TryInto;
|
use std::convert::TryInto;
|
||||||
|
|
||||||
// Public for rustfmt usage
|
// Public for rustfmt usage
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum InnerAttrPolicy<'a> {
|
pub enum InnerAttrPolicy {
|
||||||
Permitted,
|
Permitted,
|
||||||
Forbidden { reason: &'a str, saw_doc_comment: bool, prev_outer_attr_sp: Option<Span> },
|
Forbidden(Option<InnerAttrForbiddenReason>),
|
||||||
}
|
}
|
||||||
|
|
||||||
const DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG: &str = "an inner attribute is not \
|
#[derive(Clone, Copy, Debug)]
|
||||||
permitted in this context";
|
pub enum InnerAttrForbiddenReason {
|
||||||
|
InCodeBlock,
|
||||||
pub(super) const DEFAULT_INNER_ATTR_FORBIDDEN: InnerAttrPolicy<'_> = InnerAttrPolicy::Forbidden {
|
AfterOuterDocComment { prev_doc_comment_span: Span },
|
||||||
reason: DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG,
|
AfterOuterAttribute { prev_outer_attr_sp: Span },
|
||||||
saw_doc_comment: false,
|
}
|
||||||
prev_outer_attr_sp: None,
|
|
||||||
};
|
|
||||||
|
|
||||||
enum OuterAttributeType {
|
enum OuterAttributeType {
|
||||||
DocComment,
|
DocComment,
|
||||||
|
@ -40,17 +39,15 @@ impl<'a> Parser<'a> {
|
||||||
let prev_outer_attr_sp = outer_attrs.last().map(|attr| attr.span);
|
let prev_outer_attr_sp = outer_attrs.last().map(|attr| attr.span);
|
||||||
|
|
||||||
let inner_error_reason = if just_parsed_doc_comment {
|
let inner_error_reason = if just_parsed_doc_comment {
|
||||||
"an inner attribute is not permitted following an outer doc comment"
|
Some(InnerAttrForbiddenReason::AfterOuterDocComment {
|
||||||
} else if prev_outer_attr_sp.is_some() {
|
prev_doc_comment_span: prev_outer_attr_sp.unwrap(),
|
||||||
"an inner attribute is not permitted following an outer attribute"
|
})
|
||||||
|
} else if let Some(prev_outer_attr_sp) = prev_outer_attr_sp {
|
||||||
|
Some(InnerAttrForbiddenReason::AfterOuterAttribute { prev_outer_attr_sp })
|
||||||
} else {
|
} else {
|
||||||
DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG
|
None
|
||||||
};
|
|
||||||
let inner_parse_policy = InnerAttrPolicy::Forbidden {
|
|
||||||
reason: inner_error_reason,
|
|
||||||
saw_doc_comment: just_parsed_doc_comment,
|
|
||||||
prev_outer_attr_sp,
|
|
||||||
};
|
};
|
||||||
|
let inner_parse_policy = InnerAttrPolicy::Forbidden(inner_error_reason);
|
||||||
just_parsed_doc_comment = false;
|
just_parsed_doc_comment = false;
|
||||||
Some(self.parse_attribute(inner_parse_policy)?)
|
Some(self.parse_attribute(inner_parse_policy)?)
|
||||||
} else if let token::DocComment(comment_kind, attr_style, data) = self.token.kind {
|
} else if let token::DocComment(comment_kind, attr_style, data) = self.token.kind {
|
||||||
|
@ -58,7 +55,7 @@ impl<'a> Parser<'a> {
|
||||||
let span = self.token.span;
|
let span = self.token.span;
|
||||||
let mut err = self.sess.span_diagnostic.struct_span_err_with_code(
|
let mut err = self.sess.span_diagnostic.struct_span_err_with_code(
|
||||||
span,
|
span,
|
||||||
"expected outer doc comment",
|
fluent::parser::inner_doc_comment_not_permitted,
|
||||||
error_code!(E0753),
|
error_code!(E0753),
|
||||||
);
|
);
|
||||||
if let Some(replacement_span) = self.annotate_following_item_if_applicable(
|
if let Some(replacement_span) = self.annotate_following_item_if_applicable(
|
||||||
|
@ -69,13 +66,10 @@ impl<'a> Parser<'a> {
|
||||||
token::CommentKind::Block => OuterAttributeType::DocBlockComment,
|
token::CommentKind::Block => OuterAttributeType::DocBlockComment,
|
||||||
},
|
},
|
||||||
) {
|
) {
|
||||||
err.note(
|
err.note(fluent::parser::note);
|
||||||
"inner doc comments like this (starting with `//!` or `/*!`) can \
|
|
||||||
only appear before items",
|
|
||||||
);
|
|
||||||
err.span_suggestion_verbose(
|
err.span_suggestion_verbose(
|
||||||
replacement_span,
|
replacement_span,
|
||||||
"you might have meant to write a regular comment",
|
fluent::parser::suggestion,
|
||||||
"",
|
"",
|
||||||
rustc_errors::Applicability::MachineApplicable,
|
rustc_errors::Applicability::MachineApplicable,
|
||||||
);
|
);
|
||||||
|
@ -113,7 +107,7 @@ impl<'a> Parser<'a> {
|
||||||
// Public for rustfmt usage.
|
// Public for rustfmt usage.
|
||||||
pub fn parse_attribute(
|
pub fn parse_attribute(
|
||||||
&mut self,
|
&mut self,
|
||||||
inner_parse_policy: InnerAttrPolicy<'_>,
|
inner_parse_policy: InnerAttrPolicy,
|
||||||
) -> PResult<'a, ast::Attribute> {
|
) -> PResult<'a, ast::Attribute> {
|
||||||
debug!(
|
debug!(
|
||||||
"parse_attribute: inner_parse_policy={:?} self.token={:?}",
|
"parse_attribute: inner_parse_policy={:?} self.token={:?}",
|
||||||
|
@ -122,12 +116,10 @@ impl<'a> Parser<'a> {
|
||||||
let lo = self.token.span;
|
let lo = self.token.span;
|
||||||
// Attributes can't have attributes of their own [Editor's note: not with that attitude]
|
// Attributes can't have attributes of their own [Editor's note: not with that attitude]
|
||||||
self.collect_tokens_no_attrs(|this| {
|
self.collect_tokens_no_attrs(|this| {
|
||||||
if this.eat(&token::Pound) {
|
assert!(this.eat(&token::Pound), "parse_attribute called in non-attribute position");
|
||||||
let style = if this.eat(&token::Not) {
|
|
||||||
ast::AttrStyle::Inner
|
let style =
|
||||||
} else {
|
if this.eat(&token::Not) { ast::AttrStyle::Inner } else { ast::AttrStyle::Outer };
|
||||||
ast::AttrStyle::Outer
|
|
||||||
};
|
|
||||||
|
|
||||||
this.expect(&token::OpenDelim(Delimiter::Bracket))?;
|
this.expect(&token::OpenDelim(Delimiter::Bracket))?;
|
||||||
let item = this.parse_attr_item(false)?;
|
let item = this.parse_attr_item(false)?;
|
||||||
|
@ -139,18 +131,7 @@ impl<'a> Parser<'a> {
|
||||||
this.error_on_forbidden_inner_attr(attr_sp, inner_parse_policy);
|
this.error_on_forbidden_inner_attr(attr_sp, inner_parse_policy);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(attr::mk_attr_from_item(
|
Ok(attr::mk_attr_from_item(&self.sess.attr_id_generator, item, None, style, attr_sp))
|
||||||
&self.sess.attr_id_generator,
|
|
||||||
item,
|
|
||||||
None,
|
|
||||||
style,
|
|
||||||
attr_sp,
|
|
||||||
))
|
|
||||||
} else {
|
|
||||||
let token_str = pprust::token_to_string(&this.token);
|
|
||||||
let msg = &format!("expected `#`, found `{token_str}`");
|
|
||||||
Err(this.struct_span_err(this.token.span, msg))
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -190,21 +171,12 @@ impl<'a> Parser<'a> {
|
||||||
ForceCollect::No,
|
ForceCollect::No,
|
||||||
) {
|
) {
|
||||||
Ok(Some(item)) => {
|
Ok(Some(item)) => {
|
||||||
let attr_name = match attr_type {
|
// FIXME(#100717)
|
||||||
OuterAttributeType::Attribute => "attribute",
|
err.set_arg("item", item.kind.descr());
|
||||||
_ => "doc comment",
|
err.span_label(item.span, fluent::parser::label_does_not_annotate_this);
|
||||||
};
|
|
||||||
err.span_label(
|
|
||||||
item.span,
|
|
||||||
&format!("the inner {} doesn't annotate this {}", attr_name, item.kind.descr()),
|
|
||||||
);
|
|
||||||
err.span_suggestion_verbose(
|
err.span_suggestion_verbose(
|
||||||
replacement_span,
|
replacement_span,
|
||||||
&format!(
|
fluent::parser::sugg_change_inner_to_outer,
|
||||||
"to annotate the {}, change the {} from inner to outer style",
|
|
||||||
item.kind.descr(),
|
|
||||||
attr_name
|
|
||||||
),
|
|
||||||
match attr_type {
|
match attr_type {
|
||||||
OuterAttributeType::Attribute => "",
|
OuterAttributeType::Attribute => "",
|
||||||
OuterAttributeType::DocBlockComment => "*",
|
OuterAttributeType::DocBlockComment => "*",
|
||||||
|
@ -222,22 +194,33 @@ impl<'a> Parser<'a> {
|
||||||
Some(replacement_span)
|
Some(replacement_span)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn error_on_forbidden_inner_attr(&self, attr_sp: Span, policy: InnerAttrPolicy<'_>) {
|
pub(super) fn error_on_forbidden_inner_attr(&self, attr_sp: Span, policy: InnerAttrPolicy) {
|
||||||
if let InnerAttrPolicy::Forbidden { reason, saw_doc_comment, prev_outer_attr_sp } = policy {
|
if let InnerAttrPolicy::Forbidden(reason) = policy {
|
||||||
let prev_outer_attr_note =
|
let mut diag = match reason.as_ref().copied() {
|
||||||
if saw_doc_comment { "previous doc comment" } else { "previous outer attribute" };
|
Some(InnerAttrForbiddenReason::AfterOuterDocComment { prev_doc_comment_span }) => {
|
||||||
|
let mut diag = self.struct_span_err(
|
||||||
let mut diag = self.struct_span_err(attr_sp, reason);
|
attr_sp,
|
||||||
|
fluent::parser::inner_attr_not_permitted_after_outer_doc_comment,
|
||||||
if let Some(prev_outer_attr_sp) = prev_outer_attr_sp {
|
|
||||||
diag.span_label(attr_sp, "not permitted following an outer attribute")
|
|
||||||
.span_label(prev_outer_attr_sp, prev_outer_attr_note);
|
|
||||||
}
|
|
||||||
|
|
||||||
diag.note(
|
|
||||||
"inner attributes, like `#![no_std]`, annotate the item enclosing them, and \
|
|
||||||
are usually found at the beginning of source files",
|
|
||||||
);
|
);
|
||||||
|
diag.span_label(attr_sp, fluent::parser::label_attr)
|
||||||
|
.span_label(prev_doc_comment_span, fluent::parser::label_prev_doc_comment);
|
||||||
|
diag
|
||||||
|
}
|
||||||
|
Some(InnerAttrForbiddenReason::AfterOuterAttribute { prev_outer_attr_sp }) => {
|
||||||
|
let mut diag = self.struct_span_err(
|
||||||
|
attr_sp,
|
||||||
|
fluent::parser::inner_attr_not_permitted_after_outer_attr,
|
||||||
|
);
|
||||||
|
diag.span_label(attr_sp, fluent::parser::label_attr)
|
||||||
|
.span_label(prev_outer_attr_sp, fluent::parser::label_prev_attr);
|
||||||
|
diag
|
||||||
|
}
|
||||||
|
Some(InnerAttrForbiddenReason::InCodeBlock) | None => {
|
||||||
|
self.struct_span_err(attr_sp, fluent::parser::inner_attr_not_permitted)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
diag.note(fluent::parser::inner_attr_explanation);
|
||||||
if self
|
if self
|
||||||
.annotate_following_item_if_applicable(
|
.annotate_following_item_if_applicable(
|
||||||
&mut diag,
|
&mut diag,
|
||||||
|
@ -246,7 +229,7 @@ impl<'a> Parser<'a> {
|
||||||
)
|
)
|
||||||
.is_some()
|
.is_some()
|
||||||
{
|
{
|
||||||
diag.note("outer attributes, like `#[test]`, annotate the item following them");
|
diag.note(fluent::parser::outer_attr_explanation);
|
||||||
};
|
};
|
||||||
diag.emit();
|
diag.emit();
|
||||||
}
|
}
|
||||||
|
@ -337,12 +320,7 @@ impl<'a> Parser<'a> {
|
||||||
debug!("checking if {:?} is unusuffixed", lit);
|
debug!("checking if {:?} is unusuffixed", lit);
|
||||||
|
|
||||||
if !lit.kind.is_unsuffixed() {
|
if !lit.kind.is_unsuffixed() {
|
||||||
self.struct_span_err(lit.span, "suffixed literals are not allowed in attributes")
|
self.sess.emit_err(SuffixedLiteralInAttribute { span: lit.span });
|
||||||
.help(
|
|
||||||
"instead of using a suffixed literal (`1u8`, `1.0f32`, etc.), \
|
|
||||||
use an unsuffixed version (`1`, `1.0`, etc.)",
|
|
||||||
)
|
|
||||||
.emit();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(lit)
|
Ok(lit)
|
||||||
|
@ -435,9 +413,8 @@ impl<'a> Parser<'a> {
|
||||||
Err(err) => err.cancel(),
|
Err(err) => err.cancel(),
|
||||||
}
|
}
|
||||||
|
|
||||||
let found = pprust::token_to_string(&self.token);
|
Err(InvalidMetaItem { span: self.token.span, token: self.token.clone() }
|
||||||
let msg = format!("expected unsuffixed literal or identifier, found `{found}`");
|
.into_diagnostic(&self.sess.span_diagnostic))
|
||||||
Err(self.struct_span_err(self.token.span, &msg))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,27 +1,33 @@
|
||||||
use super::diagnostics::{
|
use super::diagnostics::SnapshotParser;
|
||||||
CatchAfterTry, CommaAfterBaseStruct, DoCatchSyntaxRemoved, DotDotDot, EqFieldInit,
|
|
||||||
ExpectedElseBlock, ExpectedExpressionFoundLet, FieldExpressionWithGeneric,
|
|
||||||
FloatLiteralRequiresIntegerPart, IfExpressionMissingCondition, IfExpressionMissingThenBlock,
|
|
||||||
IfExpressionMissingThenBlockSub, InvalidBlockMacroSegment, InvalidComparisonOperator,
|
|
||||||
InvalidComparisonOperatorSub, InvalidLogicalOperator, InvalidLogicalOperatorSub,
|
|
||||||
LeftArrowOperator, LifetimeInBorrowExpression, MacroInvocationWithQualifiedPath,
|
|
||||||
MalformedLoopLabel, MissingInInForLoop, MissingInInForLoopSub, MissingSemicolonBeforeArray,
|
|
||||||
NotAsNegationOperator, NotAsNegationOperatorSub, OuterAttributeNotAllowedOnIfElse,
|
|
||||||
RequireColonAfterLabeledExpression, SnapshotParser, TildeAsUnaryOperator,
|
|
||||||
UnexpectedTokenAfterLabel,
|
|
||||||
};
|
|
||||||
use super::pat::{CommaRecoveryMode, RecoverColon, RecoverComma, PARAM_EXPECTED};
|
use super::pat::{CommaRecoveryMode, RecoverColon, RecoverComma, PARAM_EXPECTED};
|
||||||
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
|
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
|
||||||
use super::{
|
use super::{
|
||||||
AttrWrapper, BlockMode, ClosureSpans, ForceCollect, Parser, PathStyle, Restrictions,
|
AttrWrapper, BlockMode, ClosureSpans, ForceCollect, Parser, PathStyle, Restrictions,
|
||||||
SemiColonMode, SeqSep, TokenExpectType, TokenType, TrailingToken,
|
SemiColonMode, SeqSep, TokenExpectType, TokenType, TrailingToken,
|
||||||
};
|
};
|
||||||
use crate::maybe_recover_from_interpolated_ty_qpath;
|
use crate::errors::{
|
||||||
use crate::parser::diagnostics::{
|
ArrayBracketsInsteadOfSpaces, ArrayBracketsInsteadOfSpacesSugg, AsyncMoveOrderIncorrect,
|
||||||
IntLiteralTooLarge, InvalidFloatLiteralSuffix, InvalidFloatLiteralWidth,
|
BinaryFloatLiteralNotSupported, BracesForStructLiteral, CatchAfterTry, CommaAfterBaseStruct,
|
||||||
InvalidIntLiteralWidth, InvalidNumLiteralBasePrefix, InvalidNumLiteralSuffix,
|
ComparisonInterpretedAsGeneric, ComparisonOrShiftInterpretedAsGenericSugg,
|
||||||
MissingCommaAfterMatchArm,
|
DoCatchSyntaxRemoved, DotDotDot, EqFieldInit, ExpectedElseBlock, ExpectedExpressionFoundLet,
|
||||||
|
FieldExpressionWithGeneric, FloatLiteralRequiresIntegerPart, FoundExprWouldBeStmt,
|
||||||
|
HexadecimalFloatLiteralNotSupported, IfExpressionMissingCondition,
|
||||||
|
IfExpressionMissingThenBlock, IfExpressionMissingThenBlockSub, IntLiteralTooLarge,
|
||||||
|
InvalidBlockMacroSegment, InvalidComparisonOperator, InvalidComparisonOperatorSub,
|
||||||
|
InvalidFloatLiteralSuffix, InvalidFloatLiteralWidth, InvalidIntLiteralWidth,
|
||||||
|
InvalidInterpolatedExpression, InvalidLiteralSuffix, InvalidLiteralSuffixOnTupleIndex,
|
||||||
|
InvalidLogicalOperator, InvalidLogicalOperatorSub, InvalidNumLiteralBasePrefix,
|
||||||
|
InvalidNumLiteralSuffix, LabeledLoopInBreak, LeadingPlusNotSupported, LeftArrowOperator,
|
||||||
|
LifetimeInBorrowExpression, MacroInvocationWithQualifiedPath, MalformedLoopLabel,
|
||||||
|
MatchArmBodyWithoutBraces, MatchArmBodyWithoutBracesSugg, MissingCommaAfterMatchArm,
|
||||||
|
MissingInInForLoop, MissingInInForLoopSub, MissingSemicolonBeforeArray, NoFieldsForFnCall,
|
||||||
|
NotAsNegationOperator, NotAsNegationOperatorSub, OctalFloatLiteralNotSupported,
|
||||||
|
OuterAttributeNotAllowedOnIfElse, ParenthesesWithStructFields,
|
||||||
|
RequireColonAfterLabeledExpression, ShiftInterpretedAsGeneric, StructLiteralNotAllowedHere,
|
||||||
|
StructLiteralNotAllowedHereSugg, TildeAsUnaryOperator, UnexpectedTokenAfterLabel,
|
||||||
|
UnexpectedTokenAfterLabelSugg, WrapExpressionInParentheses,
|
||||||
};
|
};
|
||||||
|
use crate::maybe_recover_from_interpolated_ty_qpath;
|
||||||
|
|
||||||
use core::mem;
|
use core::mem;
|
||||||
use rustc_ast::ptr::P;
|
use rustc_ast::ptr::P;
|
||||||
|
@ -38,6 +44,7 @@ use rustc_ast::{ClosureBinder, StmtKind};
|
||||||
use rustc_ast_pretty::pprust;
|
use rustc_ast_pretty::pprust;
|
||||||
use rustc_errors::IntoDiagnostic;
|
use rustc_errors::IntoDiagnostic;
|
||||||
use rustc_errors::{Applicability, Diagnostic, PResult};
|
use rustc_errors::{Applicability, Diagnostic, PResult};
|
||||||
|
use rustc_session::errors::ExprParenthesesNeeded;
|
||||||
use rustc_session::lint::builtin::BREAK_WITH_LABEL_AND_LOOP;
|
use rustc_session::lint::builtin::BREAK_WITH_LABEL_AND_LOOP;
|
||||||
use rustc_session::lint::BuiltinLintDiagnostics;
|
use rustc_session::lint::BuiltinLintDiagnostics;
|
||||||
use rustc_span::source_map::{self, Span, Spanned};
|
use rustc_span::source_map::{self, Span, Spanned};
|
||||||
|
@ -421,13 +428,11 @@ impl<'a> Parser<'a> {
|
||||||
/// but the next token implies this should be parsed as an expression.
|
/// but the next token implies this should be parsed as an expression.
|
||||||
/// For example: `if let Some(x) = x { x } else { 0 } / 2`.
|
/// For example: `if let Some(x) = x { x } else { 0 } / 2`.
|
||||||
fn error_found_expr_would_be_stmt(&self, lhs: &Expr) {
|
fn error_found_expr_would_be_stmt(&self, lhs: &Expr) {
|
||||||
let mut err = self.struct_span_err(
|
self.sess.emit_err(FoundExprWouldBeStmt {
|
||||||
self.token.span,
|
span: self.token.span,
|
||||||
&format!("expected expression, found `{}`", pprust::token_to_string(&self.token),),
|
token: self.token.clone(),
|
||||||
);
|
suggestion: ExprParenthesesNeeded::surrounding(lhs.span),
|
||||||
err.span_label(self.token.span, "expected expression");
|
});
|
||||||
self.sess.expr_parentheses_needed(&mut err, lhs.span);
|
|
||||||
err.emit();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Possibly translate the current token to an associative operator.
|
/// Possibly translate the current token to an associative operator.
|
||||||
|
@ -578,21 +583,16 @@ impl<'a> Parser<'a> {
|
||||||
make_it!(this, attrs, |this, _| this.parse_borrow_expr(lo))
|
make_it!(this, attrs, |this, _| this.parse_borrow_expr(lo))
|
||||||
}
|
}
|
||||||
token::BinOp(token::Plus) if this.look_ahead(1, |tok| tok.is_numeric_lit()) => {
|
token::BinOp(token::Plus) if this.look_ahead(1, |tok| tok.is_numeric_lit()) => {
|
||||||
let mut err = this.struct_span_err(lo, "leading `+` is not supported");
|
let mut err =
|
||||||
err.span_label(lo, "unexpected `+`");
|
LeadingPlusNotSupported { span: lo, remove_plus: None, add_parentheses: None };
|
||||||
|
|
||||||
// a block on the LHS might have been intended to be an expression instead
|
// a block on the LHS might have been intended to be an expression instead
|
||||||
if let Some(sp) = this.sess.ambiguous_block_expr_parse.borrow().get(&lo) {
|
if let Some(sp) = this.sess.ambiguous_block_expr_parse.borrow().get(&lo) {
|
||||||
this.sess.expr_parentheses_needed(&mut err, *sp);
|
err.add_parentheses = Some(ExprParenthesesNeeded::surrounding(*sp));
|
||||||
} else {
|
} else {
|
||||||
err.span_suggestion_verbose(
|
err.remove_plus = Some(lo);
|
||||||
lo,
|
|
||||||
"try removing the `+`",
|
|
||||||
"",
|
|
||||||
Applicability::MachineApplicable,
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
err.emit();
|
this.sess.emit_err(err);
|
||||||
|
|
||||||
this.bump();
|
this.bump();
|
||||||
this.parse_prefix_expr(None)
|
this.parse_prefix_expr(None)
|
||||||
|
@ -755,9 +755,34 @@ impl<'a> Parser<'a> {
|
||||||
|
|
||||||
match self.parse_path(PathStyle::Expr) {
|
match self.parse_path(PathStyle::Expr) {
|
||||||
Ok(path) => {
|
Ok(path) => {
|
||||||
let (op_noun, op_verb) = match self.token.kind {
|
let span_after_type = parser_snapshot_after_type.token.span;
|
||||||
token::Lt => ("comparison", "comparing"),
|
let expr = mk_expr(
|
||||||
token::BinOp(token::Shl) => ("shift", "shifting"),
|
self,
|
||||||
|
lhs,
|
||||||
|
self.mk_ty(path.span, TyKind::Path(None, path.clone())),
|
||||||
|
);
|
||||||
|
|
||||||
|
let args_span = self.look_ahead(1, |t| t.span).to(span_after_type);
|
||||||
|
let suggestion = ComparisonOrShiftInterpretedAsGenericSugg {
|
||||||
|
left: expr.span.shrink_to_lo(),
|
||||||
|
right: expr.span.shrink_to_hi(),
|
||||||
|
};
|
||||||
|
|
||||||
|
match self.token.kind {
|
||||||
|
token::Lt => self.sess.emit_err(ComparisonInterpretedAsGeneric {
|
||||||
|
comparison: self.token.span,
|
||||||
|
r#type: path,
|
||||||
|
args: args_span,
|
||||||
|
suggestion,
|
||||||
|
}),
|
||||||
|
token::BinOp(token::Shl) => {
|
||||||
|
self.sess.emit_err(ShiftInterpretedAsGeneric {
|
||||||
|
shift: self.token.span,
|
||||||
|
r#type: path,
|
||||||
|
args: args_span,
|
||||||
|
suggestion,
|
||||||
|
})
|
||||||
|
}
|
||||||
_ => {
|
_ => {
|
||||||
// We can end up here even without `<` being the next token, for
|
// We can end up here even without `<` being the next token, for
|
||||||
// example because `parse_ty_no_plus` returns `Err` on keywords,
|
// example because `parse_ty_no_plus` returns `Err` on keywords,
|
||||||
|
@ -771,33 +796,7 @@ impl<'a> Parser<'a> {
|
||||||
// Successfully parsed the type path leaving a `<` yet to parse.
|
// Successfully parsed the type path leaving a `<` yet to parse.
|
||||||
type_err.cancel();
|
type_err.cancel();
|
||||||
|
|
||||||
// Report non-fatal diagnostics, keep `x as usize` as an expression
|
// Keep `x as usize` as an expression in AST and continue parsing.
|
||||||
// in AST and continue parsing.
|
|
||||||
let msg = format!(
|
|
||||||
"`<` is interpreted as a start of generic arguments for `{}`, not a {}",
|
|
||||||
pprust::path_to_string(&path),
|
|
||||||
op_noun,
|
|
||||||
);
|
|
||||||
let span_after_type = parser_snapshot_after_type.token.span;
|
|
||||||
let expr =
|
|
||||||
mk_expr(self, lhs, self.mk_ty(path.span, TyKind::Path(None, path)));
|
|
||||||
|
|
||||||
self.struct_span_err(self.token.span, &msg)
|
|
||||||
.span_label(
|
|
||||||
self.look_ahead(1, |t| t.span).to(span_after_type),
|
|
||||||
"interpreted as generic arguments",
|
|
||||||
)
|
|
||||||
.span_label(self.token.span, format!("not interpreted as {op_noun}"))
|
|
||||||
.multipart_suggestion(
|
|
||||||
&format!("try {op_verb} the cast value"),
|
|
||||||
vec![
|
|
||||||
(expr.span.shrink_to_lo(), "(".to_string()),
|
|
||||||
(expr.span.shrink_to_hi(), ")".to_string()),
|
|
||||||
],
|
|
||||||
Applicability::MachineApplicable,
|
|
||||||
)
|
|
||||||
.emit();
|
|
||||||
|
|
||||||
expr
|
expr
|
||||||
}
|
}
|
||||||
Err(path_err) => {
|
Err(path_err) => {
|
||||||
|
@ -1158,7 +1157,9 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
let span = self.prev_token.span;
|
let span = self.prev_token.span;
|
||||||
let field = ExprKind::Field(base, Ident::new(field, span));
|
let field = ExprKind::Field(base, Ident::new(field, span));
|
||||||
self.expect_no_suffix(span, "a tuple index", suffix);
|
if let Some(suffix) = suffix {
|
||||||
|
self.expect_no_tuple_index_suffix(span, suffix);
|
||||||
|
}
|
||||||
self.mk_expr(lo.to(span), field)
|
self.mk_expr(lo.to(span), field)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1196,9 +1197,8 @@ impl<'a> Parser<'a> {
|
||||||
) -> Option<P<Expr>> {
|
) -> Option<P<Expr>> {
|
||||||
match (seq.as_mut(), snapshot) {
|
match (seq.as_mut(), snapshot) {
|
||||||
(Err(err), Some((mut snapshot, ExprKind::Path(None, path)))) => {
|
(Err(err), Some((mut snapshot, ExprKind::Path(None, path)))) => {
|
||||||
let name = pprust::path_to_string(&path);
|
|
||||||
snapshot.bump(); // `(`
|
snapshot.bump(); // `(`
|
||||||
match snapshot.parse_struct_fields(path, false, Delimiter::Parenthesis) {
|
match snapshot.parse_struct_fields(path.clone(), false, Delimiter::Parenthesis) {
|
||||||
Ok((fields, ..))
|
Ok((fields, ..))
|
||||||
if snapshot.eat(&token::CloseDelim(Delimiter::Parenthesis)) =>
|
if snapshot.eat(&token::CloseDelim(Delimiter::Parenthesis)) =>
|
||||||
{
|
{
|
||||||
|
@ -1208,29 +1208,25 @@ impl<'a> Parser<'a> {
|
||||||
let close_paren = self.prev_token.span;
|
let close_paren = self.prev_token.span;
|
||||||
let span = lo.to(self.prev_token.span);
|
let span = lo.to(self.prev_token.span);
|
||||||
if !fields.is_empty() {
|
if !fields.is_empty() {
|
||||||
let replacement_err = self.struct_span_err(
|
let mut replacement_err = ParenthesesWithStructFields {
|
||||||
span,
|
span,
|
||||||
"invalid `struct` delimiters or `fn` call arguments",
|
r#type: path,
|
||||||
);
|
braces_for_struct: BracesForStructLiteral {
|
||||||
mem::replace(err, replacement_err).cancel();
|
first: open_paren,
|
||||||
|
second: close_paren,
|
||||||
err.multipart_suggestion(
|
},
|
||||||
&format!("if `{name}` is a struct, use braces as delimiters"),
|
no_fields_for_fn: NoFieldsForFnCall {
|
||||||
vec![
|
fields: fields
|
||||||
(open_paren, " { ".to_string()),
|
|
||||||
(close_paren, " }".to_string()),
|
|
||||||
],
|
|
||||||
Applicability::MaybeIncorrect,
|
|
||||||
);
|
|
||||||
err.multipart_suggestion(
|
|
||||||
&format!("if `{name}` is a function, use the arguments directly"),
|
|
||||||
fields
|
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|field| (field.span.until(field.expr.span), String::new()))
|
.map(|field| field.span.until(field.expr.span))
|
||||||
.collect(),
|
.collect(),
|
||||||
Applicability::MaybeIncorrect,
|
},
|
||||||
);
|
}
|
||||||
err.emit();
|
.into_diagnostic(&self.sess.span_diagnostic);
|
||||||
|
replacement_err.emit();
|
||||||
|
|
||||||
|
let old_err = mem::replace(err, replacement_err);
|
||||||
|
old_err.cancel();
|
||||||
} else {
|
} else {
|
||||||
err.emit();
|
err.emit();
|
||||||
}
|
}
|
||||||
|
@ -1537,15 +1533,19 @@ impl<'a> Parser<'a> {
|
||||||
&& (self.check_noexpect(&TokenKind::Comma) || self.check_noexpect(&TokenKind::Gt))
|
&& (self.check_noexpect(&TokenKind::Comma) || self.check_noexpect(&TokenKind::Gt))
|
||||||
{
|
{
|
||||||
// We're probably inside of a `Path<'a>` that needs a turbofish
|
// We're probably inside of a `Path<'a>` that needs a turbofish
|
||||||
self.sess.emit_err(UnexpectedTokenAfterLabel(self.token.span));
|
self.sess.emit_err(UnexpectedTokenAfterLabel {
|
||||||
|
span: self.token.span,
|
||||||
|
remove_label: None,
|
||||||
|
enclose_in_block: None,
|
||||||
|
});
|
||||||
consume_colon = false;
|
consume_colon = false;
|
||||||
Ok(self.mk_expr_err(lo))
|
Ok(self.mk_expr_err(lo))
|
||||||
} else {
|
} else {
|
||||||
// FIXME: use UnexpectedTokenAfterLabel, needs multipart suggestions
|
let mut err = UnexpectedTokenAfterLabel {
|
||||||
let msg = "expected `while`, `for`, `loop` or `{` after a label";
|
span: self.token.span,
|
||||||
|
remove_label: None,
|
||||||
let mut err = self.struct_span_err(self.token.span, msg);
|
enclose_in_block: None,
|
||||||
err.span_label(self.token.span, msg);
|
};
|
||||||
|
|
||||||
// Continue as an expression in an effort to recover on `'label: non_block_expr`.
|
// Continue as an expression in an effort to recover on `'label: non_block_expr`.
|
||||||
let expr = self.parse_expr().map(|expr| {
|
let expr = self.parse_expr().map(|expr| {
|
||||||
|
@ -1572,28 +1572,15 @@ impl<'a> Parser<'a> {
|
||||||
// If there are no breaks that may use this label, suggest removing the label and
|
// If there are no breaks that may use this label, suggest removing the label and
|
||||||
// recover to the unmodified expression.
|
// recover to the unmodified expression.
|
||||||
if !found_labeled_breaks {
|
if !found_labeled_breaks {
|
||||||
let msg = "consider removing the label";
|
err.remove_label = Some(lo.until(span));
|
||||||
err.span_suggestion_verbose(
|
|
||||||
lo.until(span),
|
|
||||||
msg,
|
|
||||||
"",
|
|
||||||
Applicability::MachineApplicable,
|
|
||||||
);
|
|
||||||
|
|
||||||
return expr;
|
return expr;
|
||||||
}
|
}
|
||||||
|
|
||||||
let sugg_msg = "consider enclosing expression in a block";
|
err.enclose_in_block = Some(UnexpectedTokenAfterLabelSugg {
|
||||||
let suggestions = vec![
|
left: span.shrink_to_lo(),
|
||||||
(span.shrink_to_lo(), "{ ".to_owned()),
|
right: span.shrink_to_hi(),
|
||||||
(span.shrink_to_hi(), " }".to_owned()),
|
});
|
||||||
];
|
|
||||||
|
|
||||||
err.multipart_suggestion_verbose(
|
|
||||||
sugg_msg,
|
|
||||||
suggestions,
|
|
||||||
Applicability::MachineApplicable,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Replace `'label: non_block_expr` with `'label: {non_block_expr}` in order to suppress future errors about `break 'label`.
|
// Replace `'label: non_block_expr` with `'label: {non_block_expr}` in order to suppress future errors about `break 'label`.
|
||||||
let stmt = self.mk_stmt(span, StmtKind::Expr(expr));
|
let stmt = self.mk_stmt(span, StmtKind::Expr(expr));
|
||||||
|
@ -1601,7 +1588,7 @@ impl<'a> Parser<'a> {
|
||||||
self.mk_expr(span, ExprKind::Block(blk, label))
|
self.mk_expr(span, ExprKind::Block(blk, label))
|
||||||
});
|
});
|
||||||
|
|
||||||
err.emit();
|
self.sess.emit_err(err);
|
||||||
expr
|
expr
|
||||||
}?;
|
}?;
|
||||||
|
|
||||||
|
@ -1672,19 +1659,13 @@ impl<'a> Parser<'a> {
|
||||||
// The value expression can be a labeled loop, see issue #86948, e.g.:
|
// The value expression can be a labeled loop, see issue #86948, e.g.:
|
||||||
// `loop { break 'label: loop { break 'label 42; }; }`
|
// `loop { break 'label: loop { break 'label 42; }; }`
|
||||||
let lexpr = self.parse_labeled_expr(label.take().unwrap(), true)?;
|
let lexpr = self.parse_labeled_expr(label.take().unwrap(), true)?;
|
||||||
self.struct_span_err(
|
self.sess.emit_err(LabeledLoopInBreak {
|
||||||
lexpr.span,
|
span: lexpr.span,
|
||||||
"parentheses are required around this expression to avoid confusion with a labeled break expression",
|
sub: WrapExpressionInParentheses {
|
||||||
)
|
left: lexpr.span.shrink_to_lo(),
|
||||||
.multipart_suggestion(
|
right: lexpr.span.shrink_to_hi(),
|
||||||
"wrap the expression in parentheses",
|
},
|
||||||
vec![
|
});
|
||||||
(lexpr.span.shrink_to_lo(), "(".to_string()),
|
|
||||||
(lexpr.span.shrink_to_hi(), ")".to_string()),
|
|
||||||
],
|
|
||||||
Applicability::MachineApplicable,
|
|
||||||
)
|
|
||||||
.emit();
|
|
||||||
Some(lexpr)
|
Some(lexpr)
|
||||||
} else if self.token != token::OpenDelim(Delimiter::Brace)
|
} else if self.token != token::OpenDelim(Delimiter::Brace)
|
||||||
|| !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
|
|| !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
|
||||||
|
@ -1756,9 +1737,8 @@ impl<'a> Parser<'a> {
|
||||||
};
|
};
|
||||||
if let Some(expr) = expr {
|
if let Some(expr) = expr {
|
||||||
if matches!(expr.kind, ExprKind::Err) {
|
if matches!(expr.kind, ExprKind::Err) {
|
||||||
let mut err = self
|
let mut err = InvalidInterpolatedExpression { span: self.token.span }
|
||||||
.diagnostic()
|
.into_diagnostic(&self.sess.span_diagnostic);
|
||||||
.struct_span_err(self.token.span, "invalid interpolated expression");
|
|
||||||
err.downgrade_to_delayed_bug();
|
err.downgrade_to_delayed_bug();
|
||||||
return err;
|
return err;
|
||||||
}
|
}
|
||||||
|
@ -1790,7 +1770,10 @@ impl<'a> Parser<'a> {
|
||||||
});
|
});
|
||||||
if let Some(token) = &recovered {
|
if let Some(token) = &recovered {
|
||||||
self.bump();
|
self.bump();
|
||||||
self.error_float_lits_must_have_int_part(&token);
|
self.sess.emit_err(FloatLiteralRequiresIntegerPart {
|
||||||
|
span: token.span,
|
||||||
|
correct: pprust::token_to_string(token).into_owned(),
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1818,13 +1801,6 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn error_float_lits_must_have_int_part(&self, token: &Token) {
|
|
||||||
self.sess.emit_err(FloatLiteralRequiresIntegerPart {
|
|
||||||
span: token.span,
|
|
||||||
correct: pprust::token_to_string(token).into_owned(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
fn report_lit_error(&self, err: LitError, lit: token::Lit, span: Span) {
|
fn report_lit_error(&self, err: LitError, lit: token::Lit, span: Span) {
|
||||||
// Checks if `s` looks like i32 or u1234 etc.
|
// Checks if `s` looks like i32 or u1234 etc.
|
||||||
fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool {
|
fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool {
|
||||||
|
@ -1853,11 +1829,13 @@ impl<'a> Parser<'a> {
|
||||||
// by lexer, so here we don't report it the second time.
|
// by lexer, so here we don't report it the second time.
|
||||||
LitError::LexerError => {}
|
LitError::LexerError => {}
|
||||||
LitError::InvalidSuffix => {
|
LitError::InvalidSuffix => {
|
||||||
self.expect_no_suffix(
|
if let Some(suffix) = suffix {
|
||||||
|
self.sess.emit_err(InvalidLiteralSuffix {
|
||||||
span,
|
span,
|
||||||
&format!("{} {} literal", kind.article(), kind.descr()),
|
kind: format!("{}", kind.descr()),
|
||||||
suffix,
|
suffix,
|
||||||
);
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
LitError::InvalidIntSuffix => {
|
LitError::InvalidIntSuffix => {
|
||||||
let suf = suffix.expect("suffix error with no suffix");
|
let suf = suffix.expect("suffix error with no suffix");
|
||||||
|
@ -1883,15 +1861,12 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
LitError::NonDecimalFloat(base) => {
|
LitError::NonDecimalFloat(base) => {
|
||||||
let descr = match base {
|
match base {
|
||||||
16 => "hexadecimal",
|
16 => self.sess.emit_err(HexadecimalFloatLiteralNotSupported { span }),
|
||||||
8 => "octal",
|
8 => self.sess.emit_err(OctalFloatLiteralNotSupported { span }),
|
||||||
2 => "binary",
|
2 => self.sess.emit_err(BinaryFloatLiteralNotSupported { span }),
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
};
|
};
|
||||||
self.struct_span_err(span, &format!("{descr} float literal is not supported"))
|
|
||||||
.span_label(span, "not supported")
|
|
||||||
.emit();
|
|
||||||
}
|
}
|
||||||
LitError::IntTooLarge => {
|
LitError::IntTooLarge => {
|
||||||
self.sess.emit_err(IntLiteralTooLarge { span });
|
self.sess.emit_err(IntLiteralTooLarge { span });
|
||||||
|
@ -1899,38 +1874,17 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn expect_no_suffix(&self, sp: Span, kind: &str, suffix: Option<Symbol>) {
|
pub(super) fn expect_no_tuple_index_suffix(&self, span: Span, suffix: Symbol) {
|
||||||
if let Some(suf) = suffix {
|
if [sym::i32, sym::u32, sym::isize, sym::usize].contains(&suffix) {
|
||||||
let mut err = if kind == "a tuple index"
|
|
||||||
&& [sym::i32, sym::u32, sym::isize, sym::usize].contains(&suf)
|
|
||||||
{
|
|
||||||
// #59553: warn instead of reject out of hand to allow the fix to percolate
|
// #59553: warn instead of reject out of hand to allow the fix to percolate
|
||||||
// through the ecosystem when people fix their macros
|
// through the ecosystem when people fix their macros
|
||||||
let mut err = self
|
self.sess.emit_warning(InvalidLiteralSuffixOnTupleIndex {
|
||||||
.sess
|
span,
|
||||||
.span_diagnostic
|
suffix,
|
||||||
.struct_span_warn(sp, &format!("suffixes on {kind} are invalid"));
|
exception: Some(()),
|
||||||
err.note(&format!(
|
});
|
||||||
"`{}` is *temporarily* accepted on tuple index fields as it was \
|
|
||||||
incorrectly accepted on stable for a few releases",
|
|
||||||
suf,
|
|
||||||
));
|
|
||||||
err.help(
|
|
||||||
"on proc macros, you'll want to use `syn::Index::from` or \
|
|
||||||
`proc_macro::Literal::*_unsuffixed` for code that will desugar \
|
|
||||||
to tuple field access",
|
|
||||||
);
|
|
||||||
err.note(
|
|
||||||
"see issue #60210 <https://github.com/rust-lang/rust/issues/60210> \
|
|
||||||
for more information",
|
|
||||||
);
|
|
||||||
err
|
|
||||||
} else {
|
} else {
|
||||||
self.struct_span_err(sp, &format!("suffixes on {kind} are invalid"))
|
self.sess.emit_err(InvalidLiteralSuffixOnTupleIndex { span, suffix, exception: None });
|
||||||
.forget_guarantee()
|
|
||||||
};
|
|
||||||
err.span_label(sp, format!("invalid suffix `{suf}`"));
|
|
||||||
err.emit();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1964,14 +1918,13 @@ impl<'a> Parser<'a> {
|
||||||
let mut snapshot = self.create_snapshot_for_diagnostic();
|
let mut snapshot = self.create_snapshot_for_diagnostic();
|
||||||
match snapshot.parse_array_or_repeat_expr(Delimiter::Brace) {
|
match snapshot.parse_array_or_repeat_expr(Delimiter::Brace) {
|
||||||
Ok(arr) => {
|
Ok(arr) => {
|
||||||
let hi = snapshot.prev_token.span;
|
self.sess.emit_err(ArrayBracketsInsteadOfSpaces {
|
||||||
self.struct_span_err(arr.span, "this is a block expression, not an array")
|
span: arr.span,
|
||||||
.multipart_suggestion(
|
sub: ArrayBracketsInsteadOfSpacesSugg {
|
||||||
"to make an array, use square brackets instead of curly braces",
|
left: lo,
|
||||||
vec![(lo, "[".to_owned()), (hi, "]".to_owned())],
|
right: snapshot.prev_token.span,
|
||||||
Applicability::MaybeIncorrect,
|
},
|
||||||
)
|
});
|
||||||
.emit();
|
|
||||||
|
|
||||||
self.restore_snapshot(snapshot);
|
self.restore_snapshot(snapshot);
|
||||||
Some(self.mk_expr_err(arr.span))
|
Some(self.mk_expr_err(arr.span))
|
||||||
|
@ -2134,7 +2087,8 @@ impl<'a> Parser<'a> {
|
||||||
// Check for `move async` and recover
|
// Check for `move async` and recover
|
||||||
if self.check_keyword(kw::Async) {
|
if self.check_keyword(kw::Async) {
|
||||||
let move_async_span = self.token.span.with_lo(self.prev_token.span.data().lo);
|
let move_async_span = self.token.span.with_lo(self.prev_token.span.data().lo);
|
||||||
Err(self.incorrect_move_async_order_found(move_async_span))
|
Err(AsyncMoveOrderIncorrect { span: move_async_span }
|
||||||
|
.into_diagnostic(&self.sess.span_diagnostic))
|
||||||
} else {
|
} else {
|
||||||
Ok(CaptureBy::Value)
|
Ok(CaptureBy::Value)
|
||||||
}
|
}
|
||||||
|
@ -2515,39 +2469,22 @@ impl<'a> Parser<'a> {
|
||||||
self.bump(); // `;`
|
self.bump(); // `;`
|
||||||
let mut stmts =
|
let mut stmts =
|
||||||
vec![self.mk_stmt(first_expr.span, ast::StmtKind::Expr(first_expr.clone()))];
|
vec![self.mk_stmt(first_expr.span, ast::StmtKind::Expr(first_expr.clone()))];
|
||||||
let err = |this: &mut Parser<'_>, stmts: Vec<ast::Stmt>| {
|
let err = |this: &Parser<'_>, stmts: Vec<ast::Stmt>| {
|
||||||
let span = stmts[0].span.to(stmts[stmts.len() - 1].span);
|
let span = stmts[0].span.to(stmts[stmts.len() - 1].span);
|
||||||
let mut err = this.struct_span_err(span, "`match` arm body without braces");
|
|
||||||
let (these, s, are) =
|
this.sess.emit_err(MatchArmBodyWithoutBraces {
|
||||||
if stmts.len() > 1 { ("these", "s", "are") } else { ("this", "", "is") };
|
statements: span,
|
||||||
err.span_label(
|
arrow: arrow_span,
|
||||||
span,
|
num_statements: stmts.len(),
|
||||||
&format!(
|
sub: if stmts.len() > 1 {
|
||||||
"{these} statement{s} {are} not surrounded by a body",
|
MatchArmBodyWithoutBracesSugg::AddBraces {
|
||||||
these = these,
|
left: span.shrink_to_lo(),
|
||||||
s = s,
|
right: span.shrink_to_hi(),
|
||||||
are = are
|
|
||||||
),
|
|
||||||
);
|
|
||||||
err.span_label(arrow_span, "while parsing the `match` arm starting here");
|
|
||||||
if stmts.len() > 1 {
|
|
||||||
err.multipart_suggestion(
|
|
||||||
&format!("surround the statement{s} with a body"),
|
|
||||||
vec![
|
|
||||||
(span.shrink_to_lo(), "{ ".to_string()),
|
|
||||||
(span.shrink_to_hi(), " }".to_string()),
|
|
||||||
],
|
|
||||||
Applicability::MachineApplicable,
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
err.span_suggestion(
|
|
||||||
semi_sp,
|
|
||||||
"use a comma to end a `match` arm expression",
|
|
||||||
",",
|
|
||||||
Applicability::MachineApplicable,
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
err.emit();
|
} else {
|
||||||
|
MatchArmBodyWithoutBracesSugg::UseComma { semicolon: semi_sp }
|
||||||
|
},
|
||||||
|
});
|
||||||
this.mk_expr_err(span)
|
this.mk_expr_err(span)
|
||||||
};
|
};
|
||||||
// We might have either a `,` -> `;` typo, or a block without braces. We need
|
// We might have either a `,` -> `;` typo, or a block without braces. We need
|
||||||
|
@ -2836,23 +2773,19 @@ impl<'a> Parser<'a> {
|
||||||
let expr = self.parse_struct_expr(qself.cloned(), path.clone(), true);
|
let expr = self.parse_struct_expr(qself.cloned(), path.clone(), true);
|
||||||
if let (Ok(expr), false) = (&expr, struct_allowed) {
|
if let (Ok(expr), false) = (&expr, struct_allowed) {
|
||||||
// This is a struct literal, but we don't can't accept them here.
|
// This is a struct literal, but we don't can't accept them here.
|
||||||
self.error_struct_lit_not_allowed_here(path.span, expr.span);
|
self.sess.emit_err(StructLiteralNotAllowedHere {
|
||||||
|
span: expr.span,
|
||||||
|
sub: StructLiteralNotAllowedHereSugg {
|
||||||
|
left: path.span.shrink_to_lo(),
|
||||||
|
right: expr.span.shrink_to_hi(),
|
||||||
|
},
|
||||||
|
});
|
||||||
}
|
}
|
||||||
return Some(expr);
|
return Some(expr);
|
||||||
}
|
}
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
fn error_struct_lit_not_allowed_here(&self, lo: Span, sp: Span) {
|
|
||||||
self.struct_span_err(sp, "struct literals are not allowed here")
|
|
||||||
.multipart_suggestion(
|
|
||||||
"surround the struct literal with parentheses",
|
|
||||||
vec![(lo.shrink_to_lo(), "(".to_string()), (sp.shrink_to_hi(), ")".to_string())],
|
|
||||||
Applicability::MachineApplicable,
|
|
||||||
)
|
|
||||||
.emit();
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(super) fn parse_struct_fields(
|
pub(super) fn parse_struct_fields(
|
||||||
&mut self,
|
&mut self,
|
||||||
pth: ast::Path,
|
pth: ast::Path,
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
use super::diagnostics::{dummy_arg, ConsumeClosingDelim, Error, UseEmptyBlockNotSemi};
|
use crate::errors::{DocCommentDoesNotDocumentAnything, UseEmptyBlockNotSemi};
|
||||||
|
|
||||||
|
use super::diagnostics::{dummy_arg, ConsumeClosingDelim};
|
||||||
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
|
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
|
||||||
use super::{AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, TrailingToken};
|
use super::{AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, TrailingToken};
|
||||||
|
|
||||||
|
@ -13,7 +15,7 @@ use rustc_ast::{EnumDef, FieldDef, Generics, TraitRef, Ty, TyKind, Variant, Vari
|
||||||
use rustc_ast::{FnHeader, ForeignItem, Path, PathSegment, Visibility, VisibilityKind};
|
use rustc_ast::{FnHeader, ForeignItem, Path, PathSegment, Visibility, VisibilityKind};
|
||||||
use rustc_ast::{MacArgs, MacCall, MacDelimiter};
|
use rustc_ast::{MacArgs, MacCall, MacDelimiter};
|
||||||
use rustc_ast_pretty::pprust;
|
use rustc_ast_pretty::pprust;
|
||||||
use rustc_errors::{struct_span_err, Applicability, PResult, StashKey};
|
use rustc_errors::{struct_span_err, Applicability, IntoDiagnostic, PResult, StashKey};
|
||||||
use rustc_span::edition::Edition;
|
use rustc_span::edition::Edition;
|
||||||
use rustc_span::lev_distance::lev_distance;
|
use rustc_span::lev_distance::lev_distance;
|
||||||
use rustc_span::source_map::{self, Span};
|
use rustc_span::source_map::{self, Span};
|
||||||
|
@ -1584,7 +1586,10 @@ impl<'a> Parser<'a> {
|
||||||
token::CloseDelim(Delimiter::Brace) => {}
|
token::CloseDelim(Delimiter::Brace) => {}
|
||||||
token::DocComment(..) => {
|
token::DocComment(..) => {
|
||||||
let previous_span = self.prev_token.span;
|
let previous_span = self.prev_token.span;
|
||||||
let mut err = self.span_err(self.token.span, Error::UselessDocComment);
|
let mut err = DocCommentDoesNotDocumentAnything {
|
||||||
|
span: self.token.span,
|
||||||
|
missing_comma: None,
|
||||||
|
};
|
||||||
self.bump(); // consume the doc comment
|
self.bump(); // consume the doc comment
|
||||||
let comma_after_doc_seen = self.eat(&token::Comma);
|
let comma_after_doc_seen = self.eat(&token::Comma);
|
||||||
// `seen_comma` is always false, because we are inside doc block
|
// `seen_comma` is always false, because we are inside doc block
|
||||||
|
@ -1593,18 +1598,13 @@ impl<'a> Parser<'a> {
|
||||||
seen_comma = true;
|
seen_comma = true;
|
||||||
}
|
}
|
||||||
if comma_after_doc_seen || self.token == token::CloseDelim(Delimiter::Brace) {
|
if comma_after_doc_seen || self.token == token::CloseDelim(Delimiter::Brace) {
|
||||||
err.emit();
|
self.sess.emit_err(err);
|
||||||
} else {
|
} else {
|
||||||
if !seen_comma {
|
if !seen_comma {
|
||||||
let sp = self.sess.source_map().next_point(previous_span);
|
let sp = self.sess.source_map().next_point(previous_span);
|
||||||
err.span_suggestion(
|
err.missing_comma = Some(sp);
|
||||||
sp,
|
|
||||||
"missing comma here",
|
|
||||||
",",
|
|
||||||
Applicability::MachineApplicable,
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
return Err(err);
|
return Err(err.into_diagnostic(&self.sess.span_diagnostic));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
|
|
|
@ -13,7 +13,6 @@ mod ty;
|
||||||
use crate::lexer::UnmatchedBrace;
|
use crate::lexer::UnmatchedBrace;
|
||||||
pub use attr_wrapper::AttrWrapper;
|
pub use attr_wrapper::AttrWrapper;
|
||||||
pub use diagnostics::AttemptLocalParseRecovery;
|
pub use diagnostics::AttemptLocalParseRecovery;
|
||||||
use diagnostics::Error;
|
|
||||||
pub(crate) use item::FnParseMode;
|
pub(crate) use item::FnParseMode;
|
||||||
pub use pat::{CommaRecoveryMode, RecoverColon, RecoverComma};
|
pub use pat::{CommaRecoveryMode, RecoverColon, RecoverComma};
|
||||||
pub use path::PathStyle;
|
pub use path::PathStyle;
|
||||||
|
@ -32,7 +31,7 @@ use rustc_ast_pretty::pprust;
|
||||||
use rustc_data_structures::fx::FxHashMap;
|
use rustc_data_structures::fx::FxHashMap;
|
||||||
use rustc_errors::PResult;
|
use rustc_errors::PResult;
|
||||||
use rustc_errors::{
|
use rustc_errors::{
|
||||||
struct_span_err, Applicability, DiagnosticBuilder, ErrorGuaranteed, FatalError, MultiSpan,
|
Applicability, DiagnosticBuilder, ErrorGuaranteed, FatalError, IntoDiagnostic, MultiSpan,
|
||||||
};
|
};
|
||||||
use rustc_session::parse::ParseSess;
|
use rustc_session::parse::ParseSess;
|
||||||
use rustc_span::source_map::{Span, DUMMY_SP};
|
use rustc_span::source_map::{Span, DUMMY_SP};
|
||||||
|
@ -41,6 +40,11 @@ use rustc_span::symbol::{kw, sym, Ident, Symbol};
|
||||||
use std::ops::Range;
|
use std::ops::Range;
|
||||||
use std::{cmp, mem, slice};
|
use std::{cmp, mem, slice};
|
||||||
|
|
||||||
|
use crate::errors::{
|
||||||
|
DocCommentDoesNotDocumentAnything, IncorrectVisibilityRestriction, MismatchedClosingDelimiter,
|
||||||
|
NonStringAbiLiteral,
|
||||||
|
};
|
||||||
|
|
||||||
bitflags::bitflags! {
|
bitflags::bitflags! {
|
||||||
struct Restrictions: u8 {
|
struct Restrictions: u8 {
|
||||||
const STMT_EXPR = 1 << 0;
|
const STMT_EXPR = 1 << 0;
|
||||||
|
@ -406,22 +410,37 @@ pub enum FollowedByType {
|
||||||
No,
|
No,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn token_descr_opt(token: &Token) -> Option<&'static str> {
|
#[derive(Clone, Copy, PartialEq, Eq)]
|
||||||
Some(match token.kind {
|
pub enum TokenDescription {
|
||||||
_ if token.is_special_ident() => "reserved identifier",
|
ReservedIdentifier,
|
||||||
_ if token.is_used_keyword() => "keyword",
|
Keyword,
|
||||||
_ if token.is_unused_keyword() => "reserved keyword",
|
ReservedKeyword,
|
||||||
token::DocComment(..) => "doc comment",
|
DocComment,
|
||||||
_ => return None,
|
}
|
||||||
})
|
|
||||||
|
impl TokenDescription {
|
||||||
|
pub fn from_token(token: &Token) -> Option<Self> {
|
||||||
|
match token.kind {
|
||||||
|
_ if token.is_special_ident() => Some(TokenDescription::ReservedIdentifier),
|
||||||
|
_ if token.is_used_keyword() => Some(TokenDescription::Keyword),
|
||||||
|
_ if token.is_unused_keyword() => Some(TokenDescription::ReservedKeyword),
|
||||||
|
token::DocComment(..) => Some(TokenDescription::DocComment),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn token_descr(token: &Token) -> String {
|
pub(super) fn token_descr(token: &Token) -> String {
|
||||||
let token_str = pprust::token_to_string(token);
|
let name = pprust::token_to_string(token).to_string();
|
||||||
match token_descr_opt(token) {
|
|
||||||
Some(prefix) => format!("{} `{}`", prefix, token_str),
|
let kind = TokenDescription::from_token(token).map(|kind| match kind {
|
||||||
_ => format!("`{}`", token_str),
|
TokenDescription::ReservedIdentifier => "reserved identifier",
|
||||||
}
|
TokenDescription::Keyword => "keyword",
|
||||||
|
TokenDescription::ReservedKeyword => "reserved keyword",
|
||||||
|
TokenDescription::DocComment => "doc comment",
|
||||||
|
});
|
||||||
|
|
||||||
|
if let Some(kind) = kind { format!("{} `{}`", kind, name) } else { format!("`{}`", name) }
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Parser<'a> {
|
impl<'a> Parser<'a> {
|
||||||
|
@ -518,9 +537,11 @@ impl<'a> Parser<'a> {
|
||||||
|
|
||||||
fn ident_or_err(&mut self) -> PResult<'a, (Ident, /* is_raw */ bool)> {
|
fn ident_or_err(&mut self) -> PResult<'a, (Ident, /* is_raw */ bool)> {
|
||||||
self.token.ident().ok_or_else(|| match self.prev_token.kind {
|
self.token.ident().ok_or_else(|| match self.prev_token.kind {
|
||||||
TokenKind::DocComment(..) => {
|
TokenKind::DocComment(..) => DocCommentDoesNotDocumentAnything {
|
||||||
self.span_err(self.prev_token.span, Error::UselessDocComment)
|
span: self.prev_token.span,
|
||||||
|
missing_comma: None,
|
||||||
}
|
}
|
||||||
|
.into_diagnostic(&self.sess.span_diagnostic),
|
||||||
_ => self.expected_ident_found(),
|
_ => self.expected_ident_found(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -1144,7 +1165,9 @@ impl<'a> Parser<'a> {
|
||||||
fn parse_field_name(&mut self) -> PResult<'a, Ident> {
|
fn parse_field_name(&mut self) -> PResult<'a, Ident> {
|
||||||
if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = self.token.kind
|
if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = self.token.kind
|
||||||
{
|
{
|
||||||
self.expect_no_suffix(self.token.span, "a tuple index", suffix);
|
if let Some(suffix) = suffix {
|
||||||
|
self.expect_no_tuple_index_suffix(self.token.span, suffix);
|
||||||
|
}
|
||||||
self.bump();
|
self.bump();
|
||||||
Ok(Ident::new(symbol, self.prev_token.span))
|
Ok(Ident::new(symbol, self.prev_token.span))
|
||||||
} else {
|
} else {
|
||||||
|
@ -1342,23 +1365,8 @@ impl<'a> Parser<'a> {
|
||||||
let path = self.parse_path(PathStyle::Mod)?;
|
let path = self.parse_path(PathStyle::Mod)?;
|
||||||
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; // `)`
|
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; // `)`
|
||||||
|
|
||||||
let msg = "incorrect visibility restriction";
|
|
||||||
let suggestion = r##"some possible visibility restrictions are:
|
|
||||||
`pub(crate)`: visible only on the current crate
|
|
||||||
`pub(super)`: visible only in the current module's parent
|
|
||||||
`pub(in path::to::module)`: visible only on the specified path"##;
|
|
||||||
|
|
||||||
let path_str = pprust::path_to_string(&path);
|
let path_str = pprust::path_to_string(&path);
|
||||||
|
self.sess.emit_err(IncorrectVisibilityRestriction { span: path.span, inner_str: path_str });
|
||||||
struct_span_err!(self.sess.span_diagnostic, path.span, E0704, "{}", msg)
|
|
||||||
.help(suggestion)
|
|
||||||
.span_suggestion(
|
|
||||||
path.span,
|
|
||||||
&format!("make this visible only to module `{}` with `in`", path_str),
|
|
||||||
format!("in {}", path_str),
|
|
||||||
Applicability::MachineApplicable,
|
|
||||||
)
|
|
||||||
.emit();
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -1384,14 +1392,7 @@ impl<'a> Parser<'a> {
|
||||||
Err(Some(lit)) => match lit.kind {
|
Err(Some(lit)) => match lit.kind {
|
||||||
ast::LitKind::Err => None,
|
ast::LitKind::Err => None,
|
||||||
_ => {
|
_ => {
|
||||||
self.struct_span_err(lit.span, "non-string ABI literal")
|
self.sess.emit_err(NonStringAbiLiteral { span: lit.span });
|
||||||
.span_suggestion(
|
|
||||||
lit.span,
|
|
||||||
"specify the ABI with a string literal",
|
|
||||||
"\"C\"",
|
|
||||||
Applicability::MaybeIncorrect,
|
|
||||||
)
|
|
||||||
.emit();
|
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -1432,25 +1433,18 @@ pub(crate) fn make_unclosed_delims_error(
|
||||||
// `None` here means an `Eof` was found. We already emit those errors elsewhere, we add them to
|
// `None` here means an `Eof` was found. We already emit those errors elsewhere, we add them to
|
||||||
// `unmatched_braces` only for error recovery in the `Parser`.
|
// `unmatched_braces` only for error recovery in the `Parser`.
|
||||||
let found_delim = unmatched.found_delim?;
|
let found_delim = unmatched.found_delim?;
|
||||||
let span: MultiSpan = if let Some(sp) = unmatched.unclosed_span {
|
let mut spans = vec![unmatched.found_span];
|
||||||
vec![unmatched.found_span, sp].into()
|
|
||||||
} else {
|
|
||||||
unmatched.found_span.into()
|
|
||||||
};
|
|
||||||
let mut err = sess.span_diagnostic.struct_span_err(
|
|
||||||
span,
|
|
||||||
&format!(
|
|
||||||
"mismatched closing delimiter: `{}`",
|
|
||||||
pprust::token_kind_to_string(&token::CloseDelim(found_delim)),
|
|
||||||
),
|
|
||||||
);
|
|
||||||
err.span_label(unmatched.found_span, "mismatched closing delimiter");
|
|
||||||
if let Some(sp) = unmatched.candidate_span {
|
|
||||||
err.span_label(sp, "closing delimiter possibly meant for this");
|
|
||||||
}
|
|
||||||
if let Some(sp) = unmatched.unclosed_span {
|
if let Some(sp) = unmatched.unclosed_span {
|
||||||
err.span_label(sp, "unclosed delimiter");
|
spans.push(sp);
|
||||||
|
};
|
||||||
|
let err = MismatchedClosingDelimiter {
|
||||||
|
spans,
|
||||||
|
delimiter: pprust::token_kind_to_string(&token::CloseDelim(found_delim)).to_string(),
|
||||||
|
unmatched: unmatched.found_span,
|
||||||
|
opening_candidate: unmatched.candidate_span,
|
||||||
|
unclosed: unmatched.unclosed_span,
|
||||||
}
|
}
|
||||||
|
.into_diagnostic(&sess.span_diagnostic);
|
||||||
Some(err)
|
Some(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
use super::{ForceCollect, Parser, PathStyle, TrailingToken};
|
use super::{ForceCollect, Parser, PathStyle, TrailingToken};
|
||||||
use crate::parser::diagnostics::RemoveLet;
|
use crate::errors::RemoveLet;
|
||||||
use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole};
|
use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole};
|
||||||
use rustc_ast::mut_visit::{noop_visit_pat, MutVisitor};
|
use rustc_ast::mut_visit::{noop_visit_pat, MutVisitor};
|
||||||
use rustc_ast::ptr::P;
|
use rustc_ast::ptr::P;
|
||||||
|
|
|
@ -1,7 +1,5 @@
|
||||||
use super::attr::DEFAULT_INNER_ATTR_FORBIDDEN;
|
use super::attr::InnerAttrForbiddenReason;
|
||||||
use super::diagnostics::{
|
use super::diagnostics::AttemptLocalParseRecovery;
|
||||||
AttemptLocalParseRecovery, Error, InvalidVariableDeclaration, InvalidVariableDeclarationSub,
|
|
||||||
};
|
|
||||||
use super::expr::LhsExpr;
|
use super::expr::LhsExpr;
|
||||||
use super::pat::RecoverComma;
|
use super::pat::RecoverComma;
|
||||||
use super::path::PathStyle;
|
use super::path::PathStyle;
|
||||||
|
@ -9,6 +7,12 @@ use super::TrailingToken;
|
||||||
use super::{
|
use super::{
|
||||||
AttrWrapper, BlockMode, FnParseMode, ForceCollect, Parser, Restrictions, SemiColonMode,
|
AttrWrapper, BlockMode, FnParseMode, ForceCollect, Parser, Restrictions, SemiColonMode,
|
||||||
};
|
};
|
||||||
|
use crate::errors::{
|
||||||
|
AssignmentElseNotAllowed, CompoundAssignmentExpressionInLet, ConstLetMutuallyExclusive,
|
||||||
|
DocCommentDoesNotDocumentAnything, ExpectedStatementAfterOuterAttr, InvalidCurlyInLetElse,
|
||||||
|
InvalidExpressionInLetElse, InvalidVariableDeclaration, InvalidVariableDeclarationSub,
|
||||||
|
WrapExpressionInParentheses,
|
||||||
|
};
|
||||||
use crate::maybe_whole;
|
use crate::maybe_whole;
|
||||||
|
|
||||||
use rustc_ast as ast;
|
use rustc_ast as ast;
|
||||||
|
@ -112,11 +116,7 @@ impl<'a> Parser<'a> {
|
||||||
let bl = self.parse_block()?;
|
let bl = self.parse_block()?;
|
||||||
// Destructuring assignment ... else.
|
// Destructuring assignment ... else.
|
||||||
// This is not allowed, but point it out in a nice way.
|
// This is not allowed, but point it out in a nice way.
|
||||||
let mut err = self.struct_span_err(
|
self.sess.emit_err(AssignmentElseNotAllowed { span: e.span.to(bl.span) });
|
||||||
e.span.to(bl.span),
|
|
||||||
"<assignment> ... else { ... } is not allowed",
|
|
||||||
);
|
|
||||||
err.emit();
|
|
||||||
}
|
}
|
||||||
self.mk_stmt(lo.to(e.span), StmtKind::Expr(e))
|
self.mk_stmt(lo.to(e.span), StmtKind::Expr(e))
|
||||||
} else {
|
} else {
|
||||||
|
@ -202,9 +202,12 @@ impl<'a> Parser<'a> {
|
||||||
fn error_outer_attrs(&self, attrs: &[Attribute]) {
|
fn error_outer_attrs(&self, attrs: &[Attribute]) {
|
||||||
if let [.., last] = attrs {
|
if let [.., last] = attrs {
|
||||||
if last.is_doc_comment() {
|
if last.is_doc_comment() {
|
||||||
self.span_err(last.span, Error::UselessDocComment).emit();
|
self.sess.emit_err(DocCommentDoesNotDocumentAnything {
|
||||||
|
span: last.span,
|
||||||
|
missing_comma: None,
|
||||||
|
});
|
||||||
} else if attrs.iter().any(|a| a.style == AttrStyle::Outer) {
|
} else if attrs.iter().any(|a| a.style == AttrStyle::Outer) {
|
||||||
self.struct_span_err(last.span, "expected statement after outer attribute").emit();
|
self.sess.emit_err(ExpectedStatementAfterOuterAttr { span: last.span });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -255,17 +258,7 @@ impl<'a> Parser<'a> {
|
||||||
let lo = self.prev_token.span;
|
let lo = self.prev_token.span;
|
||||||
|
|
||||||
if self.token.is_keyword(kw::Const) && self.look_ahead(1, |t| t.is_ident()) {
|
if self.token.is_keyword(kw::Const) && self.look_ahead(1, |t| t.is_ident()) {
|
||||||
self.struct_span_err(
|
self.sess.emit_err(ConstLetMutuallyExclusive { span: lo.to(self.token.span) });
|
||||||
lo.to(self.token.span),
|
|
||||||
"`const` and `let` are mutually exclusive",
|
|
||||||
)
|
|
||||||
.span_suggestion(
|
|
||||||
lo.to(self.token.span),
|
|
||||||
"remove `let`",
|
|
||||||
"const",
|
|
||||||
Applicability::MaybeIncorrect,
|
|
||||||
)
|
|
||||||
.emit();
|
|
||||||
self.bump();
|
self.bump();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -363,44 +356,27 @@ impl<'a> Parser<'a> {
|
||||||
fn check_let_else_init_bool_expr(&self, init: &ast::Expr) {
|
fn check_let_else_init_bool_expr(&self, init: &ast::Expr) {
|
||||||
if let ast::ExprKind::Binary(op, ..) = init.kind {
|
if let ast::ExprKind::Binary(op, ..) = init.kind {
|
||||||
if op.node.lazy() {
|
if op.node.lazy() {
|
||||||
let suggs = vec![
|
self.sess.emit_err(InvalidExpressionInLetElse {
|
||||||
(init.span.shrink_to_lo(), "(".to_string()),
|
span: init.span,
|
||||||
(init.span.shrink_to_hi(), ")".to_string()),
|
operator: op.node.to_string(),
|
||||||
];
|
sugg: WrapExpressionInParentheses {
|
||||||
self.struct_span_err(
|
left: init.span.shrink_to_lo(),
|
||||||
init.span,
|
right: init.span.shrink_to_hi(),
|
||||||
&format!(
|
},
|
||||||
"a `{}` expression cannot be directly assigned in `let...else`",
|
});
|
||||||
op.node.to_string()
|
|
||||||
),
|
|
||||||
)
|
|
||||||
.multipart_suggestion(
|
|
||||||
"wrap the expression in parentheses",
|
|
||||||
suggs,
|
|
||||||
Applicability::MachineApplicable,
|
|
||||||
)
|
|
||||||
.emit();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_let_else_init_trailing_brace(&self, init: &ast::Expr) {
|
fn check_let_else_init_trailing_brace(&self, init: &ast::Expr) {
|
||||||
if let Some(trailing) = classify::expr_trailing_brace(init) {
|
if let Some(trailing) = classify::expr_trailing_brace(init) {
|
||||||
let err_span = trailing.span.with_lo(trailing.span.hi() - BytePos(1));
|
self.sess.emit_err(InvalidCurlyInLetElse {
|
||||||
let suggs = vec![
|
span: trailing.span.with_lo(trailing.span.hi() - BytePos(1)),
|
||||||
(trailing.span.shrink_to_lo(), "(".to_string()),
|
sugg: WrapExpressionInParentheses {
|
||||||
(trailing.span.shrink_to_hi(), ")".to_string()),
|
left: trailing.span.shrink_to_lo(),
|
||||||
];
|
right: trailing.span.shrink_to_hi(),
|
||||||
self.struct_span_err(
|
},
|
||||||
err_span,
|
});
|
||||||
"right curly brace `}` before `else` in a `let...else` statement not allowed",
|
|
||||||
)
|
|
||||||
.multipart_suggestion(
|
|
||||||
"try wrapping the expression in parentheses",
|
|
||||||
suggs,
|
|
||||||
Applicability::MachineApplicable,
|
|
||||||
)
|
|
||||||
.emit();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -409,18 +385,7 @@ impl<'a> Parser<'a> {
|
||||||
let eq_consumed = match self.token.kind {
|
let eq_consumed = match self.token.kind {
|
||||||
token::BinOpEq(..) => {
|
token::BinOpEq(..) => {
|
||||||
// Recover `let x <op>= 1` as `let x = 1`
|
// Recover `let x <op>= 1` as `let x = 1`
|
||||||
self.struct_span_err(
|
self.sess.emit_err(CompoundAssignmentExpressionInLet { span: self.token.span });
|
||||||
self.token.span,
|
|
||||||
"can't reassign to an uninitialized variable",
|
|
||||||
)
|
|
||||||
.span_suggestion_short(
|
|
||||||
self.token.span,
|
|
||||||
"initialize the variable",
|
|
||||||
"=",
|
|
||||||
Applicability::MaybeIncorrect,
|
|
||||||
)
|
|
||||||
.help("if you meant to overwrite, remove the `let` binding")
|
|
||||||
.emit();
|
|
||||||
self.bump();
|
self.bump();
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
@ -434,7 +399,12 @@ impl<'a> Parser<'a> {
|
||||||
pub(super) fn parse_block(&mut self) -> PResult<'a, P<Block>> {
|
pub(super) fn parse_block(&mut self) -> PResult<'a, P<Block>> {
|
||||||
let (attrs, block) = self.parse_inner_attrs_and_block()?;
|
let (attrs, block) = self.parse_inner_attrs_and_block()?;
|
||||||
if let [.., last] = &*attrs {
|
if let [.., last] = &*attrs {
|
||||||
self.error_on_forbidden_inner_attr(last.span, DEFAULT_INNER_ATTR_FORBIDDEN);
|
self.error_on_forbidden_inner_attr(
|
||||||
|
last.span,
|
||||||
|
super::attr::InnerAttrPolicy::Forbidden(Some(
|
||||||
|
InnerAttrForbiddenReason::InCodeBlock,
|
||||||
|
)),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
Ok(block)
|
Ok(block)
|
||||||
}
|
}
|
||||||
|
|
|
@ -219,3 +219,18 @@ impl IntoDiagnostic<'_> for InvalidCharacterInCrateName<'_> {
|
||||||
diag
|
diag
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Subdiagnostic)]
|
||||||
|
#[multipart_suggestion(session::expr_parentheses_needed, applicability = "machine-applicable")]
|
||||||
|
pub struct ExprParenthesesNeeded {
|
||||||
|
#[suggestion_part(code = "(")]
|
||||||
|
pub left: Span,
|
||||||
|
#[suggestion_part(code = ")")]
|
||||||
|
pub right: Span,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ExprParenthesesNeeded {
|
||||||
|
pub fn surrounding(s: Span) -> Self {
|
||||||
|
ExprParenthesesNeeded { left: s.shrink_to_lo(), right: s.shrink_to_hi() }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -2,7 +2,9 @@
|
||||||
//! It also serves as an input to the parser itself.
|
//! It also serves as an input to the parser itself.
|
||||||
|
|
||||||
use crate::config::CheckCfg;
|
use crate::config::CheckCfg;
|
||||||
use crate::errors::{FeatureDiagnosticForIssue, FeatureDiagnosticHelp, FeatureGateError};
|
use crate::errors::{
|
||||||
|
ExprParenthesesNeeded, FeatureDiagnosticForIssue, FeatureDiagnosticHelp, FeatureGateError,
|
||||||
|
};
|
||||||
use crate::lint::{
|
use crate::lint::{
|
||||||
builtin::UNSTABLE_SYNTAX_PRE_EXPANSION, BufferedEarlyLint, BuiltinLintDiagnostics, Lint, LintId,
|
builtin::UNSTABLE_SYNTAX_PRE_EXPANSION, BufferedEarlyLint, BuiltinLintDiagnostics, Lint, LintId,
|
||||||
};
|
};
|
||||||
|
@ -11,7 +13,7 @@ use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexSet};
|
||||||
use rustc_data_structures::sync::{Lock, Lrc};
|
use rustc_data_structures::sync::{Lock, Lrc};
|
||||||
use rustc_errors::{emitter::SilentEmitter, ColorConfig, Handler};
|
use rustc_errors::{emitter::SilentEmitter, ColorConfig, Handler};
|
||||||
use rustc_errors::{
|
use rustc_errors::{
|
||||||
fallback_fluent_bundle, Applicability, Diagnostic, DiagnosticBuilder, DiagnosticId,
|
fallback_fluent_bundle, AddToDiagnostic, Diagnostic, DiagnosticBuilder, DiagnosticId,
|
||||||
DiagnosticMessage, EmissionGuarantee, ErrorGuaranteed, IntoDiagnostic, MultiSpan, StashKey,
|
DiagnosticMessage, EmissionGuarantee, ErrorGuaranteed, IntoDiagnostic, MultiSpan, StashKey,
|
||||||
};
|
};
|
||||||
use rustc_feature::{find_feature_issue, GateIssue, UnstableFeatures};
|
use rustc_feature::{find_feature_issue, GateIssue, UnstableFeatures};
|
||||||
|
@ -325,11 +327,7 @@ impl ParseSess {
|
||||||
/// Extend an error with a suggestion to wrap an expression with parentheses to allow the
|
/// Extend an error with a suggestion to wrap an expression with parentheses to allow the
|
||||||
/// parser to continue parsing the following operation as part of the same expression.
|
/// parser to continue parsing the following operation as part of the same expression.
|
||||||
pub fn expr_parentheses_needed(&self, err: &mut Diagnostic, span: Span) {
|
pub fn expr_parentheses_needed(&self, err: &mut Diagnostic, span: Span) {
|
||||||
err.multipart_suggestion(
|
ExprParenthesesNeeded::surrounding(span).add_to_diagnostic(err);
|
||||||
"parentheses are required to parse this as an expression",
|
|
||||||
vec![(span.shrink_to_lo(), "(".to_string()), (span.shrink_to_hi(), ")".to_string())],
|
|
||||||
Applicability::MachineApplicable,
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn save_proc_macro_span(&self, span: Span) -> usize {
|
pub fn save_proc_macro_span(&self, span: Span) -> usize {
|
||||||
|
|
|
@ -671,3 +671,9 @@ enum ExampleEnum {
|
||||||
#[diag(typeck::ambiguous_lifetime_bound)]
|
#[diag(typeck::ambiguous_lifetime_bound)]
|
||||||
Baz,
|
Baz,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Diagnostic)]
|
||||||
|
#[diag(typeck::ambiguous_lifetime_bound, code = "E0123")]
|
||||||
|
struct RawIdentDiagnosticArg {
|
||||||
|
pub r#type: String,
|
||||||
|
}
|
||||||
|
|
|
@ -633,3 +633,11 @@ struct BI {
|
||||||
#[suggestion_part(code = "")]
|
#[suggestion_part(code = "")]
|
||||||
spans: Vec<Span>,
|
spans: Vec<Span>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Subdiagnostic)]
|
||||||
|
#[label(parser::add_paren)]
|
||||||
|
struct BJ {
|
||||||
|
#[primary_span]
|
||||||
|
span: Span,
|
||||||
|
r#type: String,
|
||||||
|
}
|
||||||
|
|
|
@ -14,7 +14,7 @@ mod nonexistent_env {
|
||||||
|
|
||||||
mod erroneous_literal {
|
mod erroneous_literal {
|
||||||
include!(concat!("NON_EXISTENT"suffix, "/data.rs"));
|
include!(concat!("NON_EXISTENT"suffix, "/data.rs"));
|
||||||
//~^ ERROR suffixes on a string literal are invalid
|
//~^ ERROR suffixes on string literals are invalid
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main() {}
|
fn main() {}
|
||||||
|
|
|
@ -6,7 +6,7 @@ LL | include!(concat!(env!("NON_EXISTENT"), "/data.rs"));
|
||||||
|
|
|
|
||||||
= note: this error originates in the macro `env` (in Nightly builds, run with -Z macro-backtrace for more info)
|
= note: this error originates in the macro `env` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||||
|
|
||||||
error: suffixes on a string literal are invalid
|
error: suffixes on string literals are invalid
|
||||||
--> $DIR/issue-55897.rs:16:22
|
--> $DIR/issue-55897.rs:16:22
|
||||||
|
|
|
|
||||||
LL | include!(concat!("NON_EXISTENT"suffix, "/data.rs"));
|
LL | include!(concat!("NON_EXISTENT"suffix, "/data.rs"));
|
||||||
|
|
|
@ -29,7 +29,7 @@ fn main() {
|
||||||
< //~ ERROR `<` is interpreted as a start of generic
|
< //~ ERROR `<` is interpreted as a start of generic
|
||||||
5);
|
5);
|
||||||
|
|
||||||
println!("{}", a as usize << long_name); //~ ERROR `<` is interpreted as a start of generic
|
println!("{}", a as usize << long_name); //~ ERROR `<<` is interpreted as a start of generic
|
||||||
|
|
||||||
println!("{}", a: &mut 4); //~ ERROR expected type, found `4`
|
println!("{}", a: &mut 4); //~ ERROR expected type, found `4`
|
||||||
}
|
}
|
||||||
|
|
|
@ -95,7 +95,7 @@ LL |
|
||||||
LL ~ usize)
|
LL ~ usize)
|
||||||
|
|
|
|
||||||
|
|
||||||
error: `<` is interpreted as a start of generic arguments for `usize`, not a shift
|
error: `<<` is interpreted as a start of generic arguments for `usize`, not a shift
|
||||||
--> $DIR/issue-22644.rs:32:31
|
--> $DIR/issue-22644.rs:32:31
|
||||||
|
|
|
|
||||||
LL | println!("{}", a as usize << long_name);
|
LL | println!("{}", a as usize << long_name);
|
||||||
|
|
|
@ -4,7 +4,7 @@ error: right curly brace `}` before `else` in a `let...else` statement not allow
|
||||||
LL | let Some(1) = { Some(1) } else {
|
LL | let Some(1) = { Some(1) } else {
|
||||||
| ^
|
| ^
|
||||||
|
|
|
|
||||||
help: try wrapping the expression in parentheses
|
help: wrap the expression in parentheses
|
||||||
|
|
|
|
||||||
LL | let Some(1) = ({ Some(1) }) else {
|
LL | let Some(1) = ({ Some(1) }) else {
|
||||||
| + +
|
| + +
|
||||||
|
@ -15,7 +15,7 @@ error: right curly brace `}` before `else` in a `let...else` statement not allow
|
||||||
LL | let Some(1) = loop { break Some(1) } else {
|
LL | let Some(1) = loop { break Some(1) } else {
|
||||||
| ^
|
| ^
|
||||||
|
|
|
|
||||||
help: try wrapping the expression in parentheses
|
help: wrap the expression in parentheses
|
||||||
|
|
|
|
||||||
LL | let Some(1) = (loop { break Some(1) }) else {
|
LL | let Some(1) = (loop { break Some(1) }) else {
|
||||||
| + +
|
| + +
|
||||||
|
@ -26,7 +26,7 @@ error: right curly brace `}` before `else` in a `let...else` statement not allow
|
||||||
LL | let 2 = 1 + match 1 { n => n } else {
|
LL | let 2 = 1 + match 1 { n => n } else {
|
||||||
| ^
|
| ^
|
||||||
|
|
|
|
||||||
help: try wrapping the expression in parentheses
|
help: wrap the expression in parentheses
|
||||||
|
|
|
|
||||||
LL | let 2 = 1 + (match 1 { n => n }) else {
|
LL | let 2 = 1 + (match 1 { n => n }) else {
|
||||||
| + +
|
| + +
|
||||||
|
@ -37,7 +37,7 @@ error: right curly brace `}` before `else` in a `let...else` statement not allow
|
||||||
LL | let Some(1) = unsafe { unsafe_fn() } else {
|
LL | let Some(1) = unsafe { unsafe_fn() } else {
|
||||||
| ^
|
| ^
|
||||||
|
|
|
|
||||||
help: try wrapping the expression in parentheses
|
help: wrap the expression in parentheses
|
||||||
|
|
|
|
||||||
LL | let Some(1) = (unsafe { unsafe_fn() }) else {
|
LL | let Some(1) = (unsafe { unsafe_fn() }) else {
|
||||||
| + +
|
| + +
|
||||||
|
|
|
@ -1,18 +1,18 @@
|
||||||
extern
|
extern
|
||||||
"C"suffix //~ ERROR suffixes on a string literal are invalid
|
"C"suffix //~ ERROR suffixes on string literals are invalid
|
||||||
fn foo() {}
|
fn foo() {}
|
||||||
|
|
||||||
extern
|
extern
|
||||||
"C"suffix //~ ERROR suffixes on a string literal are invalid
|
"C"suffix //~ ERROR suffixes on string literals are invalid
|
||||||
{}
|
{}
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
""suffix; //~ ERROR suffixes on a string literal are invalid
|
""suffix; //~ ERROR suffixes on string literals are invalid
|
||||||
b""suffix; //~ ERROR suffixes on a byte string literal are invalid
|
b""suffix; //~ ERROR suffixes on byte string literals are invalid
|
||||||
r#""#suffix; //~ ERROR suffixes on a string literal are invalid
|
r#""#suffix; //~ ERROR suffixes on string literals are invalid
|
||||||
br#""#suffix; //~ ERROR suffixes on a byte string literal are invalid
|
br#""#suffix; //~ ERROR suffixes on byte string literals are invalid
|
||||||
'a'suffix; //~ ERROR suffixes on a char literal are invalid
|
'a'suffix; //~ ERROR suffixes on char literals are invalid
|
||||||
b'a'suffix; //~ ERROR suffixes on a byte literal are invalid
|
b'a'suffix; //~ ERROR suffixes on byte literals are invalid
|
||||||
|
|
||||||
1234u1024; //~ ERROR invalid width `1024` for integer literal
|
1234u1024; //~ ERROR invalid width `1024` for integer literal
|
||||||
1234i1024; //~ ERROR invalid width `1024` for integer literal
|
1234i1024; //~ ERROR invalid width `1024` for integer literal
|
||||||
|
|
|
@ -1,46 +1,46 @@
|
||||||
error: suffixes on a string literal are invalid
|
error: suffixes on string literals are invalid
|
||||||
--> $DIR/bad-lit-suffixes.rs:2:5
|
--> $DIR/bad-lit-suffixes.rs:2:5
|
||||||
|
|
|
|
||||||
LL | "C"suffix
|
LL | "C"suffix
|
||||||
| ^^^^^^^^^ invalid suffix `suffix`
|
| ^^^^^^^^^ invalid suffix `suffix`
|
||||||
|
|
||||||
error: suffixes on a string literal are invalid
|
error: suffixes on string literals are invalid
|
||||||
--> $DIR/bad-lit-suffixes.rs:6:5
|
--> $DIR/bad-lit-suffixes.rs:6:5
|
||||||
|
|
|
|
||||||
LL | "C"suffix
|
LL | "C"suffix
|
||||||
| ^^^^^^^^^ invalid suffix `suffix`
|
| ^^^^^^^^^ invalid suffix `suffix`
|
||||||
|
|
||||||
error: suffixes on a string literal are invalid
|
error: suffixes on string literals are invalid
|
||||||
--> $DIR/bad-lit-suffixes.rs:10:5
|
--> $DIR/bad-lit-suffixes.rs:10:5
|
||||||
|
|
|
|
||||||
LL | ""suffix;
|
LL | ""suffix;
|
||||||
| ^^^^^^^^ invalid suffix `suffix`
|
| ^^^^^^^^ invalid suffix `suffix`
|
||||||
|
|
||||||
error: suffixes on a byte string literal are invalid
|
error: suffixes on byte string literals are invalid
|
||||||
--> $DIR/bad-lit-suffixes.rs:11:5
|
--> $DIR/bad-lit-suffixes.rs:11:5
|
||||||
|
|
|
|
||||||
LL | b""suffix;
|
LL | b""suffix;
|
||||||
| ^^^^^^^^^ invalid suffix `suffix`
|
| ^^^^^^^^^ invalid suffix `suffix`
|
||||||
|
|
||||||
error: suffixes on a string literal are invalid
|
error: suffixes on string literals are invalid
|
||||||
--> $DIR/bad-lit-suffixes.rs:12:5
|
--> $DIR/bad-lit-suffixes.rs:12:5
|
||||||
|
|
|
|
||||||
LL | r#""#suffix;
|
LL | r#""#suffix;
|
||||||
| ^^^^^^^^^^^ invalid suffix `suffix`
|
| ^^^^^^^^^^^ invalid suffix `suffix`
|
||||||
|
|
||||||
error: suffixes on a byte string literal are invalid
|
error: suffixes on byte string literals are invalid
|
||||||
--> $DIR/bad-lit-suffixes.rs:13:5
|
--> $DIR/bad-lit-suffixes.rs:13:5
|
||||||
|
|
|
|
||||||
LL | br#""#suffix;
|
LL | br#""#suffix;
|
||||||
| ^^^^^^^^^^^^ invalid suffix `suffix`
|
| ^^^^^^^^^^^^ invalid suffix `suffix`
|
||||||
|
|
||||||
error: suffixes on a char literal are invalid
|
error: suffixes on char literals are invalid
|
||||||
--> $DIR/bad-lit-suffixes.rs:14:5
|
--> $DIR/bad-lit-suffixes.rs:14:5
|
||||||
|
|
|
|
||||||
LL | 'a'suffix;
|
LL | 'a'suffix;
|
||||||
| ^^^^^^^^^ invalid suffix `suffix`
|
| ^^^^^^^^^ invalid suffix `suffix`
|
||||||
|
|
||||||
error: suffixes on a byte literal are invalid
|
error: suffixes on byte literals are invalid
|
||||||
--> $DIR/bad-lit-suffixes.rs:15:5
|
--> $DIR/bad-lit-suffixes.rs:15:5
|
||||||
|
|
|
|
||||||
LL | b'a'suffix;
|
LL | b'a'suffix;
|
||||||
|
|
|
@ -7,7 +7,7 @@ LL | | */
|
||||||
| |___- previous doc comment
|
| |___- previous doc comment
|
||||||
LL |
|
LL |
|
||||||
LL | #![recursion_limit="100"]
|
LL | #![recursion_limit="100"]
|
||||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^ not permitted following an outer attribute
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^ not permitted following an outer doc comment
|
||||||
LL |
|
LL |
|
||||||
LL | fn main() {}
|
LL | fn main() {}
|
||||||
| ------------ the inner attribute doesn't annotate this function
|
| ------------ the inner attribute doesn't annotate this function
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue