Rollup merge of #94146 - est31:let_else, r=cjgillot
Adopt let else in more places Continuation of #89933, #91018, #91481, #93046, #93590, #94011. I have extended my clippy lint to also recognize tuple passing and match statements. The diff caused by fixing it is way above 1 thousand lines. Thus, I split it up into multiple pull requests to make reviewing easier. This is the biggest of these PRs and handles the changes outside of rustdoc, rustc_typeck, rustc_const_eval, rustc_trait_selection, which were handled in PRs #94139, #94142, #94143, #94144.
This commit is contained in:
commit
f2d6770f77
132 changed files with 539 additions and 881 deletions
|
@ -1299,20 +1299,16 @@ pub fn parse_macro_name_and_helper_attrs(
|
|||
// Once we've located the `#[proc_macro_derive]` attribute, verify
|
||||
// that it's of the form `#[proc_macro_derive(Foo)]` or
|
||||
// `#[proc_macro_derive(Foo, attributes(A, ..))]`
|
||||
let list = match attr.meta_item_list() {
|
||||
Some(list) => list,
|
||||
None => return None,
|
||||
let Some(list) = attr.meta_item_list() else {
|
||||
return None;
|
||||
};
|
||||
if list.len() != 1 && list.len() != 2 {
|
||||
diag.span_err(attr.span, "attribute must have either one or two arguments");
|
||||
return None;
|
||||
}
|
||||
let trait_attr = match list[0].meta_item() {
|
||||
Some(meta_item) => meta_item,
|
||||
_ => {
|
||||
diag.span_err(list[0].span(), "not a meta item");
|
||||
return None;
|
||||
}
|
||||
let Some(trait_attr) = list[0].meta_item() else {
|
||||
diag.span_err(list[0].span(), "not a meta item");
|
||||
return None;
|
||||
};
|
||||
let trait_ident = match trait_attr.ident() {
|
||||
Some(trait_ident) if trait_attr.is_word() => trait_ident,
|
||||
|
@ -1341,12 +1337,9 @@ pub fn parse_macro_name_and_helper_attrs(
|
|||
})
|
||||
.iter()
|
||||
.filter_map(|attr| {
|
||||
let attr = match attr.meta_item() {
|
||||
Some(meta_item) => meta_item,
|
||||
_ => {
|
||||
diag.span_err(attr.span(), "not a meta item");
|
||||
return None;
|
||||
}
|
||||
let Some(attr) = attr.meta_item() else {
|
||||
diag.span_err(attr.span(), "not a meta item");
|
||||
return None;
|
||||
};
|
||||
|
||||
let ident = match attr.ident() {
|
||||
|
|
|
@ -79,9 +79,8 @@ fn get_features(
|
|||
continue;
|
||||
}
|
||||
|
||||
let list = match attr.meta_item_list() {
|
||||
Some(list) => list,
|
||||
None => continue,
|
||||
let Some(list) = attr.meta_item_list() else {
|
||||
continue;
|
||||
};
|
||||
|
||||
for mi in list {
|
||||
|
@ -112,9 +111,8 @@ fn get_features(
|
|||
continue;
|
||||
}
|
||||
|
||||
let list = match attr.meta_item_list() {
|
||||
Some(list) => list,
|
||||
None => continue,
|
||||
let Some(list) = attr.meta_item_list() else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let bad_input = |span| {
|
||||
|
@ -340,10 +338,9 @@ impl<'a> StripUnconfigured<'a> {
|
|||
/// is in the original source file. Gives a compiler error if the syntax of
|
||||
/// the attribute is incorrect.
|
||||
crate fn expand_cfg_attr(&self, attr: Attribute, recursive: bool) -> Vec<Attribute> {
|
||||
let (cfg_predicate, expanded_attrs) =
|
||||
match rustc_parse::parse_cfg_attr(&attr, &self.sess.parse_sess) {
|
||||
None => return vec![],
|
||||
Some(r) => r,
|
||||
let Some((cfg_predicate, expanded_attrs)) =
|
||||
rustc_parse::parse_cfg_attr(&attr, &self.sess.parse_sess) else {
|
||||
return vec![];
|
||||
};
|
||||
|
||||
// Lint on zero attributes in source.
|
||||
|
@ -389,18 +386,16 @@ impl<'a> StripUnconfigured<'a> {
|
|||
// Use the `#` in `#[cfg_attr(pred, attr)]` as the `#` token
|
||||
// for `attr` when we expand it to `#[attr]`
|
||||
let mut orig_trees = orig_tokens.trees();
|
||||
let pound_token = match orig_trees.next().unwrap() {
|
||||
TokenTree::Token(token @ Token { kind: TokenKind::Pound, .. }) => token,
|
||||
_ => panic!("Bad tokens for attribute {:?}", attr),
|
||||
let TokenTree::Token(pound_token @ Token { kind: TokenKind::Pound, .. }) = orig_trees.next().unwrap() else {
|
||||
panic!("Bad tokens for attribute {:?}", attr);
|
||||
};
|
||||
let pound_span = pound_token.span;
|
||||
|
||||
let mut trees = vec![(AttrAnnotatedTokenTree::Token(pound_token), Spacing::Alone)];
|
||||
if attr.style == AttrStyle::Inner {
|
||||
// For inner attributes, we do the same thing for the `!` in `#![some_attr]`
|
||||
let bang_token = match orig_trees.next().unwrap() {
|
||||
TokenTree::Token(token @ Token { kind: TokenKind::Not, .. }) => token,
|
||||
_ => panic!("Bad tokens for attribute {:?}", attr),
|
||||
let TokenTree::Token(bang_token @ Token { kind: TokenKind::Not, .. }) = orig_trees.next().unwrap() else {
|
||||
panic!("Bad tokens for attribute {:?}", attr);
|
||||
};
|
||||
trees.push((AttrAnnotatedTokenTree::Token(bang_token), Spacing::Alone));
|
||||
}
|
||||
|
|
|
@ -641,9 +641,8 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
|||
ExpandResult::Ready(match invoc.kind {
|
||||
InvocationKind::Bang { mac, .. } => match ext {
|
||||
SyntaxExtensionKind::Bang(expander) => {
|
||||
let tok_result = match expander.expand(self.cx, span, mac.args.inner_tokens()) {
|
||||
Err(_) => return ExpandResult::Ready(fragment_kind.dummy(span)),
|
||||
Ok(ts) => ts,
|
||||
let Ok(tok_result) = expander.expand(self.cx, span, mac.args.inner_tokens()) else {
|
||||
return ExpandResult::Ready(fragment_kind.dummy(span));
|
||||
};
|
||||
self.parse_ast_fragment(tok_result, fragment_kind, &mac.path, span)
|
||||
}
|
||||
|
@ -698,9 +697,8 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
|||
self.cx.span_err(span, "key-value macro attributes are not supported");
|
||||
}
|
||||
let inner_tokens = attr_item.args.inner_tokens();
|
||||
let tok_result = match expander.expand(self.cx, span, inner_tokens, tokens) {
|
||||
Err(_) => return ExpandResult::Ready(fragment_kind.dummy(span)),
|
||||
Ok(ts) => ts,
|
||||
let Ok(tok_result) = expander.expand(self.cx, span, inner_tokens, tokens) else {
|
||||
return ExpandResult::Ready(fragment_kind.dummy(span));
|
||||
};
|
||||
self.parse_ast_fragment(tok_result, fragment_kind, &attr_item.path, span)
|
||||
}
|
||||
|
|
|
@ -170,23 +170,20 @@ fn mod_file_path_from_attr(
|
|||
) -> Option<PathBuf> {
|
||||
// Extract path string from first `#[path = "path_string"]` attribute.
|
||||
let first_path = attrs.iter().find(|at| at.has_name(sym::path))?;
|
||||
let path_sym = match first_path.value_str() {
|
||||
Some(s) => s,
|
||||
None => {
|
||||
// This check is here mainly to catch attempting to use a macro,
|
||||
// such as #[path = concat!(...)]. This isn't currently supported
|
||||
// because otherwise the InvocationCollector would need to defer
|
||||
// loading a module until the #[path] attribute was expanded, and
|
||||
// it doesn't support that (and would likely add a bit of
|
||||
// complexity). Usually bad forms are checked in AstValidator (via
|
||||
// `check_builtin_attribute`), but by the time that runs the macro
|
||||
// is expanded, and it doesn't give an error.
|
||||
validate_attr::emit_fatal_malformed_builtin_attribute(
|
||||
&sess.parse_sess,
|
||||
first_path,
|
||||
sym::path,
|
||||
);
|
||||
}
|
||||
let Some(path_sym) = first_path.value_str() else {
|
||||
// This check is here mainly to catch attempting to use a macro,
|
||||
// such as #[path = concat!(...)]. This isn't currently supported
|
||||
// because otherwise the InvocationCollector would need to defer
|
||||
// loading a module until the #[path] attribute was expanded, and
|
||||
// it doesn't support that (and would likely add a bit of
|
||||
// complexity). Usually bad forms are checked in AstValidator (via
|
||||
// `check_builtin_attribute`), but by the time that runs the macro
|
||||
// is expanded, and it doesn't give an error.
|
||||
validate_attr::emit_fatal_malformed_builtin_attribute(
|
||||
&sess.parse_sess,
|
||||
first_path,
|
||||
sym::path,
|
||||
);
|
||||
};
|
||||
|
||||
let path_str = path_sym.as_str();
|
||||
|
|
|
@ -596,9 +596,8 @@ impl server::Literal for Rustc<'_, '_> {
|
|||
let minus_present = parser.eat(&token::BinOp(token::Minus));
|
||||
|
||||
let lit_span = parser.token.span.data();
|
||||
let mut lit = match parser.token.kind {
|
||||
token::Literal(lit) => lit,
|
||||
_ => return Err(()),
|
||||
let token::Literal(mut lit) = parser.token.kind else {
|
||||
return Err(());
|
||||
};
|
||||
|
||||
// Check no comment or whitespace surrounding the (possibly negative)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue