Auto merge of #87449 - matthiaskrgr:clippyy_v2, r=nagisa

more clippy::complexity fixes

(also a couple of clippy::perf fixes)
This commit is contained in:
bors 2021-08-01 09:15:15 +00:00
commit aadd6189ad
32 changed files with 44 additions and 51 deletions

View file

@ -2189,8 +2189,7 @@ impl<'a> State<'a> {
Options(InlineAsmOptions), Options(InlineAsmOptions),
} }
let mut args = vec![]; let mut args = vec![AsmArg::Template(InlineAsmTemplatePiece::to_string(&asm.template))];
args.push(AsmArg::Template(InlineAsmTemplatePiece::to_string(&asm.template)));
args.extend(asm.operands.iter().map(|(o, _)| AsmArg::Operand(o))); args.extend(asm.operands.iter().map(|(o, _)| AsmArg::Operand(o)));
if !asm.options.is_empty() { if !asm.options.is_empty() {
args.push(AsmArg::Options(asm.options)); args.push(AsmArg::Options(asm.options));

View file

@ -365,7 +365,7 @@ pub fn llvm_global_features(sess: &Session) -> Vec<String> {
features_string features_string
}; };
features.extend(features_string.split(",").map(String::from)); features.extend(features_string.split(',').map(String::from));
} }
Some(_) | None => {} Some(_) | None => {}
}; };
@ -374,7 +374,7 @@ pub fn llvm_global_features(sess: &Session) -> Vec<String> {
if s.is_empty() { if s.is_empty() {
return None; return None;
} }
let feature = if s.starts_with("+") || s.starts_with("-") { let feature = if s.starts_with('+') || s.starts_with('-') {
&s[1..] &s[1..]
} else { } else {
return Some(s.to_string()); return Some(s.to_string());

View file

@ -834,7 +834,7 @@ fn ident_name_compatibility_hack(
.flat_map(|c| c.as_os_str().to_str()) .flat_map(|c| c.as_os_str().to_str())
.find(|c| c.starts_with("js-sys")) .find(|c| c.starts_with("js-sys"))
{ {
let mut version = c.trim_start_matches("js-sys-").split("."); let mut version = c.trim_start_matches("js-sys-").split('.');
if version.next() == Some("0") if version.next() == Some("0")
&& version.next() == Some("3") && version.next() == Some("3")
&& version && version

View file

@ -476,7 +476,7 @@ impl<T> PerNS<Option<T>> {
/// Returns an iterator over the items which are `Some`. /// Returns an iterator over the items which are `Some`.
pub fn present_items(self) -> impl Iterator<Item = T> { pub fn present_items(self) -> impl Iterator<Item = T> {
IntoIter::new([self.type_ns, self.value_ns, self.macro_ns]).filter_map(|it| it) IntoIter::new([self.type_ns, self.value_ns, self.macro_ns]).flatten()
} }
} }

View file

@ -1357,8 +1357,8 @@ impl<'a> State<'a> {
Options(ast::InlineAsmOptions), Options(ast::InlineAsmOptions),
} }
let mut args = vec![]; let mut args =
args.push(AsmArg::Template(ast::InlineAsmTemplatePiece::to_string(&asm.template))); vec![AsmArg::Template(ast::InlineAsmTemplatePiece::to_string(&asm.template))];
args.extend(asm.operands.iter().map(|(o, _)| AsmArg::Operand(o))); args.extend(asm.operands.iter().map(|(o, _)| AsmArg::Operand(o)));
if !asm.options.is_empty() { if !asm.options.is_empty() {
args.push(AsmArg::Options(asm.options)); args.push(AsmArg::Options(asm.options));

View file

@ -576,7 +576,7 @@ pub fn is_known_lint_tool(m_item: Symbol, sess: &Session, attrs: &[ast::Attribut
// NOTE: does no error handling; error handling is done by rustc_resolve. // NOTE: does no error handling; error handling is done by rustc_resolve.
sess.filter_by_name(attrs, sym::register_tool) sess.filter_by_name(attrs, sym::register_tool)
.filter_map(|attr| attr.meta_item_list()) .filter_map(|attr| attr.meta_item_list())
.flat_map(std::convert::identity) .flatten()
.filter_map(|nested_meta| nested_meta.ident()) .filter_map(|nested_meta| nested_meta.ident())
.map(|ident| ident.name) .map(|ident| ident.name)
.any(|name| name == m_item) .any(|name| name == m_item)

View file

@ -906,7 +906,7 @@ impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> {
} else { } else {
return FfiUnsafe { return FfiUnsafe {
ty, ty,
reason: format!("box cannot be represented as a single pointer"), reason: "box cannot be represented as a single pointer".to_string(),
help: None, help: None,
}; };
} }

View file

@ -135,7 +135,7 @@ fn symbols_with_errors(input: TokenStream) -> (TokenStream, Vec<syn::Error>) {
let mut check_dup = |span: Span, str: &str, errors: &mut Errors| { let mut check_dup = |span: Span, str: &str, errors: &mut Errors| {
if let Some(prev_span) = keys.get(str) { if let Some(prev_span) = keys.get(str) {
errors.error(span, format!("Symbol `{}` is duplicated", str)); errors.error(span, format!("Symbol `{}` is duplicated", str));
errors.error(*prev_span, format!("location of previous definition")); errors.error(*prev_span, "location of previous definition".to_string());
} else { } else {
keys.insert(str.to_string(), span); keys.insert(str.to_string(), span);
} }

View file

@ -385,7 +385,7 @@ impl<'tcx, D: TyDecoder<'tcx>> RefDecodable<'tcx, D> for [mir::abstract_const::N
impl<'tcx, D: TyDecoder<'tcx>> RefDecodable<'tcx, D> for ty::List<ty::BoundVariableKind> { impl<'tcx, D: TyDecoder<'tcx>> RefDecodable<'tcx, D> for ty::List<ty::BoundVariableKind> {
fn decode(decoder: &mut D) -> Result<&'tcx Self, D::Error> { fn decode(decoder: &mut D) -> Result<&'tcx Self, D::Error> {
let len = decoder.read_usize()?; let len = decoder.read_usize()?;
Ok(decoder.tcx().mk_bound_variable_kinds((0..len).map(|_| Decodable::decode(decoder)))?) decoder.tcx().mk_bound_variable_kinds((0..len).map(|_| Decodable::decode(decoder)))
} }
} }

View file

@ -320,7 +320,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
.map(|n| format!("`{}`", n)) .map(|n| format!("`{}`", n))
.unwrap_or_else(|| "the mutable reference".to_string()), .unwrap_or_else(|| "the mutable reference".to_string()),
), ),
format!("&mut *"), "&mut *".to_string(),
Applicability::MachineApplicable, Applicability::MachineApplicable,
); );
} }

View file

@ -731,7 +731,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
if suggestions.peek().is_some() { if suggestions.peek().is_some() {
err.span_suggestions( err.span_suggestions(
path_segment.ident.span, path_segment.ident.span,
&format!("use mutable method"), "use mutable method",
suggestions, suggestions,
Applicability::MaybeIncorrect, Applicability::MaybeIncorrect,
); );

View file

@ -46,7 +46,7 @@ pub fn merge_codegen_units<'tcx>(
// Record that `second_smallest` now contains all the stuff that was in // Record that `second_smallest` now contains all the stuff that was in
// `smallest` before. // `smallest` before.
let mut consumed_cgu_names = cgu_contents.remove(&smallest.name()).unwrap(); let mut consumed_cgu_names = cgu_contents.remove(&smallest.name()).unwrap();
cgu_contents.get_mut(&second_smallest.name()).unwrap().extend(consumed_cgu_names.drain(..)); cgu_contents.get_mut(&second_smallest.name()).unwrap().append(&mut consumed_cgu_names);
debug!( debug!(
"CodegenUnit {} merged into CodegenUnit {}", "CodegenUnit {} merged into CodegenUnit {}",

View file

@ -255,7 +255,7 @@ impl NonConstOp for CellBorrow {
); );
err.span_label( err.span_label(
span, span,
format!("this borrow of an interior mutable value may end up in the final value"), "this borrow of an interior mutable value may end up in the final value",
); );
if let hir::ConstContext::Static(_) = ccx.const_kind() { if let hir::ConstContext::Static(_) = ccx.const_kind() {
err.help( err.help(

View file

@ -344,7 +344,7 @@ impl DebugCounters {
return if counter_format.id { return if counter_format.id {
format!("{}#{}", block_label, id.index()) format!("{}#{}", block_label, id.index())
} else { } else {
format!("{}", block_label) block_label.to_string()
}; };
} }
} }
@ -369,7 +369,7 @@ impl DebugCounters {
} }
return format!("({})", self.format_counter_kind(counter_kind)); return format!("({})", self.format_counter_kind(counter_kind));
} }
return format!("{}", self.format_counter_kind(counter_kind)); return self.format_counter_kind(counter_kind).to_string();
} }
} }
format!("#{}", operand.index().to_string()) format!("#{}", operand.index().to_string())

View file

@ -526,8 +526,8 @@ impl TraverseCoverageGraphWithLoops {
pub fn new(basic_coverage_blocks: &CoverageGraph) -> Self { pub fn new(basic_coverage_blocks: &CoverageGraph) -> Self {
let start_bcb = basic_coverage_blocks.start_node(); let start_bcb = basic_coverage_blocks.start_node();
let backedges = find_loop_backedges(basic_coverage_blocks); let backedges = find_loop_backedges(basic_coverage_blocks);
let mut context_stack = Vec::new(); let context_stack =
context_stack.push(TraversalContext { loop_backedges: None, worklist: vec![start_bcb] }); vec![TraversalContext { loop_backedges: None, worklist: vec![start_bcb] }];
// `context_stack` starts with a `TraversalContext` for the main function context (beginning // `context_stack` starts with a `TraversalContext` for the main function context (beginning
// with the `start` BasicCoverageBlock of the function). New worklists are pushed to the top // with the `start` BasicCoverageBlock of the function). New worklists are pushed to the top
// of the stack as loops are entered, and popped off of the stack when a loop's worklist is // of the stack as loops are entered, and popped off of the stack when a loop's worklist is

View file

@ -614,8 +614,8 @@ impl Inliner<'tcx> {
.vars_and_temps_iter() .vars_and_temps_iter()
.map(|local| callee_body.local_decls[local].clone()), .map(|local| callee_body.local_decls[local].clone()),
); );
caller_body.source_scopes.extend(callee_body.source_scopes.drain(..)); caller_body.source_scopes.extend(&mut callee_body.source_scopes.drain(..));
caller_body.var_debug_info.extend(callee_body.var_debug_info.drain(..)); caller_body.var_debug_info.append(&mut callee_body.var_debug_info);
caller_body.basic_blocks_mut().extend(callee_body.basic_blocks_mut().drain(..)); caller_body.basic_blocks_mut().extend(callee_body.basic_blocks_mut().drain(..));
caller_body[callsite.block].terminator = Some(Terminator { caller_body[callsite.block].terminator = Some(Terminator {

View file

@ -147,8 +147,8 @@ fn validate_simd_shuffle(tcx: TyCtxt<'tcx>, args: &[Operand<'tcx>], span: Span)
match &args[2] { match &args[2] {
Operand::Constant(_) => {} // all good Operand::Constant(_) => {} // all good
_ => { _ => {
let msg = format!("last argument of `simd_shuffle` is required to be a `const` item"); let msg = "last argument of `simd_shuffle` is required to be a `const` item";
tcx.sess.span_err(span, &msg); tcx.sess.span_err(span, msg);
} }
} }
} }

View file

@ -479,7 +479,7 @@ impl Visitor<'tcx> for ExtraComments<'tcx> {
uv.promoted uv.promoted
), ),
ty::ConstKind::Value(val) => format!("Value({:?})", val), ty::ConstKind::Value(val) => format!("Value({:?})", val),
ty::ConstKind::Error(_) => format!("Error"), ty::ConstKind::Error(_) => "Error".to_string(),
}; };
self.push(&format!("+ val: {}", val)); self.push(&format!("+ val: {}", val));
} }

View file

@ -1107,8 +1107,7 @@ impl<'a> Parser<'a> {
e e
})?; })?;
let enum_definition = let enum_definition = EnumDef { variants: variants.into_iter().flatten().collect() };
EnumDef { variants: variants.into_iter().filter_map(|v| v).collect() };
Ok((id, ItemKind::Enum(enum_definition, generics))) Ok((id, ItemKind::Enum(enum_definition, generics)))
} }

View file

@ -855,7 +855,7 @@ impl CheckAttrVisitor<'tcx> {
hir_id, hir_id,
meta.span(), meta.span(),
|lint| { |lint| {
lint.build(&format!("invalid `doc` attribute")).emit(); lint.build(&"invalid `doc` attribute").emit();
}, },
); );
is_valid = false; is_valid = false;

View file

@ -229,7 +229,7 @@ fn no_main_err(tcx: TyCtxt<'_>, visitor: &EntryContext<'_, '_>) {
if let Some(main_def) = tcx.resolutions(()).main_def { if let Some(main_def) = tcx.resolutions(()).main_def {
if main_def.opt_fn_def_id().is_none() { if main_def.opt_fn_def_id().is_none() {
// There is something at `crate::main`, but it is not a function definition. // There is something at `crate::main`, but it is not a function definition.
err.span_label(main_def.span, &format!("non-function item at `crate::main` is found")); err.span_label(main_def.span, "non-function item at `crate::main` is found");
} }
} }

View file

@ -620,8 +620,8 @@ fn incremental_verify_ich<CTX, K, V: Debug>(
}; };
tcx.sess().struct_err(&format!("internal compiler error: encountered incremental compilation error with {:?}", dep_node)) tcx.sess().struct_err(&format!("internal compiler error: encountered incremental compilation error with {:?}", dep_node))
.help(&format!("This is a known issue with the compiler. Run {} to allow your project to compile", run_cmd)) .help(&format!("This is a known issue with the compiler. Run {} to allow your project to compile", run_cmd))
.note(&format!("Please follow the instructions below to create a bug report with the provided information")) .note(&"Please follow the instructions below to create a bug report with the provided information")
.note(&format!("See <https://github.com/rust-lang/rust/issues/84970> for more information")) .note(&"See <https://github.com/rust-lang/rust/issues/84970> for more information")
.emit(); .emit();
panic!("Found unstable fingerprints for {:?}: {:?}", dep_node, result); panic!("Found unstable fingerprints for {:?}: {:?}", dep_node, result);
} }

View file

@ -1061,7 +1061,7 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
} }
err.span_suggestion( err.span_suggestion(
span, span,
&format!("use this syntax instead"), &"use this syntax instead",
format!("{path_str}"), format!("{path_str}"),
Applicability::MaybeIncorrect, Applicability::MaybeIncorrect,
); );

View file

@ -1867,7 +1867,7 @@ fn parse_extern_dep_specs(
) )
}); });
let locparts: Vec<_> = loc.split(":").collect(); let locparts: Vec<_> = loc.split(':').collect();
let spec = match &locparts[..] { let spec = match &locparts[..] {
["raw", ..] => { ["raw", ..] => {
// Don't want `:` split string // Don't want `:` split string

View file

@ -684,7 +684,7 @@ mod parse {
Some(v) => v, Some(v) => v,
}; };
*slot = Some(match v.trim_end_matches("s") { *slot = Some(match v.trim_end_matches('s') {
"statement" | "stmt" => MirSpanview::Statement, "statement" | "stmt" => MirSpanview::Statement,
"terminator" | "term" => MirSpanview::Terminator, "terminator" | "term" => MirSpanview::Terminator,
"block" | "basicblock" => MirSpanview::Block, "block" | "basicblock" => MirSpanview::Block,

View file

@ -2018,7 +2018,7 @@ impl Target {
if base.is_builtin { if base.is_builtin {
// This can cause unfortunate ICEs later down the line. // This can cause unfortunate ICEs later down the line.
return Err(format!("may not set is_builtin for targets not built-in")); return Err("may not set is_builtin for targets not built-in".to_string());
} }
// Each field should have been read using `Json::remove_key` so any keys remaining are unused. // Each field should have been read using `Json::remove_key` so any keys remaining are unused.
let remaining_keys = obj.as_object().ok_or("Expected JSON object for target")?.keys(); let remaining_keys = obj.as_object().ok_or("Expected JSON object for target")?.keys();

View file

@ -124,11 +124,10 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
self.impl_similar_to(trait_ref, obligation).unwrap_or_else(|| trait_ref.def_id()); self.impl_similar_to(trait_ref, obligation).unwrap_or_else(|| trait_ref.def_id());
let trait_ref = trait_ref.skip_binder(); let trait_ref = trait_ref.skip_binder();
let mut flags = vec![]; let mut flags = vec![(
flags.push((
sym::ItemContext, sym::ItemContext,
self.describe_enclosure(obligation.cause.body_id).map(|s| s.to_owned()), self.describe_enclosure(obligation.cause.body_id).map(|s| s.to_owned()),
)); )];
match obligation.cause.code { match obligation.cause.code {
ObligationCauseCode::BuiltinDerivedObligation(..) ObligationCauseCode::BuiltinDerivedObligation(..)

View file

@ -290,13 +290,9 @@ fn suggest_restriction(
} else { } else {
// Trivial case: `T` needs an extra bound: `T: Bound`. // Trivial case: `T` needs an extra bound: `T: Bound`.
let (sp, suggestion) = match ( let (sp, suggestion) = match (
generics generics.params.iter().find(|p| {
.params
.iter()
.filter(|p| {
!matches!(p.kind, hir::GenericParamKind::Type { synthetic: Some(_), .. }) !matches!(p.kind, hir::GenericParamKind::Type { synthetic: Some(_), .. })
}) }),
.next(),
super_traits, super_traits,
) { ) {
(_, None) => predicate_constraint( (_, None) => predicate_constraint(

View file

@ -90,8 +90,8 @@ fn dropck_outlives<'tcx>(
// "outlives" represent types/regions that may be touched // "outlives" represent types/regions that may be touched
// by a destructor. // by a destructor.
result.kinds.extend(constraints.outlives.drain(..)); result.kinds.append(&mut constraints.outlives);
result.overflows.extend(constraints.overflows.drain(..)); result.overflows.append(&mut constraints.overflows);
// If we have even one overflow, we should stop trying to evaluate further -- // If we have even one overflow, we should stop trying to evaluate further --
// chances are, the subsequent overflows for this evaluation won't provide useful // chances are, the subsequent overflows for this evaluation won't provide useful

View file

@ -357,7 +357,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
if let Ok(expr_text) = self.sess().source_map().span_to_snippet(expr.span) { if let Ok(expr_text) = self.sess().source_map().span_to_snippet(expr.span) {
(expr_text, true) (expr_text, true)
} else { } else {
(format!("(..)"), false) ("(..)".to_string(), false)
}; };
let adjusted_text = if let Some(probe::AutorefOrPtrAdjustment::ToConstPtr) = let adjusted_text = if let Some(probe::AutorefOrPtrAdjustment::ToConstPtr) =

View file

@ -791,7 +791,7 @@ fn fn_sig_suggestion<'tcx>(
}) })
}) })
.chain(std::iter::once(if sig.c_variadic { Some("...".to_string()) } else { None })) .chain(std::iter::once(if sig.c_variadic { Some("...".to_string()) } else { None }))
.filter_map(|arg| arg) .flatten()
.collect::<Vec<String>>() .collect::<Vec<String>>()
.join(", "); .join(", ");
let output = sig.output(); let output = sig.output();

View file

@ -488,7 +488,7 @@ crate fn href(did: DefId, cx: &Context<'_>) -> Result<(String, ItemType, Vec<Str
let cache = &cx.cache(); let cache = &cx.cache();
let relative_to = &cx.current; let relative_to = &cx.current;
fn to_module_fqp(shortty: ItemType, fqp: &[String]) -> &[String] { fn to_module_fqp(shortty: ItemType, fqp: &[String]) -> &[String] {
if shortty == ItemType::Module { &fqp[..] } else { &fqp[..fqp.len() - 1] } if shortty == ItemType::Module { fqp } else { &fqp[..fqp.len() - 1] }
} }
if !did.is_local() && !cache.access_levels.is_public(did) && !cache.document_private { if !did.is_local() && !cache.access_levels.is_public(did) && !cache.document_private {
@ -509,7 +509,7 @@ crate fn href(did: DefId, cx: &Context<'_>) -> Result<(String, ItemType, Vec<Str
match cache.extern_locations[&did.krate] { match cache.extern_locations[&did.krate] {
ExternalLocation::Remote(ref s) => { ExternalLocation::Remote(ref s) => {
let s = s.trim_end_matches('/'); let s = s.trim_end_matches('/');
let mut s = vec![&s[..]]; let mut s = vec![s];
s.extend(module_fqp[..].iter().map(String::as_str)); s.extend(module_fqp[..].iter().map(String::as_str));
s s
} }