avoid cloning and then iterating

This commit is contained in:
KaDiWa 2022-08-13 15:50:01 +02:00
parent 75b7e52e92
commit 4eebcb9910
No known key found for this signature in database
GPG key ID: 0B52AE391C674CE5
12 changed files with 31 additions and 31 deletions

View file

@ -385,8 +385,7 @@ fn find_type_parameters(
// Place bound generic params on a stack, to extract them when a type is encountered. // Place bound generic params on a stack, to extract them when a type is encountered.
fn visit_poly_trait_ref(&mut self, trait_ref: &'a ast::PolyTraitRef) { fn visit_poly_trait_ref(&mut self, trait_ref: &'a ast::PolyTraitRef) {
let stack_len = self.bound_generic_params_stack.len(); let stack_len = self.bound_generic_params_stack.len();
self.bound_generic_params_stack self.bound_generic_params_stack.extend(trait_ref.bound_generic_params.iter().cloned());
.extend(trait_ref.bound_generic_params.clone().into_iter());
visit::walk_poly_trait_ref(self, trait_ref); visit::walk_poly_trait_ref(self, trait_ref);

View file

@ -116,7 +116,7 @@ impl CreateTokenStream for LazyTokenStreamImpl {
if !self.replace_ranges.is_empty() { if !self.replace_ranges.is_empty() {
let mut tokens: Vec<_> = tokens.collect(); let mut tokens: Vec<_> = tokens.collect();
let mut replace_ranges = self.replace_ranges.clone(); let mut replace_ranges = self.replace_ranges.to_vec();
replace_ranges.sort_by_key(|(range, _)| range.start); replace_ranges.sort_by_key(|(range, _)| range.start);
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
@ -146,7 +146,7 @@ impl CreateTokenStream for LazyTokenStreamImpl {
// start position, we ensure that any replace range which encloses // start position, we ensure that any replace range which encloses
// another replace range will capture the *replaced* tokens for the inner // another replace range will capture the *replaced* tokens for the inner
// range, not the original tokens. // range, not the original tokens.
for (range, new_tokens) in replace_ranges.iter().rev() { for (range, new_tokens) in replace_ranges.into_iter().rev() {
assert!(!range.is_empty(), "Cannot replace an empty range: {:?}", range); assert!(!range.is_empty(), "Cannot replace an empty range: {:?}", range);
// Replace ranges are only allowed to decrease the number of tokens. // Replace ranges are only allowed to decrease the number of tokens.
assert!( assert!(
@ -165,7 +165,7 @@ impl CreateTokenStream for LazyTokenStreamImpl {
tokens.splice( tokens.splice(
(range.start as usize)..(range.end as usize), (range.start as usize)..(range.end as usize),
new_tokens.clone().into_iter().chain(filler), new_tokens.into_iter().chain(filler),
); );
} }
make_token_stream(tokens.into_iter(), self.break_last_token) make_token_stream(tokens.into_iter(), self.break_last_token)
@ -321,7 +321,7 @@ impl<'a> Parser<'a> {
self.capture_state.replace_ranges[replace_ranges_start..replace_ranges_end] self.capture_state.replace_ranges[replace_ranges_start..replace_ranges_end]
.iter() .iter()
.cloned() .cloned()
.chain(inner_attr_replace_ranges.clone().into_iter()) .chain(inner_attr_replace_ranges.iter().cloned())
.map(|(range, tokens)| { .map(|(range, tokens)| {
((range.start - start_calls)..(range.end - start_calls), tokens) ((range.start - start_calls)..(range.end - start_calls), tokens)
}) })

View file

@ -387,7 +387,7 @@ impl<'a> Parser<'a> {
/// This is to avoid losing unclosed delims errors `create_snapshot_for_diagnostic` clears. /// This is to avoid losing unclosed delims errors `create_snapshot_for_diagnostic` clears.
pub(super) fn restore_snapshot(&mut self, snapshot: SnapshotParser<'a>) { pub(super) fn restore_snapshot(&mut self, snapshot: SnapshotParser<'a>) {
*self = snapshot.parser; *self = snapshot.parser;
self.unclosed_delims.extend(snapshot.unclosed_delims.clone()); self.unclosed_delims.extend(snapshot.unclosed_delims);
} }
pub fn unclosed_delims(&self) -> &[UnmatchedBrace] { pub fn unclosed_delims(&self) -> &[UnmatchedBrace] {

View file

@ -2300,7 +2300,7 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
err.multipart_suggestion_verbose( err.multipart_suggestion_verbose(
message, message,
std::iter::once((span, intro_sugg)) std::iter::once((span, intro_sugg))
.chain(spans_suggs.clone()) .chain(spans_suggs.iter().cloned())
.collect(), .collect(),
Applicability::MaybeIncorrect, Applicability::MaybeIncorrect,
); );

View file

@ -341,7 +341,7 @@ impl<'tcx> AutoTraitFinder<'tcx> {
} }
} }
let obligations = impl_source.clone().nested_obligations().into_iter(); let obligations = impl_source.borrow_nested_obligations().iter().cloned();
if !self.evaluate_nested_obligations( if !self.evaluate_nested_obligations(
ty, ty,

View file

@ -298,9 +298,8 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
// show that order to the user as a possible order for the parameters // show that order to the user as a possible order for the parameters
let mut param_types_present = defs let mut param_types_present = defs
.params .params
.clone() .iter()
.into_iter() .map(|param| (param.kind.to_ord(), param.clone()))
.map(|param| (param.kind.to_ord(), param))
.collect::<Vec<(ParamKindOrd, GenericParamDef)>>(); .collect::<Vec<(ParamKindOrd, GenericParamDef)>>();
param_types_present.sort_by_key(|(ord, _)| *ord); param_types_present.sort_by_key(|(ord, _)| *ord);
let (mut param_types_present, ordered_params): ( let (mut param_types_present, ordered_params): (

View file

@ -1217,7 +1217,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// Combine all the reasons of why the root variable should be captured as a result of // Combine all the reasons of why the root variable should be captured as a result of
// auto trait implementation issues // auto trait implementation issues
auto_trait_migration_reasons.extend(capture_trait_reasons.clone()); auto_trait_migration_reasons.extend(capture_trait_reasons.iter().copied());
diagnostics_info.push(MigrationLintNote { diagnostics_info.push(MigrationLintNote {
captures_info, captures_info,

View file

@ -525,8 +525,8 @@ where
GenericBound::TraitBound(ref mut p, _) => { GenericBound::TraitBound(ref mut p, _) => {
// Insert regions into the for_generics hash map first, to ensure // Insert regions into the for_generics hash map first, to ensure
// that we don't end up with duplicate bounds (e.g., for<'b, 'b>) // that we don't end up with duplicate bounds (e.g., for<'b, 'b>)
for_generics.extend(p.generic_params.clone()); for_generics.extend(p.generic_params.drain(..));
p.generic_params = for_generics.into_iter().collect(); p.generic_params.extend(for_generics);
self.is_fn_trait(&p.trait_) self.is_fn_trait(&p.trait_)
} }
_ => false, _ => false,

View file

@ -544,10 +544,15 @@ fn get_fn_inputs_and_outputs<'tcx>(
(true, _) => (Some(impl_self), &func.generics), (true, _) => (Some(impl_self), &func.generics),
(_, true) => (Some(impl_self), impl_generics), (_, true) => (Some(impl_self), impl_generics),
(false, false) => { (false, false) => {
let mut params = func.generics.params.clone(); let params =
params.extend(impl_generics.params.clone()); func.generics.params.iter().chain(&impl_generics.params).cloned().collect();
let mut where_predicates = func.generics.where_predicates.clone(); let where_predicates = func
where_predicates.extend(impl_generics.where_predicates.clone()); .generics
.where_predicates
.iter()
.chain(&impl_generics.where_predicates)
.cloned()
.collect();
combined_generics = clean::Generics { params, where_predicates }; combined_generics = clean::Generics { params, where_predicates };
(Some(impl_self), &combined_generics) (Some(impl_self), &combined_generics)
} }

View file

@ -106,7 +106,9 @@ impl<'tcx> JsonRenderer<'tcx> {
// only need to synthesize items for external traits // only need to synthesize items for external traits
if !id.is_local() { if !id.is_local() {
let trait_item = &trait_item.trait_; let trait_item = &trait_item.trait_;
trait_item.items.clone().into_iter().for_each(|i| self.item(i).unwrap()); for item in &trait_item.items {
self.item(item.clone()).unwrap();
}
let item_id = from_item_id(id.into(), self.tcx); let item_id = from_item_id(id.into(), self.tcx);
Some(( Some((
item_id.clone(), item_id.clone(),
@ -274,10 +276,9 @@ impl<'tcx> FormatRenderer<'tcx> for JsonRenderer<'tcx> {
paths: self paths: self
.cache .cache
.paths .paths
.clone() .iter()
.into_iter() .chain(&self.cache.external_paths)
.chain(self.cache.external_paths.clone().into_iter()) .map(|(&k, &(ref path, kind))| {
.map(|(k, (path, kind))| {
( (
from_item_id(k.into(), self.tcx), from_item_id(k.into(), self.tcx),
types::ItemSummary { types::ItemSummary {

View file

@ -1702,7 +1702,7 @@ impl<'test> TestCx<'test> {
fn compose_and_run_compiler(&self, mut rustc: Command, input: Option<String>) -> ProcRes { fn compose_and_run_compiler(&self, mut rustc: Command, input: Option<String>) -> ProcRes {
let aux_dir = self.build_all_auxiliary(&mut rustc); let aux_dir = self.build_all_auxiliary(&mut rustc);
self.props.unset_rustc_env.clone().iter().fold(&mut rustc, |rustc, v| rustc.env_remove(v)); self.props.unset_rustc_env.iter().fold(&mut rustc, Command::env_remove);
rustc.envs(self.props.rustc_env.clone()); rustc.envs(self.props.rustc_env.clone());
self.compose_and_run( self.compose_and_run(
rustc, rustc,

View file

@ -121,12 +121,8 @@ impl RawEmitter {
for chunk in compressed_words.chunks(chunk_length) { for chunk in compressed_words.chunks(chunk_length) {
chunks.insert(chunk); chunks.insert(chunk);
} }
let chunk_map = chunks let chunk_map =
.clone() chunks.iter().enumerate().map(|(idx, &chunk)| (chunk, idx)).collect::<HashMap<_, _>>();
.into_iter()
.enumerate()
.map(|(idx, chunk)| (chunk, idx))
.collect::<HashMap<_, _>>();
let mut chunk_indices = Vec::new(); let mut chunk_indices = Vec::new();
for chunk in compressed_words.chunks(chunk_length) { for chunk in compressed_words.chunks(chunk_length) {
chunk_indices.push(chunk_map[chunk]); chunk_indices.push(chunk_map[chunk]);