1
Fork 0

Rollup merge of #55358 - sinkuu:redundant_clone2, r=estebank

Remove redundant clone (2)
This commit is contained in:
kennytm 2018-10-26 18:25:12 +08:00
commit 626b49666f
No known key found for this signature in database
GPG key ID: FEF6C8051D0E013C
35 changed files with 56 additions and 65 deletions

View file

@ -1447,8 +1447,8 @@ impl Step for Extended {
tarballs.extend(rls_installer.clone()); tarballs.extend(rls_installer.clone());
tarballs.extend(clippy_installer.clone()); tarballs.extend(clippy_installer.clone());
tarballs.extend(rustfmt_installer.clone()); tarballs.extend(rustfmt_installer.clone());
tarballs.extend(llvm_tools_installer.clone()); tarballs.extend(llvm_tools_installer);
tarballs.extend(lldb_installer.clone()); tarballs.extend(lldb_installer);
tarballs.push(analysis_installer); tarballs.push(analysis_installer);
tarballs.push(std_installer); tarballs.push(std_installer);
if builder.config.docs { if builder.config.docs {

View file

@ -1052,7 +1052,7 @@ impl Step for Compiletest {
let hostflags = flags.clone(); let hostflags = flags.clone();
cmd.arg("--host-rustcflags").arg(hostflags.join(" ")); cmd.arg("--host-rustcflags").arg(hostflags.join(" "));
let mut targetflags = flags.clone(); let mut targetflags = flags;
targetflags.push(format!( targetflags.push(format!(
"-Lnative={}", "-Lnative={}",
builder.test_helpers_out(target).display() builder.test_helpers_out(target).display()

View file

@ -458,7 +458,7 @@ where
); );
debug!("projection_must_outlive: unique declared bound appears in trait ref"); debug!("projection_must_outlive: unique declared bound appears in trait ref");
self.delegate self.delegate
.push_sub_region_constraint(origin.clone(), region, unique_bound); .push_sub_region_constraint(origin, region, unique_bound);
return; return;
} }

View file

@ -749,7 +749,7 @@ impl<'tcx> RegionConstraintCollector<'tcx> {
a // LUB(a,a) = a a // LUB(a,a) = a
} }
_ => self.combine_vars(tcx, Lub, a, b, origin.clone()), _ => self.combine_vars(tcx, Lub, a, b, origin),
} }
} }
@ -771,7 +771,7 @@ impl<'tcx> RegionConstraintCollector<'tcx> {
a // GLB(a,a) = a a // GLB(a,a) = a
} }
_ => self.combine_vars(tcx, Glb, a, b, origin.clone()), _ => self.combine_vars(tcx, Glb, a, b, origin),
} }
} }

View file

@ -239,7 +239,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
let msg = format!("type mismatch resolving `{}`", predicate); let msg = format!("type mismatch resolving `{}`", predicate);
let error_id = (DiagnosticMessageId::ErrorId(271), let error_id = (DiagnosticMessageId::ErrorId(271),
Some(obligation.cause.span), msg.clone()); Some(obligation.cause.span), msg);
let fresh = self.tcx.sess.one_time_diagnostics.borrow_mut().insert(error_id); let fresh = self.tcx.sess.one_time_diagnostics.borrow_mut().insert(error_id);
if fresh { if fresh {
let mut diag = struct_span_err!( let mut diag = struct_span_err!(
@ -379,7 +379,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
} }
} }
if let Some(t) = self.get_parent_trait_ref(&obligation.cause.code) { if let Some(t) = self.get_parent_trait_ref(&obligation.cause.code) {
flags.push(("parent_trait".to_owned(), Some(t.to_string()))); flags.push(("parent_trait".to_owned(), Some(t)));
} }
if let Some(k) = obligation.cause.span.compiler_desugaring_kind() { if let Some(k) = obligation.cause.span.compiler_desugaring_kind() {

View file

@ -594,7 +594,7 @@ fn opt_normalize_projection_type<'a, 'b, 'gcx, 'tcx>(
// But for now, let's classify this as an overflow: // But for now, let's classify this as an overflow:
let recursion_limit = *selcx.tcx().sess.recursion_limit.get(); let recursion_limit = *selcx.tcx().sess.recursion_limit.get();
let obligation = Obligation::with_depth(cause.clone(), let obligation = Obligation::with_depth(cause,
recursion_limit, recursion_limit,
param_env, param_env,
projection_ty); projection_ty);

View file

@ -1200,7 +1200,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
cstore, cstore,
global_arenas: &arenas.global, global_arenas: &arenas.global,
global_interners: interners, global_interners: interners,
dep_graph: dep_graph.clone(), dep_graph,
types: common_types, types: common_types,
trait_map, trait_map,
export_map: resolutions.export_map.into_iter().map(|(k, v)| { export_map: resolutions.export_map.into_iter().map(|(k, v)| {

View file

@ -695,7 +695,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
let mut err = self.cannot_act_on_moved_value(use_span, let mut err = self.cannot_act_on_moved_value(use_span,
verb, verb,
msg, msg,
Some(nl.to_string()), Some(nl),
Origin::Ast); Origin::Ast);
let need_note = match lp.ty.sty { let need_note = match lp.ty.sty {
ty::Closure(id, _) => { ty::Closure(id, _) => {

View file

@ -347,7 +347,7 @@ impl<'a, 'tcx> MoveData<'tcx> {
lp = base_lp.clone(); lp = base_lp.clone();
} }
self.add_move_helper(tcx, orig_lp.clone(), id, kind); self.add_move_helper(tcx, orig_lp, id, kind);
} }
fn add_move_helper(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, fn add_move_helper(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
@ -359,7 +359,7 @@ impl<'a, 'tcx> MoveData<'tcx> {
id, id,
kind); kind);
let path_index = self.move_path(tcx, lp.clone()); let path_index = self.move_path(tcx, lp);
let move_index = MoveIndex(self.moves.borrow().len()); let move_index = MoveIndex(self.moves.borrow().len());
let next_move = self.path_first_move(path_index); let next_move = self.path_first_move(path_index);
@ -402,7 +402,7 @@ impl<'a, 'tcx> MoveData<'tcx> {
} }
} }
self.add_assignment_helper(tcx, lp.clone(), assign_id, span); self.add_assignment_helper(tcx, lp, assign_id, span);
} }
fn add_assignment_helper(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, fn add_assignment_helper(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,

View file

@ -784,7 +784,7 @@ pub fn codegen_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
!tcx.sess.opts.output_types.should_codegen() { !tcx.sess.opts.output_types.should_codegen() {
let ongoing_codegen = write::start_async_codegen( let ongoing_codegen = write::start_async_codegen(
tcx, tcx,
time_graph.clone(), time_graph,
metadata, metadata,
rx, rx,
1); 1);

View file

@ -138,7 +138,7 @@ pub fn filename_for_input(sess: &Session,
let suffix = &sess.target.target.options.exe_suffix; let suffix = &sess.target.target.options.exe_suffix;
let out_filename = outputs.path(OutputType::Exe); let out_filename = outputs.path(OutputType::Exe);
if suffix.is_empty() { if suffix.is_empty() {
out_filename.to_path_buf() out_filename
} else { } else {
out_filename.with_extension(&suffix[1..]) out_filename.with_extension(&suffix[1..])
} }

View file

@ -1615,7 +1615,7 @@ impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
ProjectionElem::Index(..) ProjectionElem::Index(..)
| ProjectionElem::ConstantIndex { .. } | ProjectionElem::ConstantIndex { .. }
| ProjectionElem::Subslice { .. } => { | ProjectionElem::Subslice { .. } => {
self.describe_field(&proj.base, field).to_string() self.describe_field(&proj.base, field)
} }
}, },
} }

View file

@ -265,7 +265,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
block, block,
Statement { Statement {
source_info, source_info,
kind: StatementKind::FakeRead(FakeReadCause::ForLet, place.clone()), kind: StatementKind::FakeRead(FakeReadCause::ForLet, place),
}, },
); );
@ -314,7 +314,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
Statement { Statement {
source_info: ty_source_info, source_info: ty_source_info,
kind: StatementKind::AscribeUserType( kind: StatementKind::AscribeUserType(
place.clone(), place,
ty::Variance::Invariant, ty::Variance::Invariant,
box ascription_user_ty, box ascription_user_ty,
), ),

View file

@ -324,7 +324,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
let ref_ty = self.hir.tcx().mk_ref(region, tam); let ref_ty = self.hir.tcx().mk_ref(region, tam);
// let lhs_ref_place = &lhs; // let lhs_ref_place = &lhs;
let ref_rvalue = Rvalue::Ref(region, BorrowKind::Shared, place.clone()); let ref_rvalue = Rvalue::Ref(region, BorrowKind::Shared, place);
let lhs_ref_place = self.temp(ref_ty, test.span); let lhs_ref_place = self.temp(ref_ty, test.span);
self.cfg.push_assign(block, source_info, &lhs_ref_place, ref_rvalue); self.cfg.push_assign(block, source_info, &lhs_ref_place, ref_rvalue);
let val = Operand::Move(lhs_ref_place); let val = Operand::Move(lhs_ref_place);

View file

@ -644,7 +644,7 @@ fn construct_fn<'a, 'gcx, 'tcx, A>(hir: Cx<'a, 'gcx, 'tcx>,
}).collect() }).collect()
}); });
let mut builder = Builder::new(hir.clone(), let mut builder = Builder::new(hir,
span, span,
arguments.len(), arguments.len(),
safety, safety,
@ -714,7 +714,7 @@ fn construct_const<'a, 'gcx, 'tcx>(
let ty = hir.tables().expr_ty_adjusted(ast_expr); let ty = hir.tables().expr_ty_adjusted(ast_expr);
let owner_id = tcx.hir.body_owner(body_id); let owner_id = tcx.hir.body_owner(body_id);
let span = tcx.hir.span(owner_id); let span = tcx.hir.span(owner_id);
let mut builder = Builder::new(hir.clone(), span, 0, Safety::Safe, ty, ty_span,vec![]); let mut builder = Builder::new(hir, span, 0, Safety::Safe, ty, ty_span,vec![]);
let mut block = START_BLOCK; let mut block = START_BLOCK;
let expr = builder.hir.mirror(ast_expr); let expr = builder.hir.mirror(ast_expr);

View file

@ -547,7 +547,7 @@ impl<'a, 'tcx> CloneShimBuilder<'a, 'tcx> {
// `dest[i] = Clone::clone(src[beg])`; // `dest[i] = Clone::clone(src[beg])`;
// Goto #3 if ok, #5 if unwinding happens. // Goto #3 if ok, #5 if unwinding happens.
let dest_field = dest.clone().index(beg); let dest_field = dest.clone().index(beg);
let src_field = src.clone().index(beg); let src_field = src.index(beg);
self.make_clone_call(dest_field, src_field, ty, BasicBlock::new(3), self.make_clone_call(dest_field, src_field, ty, BasicBlock::new(3),
BasicBlock::new(5)); BasicBlock::new(5));

View file

@ -753,11 +753,11 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
self.place.clone() self.place.clone()
))); )));
drop_block_stmts.push(self.assign(&cur, Rvalue::Cast( drop_block_stmts.push(self.assign(&cur, Rvalue::Cast(
CastKind::Misc, Operand::Move(tmp.clone()), iter_ty CastKind::Misc, Operand::Move(tmp), iter_ty
))); )));
drop_block_stmts.push(self.assign(&length_or_end, drop_block_stmts.push(self.assign(&length_or_end,
Rvalue::BinaryOp(BinOp::Offset, Rvalue::BinaryOp(BinOp::Offset,
Operand::Copy(cur.clone()), Operand::Move(length.clone()) Operand::Copy(cur), Operand::Move(length)
))); )));
} else { } else {
// index = 0 (length already pushed) // index = 0 (length already pushed)

View file

@ -218,7 +218,7 @@ impl<'a, 'cl> Resolver<'a, 'cl> {
}; };
this.add_import_directive( this.add_import_directive(
base.into_iter().collect(), base.into_iter().collect(),
subclass.clone(), subclass,
source.ident.span, source.ident.span,
id, id,
root_use_tree.span, root_use_tree.span,

View file

@ -105,7 +105,7 @@ impl<'l, 'tcx: 'l, 'll, O: DumpOutput + 'll> DumpVisitor<'l, 'tcx, 'll, O> {
tcx: save_ctxt.tcx, tcx: save_ctxt.tcx,
save_ctxt, save_ctxt,
dumper, dumper,
span: span_utils.clone(), span: span_utils,
cur_scope: CRATE_NODE_ID, cur_scope: CRATE_NODE_ID,
// mac_defs: FxHashSet::default(), // mac_defs: FxHashSet::default(),
macro_calls: FxHashSet::default(), macro_calls: FxHashSet::default(),

View file

@ -1132,7 +1132,7 @@ impl ToJson for Target {
macro_rules! target_val { macro_rules! target_val {
($attr:ident) => ( { ($attr:ident) => ( {
let name = (stringify!($attr)).replace("_", "-"); let name = (stringify!($attr)).replace("_", "-");
d.insert(name.to_string(), self.$attr.to_json()); d.insert(name, self.$attr.to_json());
} ); } );
($attr:ident, $key_name:expr) => ( { ($attr:ident, $key_name:expr) => ( {
let name = $key_name; let name = $key_name;
@ -1144,7 +1144,7 @@ impl ToJson for Target {
($attr:ident) => ( { ($attr:ident) => ( {
let name = (stringify!($attr)).replace("_", "-"); let name = (stringify!($attr)).replace("_", "-");
if default.$attr != self.options.$attr { if default.$attr != self.options.$attr {
d.insert(name.to_string(), self.options.$attr.to_json()); d.insert(name, self.options.$attr.to_json());
} }
} ); } );
($attr:ident, $key_name:expr) => ( { ($attr:ident, $key_name:expr) => ( {
@ -1160,7 +1160,7 @@ impl ToJson for Target {
.iter() .iter()
.map(|(k, v)| (k.desc().to_owned(), v.clone())) .map(|(k, v)| (k.desc().to_owned(), v.clone()))
.collect::<BTreeMap<_, _>>(); .collect::<BTreeMap<_, _>>();
d.insert(name.to_string(), obj.to_json()); d.insert(name, obj.to_json());
} }
} ); } );
(env - $attr:ident) => ( { (env - $attr:ident) => ( {
@ -1170,7 +1170,7 @@ impl ToJson for Target {
.iter() .iter()
.map(|&(ref k, ref v)| k.clone() + "=" + &v) .map(|&(ref k, ref v)| k.clone() + "=" + &v)
.collect::<Vec<_>>(); .collect::<Vec<_>>();
d.insert(name.to_string(), obj.to_json()); d.insert(name, obj.to_json());
} }
} ); } );

View file

@ -61,7 +61,7 @@ impl<'a, 'gcx, 'tcx> Iterator for Autoderef<'a, 'gcx, 'tcx> {
let suggested_limit = *tcx.sess.recursion_limit.get() * 2; let suggested_limit = *tcx.sess.recursion_limit.get() * 2;
let msg = format!("reached the recursion limit while auto-dereferencing {:?}", let msg = format!("reached the recursion limit while auto-dereferencing {:?}",
self.cur_ty); self.cur_ty);
let error_id = (DiagnosticMessageId::ErrorId(55), Some(self.span), msg.clone()); let error_id = (DiagnosticMessageId::ErrorId(55), Some(self.span), msg);
let fresh = tcx.sess.one_time_diagnostics.borrow_mut().insert(error_id); let fresh = tcx.sess.one_time_diagnostics.borrow_mut().insert(error_id);
if fresh { if fresh {
struct_span_err!(tcx.sess, struct_span_err!(tcx.sess,

View file

@ -325,7 +325,7 @@ fn compare_predicate_entailment<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
diag.span_suggestion_with_applicability( diag.span_suggestion_with_applicability(
impl_err_span, impl_err_span,
"consider change the type to match the mutability in trait", "consider change the type to match the mutability in trait",
trait_err_str.to_string(), trait_err_str,
Applicability::MachineApplicable, Applicability::MachineApplicable,
); );
} }

View file

@ -1157,7 +1157,7 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> {
// Convert the bounds into obligations. // Convert the bounds into obligations.
let impl_obligations = traits::predicates_for_generics( let impl_obligations = traits::predicates_for_generics(
cause.clone(), self.param_env, &impl_bounds); cause, self.param_env, &impl_bounds);
debug!("impl_obligations={:?}", impl_obligations); debug!("impl_obligations={:?}", impl_obligations);
impl_obligations.into_iter() impl_obligations.into_iter()
@ -1175,7 +1175,7 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> {
TraitCandidate(trait_ref) => { TraitCandidate(trait_ref) => {
let predicate = trait_ref.to_predicate(); let predicate = trait_ref.to_predicate();
let obligation = let obligation =
traits::Obligation::new(cause.clone(), self.param_env, predicate); traits::Obligation::new(cause, self.param_env, predicate);
if !self.predicate_may_hold(&obligation) { if !self.predicate_may_hold(&obligation) {
if self.probe(|_| self.select_trait_candidate(trait_ref).is_err()) { if self.probe(|_| self.select_trait_candidate(trait_ref).is_err()) {
// This candidate's primary obligation doesn't even // This candidate's primary obligation doesn't even

View file

@ -4752,25 +4752,17 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
} else if !self.check_for_cast(err, expr, found, expected) { } else if !self.check_for_cast(err, expr, found, expected) {
let methods = self.get_conversion_methods(expr.span, expected, found); let methods = self.get_conversion_methods(expr.span, expected, found);
if let Ok(expr_text) = self.sess().source_map().span_to_snippet(expr.span) { if let Ok(expr_text) = self.sess().source_map().span_to_snippet(expr.span) {
let suggestions = iter::repeat(expr_text).zip(methods.iter()) let suggestions = iter::repeat(&expr_text).zip(methods.iter())
.filter_map(|(receiver, method)| { .filter_map(|(receiver, method)| {
let method_call = format!(".{}()", method.ident); let method_call = format!(".{}()", method.ident);
if receiver.ends_with(&method_call) { if receiver.ends_with(&method_call) {
None // do not suggest code that is already there (#53348) None // do not suggest code that is already there (#53348)
} else { } else {
/*
methods defined in `method_call_list` will overwrite
`.clone()` in copy of `receiver`
*/
let method_call_list = [".to_vec()", ".to_string()"]; let method_call_list = [".to_vec()", ".to_string()"];
if receiver.ends_with(".clone()") if receiver.ends_with(".clone()")
&& method_call_list.contains(&method_call.as_str()) { && method_call_list.contains(&method_call.as_str()) {
// created copy of `receiver` because we don't want other let max_len = receiver.rfind(".").unwrap();
// suggestion to get affected Some(format!("{}{}", &receiver[..max_len], method_call))
let mut new_receiver = receiver.clone();
let max_len = new_receiver.rfind(".").unwrap();
new_receiver.truncate(max_len);
Some(format!("{}{}", new_receiver, method_call))
} }
else { else {
Some(format!("{}{}", receiver, method_call)) Some(format!("{}{}", receiver, method_call))

View file

@ -84,7 +84,7 @@ impl<'a, 'tcx, 'rcx, 'cstore> AutoTraitFinder<'a, 'tcx, 'rcx, 'cstore> {
.into_iter() .into_iter()
.chain(self.get_auto_trait_impl_for( .chain(self.get_auto_trait_impl_for(
def_id, def_id,
name.clone(), name,
generics.clone(), generics.clone(),
def_ctor, def_ctor,
tcx.require_lang_item(lang_items::SyncTraitLangItem), tcx.require_lang_item(lang_items::SyncTraitLangItem),

View file

@ -67,7 +67,7 @@ impl<'a, 'tcx, 'rcx, 'cstore> BlanketImplFinder <'a, 'tcx, 'rcx, 'cstore> {
} }
let ty = self.cx.tcx.type_of(def_id); let ty = self.cx.tcx.type_of(def_id);
let generics = self.cx.tcx.generics_of(def_id); let generics = self.cx.tcx.generics_of(def_id);
let real_name = name.clone().map(|name| Ident::from_str(&name)); let real_name = name.map(|name| Ident::from_str(&name));
let param_env = self.cx.tcx.param_env(def_id); let param_env = self.cx.tcx.param_env(def_id);
for &trait_def_id in self.cx.all_traits.iter() { for &trait_def_id in self.cx.all_traits.iter() {
if !self.cx.renderinfo.borrow().access_levels.is_doc_reachable(trait_def_id) || if !self.cx.renderinfo.borrow().access_levels.is_doc_reachable(trait_def_id) ||
@ -112,7 +112,7 @@ impl<'a, 'tcx, 'rcx, 'cstore> BlanketImplFinder <'a, 'tcx, 'rcx, 'cstore> {
); );
let may_apply = match infcx.evaluate_obligation( let may_apply = match infcx.evaluate_obligation(
&traits::Obligation::new( &traits::Obligation::new(
cause.clone(), cause,
param_env, param_env,
trait_ref.to_predicate(), trait_ref.to_predicate(),
), ),

View file

@ -3611,7 +3611,7 @@ impl ToSource for syntax_pos::Span {
fn to_src(&self, cx: &DocContext) -> String { fn to_src(&self, cx: &DocContext) -> String {
debug!("converting span {:?} to snippet", self.clean(cx)); debug!("converting span {:?} to snippet", self.clean(cx));
let sn = match cx.sess().source_map().span_to_snippet(*self) { let sn = match cx.sess().source_map().span_to_snippet(*self) {
Ok(x) => x.to_string(), Ok(x) => x,
Err(_) => String::new() Err(_) => String::new()
}; };
debug!("got snippet {}", sn); debug!("got snippet {}", sn);

View file

@ -521,7 +521,7 @@ pub fn run(mut krate: clean::Crate,
external_html: external_html.clone(), external_html: external_html.clone(),
krate: krate.name.clone(), krate: krate.name.clone(),
}, },
css_file_extension: css_file_extension.clone(), css_file_extension,
created_dirs: Default::default(), created_dirs: Default::default(),
sort_modules_alphabetically, sort_modules_alphabetically,
themes, themes,
@ -1343,7 +1343,7 @@ impl DocFolder for Cache {
self.search_index.push(IndexItem { self.search_index.push(IndexItem {
ty: item.type_(), ty: item.type_(),
name: s.to_string(), name: s.to_string(),
path: path.join("::").to_string(), path: path.join("::"),
desc: plain_summary_line(item.doc_value()), desc: plain_summary_line(item.doc_value()),
parent, parent,
parent_idx: None, parent_idx: None,
@ -2284,7 +2284,7 @@ fn document_short(w: &mut fmt::Formatter, cx: &Context, item: &clean::Item, link
format!("{} [Read more]({})", format!("{} [Read more]({})",
&plain_summary_line(Some(s)), naive_assoc_href(item, link)) &plain_summary_line(Some(s)), naive_assoc_href(item, link))
} else { } else {
plain_summary_line(Some(s)).to_string() plain_summary_line(Some(s))
}; };
render_markdown(w, cx, &markdown, item.links(), prefix)?; render_markdown(w, cx, &markdown, item.links(), prefix)?;
} else if !prefix.is_empty() { } else if !prefix.is_empty() {
@ -2436,7 +2436,7 @@ fn item_module(w: &mut fmt::Formatter, cx: &Context,
// (which is the position in the vector). // (which is the position in the vector).
indices.dedup_by_key(|i| (items[*i].def_id, indices.dedup_by_key(|i| (items[*i].def_id,
if items[*i].name.as_ref().is_some() { if items[*i].name.as_ref().is_some() {
Some(full_path(cx, &items[*i]).clone()) Some(full_path(cx, &items[*i]))
} else { } else {
None None
}, },

View file

@ -460,7 +460,7 @@ fn main_args(args: &[String]) -> isize {
let externs = match parse_externs(&matches) { let externs = match parse_externs(&matches) {
Ok(ex) => ex, Ok(ex) => ex,
Err(err) => { Err(err) => {
diag.struct_err(&err.to_string()).emit(); diag.struct_err(&err).emit();
return 1; return 1;
} }
}; };

View file

@ -208,7 +208,7 @@ fn run_test(test: &str, cratename: &str, filename: &FileName, line: usize,
// needs to be the result of SourceMap::span_to_unmapped_path // needs to be the result of SourceMap::span_to_unmapped_path
let input = config::Input::Str { let input = config::Input::Str {
name: filename.to_owned(), name: filename.to_owned(),
input: test.to_owned(), input: test,
}; };
let outputs = OutputTypes::new(&[(OutputType::Exe, None)]); let outputs = OutputTypes::new(&[(OutputType::Exe, None)]);
@ -350,7 +350,7 @@ fn run_test(test: &str, cratename: &str, filename: &FileName, line: usize,
let newpath = { let newpath = {
let path = env::var_os(var).unwrap_or(OsString::new()); let path = env::var_os(var).unwrap_or(OsString::new());
let mut path = env::split_paths(&path).collect::<Vec<_>>(); let mut path = env::split_paths(&path).collect::<Vec<_>>();
path.insert(0, libdir.clone()); path.insert(0, libdir);
env::join_paths(path).unwrap() env::join_paths(path).unwrap()
}; };
cmd.env(var, &newpath); cmd.env(var, &newpath);

View file

@ -142,8 +142,7 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt,
// Replace all the tokens for the corresponding positions in the macro, to maintain // Replace all the tokens for the corresponding positions in the macro, to maintain
// proper positions in error reporting, while maintaining the macro_backtrace. // proper positions in error reporting, while maintaining the macro_backtrace.
if rhs_spans.len() == tts.len() { if rhs_spans.len() == tts.len() {
tts = tts.map_enumerated(|i, tt| { tts = tts.map_enumerated(|i, mut tt| {
let mut tt = tt.clone();
let mut sp = rhs_spans[i]; let mut sp = rhs_spans[i];
sp = sp.with_ctxt(tt.span().ctxt()); sp = sp.with_ctxt(tt.span().ctxt());
tt.set_span(sp); tt.set_span(sp);

View file

@ -219,9 +219,9 @@ impl Add for LockstepIterSize {
LockstepIterSize::Unconstrained => other, LockstepIterSize::Unconstrained => other,
LockstepIterSize::Contradiction(_) => self, LockstepIterSize::Contradiction(_) => self,
LockstepIterSize::Constraint(l_len, ref l_id) => match other { LockstepIterSize::Constraint(l_len, ref l_id) => match other {
LockstepIterSize::Unconstrained => self.clone(), LockstepIterSize::Unconstrained => self,
LockstepIterSize::Contradiction(_) => other, LockstepIterSize::Contradiction(_) => other,
LockstepIterSize::Constraint(r_len, _) if l_len == r_len => self.clone(), LockstepIterSize::Constraint(r_len, _) if l_len == r_len => self,
LockstepIterSize::Constraint(r_len, r_id) => { LockstepIterSize::Constraint(r_len, r_id) => {
let msg = format!("inconsistent lockstep iteration: \ let msg = format!("inconsistent lockstep iteration: \
'{}' has {} items, but '{}' has {}", '{}' has {} items, but '{}' has {}",

View file

@ -346,7 +346,7 @@ fn mk_main(cx: &mut TestCtxt) -> P<ast::Item> {
test_runner.span = sp; test_runner.span = sp;
let test_main_path_expr = ecx.expr_path(test_runner.clone()); let test_main_path_expr = ecx.expr_path(test_runner);
let call_test_main = ecx.expr_call(sp, test_main_path_expr, let call_test_main = ecx.expr_call(sp, test_main_path_expr,
vec![mk_tests_slice(cx)]); vec![mk_tests_slice(cx)]);
let call_test_main = ecx.stmt_expr(call_test_main); let call_test_main = ecx.stmt_expr(call_test_main);

View file

@ -178,7 +178,7 @@ pub fn cs_partial_cmp(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P<
cx.expr_match(span, new, vec![eq_arm, neq_arm]) cx.expr_match(span, new, vec![eq_arm, neq_arm])
}, },
equals_expr.clone(), equals_expr,
Box::new(|cx, span, (self_args, tag_tuple), _non_self_args| { Box::new(|cx, span, (self_args, tag_tuple), _non_self_args| {
if self_args.len() != 2 { if self_args.len() != 2 {
cx.span_bug(span, "not exactly 2 arguments in `derive(PartialOrd)`") cx.span_bug(span, "not exactly 2 arguments in `derive(PartialOrd)`")

View file

@ -75,7 +75,7 @@ impl MultiItemModifier for ProcMacroDerive {
// Mark attributes as known, and used. // Mark attributes as known, and used.
MarkAttrs(&self.attrs).visit_item(&item); MarkAttrs(&self.attrs).visit_item(&item);
let input = __internal::new_token_stream(ecx.resolver.eliminate_crate_var(item.clone())); let input = __internal::new_token_stream(ecx.resolver.eliminate_crate_var(item));
let res = __internal::set_sess(ecx, || { let res = __internal::set_sess(ecx, || {
let inner = self.inner; let inner = self.inner;
panic::catch_unwind(panic::AssertUnwindSafe(|| inner(input))) panic::catch_unwind(panic::AssertUnwindSafe(|| inner(input)))