Opt for .cloned() over .map(|x| x.clone()) etc.
This commit is contained in:
parent
5705d48e28
commit
2f586b9687
39 changed files with 67 additions and 81 deletions
|
@ -21,6 +21,7 @@
|
|||
#![feature(test)]
|
||||
#![feature(unicode)]
|
||||
#![feature(env)]
|
||||
#![feature(core)]
|
||||
|
||||
#![deny(warnings)]
|
||||
|
||||
|
|
|
@ -1133,7 +1133,7 @@ fn compile_test_(config: &Config, props: &TestProps,
|
|||
// FIXME (#9639): This needs to handle non-utf8 paths
|
||||
let mut link_args = vec!("-L".to_string(),
|
||||
aux_dir.as_str().unwrap().to_string());
|
||||
link_args.extend(extra_args.iter().map(|s| s.clone()));
|
||||
link_args.extend(extra_args.iter().cloned());
|
||||
let args = make_compile_args(config,
|
||||
props,
|
||||
link_args,
|
||||
|
|
|
@ -2306,7 +2306,7 @@ mod tests {
|
|||
#[test]
|
||||
fn test_from_bools() {
|
||||
let bools = vec![true, false, true, true];
|
||||
let bitv: Bitv = bools.iter().map(|n| *n).collect();
|
||||
let bitv: Bitv = bools.iter().cloned().collect();
|
||||
assert_eq!(format!("{:?}", bitv), "1011");
|
||||
}
|
||||
|
||||
|
@ -2319,12 +2319,12 @@ mod tests {
|
|||
#[test]
|
||||
fn test_bitv_iterator() {
|
||||
let bools = vec![true, false, true, true];
|
||||
let bitv: Bitv = bools.iter().map(|n| *n).collect();
|
||||
let bitv: Bitv = bools.iter().cloned().collect();
|
||||
|
||||
assert_eq!(bitv.iter().collect::<Vec<bool>>(), bools);
|
||||
|
||||
let long: Vec<_> = (0i32..10000).map(|i| i % 2 == 0).collect();
|
||||
let bitv: Bitv = long.iter().map(|n| *n).collect();
|
||||
let bitv: Bitv = long.iter().cloned().collect();
|
||||
assert_eq!(bitv.iter().collect::<Vec<bool>>(), long)
|
||||
}
|
||||
|
||||
|
|
|
@ -938,7 +938,7 @@ impl<A: Ord> Ord for DList<A> {
|
|||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
impl<A: Clone> Clone for DList<A> {
|
||||
fn clone(&self) -> DList<A> {
|
||||
self.iter().map(|x| x.clone()).collect()
|
||||
self.iter().cloned().collect()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1056,7 +1056,7 @@ mod tests {
|
|||
|
||||
#[cfg(test)]
|
||||
fn list_from<T: Clone>(v: &[T]) -> DList<T> {
|
||||
v.iter().map(|x| (*x).clone()).collect()
|
||||
v.iter().cloned().collect()
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -350,7 +350,7 @@ pub trait IteratorExt: Iterator + Sized {
|
|||
///
|
||||
/// ```
|
||||
/// let xs = [100, 200, 300];
|
||||
/// let mut it = xs.iter().map(|x| *x).peekable();
|
||||
/// let mut it = xs.iter().cloned().peekable();
|
||||
/// assert_eq!(*it.peek().unwrap(), 100);
|
||||
/// assert_eq!(it.next().unwrap(), 100);
|
||||
/// assert_eq!(it.next().unwrap(), 200);
|
||||
|
|
|
@ -713,7 +713,7 @@ fn test_random_access_inspect() {
|
|||
fn test_random_access_map() {
|
||||
let xs = [1, 2, 3, 4, 5];
|
||||
|
||||
let mut it = xs.iter().map(|x| *x);
|
||||
let mut it = xs.iter().cloned();
|
||||
assert_eq!(xs.len(), it.indexable());
|
||||
for (i, elt) in xs.iter().enumerate() {
|
||||
assert_eq!(Some(*elt), it.idx(i));
|
||||
|
|
|
@ -139,8 +139,7 @@ impl CStore {
|
|||
pub fn get_used_crate_source(&self, cnum: ast::CrateNum)
|
||||
-> Option<CrateSource> {
|
||||
self.used_crate_sources.borrow_mut()
|
||||
.iter().find(|source| source.cnum == cnum)
|
||||
.map(|source| source.clone())
|
||||
.iter().find(|source| source.cnum == cnum).cloned()
|
||||
}
|
||||
|
||||
pub fn reset(&self) {
|
||||
|
@ -218,7 +217,7 @@ impl CStore {
|
|||
|
||||
pub fn find_extern_mod_stmt_cnum(&self, emod_id: ast::NodeId)
|
||||
-> Option<ast::CrateNum> {
|
||||
self.extern_mod_crate_map.borrow().get(&emod_id).map(|x| *x)
|
||||
self.extern_mod_crate_map.borrow().get(&emod_id).cloned()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -76,7 +76,7 @@ impl<'a> fmt::Debug for Matrix<'a> {
|
|||
pretty_printed_matrix.iter().map(|row| row[col].len()).max().unwrap_or(0)
|
||||
}).collect();
|
||||
|
||||
let total_width = column_widths.iter().map(|n| *n).sum() + column_count * 3 + 1;
|
||||
let total_width = column_widths.iter().cloned().sum() + column_count * 3 + 1;
|
||||
let br = repeat('+').take(total_width).collect::<String>();
|
||||
try!(write!(f, "{}\n", br));
|
||||
for row in pretty_printed_matrix {
|
||||
|
|
|
@ -501,7 +501,7 @@ fn lit_to_const(lit: &ast::Lit, ty_hint: Option<Ty>) -> const_val {
|
|||
match lit.node {
|
||||
ast::LitStr(ref s, _) => const_str((*s).clone()),
|
||||
ast::LitBinary(ref data) => {
|
||||
const_binary(Rc::new(data.iter().map(|x| *x).collect()))
|
||||
const_binary(data.clone())
|
||||
}
|
||||
ast::LitByte(n) => const_uint(n as u64),
|
||||
ast::LitChar(n) => const_uint(n as u64),
|
||||
|
|
|
@ -158,7 +158,7 @@ fn calculate_type(sess: &session::Session,
|
|||
|
||||
// Collect what we've got so far in the return vector.
|
||||
let mut ret = (1..sess.cstore.next_crate_num()).map(|i| {
|
||||
match formats.get(&i).map(|v| *v) {
|
||||
match formats.get(&i).cloned() {
|
||||
v @ Some(cstore::RequireDynamic) => v,
|
||||
_ => None,
|
||||
}
|
||||
|
|
|
@ -924,7 +924,7 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> {
|
|||
|
||||
fn rebuild(&self)
|
||||
-> (ast::FnDecl, Option<ast::ExplicitSelf_>, ast::Generics) {
|
||||
let mut expl_self_opt = self.expl_self_opt.map(|x| x.clone());
|
||||
let mut expl_self_opt = self.expl_self_opt.cloned();
|
||||
let mut inputs = self.fn_decl.inputs.clone();
|
||||
let mut output = self.fn_decl.output.clone();
|
||||
let mut ty_params = self.generics.ty_params.clone();
|
||||
|
|
|
@ -147,18 +147,12 @@ struct LanguageItemCollector<'a> {
|
|||
|
||||
impl<'a, 'v> Visitor<'v> for LanguageItemCollector<'a> {
|
||||
fn visit_item(&mut self, item: &ast::Item) {
|
||||
match extract(&item.attrs) {
|
||||
Some(value) => {
|
||||
let item_index = self.item_refs.get(&value[]).map(|x| *x);
|
||||
if let Some(value) = extract(&item.attrs) {
|
||||
let item_index = self.item_refs.get(&value[]).cloned();
|
||||
|
||||
match item_index {
|
||||
Some(item_index) => {
|
||||
if let Some(item_index) = item_index {
|
||||
self.collect_item(item_index, local_def(item.id), item.span)
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
|
||||
visit::walk_item(self, item);
|
||||
|
|
|
@ -407,7 +407,7 @@ impl RegionMaps {
|
|||
|
||||
pub fn opt_encl_scope(&self, id: CodeExtent) -> Option<CodeExtent> {
|
||||
//! Returns the narrowest scope that encloses `id`, if any.
|
||||
self.scope_map.borrow().get(&id).map(|x| *x)
|
||||
self.scope_map.borrow().get(&id).cloned()
|
||||
}
|
||||
|
||||
#[allow(dead_code)] // used in middle::cfg
|
||||
|
|
|
@ -562,7 +562,7 @@ pub fn early_bound_lifetimes<'a>(generics: &'a ast::Generics) -> Vec<ast::Lifeti
|
|||
|
||||
generics.lifetimes.iter()
|
||||
.filter(|l| referenced_idents.iter().any(|&i| i == l.lifetime.name))
|
||||
.map(|l| (*l).clone())
|
||||
.cloned()
|
||||
.collect()
|
||||
}
|
||||
|
||||
|
|
|
@ -738,7 +738,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
|||
{
|
||||
let cache = self.pick_candidate_cache();
|
||||
let hashmap = cache.hashmap.borrow();
|
||||
hashmap.get(&cache_fresh_trait_pred.0.trait_ref).map(|c| (*c).clone())
|
||||
hashmap.get(&cache_fresh_trait_pred.0.trait_ref).cloned()
|
||||
}
|
||||
|
||||
fn insert_candidate_cache(&mut self,
|
||||
|
|
|
@ -2868,7 +2868,7 @@ pub fn mk_ctor_fn<'tcx>(cx: &ctxt<'tcx>,
|
|||
def_id: ast::DefId,
|
||||
input_tys: &[Ty<'tcx>],
|
||||
output: Ty<'tcx>) -> Ty<'tcx> {
|
||||
let input_args = input_tys.iter().map(|ty| *ty).collect();
|
||||
let input_args = input_tys.iter().cloned().collect();
|
||||
mk_bare_fn(cx,
|
||||
Some(def_id),
|
||||
cx.mk_bare_fn(BareFnTy {
|
||||
|
@ -3837,7 +3837,7 @@ pub fn is_type_representable<'tcx>(cx: &ctxt<'tcx>, sp: Span, ty: Ty<'tcx>)
|
|||
-> Representability {
|
||||
match ty.sty {
|
||||
ty_tup(ref ts) => {
|
||||
find_nonrepresentable(cx, sp, seen, ts.iter().map(|ty| *ty))
|
||||
find_nonrepresentable(cx, sp, seen, ts.iter().cloned())
|
||||
}
|
||||
// Fixed-length vectors.
|
||||
// FIXME(#11924) Behavior undecided for zero-length vectors.
|
||||
|
@ -4965,7 +4965,7 @@ pub fn note_and_explain_type_err(cx: &ctxt, err: &type_err) {
|
|||
}
|
||||
|
||||
pub fn provided_source(cx: &ctxt, id: ast::DefId) -> Option<ast::DefId> {
|
||||
cx.provided_method_sources.borrow().get(&id).map(|x| *x)
|
||||
cx.provided_method_sources.borrow().get(&id).cloned()
|
||||
}
|
||||
|
||||
pub fn provided_trait_methods<'tcx>(cx: &ctxt<'tcx>, id: ast::DefId)
|
||||
|
|
|
@ -214,7 +214,7 @@ pub fn memoized<T, U, S, F>(cache: &RefCell<HashMap<T, U, S>>, arg: T, f: F) ->
|
|||
F: FnOnce(T) -> U,
|
||||
{
|
||||
let key = arg.clone();
|
||||
let result = cache.borrow().get(&key).map(|result| result.clone());
|
||||
let result = cache.borrow().get(&key).cloned();
|
||||
match result {
|
||||
Some(result) => result,
|
||||
None => {
|
||||
|
|
|
@ -40,10 +40,7 @@ pub fn get_rpath_flags<F, G>(config: RPathConfig<F, G>) -> Vec<String> where
|
|||
debug!("preparing the RPATH!");
|
||||
|
||||
let libs = config.used_crates.clone();
|
||||
let libs = libs.into_iter().filter_map(|(_, l)| {
|
||||
l.map(|p| p.clone())
|
||||
}).collect::<Vec<_>>();
|
||||
|
||||
let libs = libs.into_iter().filter_map(|(_, l)| l).collect::<Vec<_>>();
|
||||
let rpaths = get_rpaths(config, &libs[]);
|
||||
flags.push_all(&rpaths_to_flags(&rpaths[])[]);
|
||||
flags
|
||||
|
|
|
@ -254,7 +254,7 @@ impl<'a, 'tcx> Env<'a, 'tcx> {
|
|||
output_ty: Ty<'tcx>)
|
||||
-> Ty<'tcx>
|
||||
{
|
||||
let input_args = input_tys.iter().map(|ty| *ty).collect();
|
||||
let input_args = input_tys.iter().cloned().collect();
|
||||
ty::mk_bare_fn(self.infcx.tcx,
|
||||
None,
|
||||
self.infcx.tcx.mk_bare_fn(ty::BareFnTy {
|
||||
|
|
|
@ -1920,18 +1920,15 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
-> ResolveResult<(Rc<Module>, LastPrivate)> {
|
||||
fn search_parent_externals(needle: Name, module: &Rc<Module>)
|
||||
-> Option<Rc<Module>> {
|
||||
module.external_module_children.borrow()
|
||||
.get(&needle).cloned()
|
||||
.map(|_| module.clone())
|
||||
.or_else(|| {
|
||||
match module.parent_link.clone() {
|
||||
ModuleParentLink(parent, _) => {
|
||||
search_parent_externals(needle,
|
||||
&parent.upgrade().unwrap())
|
||||
match module.external_module_children.borrow().get(&needle) {
|
||||
Some(_) => Some(module.clone()),
|
||||
None => match module.parent_link {
|
||||
ModuleParentLink(ref parent, _) => {
|
||||
search_parent_externals(needle, &parent.upgrade().unwrap())
|
||||
}
|
||||
_ => None
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
let mut search_module = module_;
|
||||
|
|
|
@ -3211,7 +3211,7 @@ pub fn trans_crate<'tcx>(analysis: ty::CrateAnalysis<'tcx>)
|
|||
reachable.push("rust_eh_personality_catch".to_string());
|
||||
|
||||
if codegen_units > 1 {
|
||||
internalize_symbols(&shared_ccx, &reachable.iter().map(|x| x.clone()).collect());
|
||||
internalize_symbols(&shared_ccx, &reachable.iter().cloned().collect());
|
||||
}
|
||||
|
||||
let metadata_module = ModuleTranslation {
|
||||
|
|
|
@ -1197,7 +1197,7 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
|||
let trait_ref =
|
||||
bcx.tcx().object_cast_map.borrow()
|
||||
.get(&expr.id)
|
||||
.map(|t| (*t).clone())
|
||||
.cloned()
|
||||
.unwrap();
|
||||
let trait_ref = bcx.monomorphize(&trait_ref);
|
||||
let datum = unpack_datum!(bcx, trans(bcx, &**val));
|
||||
|
|
|
@ -67,7 +67,7 @@ pub fn untuple_arguments_if_necessary<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
|||
abi: abi::Abi)
|
||||
-> Vec<Ty<'tcx>> {
|
||||
if abi != abi::RustCall {
|
||||
return inputs.iter().map(|x| (*x).clone()).collect()
|
||||
return inputs.iter().cloned().collect()
|
||||
}
|
||||
|
||||
if inputs.len() == 0 {
|
||||
|
|
|
@ -3220,7 +3220,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
|
|||
for field in ast_fields {
|
||||
let mut expected_field_type = tcx.types.err;
|
||||
|
||||
let pair = class_field_map.get(&field.ident.node.name).map(|x| *x);
|
||||
let pair = class_field_map.get(&field.ident.node.name).cloned();
|
||||
match pair {
|
||||
None => {
|
||||
fcx.type_error_message(
|
||||
|
@ -3852,7 +3852,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
|
|||
}
|
||||
ast::ExprStruct(ref path, ref fields, ref base_expr) => {
|
||||
// Resolve the path.
|
||||
let def = tcx.def_map.borrow().get(&id).map(|i| *i);
|
||||
let def = tcx.def_map.borrow().get(&id).cloned();
|
||||
let struct_id = match def {
|
||||
Some(def::DefVariant(enum_id, variant_id, true)) => {
|
||||
check_struct_enum_variant(fcx, id, expr.span, enum_id,
|
||||
|
|
|
@ -293,7 +293,7 @@ pub fn collapse_docs(krate: clean::Crate) -> plugins::PluginResult {
|
|||
let mut a: Vec<clean::Attribute> = i.attrs.iter().filter(|&a| match a {
|
||||
&clean::NameValue(ref x, _) if "doc" == *x => false,
|
||||
_ => true
|
||||
}).map(|x| x.clone()).collect();
|
||||
}).cloned().collect();
|
||||
if docstr.len() > 0 {
|
||||
a.push(clean::NameValue("doc".to_string(), docstr));
|
||||
}
|
||||
|
|
|
@ -333,7 +333,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
|
|||
name: name,
|
||||
items: items.clone(),
|
||||
generics: gen.clone(),
|
||||
bounds: b.iter().map(|x| (*x).clone()).collect(),
|
||||
bounds: b.iter().cloned().collect(),
|
||||
id: item.id,
|
||||
attrs: item.attrs.clone(),
|
||||
whence: item.span,
|
||||
|
|
|
@ -918,7 +918,7 @@ mod tests {
|
|||
#[cfg(unix)]
|
||||
fn join_paths_unix() {
|
||||
fn test_eq(input: &[&str], output: &str) -> bool {
|
||||
&*join_paths(input.iter().map(|s| *s)).unwrap() ==
|
||||
&*join_paths(input.iter().cloned()).unwrap() ==
|
||||
OsStr::from_str(output)
|
||||
}
|
||||
|
||||
|
@ -927,14 +927,14 @@ mod tests {
|
|||
"/bin:/usr/bin:/usr/local/bin"));
|
||||
assert!(test_eq(&["", "/bin", "", "", "/usr/bin", ""],
|
||||
":/bin:::/usr/bin:"));
|
||||
assert!(join_paths(["/te:st"].iter().map(|s| *s)).is_err());
|
||||
assert!(join_paths(["/te:st"].iter().cloned()).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(windows)]
|
||||
fn join_paths_windows() {
|
||||
fn test_eq(input: &[&str], output: &str) -> bool {
|
||||
&*join_paths(input.iter().map(|s| *s)).unwrap() ==
|
||||
&*join_paths(input.iter().cloned()).unwrap() ==
|
||||
OsStr::from_str(output)
|
||||
}
|
||||
|
||||
|
@ -945,6 +945,6 @@ mod tests {
|
|||
r";c:\windows;;;c:\;"));
|
||||
assert!(test_eq(&[r"c:\te;st", r"c:\"],
|
||||
r#""c:\te;st";c:\"#));
|
||||
assert!(join_paths([r#"c:\te"st"#].iter().map(|s| *s)).is_err());
|
||||
assert!(join_paths([r#"c:\te"st"#].iter().cloned()).is_err());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -251,7 +251,7 @@ impl<'ast> Map<'ast> {
|
|||
}
|
||||
|
||||
fn find_entry(&self, id: NodeId) -> Option<MapEntry<'ast>> {
|
||||
self.map.borrow().get(id as usize).map(|e| *e)
|
||||
self.map.borrow().get(id as usize).cloned()
|
||||
}
|
||||
|
||||
pub fn krate(&self) -> &'ast Crate {
|
||||
|
|
|
@ -21,6 +21,6 @@ impl Registry {
|
|||
}
|
||||
|
||||
pub fn find_description(&self, code: &str) -> Option<&'static str> {
|
||||
self.descriptions.get(code).map(|desc| *desc)
|
||||
self.descriptions.get(code).cloned()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -639,7 +639,7 @@ impl<'a> ExtCtxt<'a> {
|
|||
pub fn mod_path(&self) -> Vec<ast::Ident> {
|
||||
let mut v = Vec::new();
|
||||
v.push(token::str_to_ident(&self.ecfg.crate_name[]));
|
||||
v.extend(self.mod_path.iter().map(|a| *a));
|
||||
v.extend(self.mod_path.iter().cloned());
|
||||
return v;
|
||||
}
|
||||
pub fn bt_push(&mut self, ei: ExpnInfo) {
|
||||
|
|
|
@ -367,7 +367,7 @@ impl<'a> TraitDef<'a> {
|
|||
"allow" | "warn" | "deny" | "forbid" => true,
|
||||
_ => false,
|
||||
}
|
||||
}).map(|a| a.clone()));
|
||||
}).cloned());
|
||||
push(P(ast::Item {
|
||||
attrs: attrs,
|
||||
..(*newitem).clone()
|
||||
|
@ -410,7 +410,7 @@ impl<'a> TraitDef<'a> {
|
|||
let mut ty_params = ty_params.into_vec();
|
||||
|
||||
// Copy the lifetimes
|
||||
lifetimes.extend(generics.lifetimes.iter().map(|l| (*l).clone()));
|
||||
lifetimes.extend(generics.lifetimes.iter().cloned());
|
||||
|
||||
// Create the type parameters.
|
||||
ty_params.extend(generics.ty_params.iter().map(|ty_param| {
|
||||
|
@ -445,14 +445,14 @@ impl<'a> TraitDef<'a> {
|
|||
span: self.span,
|
||||
bound_lifetimes: wb.bound_lifetimes.clone(),
|
||||
bounded_ty: wb.bounded_ty.clone(),
|
||||
bounds: OwnedSlice::from_vec(wb.bounds.iter().map(|b| b.clone()).collect())
|
||||
bounds: OwnedSlice::from_vec(wb.bounds.iter().cloned().collect())
|
||||
})
|
||||
}
|
||||
ast::WherePredicate::RegionPredicate(ref rb) => {
|
||||
ast::WherePredicate::RegionPredicate(ast::WhereRegionPredicate {
|
||||
span: self.span,
|
||||
lifetime: rb.lifetime,
|
||||
bounds: rb.bounds.iter().map(|b| b.clone()).collect()
|
||||
bounds: rb.bounds.iter().cloned().collect()
|
||||
})
|
||||
}
|
||||
ast::WherePredicate::EqPredicate(ref we) => {
|
||||
|
@ -500,7 +500,7 @@ impl<'a> TraitDef<'a> {
|
|||
let opt_trait_ref = Some(trait_ref);
|
||||
let ident = ast_util::impl_pretty_name(&opt_trait_ref, &*self_type);
|
||||
let mut a = vec![attr];
|
||||
a.extend(self.attributes.iter().map(|a| a.clone()));
|
||||
a.extend(self.attributes.iter().cloned());
|
||||
cx.item(
|
||||
self.span,
|
||||
ident,
|
||||
|
|
|
@ -179,7 +179,7 @@ pub fn expand_include_bytes(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
|||
return DummyResult::expr(sp);
|
||||
}
|
||||
Ok(bytes) => {
|
||||
let bytes = bytes.iter().map(|x| *x).collect();
|
||||
let bytes = bytes.iter().cloned().collect();
|
||||
base::MacExpr::new(cx.expr_lit(sp, ast::LitBinary(Rc::new(bytes))))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -283,7 +283,7 @@ pub fn parse(sess: &ParseSess,
|
|||
-> ParseResult {
|
||||
let mut cur_eis = Vec::new();
|
||||
cur_eis.push(initial_matcher_pos(Rc::new(ms.iter()
|
||||
.map(|x| (*x).clone())
|
||||
.cloned()
|
||||
.collect()),
|
||||
None,
|
||||
rdr.peek().sp.lo));
|
||||
|
|
|
@ -159,7 +159,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
|
|||
None,
|
||||
None,
|
||||
arg.iter()
|
||||
.map(|x| (*x).clone())
|
||||
.cloned()
|
||||
.collect(),
|
||||
true);
|
||||
match parse(cx.parse_sess(), cx.cfg(), arg_rdr, lhs_tt) {
|
||||
|
|
|
@ -61,7 +61,7 @@ pub fn doc_comment_style(comment: &str) -> ast::AttrStyle {
|
|||
|
||||
pub fn strip_doc_comment_decoration(comment: &str) -> String {
|
||||
/// remove whitespace-only lines from the start/end of lines
|
||||
fn vertical_trim(lines: Vec<String> ) -> Vec<String> {
|
||||
fn vertical_trim(lines: Vec<String>) -> Vec<String> {
|
||||
let mut i = 0;
|
||||
let mut j = lines.len();
|
||||
// first line of all-stars should be omitted
|
||||
|
@ -82,7 +82,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String {
|
|||
while j > i && lines[j - 1].trim().is_empty() {
|
||||
j -= 1;
|
||||
}
|
||||
return lines[i..j].iter().map(|x| (*x).clone()).collect();
|
||||
lines[i..j].iter().cloned().collect()
|
||||
}
|
||||
|
||||
/// remove a "[ \t]*\*" block from each line, if possible
|
||||
|
|
|
@ -240,9 +240,8 @@ macro_rules! maybe_whole {
|
|||
|
||||
fn maybe_append(mut lhs: Vec<Attribute>, rhs: Option<Vec<Attribute>>)
|
||||
-> Vec<Attribute> {
|
||||
match rhs {
|
||||
Some(ref attrs) => lhs.extend(attrs.iter().map(|a| a.clone())),
|
||||
None => {}
|
||||
if let Some(ref attrs) = rhs {
|
||||
lhs.extend(attrs.iter().cloned())
|
||||
}
|
||||
lhs
|
||||
}
|
||||
|
@ -467,7 +466,7 @@ impl<'a> Parser<'a> {
|
|||
debug!("commit_expr {:?}", e);
|
||||
if let ExprPath(..) = e.node {
|
||||
// might be unit-struct construction; check for recoverableinput error.
|
||||
let mut expected = edible.iter().map(|x| x.clone()).collect::<Vec<_>>();
|
||||
let mut expected = edible.iter().cloned().collect::<Vec<_>>();
|
||||
expected.push_all(inedible);
|
||||
self.check_for_erroneous_unit_struct_expecting(&expected[]);
|
||||
}
|
||||
|
@ -485,7 +484,7 @@ impl<'a> Parser<'a> {
|
|||
if self.last_token
|
||||
.as_ref()
|
||||
.map_or(false, |t| t.is_ident() || t.is_path()) {
|
||||
let mut expected = edible.iter().map(|x| x.clone()).collect::<Vec<_>>();
|
||||
let mut expected = edible.iter().cloned().collect::<Vec<_>>();
|
||||
expected.push_all(&inedible[]);
|
||||
self.check_for_erroneous_unit_struct_expecting(
|
||||
&expected[]);
|
||||
|
|
|
@ -983,15 +983,14 @@ impl<'a> State<'a> {
|
|||
try!(self.word_nbsp("trait"));
|
||||
try!(self.print_ident(item.ident));
|
||||
try!(self.print_generics(generics));
|
||||
let bounds: Vec<_> = bounds.iter().map(|b| b.clone()).collect();
|
||||
let mut real_bounds = Vec::with_capacity(bounds.len());
|
||||
for b in bounds {
|
||||
if let TraitTyParamBound(ref ptr, ast::TraitBoundModifier::Maybe) = b {
|
||||
for b in bounds.iter() {
|
||||
if let TraitTyParamBound(ref ptr, ast::TraitBoundModifier::Maybe) = *b {
|
||||
try!(space(&mut self.s));
|
||||
try!(self.word_space("for ?"));
|
||||
try!(self.print_trait_ref(&ptr.trait_ref));
|
||||
} else {
|
||||
real_bounds.push(b);
|
||||
real_bounds.push(b.clone());
|
||||
}
|
||||
}
|
||||
try!(self.print_bounds(":", &real_bounds[]));
|
||||
|
|
|
@ -134,7 +134,7 @@ fn run<W: Writer>(writer: &mut W) -> std::old_io::IoResult<()> {
|
|||
('t', 0.3015094502008)];
|
||||
|
||||
try!(make_fasta(writer, ">ONE Homo sapiens alu\n",
|
||||
alu.as_bytes().iter().cycle().map(|c| *c), n * 2));
|
||||
alu.as_bytes().iter().cycle().cloned(), n * 2));
|
||||
try!(make_fasta(writer, ">TWO IUB ambiguity codes\n",
|
||||
AAGen::new(rng, iub), n * 3));
|
||||
try!(make_fasta(writer, ">THREE Homo sapiens frequency\n",
|
||||
|
|
|
@ -270,7 +270,7 @@ fn handle_sol(raw_sol: &List<u64>, data: &mut Data) {
|
|||
// reverse order, i.e. the board rotated by half a turn.
|
||||
data.nb += 2;
|
||||
let sol1 = to_vec(raw_sol);
|
||||
let sol2: Vec<u8> = sol1.iter().rev().map(|x| *x).collect();
|
||||
let sol2: Vec<u8> = sol1.iter().rev().cloned().collect();
|
||||
|
||||
if data.nb == 2 {
|
||||
data.min = sol1.clone();
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue