1
Fork 0

Use the new Entry::or_default method where possible.

This commit is contained in:
Eduard-Mihai Burtescu 2018-07-21 22:43:31 +03:00
parent d5b6b95aef
commit 14aed81d9a
36 changed files with 133 additions and 149 deletions

View file

@ -176,7 +176,7 @@ pub fn check(build: &mut Build) {
if target.contains("-none-") { if target.contains("-none-") {
if build.no_std(*target).is_none() { if build.no_std(*target).is_none() {
let target = build.config.target_config.entry(target.clone()) let target = build.config.target_config.entry(target.clone())
.or_insert(Default::default()); .or_default();
target.no_std = true; target.no_std = true;
} }
@ -192,7 +192,7 @@ pub fn check(build: &mut Build) {
// fall back to the system toolchain in /usr before giving up // fall back to the system toolchain in /usr before giving up
if build.musl_root(*target).is_none() && build.config.build == *target { if build.musl_root(*target).is_none() && build.config.build == *target {
let target = build.config.target_config.entry(target.clone()) let target = build.config.target_config.entry(target.clone())
.or_insert(Default::default()); .or_default();
target.musl_root = Some("/usr".into()); target.musl_root = Some("/usr".into());
} }
match build.musl_root(*target) { match build.musl_root(*target) {

View file

@ -183,7 +183,7 @@ impl Step for ToolBuild {
let mut artifacts = builder.tool_artifacts.borrow_mut(); let mut artifacts = builder.tool_artifacts.borrow_mut();
let prev_artifacts = artifacts let prev_artifacts = artifacts
.entry(target) .entry(target)
.or_insert_with(Default::default); .or_default();
if let Some(prev) = prev_artifacts.get(&*id) { if let Some(prev) = prev_artifacts.get(&*id) {
if prev.1 != val.1 { if prev.1 != val.1 {
duplicates.push(( duplicates.push((

View file

@ -2334,7 +2334,7 @@ impl<'a> LoweringContext<'a> {
// FIXME: This could probably be done with less rightward drift. Also looks like two control // FIXME: This could probably be done with less rightward drift. Also looks like two control
// paths where report_error is called are also the only paths that advance to after // paths where report_error is called are also the only paths that advance to after
// the match statement, so the error reporting could probably just be moved there. // the match statement, so the error reporting could probably just be moved there.
let mut add_bounds = NodeMap(); let mut add_bounds: NodeMap<Vec<_>> = NodeMap();
for pred in &generics.where_clause.predicates { for pred in &generics.where_clause.predicates {
if let WherePredicate::BoundPredicate(ref bound_pred) = *pred { if let WherePredicate::BoundPredicate(ref bound_pred) = *pred {
'next_bound: for bound in &bound_pred.bounds { 'next_bound: for bound in &bound_pred.bounds {
@ -2364,7 +2364,7 @@ impl<'a> LoweringContext<'a> {
GenericParamKind::Type { .. } => { GenericParamKind::Type { .. } => {
if node_id == param.id { if node_id == param.id {
add_bounds.entry(param.id) add_bounds.entry(param.id)
.or_insert(Vec::new()) .or_default()
.push(bound.clone()); .push(bound.clone());
continue 'next_bound; continue 'next_bound;
} }
@ -2730,7 +2730,7 @@ impl<'a> LoweringContext<'a> {
if let Some(ref trait_ref) = trait_ref { if let Some(ref trait_ref) = trait_ref {
if let Def::Trait(def_id) = trait_ref.path.def { if let Def::Trait(def_id) = trait_ref.path.def {
this.trait_impls.entry(def_id).or_insert(vec![]).push(id); this.trait_impls.entry(def_id).or_default().push(id);
} }
} }

View file

@ -512,7 +512,7 @@ impl LintBuffer {
msg: msg.to_string(), msg: msg.to_string(),
diagnostic diagnostic
}; };
let arr = self.map.entry(id).or_insert(Vec::new()); let arr = self.map.entry(id).or_default();
if !arr.contains(&early_lint) { if !arr.contains(&early_lint) {
arr.push(early_lint); arr.push(early_lint);
} }

View file

@ -391,37 +391,33 @@ fn resolve_lifetimes<'tcx>(
let named_region_map = krate(tcx); let named_region_map = krate(tcx);
let mut defs = FxHashMap(); let mut rl = ResolveLifetimes {
defs: FxHashMap(),
late_bound: FxHashMap(),
object_lifetime_defaults: FxHashMap(),
};
for (k, v) in named_region_map.defs { for (k, v) in named_region_map.defs {
let hir_id = tcx.hir.node_to_hir_id(k); let hir_id = tcx.hir.node_to_hir_id(k);
let map = defs.entry(hir_id.owner_local_def_id()) let map = rl.defs.entry(hir_id.owner_local_def_id()).or_default();
.or_insert_with(|| Lrc::new(FxHashMap()));
Lrc::get_mut(map).unwrap().insert(hir_id.local_id, v); Lrc::get_mut(map).unwrap().insert(hir_id.local_id, v);
} }
let mut late_bound = FxHashMap();
for k in named_region_map.late_bound { for k in named_region_map.late_bound {
let hir_id = tcx.hir.node_to_hir_id(k); let hir_id = tcx.hir.node_to_hir_id(k);
let map = late_bound let map = rl.late_bound.entry(hir_id.owner_local_def_id()).or_default();
.entry(hir_id.owner_local_def_id())
.or_insert_with(|| Lrc::new(FxHashSet()));
Lrc::get_mut(map).unwrap().insert(hir_id.local_id); Lrc::get_mut(map).unwrap().insert(hir_id.local_id);
} }
let mut object_lifetime_defaults = FxHashMap();
for (k, v) in named_region_map.object_lifetime_defaults { for (k, v) in named_region_map.object_lifetime_defaults {
let hir_id = tcx.hir.node_to_hir_id(k); let hir_id = tcx.hir.node_to_hir_id(k);
let map = object_lifetime_defaults let map = rl.object_lifetime_defaults
.entry(hir_id.owner_local_def_id()) .entry(hir_id.owner_local_def_id())
.or_insert_with(|| Lrc::new(FxHashMap())); .or_default();
Lrc::get_mut(map) Lrc::get_mut(map)
.unwrap() .unwrap()
.insert(hir_id.local_id, Lrc::new(v)); .insert(hir_id.local_id, Lrc::new(v));
} }
Lrc::new(ResolveLifetimes { Lrc::new(rl)
defs,
late_bound,
object_lifetime_defaults,
})
} }
fn krate<'tcx>(tcx: TyCtxt<'_, 'tcx, 'tcx>) -> NamedRegionMap { fn krate<'tcx>(tcx: TyCtxt<'_, 'tcx, 'tcx>) -> NamedRegionMap {

View file

@ -2174,7 +2174,7 @@ pub fn build_session_options_and_crate_config(
); );
} }
let mut externs = BTreeMap::new(); let mut externs: BTreeMap<_, BTreeSet<_>> = BTreeMap::new();
for arg in &matches.opt_strs("extern") { for arg in &matches.opt_strs("extern") {
let mut parts = arg.splitn(2, '='); let mut parts = arg.splitn(2, '=');
let name = match parts.next() { let name = match parts.next() {
@ -2191,7 +2191,7 @@ pub fn build_session_options_and_crate_config(
externs externs
.entry(name.to_string()) .entry(name.to_string())
.or_insert_with(BTreeSet::new) .or_default()
.insert(location.to_string()); .insert(location.to_string());
} }

View file

@ -513,26 +513,26 @@ impl<'a, 'tcx> AutoTraitFinder<'a, 'tcx> {
{ {
let deps1 = vid_map let deps1 = vid_map
.entry(RegionTarget::RegionVid(r1)) .entry(RegionTarget::RegionVid(r1))
.or_insert_with(|| Default::default()); .or_default();
deps1.larger.insert(RegionTarget::RegionVid(r2)); deps1.larger.insert(RegionTarget::RegionVid(r2));
} }
let deps2 = vid_map let deps2 = vid_map
.entry(RegionTarget::RegionVid(r2)) .entry(RegionTarget::RegionVid(r2))
.or_insert_with(|| Default::default()); .or_default();
deps2.smaller.insert(RegionTarget::RegionVid(r1)); deps2.smaller.insert(RegionTarget::RegionVid(r1));
} }
&Constraint::RegSubVar(region, vid) => { &Constraint::RegSubVar(region, vid) => {
{ {
let deps1 = vid_map let deps1 = vid_map
.entry(RegionTarget::Region(region)) .entry(RegionTarget::Region(region))
.or_insert_with(|| Default::default()); .or_default();
deps1.larger.insert(RegionTarget::RegionVid(vid)); deps1.larger.insert(RegionTarget::RegionVid(vid));
} }
let deps2 = vid_map let deps2 = vid_map
.entry(RegionTarget::RegionVid(vid)) .entry(RegionTarget::RegionVid(vid))
.or_insert_with(|| Default::default()); .or_default();
deps2.smaller.insert(RegionTarget::Region(region)); deps2.smaller.insert(RegionTarget::Region(region));
} }
&Constraint::VarSubReg(vid, region) => { &Constraint::VarSubReg(vid, region) => {
@ -542,13 +542,13 @@ impl<'a, 'tcx> AutoTraitFinder<'a, 'tcx> {
{ {
let deps1 = vid_map let deps1 = vid_map
.entry(RegionTarget::Region(r1)) .entry(RegionTarget::Region(r1))
.or_insert_with(|| Default::default()); .or_default();
deps1.larger.insert(RegionTarget::Region(r2)); deps1.larger.insert(RegionTarget::Region(r2));
} }
let deps2 = vid_map let deps2 = vid_map
.entry(RegionTarget::Region(r2)) .entry(RegionTarget::Region(r2))
.or_insert_with(|| Default::default()); .or_default();
deps2.smaller.insert(RegionTarget::Region(r1)); deps2.smaller.insert(RegionTarget::Region(r1));
} }
} }

View file

@ -57,7 +57,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
index: Option<usize>, // None if this is an old error index: Option<usize>, // None if this is an old error
} }
let mut error_map : FxHashMap<_, _> = let mut error_map : FxHashMap<_, Vec<_>> =
self.reported_trait_errors.borrow().iter().map(|(&span, predicates)| { self.reported_trait_errors.borrow().iter().map(|(&span, predicates)| {
(span, predicates.iter().map(|predicate| ErrorDescriptor { (span, predicates.iter().map(|predicate| ErrorDescriptor {
predicate: predicate.clone(), predicate: predicate.clone(),
@ -66,14 +66,14 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
}).collect(); }).collect();
for (index, error) in errors.iter().enumerate() { for (index, error) in errors.iter().enumerate() {
error_map.entry(error.obligation.cause.span).or_insert(Vec::new()).push( error_map.entry(error.obligation.cause.span).or_default().push(
ErrorDescriptor { ErrorDescriptor {
predicate: error.obligation.predicate.clone(), predicate: error.obligation.predicate.clone(),
index: Some(index) index: Some(index)
}); });
self.reported_trait_errors.borrow_mut() self.reported_trait_errors.borrow_mut()
.entry(error.obligation.cause.span).or_insert(Vec::new()) .entry(error.obligation.cause.span).or_default()
.push(error.obligation.predicate.clone()); .push(error.obligation.predicate.clone());
} }

View file

@ -49,7 +49,7 @@ pub struct Graph {
/// Children of a given impl, grouped into blanket/non-blanket varieties as is /// Children of a given impl, grouped into blanket/non-blanket varieties as is
/// done in `TraitDef`. /// done in `TraitDef`.
#[derive(RustcEncodable, RustcDecodable)] #[derive(Default, RustcEncodable, RustcDecodable)]
struct Children { struct Children {
// Impls of a trait (or specializations of a given impl). To allow for // Impls of a trait (or specializations of a given impl). To allow for
// quicker lookup, the impls are indexed by a simplified version of their // quicker lookup, the impls are indexed by a simplified version of their
@ -81,13 +81,6 @@ enum Inserted {
} }
impl<'a, 'gcx, 'tcx> Children { impl<'a, 'gcx, 'tcx> Children {
fn new() -> Children {
Children {
nonblanket_impls: FxHashMap(),
blanket_impls: vec![],
}
}
/// Insert an impl into this set of children without comparing to any existing impls /// Insert an impl into this set of children without comparing to any existing impls
fn insert_blindly(&mut self, fn insert_blindly(&mut self,
tcx: TyCtxt<'a, 'gcx, 'tcx>, tcx: TyCtxt<'a, 'gcx, 'tcx>,
@ -95,7 +88,7 @@ impl<'a, 'gcx, 'tcx> Children {
let trait_ref = tcx.impl_trait_ref(impl_def_id).unwrap(); let trait_ref = tcx.impl_trait_ref(impl_def_id).unwrap();
if let Some(sty) = fast_reject::simplify_type(tcx, trait_ref.self_ty(), false) { if let Some(sty) = fast_reject::simplify_type(tcx, trait_ref.self_ty(), false) {
debug!("insert_blindly: impl_def_id={:?} sty={:?}", impl_def_id, sty); debug!("insert_blindly: impl_def_id={:?} sty={:?}", impl_def_id, sty);
self.nonblanket_impls.entry(sty).or_insert(vec![]).push(impl_def_id) self.nonblanket_impls.entry(sty).or_default().push(impl_def_id)
} else { } else {
debug!("insert_blindly: impl_def_id={:?} sty=None", impl_def_id); debug!("insert_blindly: impl_def_id={:?} sty=None", impl_def_id);
self.blanket_impls.push(impl_def_id) self.blanket_impls.push(impl_def_id)
@ -230,7 +223,7 @@ impl<'a, 'gcx, 'tcx> Children {
} }
fn filtered(&mut self, sty: SimplifiedType) -> Box<dyn Iterator<Item = DefId> + '_> { fn filtered(&mut self, sty: SimplifiedType) -> Box<dyn Iterator<Item = DefId> + '_> {
let nonblanket = self.nonblanket_impls.entry(sty).or_insert(vec![]).iter(); let nonblanket = self.nonblanket_impls.entry(sty).or_default().iter();
Box::new(self.blanket_impls.iter().chain(nonblanket).cloned()) Box::new(self.blanket_impls.iter().chain(nonblanket).cloned())
} }
} }
@ -268,7 +261,7 @@ impl<'a, 'gcx, 'tcx> Graph {
trait_ref, impl_def_id, trait_def_id); trait_ref, impl_def_id, trait_def_id);
self.parent.insert(impl_def_id, trait_def_id); self.parent.insert(impl_def_id, trait_def_id);
self.children.entry(trait_def_id).or_insert(Children::new()) self.children.entry(trait_def_id).or_default()
.insert_blindly(tcx, impl_def_id); .insert_blindly(tcx, impl_def_id);
return Ok(None); return Ok(None);
} }
@ -281,7 +274,7 @@ impl<'a, 'gcx, 'tcx> Graph {
loop { loop {
use self::Inserted::*; use self::Inserted::*;
let insert_result = self.children.entry(parent).or_insert(Children::new()) let insert_result = self.children.entry(parent).or_default()
.insert(tcx, impl_def_id, simplified)?; .insert(tcx, impl_def_id, simplified)?;
match insert_result { match insert_result {
@ -318,9 +311,8 @@ impl<'a, 'gcx, 'tcx> Graph {
self.parent.insert(impl_def_id, parent); self.parent.insert(impl_def_id, parent);
// Add G as N's child. // Add G as N's child.
let mut grand_children = Children::new(); self.children.entry(impl_def_id).or_default()
grand_children.insert_blindly(tcx, grand_child_to_be); .insert_blindly(tcx, grand_child_to_be);
self.children.insert(impl_def_id, grand_children);
break; break;
} }
ShouldRecurseOn(new_parent) => { ShouldRecurseOn(new_parent) => {
@ -343,7 +335,7 @@ impl<'a, 'gcx, 'tcx> Graph {
was already present."); was already present.");
} }
self.children.entry(parent).or_insert(Children::new()).insert_blindly(tcx, child); self.children.entry(parent).or_default().insert_blindly(tcx, child);
} }
/// The parent of a given impl, which is the def id of the trait when the /// The parent of a given impl, which is the def id of the trait when the

View file

@ -1132,11 +1132,10 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
None None
}; };
let mut trait_map = FxHashMap(); let mut trait_map: FxHashMap<_, Lrc<FxHashMap<_, _>>> = FxHashMap();
for (k, v) in resolutions.trait_map { for (k, v) in resolutions.trait_map {
let hir_id = hir.node_to_hir_id(k); let hir_id = hir.node_to_hir_id(k);
let map = trait_map.entry(hir_id.owner) let map = trait_map.entry(hir_id.owner).or_default();
.or_insert_with(|| Lrc::new(FxHashMap()));
Lrc::get_mut(map).unwrap() Lrc::get_mut(map).unwrap()
.insert(hir_id.local_id, .insert(hir_id.local_id,
Lrc::new(StableVec::new(v))); Lrc::new(StableVec::new(v)));

View file

@ -228,7 +228,7 @@ impl<'a, 'gcx, 'tcx> TyS<'tcx> {
match self.sty { match self.sty {
TyAdt(def, substs) => { TyAdt(def, substs) => {
{ {
let substs_set = visited.entry(def.did).or_insert(FxHashSet::default()); let substs_set = visited.entry(def.did).or_default();
if !substs_set.insert(substs) { if !substs_set.insert(substs) {
// We are already calculating the inhabitedness of this type. // We are already calculating the inhabitedness of this type.
// The type must contain a reference to itself. Break the // The type must contain a reference to itself. Break the

View file

@ -41,6 +41,7 @@ pub struct TraitDef {
pub def_path_hash: DefPathHash, pub def_path_hash: DefPathHash,
} }
#[derive(Default)]
pub struct TraitImpls { pub struct TraitImpls {
blanket_impls: Vec<DefId>, blanket_impls: Vec<DefId>,
/// Impls indexed by their simplified self-type, for fast lookup. /// Impls indexed by their simplified self-type, for fast lookup.
@ -143,47 +144,43 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
pub(super) fn trait_impls_of_provider<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, pub(super) fn trait_impls_of_provider<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
trait_id: DefId) trait_id: DefId)
-> Lrc<TraitImpls> { -> Lrc<TraitImpls> {
let mut remote_impls = Vec::new(); let mut impls = TraitImpls::default();
// Traits defined in the current crate can't have impls in upstream {
// crates, so we don't bother querying the cstore. let mut add_impl = |impl_def_id| {
if !trait_id.is_local() { let impl_self_ty = tcx.type_of(impl_def_id);
for &cnum in tcx.crates().iter() { if impl_def_id.is_local() && impl_self_ty.references_error() {
let impls = tcx.implementations_of_trait((cnum, trait_id)); return;
remote_impls.extend(impls.iter().cloned()); }
if let Some(simplified_self_ty) =
fast_reject::simplify_type(tcx, impl_self_ty, false)
{
impls.non_blanket_impls
.entry(simplified_self_ty)
.or_default()
.push(impl_def_id);
} else {
impls.blanket_impls.push(impl_def_id);
}
};
// Traits defined in the current crate can't have impls in upstream
// crates, so we don't bother querying the cstore.
if !trait_id.is_local() {
for &cnum in tcx.crates().iter() {
for &def_id in tcx.implementations_of_trait((cnum, trait_id)).iter() {
add_impl(def_id);
}
}
}
for &node_id in tcx.hir.trait_impls(trait_id) {
add_impl(tcx.hir.local_def_id(node_id));
} }
} }
let mut blanket_impls = Vec::new(); Lrc::new(impls)
let mut non_blanket_impls = FxHashMap();
let local_impls = tcx.hir
.trait_impls(trait_id)
.into_iter()
.map(|&node_id| tcx.hir.local_def_id(node_id));
for impl_def_id in local_impls.chain(remote_impls.into_iter()) {
let impl_self_ty = tcx.type_of(impl_def_id);
if impl_def_id.is_local() && impl_self_ty.references_error() {
continue
}
if let Some(simplified_self_ty) =
fast_reject::simplify_type(tcx, impl_self_ty, false)
{
non_blanket_impls
.entry(simplified_self_ty)
.or_insert(vec![])
.push(impl_def_id);
} else {
blanket_impls.push(impl_def_id);
}
}
Lrc::new(TraitImpls {
blanket_impls: blanket_impls,
non_blanket_impls: non_blanket_impls,
})
} }
impl<'a> HashStable<StableHashingContext<'a>> for TraitImpls { impl<'a> HashStable<StableHashingContext<'a>> for TraitImpls {

View file

@ -44,7 +44,7 @@ struct UnusedMutCx<'a, 'tcx: 'a> {
impl<'a, 'tcx> UnusedMutCx<'a, 'tcx> { impl<'a, 'tcx> UnusedMutCx<'a, 'tcx> {
fn check_unused_mut_pat(&self, pats: &[P<hir::Pat>]) { fn check_unused_mut_pat(&self, pats: &[P<hir::Pat>]) {
let tcx = self.bccx.tcx; let tcx = self.bccx.tcx;
let mut mutables = FxHashMap(); let mut mutables: FxHashMap<_, Vec<_>> = FxHashMap();
for p in pats { for p in pats {
p.each_binding(|_, hir_id, span, ident| { p.each_binding(|_, hir_id, span, ident| {
// Skip anything that looks like `_foo` // Skip anything that looks like `_foo`
@ -60,7 +60,7 @@ impl<'a, 'tcx> UnusedMutCx<'a, 'tcx> {
_ => return, _ => return,
} }
mutables.entry(ident.name).or_insert(Vec::new()).push((hir_id, span)); mutables.entry(ident.name).or_default().push((hir_id, span));
} else { } else {
tcx.sess.delay_span_bug(span, "missing binding mode"); tcx.sess.delay_span_bug(span, "missing binding mode");
} }

View file

@ -181,7 +181,7 @@ fn build_local_id_to_index(body: Option<&hir::Body>,
cfg.graph.each_node(|node_idx, node| { cfg.graph.each_node(|node_idx, node| {
if let cfg::CFGNodeData::AST(id) = node.data { if let cfg::CFGNodeData::AST(id) = node.data {
index.entry(id).or_insert(vec![]).push(node_idx); index.entry(id).or_default().push(node_idx);
} }
true true
}); });
@ -209,7 +209,7 @@ fn build_local_id_to_index(body: Option<&hir::Body>,
} }
fn visit_pat(&mut self, p: &hir::Pat) { fn visit_pat(&mut self, p: &hir::Pat) {
self.index.entry(p.hir_id.local_id).or_insert(vec![]).push(self.entry); self.index.entry(p.hir_id.local_id).or_default().push(self.entry);
intravisit::walk_pat(self, p) intravisit::walk_pat(self, p)
} }
} }

View file

@ -299,7 +299,7 @@ fn upstream_monomorphizations_provider<'a, 'tcx>(
let cnums = tcx.all_crate_nums(LOCAL_CRATE); let cnums = tcx.all_crate_nums(LOCAL_CRATE);
let mut instances = DefIdMap(); let mut instances: DefIdMap<FxHashMap<_, _>> = DefIdMap();
let cnum_stable_ids: IndexVec<CrateNum, Fingerprint> = { let cnum_stable_ids: IndexVec<CrateNum, Fingerprint> = {
let mut cnum_stable_ids = IndexVec::from_elem_n(Fingerprint::ZERO, let mut cnum_stable_ids = IndexVec::from_elem_n(Fingerprint::ZERO,
@ -318,8 +318,7 @@ fn upstream_monomorphizations_provider<'a, 'tcx>(
for &cnum in cnums.iter() { for &cnum in cnums.iter() {
for &(ref exported_symbol, _) in tcx.exported_symbols(cnum).iter() { for &(ref exported_symbol, _) in tcx.exported_symbols(cnum).iter() {
if let &ExportedSymbol::Generic(def_id, substs) = exported_symbol { if let &ExportedSymbol::Generic(def_id, substs) = exported_symbol {
let substs_map = instances.entry(def_id) let substs_map = instances.entry(def_id).or_default();
.or_insert_with(|| FxHashMap());
match substs_map.entry(substs) { match substs_map.entry(substs) {
Occupied(mut e) => { Occupied(mut e) => {

View file

@ -1020,12 +1020,12 @@ fn collect_and_partition_mono_items<'a, 'tcx>(
}).collect(); }).collect();
if tcx.sess.opts.debugging_opts.print_mono_items.is_some() { if tcx.sess.opts.debugging_opts.print_mono_items.is_some() {
let mut item_to_cgus = FxHashMap(); let mut item_to_cgus: FxHashMap<_, Vec<_>> = FxHashMap();
for cgu in &codegen_units { for cgu in &codegen_units {
for (&mono_item, &linkage) in cgu.items() { for (&mono_item, &linkage) in cgu.items() {
item_to_cgus.entry(mono_item) item_to_cgus.entry(mono_item)
.or_insert(Vec::new()) .or_default()
.push((cgu.name().clone(), linkage)); .push((cgu.name().clone(), linkage));
} }
} }

View file

@ -33,12 +33,12 @@ impl TestGraph {
for &(source, target) in edges { for &(source, target) in edges {
graph.num_nodes = max(graph.num_nodes, source + 1); graph.num_nodes = max(graph.num_nodes, source + 1);
graph.num_nodes = max(graph.num_nodes, target + 1); graph.num_nodes = max(graph.num_nodes, target + 1);
graph.successors.entry(source).or_insert(vec![]).push(target); graph.successors.entry(source).or_default().push(target);
graph.predecessors.entry(target).or_insert(vec![]).push(source); graph.predecessors.entry(target).or_default().push(source);
} }
for node in 0..graph.num_nodes { for node in 0..graph.num_nodes {
graph.successors.entry(node).or_insert(vec![]); graph.successors.entry(node).or_default();
graph.predecessors.entry(node).or_insert(vec![]); graph.predecessors.entry(node).or_default();
} }
graph graph
} }

View file

@ -1788,7 +1788,7 @@ impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for ImplVisitor<'a, 'tcx> {
if let Some(trait_ref) = self.tcx.impl_trait_ref(impl_id) { if let Some(trait_ref) = self.tcx.impl_trait_ref(impl_id) {
self.impls self.impls
.entry(trait_ref.def_id) .entry(trait_ref.def_id)
.or_insert(vec![]) .or_default()
.push(impl_id.index); .push(impl_id.index);
} }
} }

View file

@ -451,7 +451,10 @@ impl<'a> Context<'a> {
let rlib_prefix = format!("lib{}{}", self.crate_name, extra_prefix); let rlib_prefix = format!("lib{}{}", self.crate_name, extra_prefix);
let staticlib_prefix = format!("{}{}{}", staticpair.0, self.crate_name, extra_prefix); let staticlib_prefix = format!("{}{}{}", staticpair.0, self.crate_name, extra_prefix);
let mut candidates = FxHashMap(); let mut candidates: FxHashMap<
_,
(FxHashMap<_, _>, FxHashMap<_, _>, FxHashMap<_, _>),
> = FxHashMap();
let mut staticlibs = vec![]; let mut staticlibs = vec![];
// First, find all possible candidate rlibs and dylibs purely based on // First, find all possible candidate rlibs and dylibs purely based on
@ -493,8 +496,7 @@ impl<'a> Context<'a> {
info!("lib candidate: {}", path.display()); info!("lib candidate: {}", path.display());
let hash_str = hash.to_string(); let hash_str = hash.to_string();
let slot = candidates.entry(hash_str) let slot = candidates.entry(hash_str).or_default();
.or_insert_with(|| (FxHashMap(), FxHashMap(), FxHashMap()));
let (ref mut rlibs, ref mut rmetas, ref mut dylibs) = *slot; let (ref mut rlibs, ref mut rmetas, ref mut dylibs) = *slot;
fs::canonicalize(path) fs::canonicalize(path)
.map(|p| { .map(|p| {

View file

@ -248,7 +248,7 @@ impl<'a, 'gcx, 'tcx> Visitor<'tcx> for GatherBorrows<'a, 'gcx, 'tcx> {
self.activation_map self.activation_map
.entry(location) .entry(location)
.or_insert(Vec::new()) .or_default()
.push(borrow_index); .push(borrow_index);
TwoPhaseActivation::ActivatedAt(location) TwoPhaseActivation::ActivatedAt(location)
} }

View file

@ -80,7 +80,7 @@ fn precompute_borrows_out_of_scope<'tcx>(
debug!("borrow {:?} gets killed at {:?}", borrow_index, location); debug!("borrow {:?} gets killed at {:?}", borrow_index, location);
borrows_out_of_scope_at_location borrows_out_of_scope_at_location
.entry(location) .entry(location)
.or_insert(vec![]) .or_default()
.push(borrow_index); .push(borrow_index);
continue; continue;
} }

View file

@ -696,7 +696,7 @@ fn internalize_symbols<'a, 'tcx>(_tcx: TyCtxt<'a, 'tcx, 'tcx>,
inlining_map.iter_accesses(|accessor, accessees| { inlining_map.iter_accesses(|accessor, accessees| {
for accessee in accessees { for accessee in accessees {
accessor_map.entry(*accessee) accessor_map.entry(*accessee)
.or_insert(Vec::new()) .or_default()
.push(accessor); .push(accessor);
} }
}); });

View file

@ -528,7 +528,7 @@ pub fn write_mir_intro<'a, 'gcx, 'tcx>(
if let Some(parent) = scope_data.parent_scope { if let Some(parent) = scope_data.parent_scope {
scope_tree scope_tree
.entry(parent) .entry(parent)
.or_insert(vec![]) .or_default()
.push(SourceScope::new(index)); .push(SourceScope::new(index));
} else { } else {
// Only the argument scope has no parent, because it's the root. // Only the argument scope has no parent, because it's the root.

View file

@ -65,7 +65,7 @@ impl<'a, 'b, 'd> UnusedImportCheckVisitor<'a, 'b, 'd> {
// Check later. // Check later.
return; return;
} }
self.unused_imports.entry(item_id).or_insert_with(NodeMap).insert(id, span); self.unused_imports.entry(item_id).or_default().insert(id, span);
} else { } else {
// This trait import is definitely used, in a way other than // This trait import is definitely used, in a way other than
// method resolution. // method resolution.
@ -112,7 +112,7 @@ impl<'a, 'b, 'cl> Visitor<'a> for UnusedImportCheckVisitor<'a, 'b, 'cl> {
if items.len() == 0 { if items.len() == 0 {
self.unused_imports self.unused_imports
.entry(self.base_id) .entry(self.base_id)
.or_insert_with(NodeMap) .or_default()
.insert(id, span); .insert(id, span);
} }
} else { } else {

View file

@ -1819,7 +1819,7 @@ impl<'a, 'crateloader: 'a> Resolver<'a, 'crateloader> {
fn add_to_glob_map(&mut self, id: NodeId, ident: Ident) { fn add_to_glob_map(&mut self, id: NodeId, ident: Ident) {
if self.make_glob_map { if self.make_glob_map {
self.glob_map.entry(id).or_insert_with(FxHashSet).insert(ident.name); self.glob_map.entry(id).or_default().insert(ident.name);
} }
} }
@ -3703,14 +3703,14 @@ impl<'a, 'crateloader: 'a> Resolver<'a, 'crateloader> {
let seen = self.freevars_seen let seen = self.freevars_seen
.entry(function_id) .entry(function_id)
.or_insert_with(|| NodeMap()); .or_default();
if let Some(&index) = seen.get(&node_id) { if let Some(&index) = seen.get(&node_id) {
def = Def::Upvar(node_id, index, function_id); def = Def::Upvar(node_id, index, function_id);
continue; continue;
} }
let vec = self.freevars let vec = self.freevars
.entry(function_id) .entry(function_id)
.or_insert_with(|| vec![]); .or_default();
let depth = vec.len(); let depth = vec.len();
def = Def::Upvar(node_id, depth, function_id); def = Def::Upvar(node_id, depth, function_id);

View file

@ -2005,7 +2005,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
closure_def_id: DefId, closure_def_id: DefId,
r: DeferredCallResolution<'gcx, 'tcx>) { r: DeferredCallResolution<'gcx, 'tcx>) {
let mut deferred_call_resolutions = self.deferred_call_resolutions.borrow_mut(); let mut deferred_call_resolutions = self.deferred_call_resolutions.borrow_mut();
deferred_call_resolutions.entry(closure_def_id).or_insert(vec![]).push(r); deferred_call_resolutions.entry(closure_def_id).or_default().push(r);
} }
fn remove_deferred_call_resolutions(&self, fn remove_deferred_call_resolutions(&self,

View file

@ -242,7 +242,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
.borrow_mut() .borrow_mut()
.adjustments_mut() .adjustments_mut()
.entry(rhs_expr.hir_id) .entry(rhs_expr.hir_id)
.or_insert(vec![]) .or_default()
.push(autoref); .push(autoref);
} }
} }

View file

@ -304,7 +304,7 @@ impl<'a, 'tcx> InherentCollect<'a, 'tcx> {
let impl_def_id = self.tcx.hir.local_def_id(item.id); let impl_def_id = self.tcx.hir.local_def_id(item.id);
let mut rc_vec = self.impls_map.inherent_impls let mut rc_vec = self.impls_map.inherent_impls
.entry(def_id) .entry(def_id)
.or_insert_with(|| Lrc::new(vec![])); .or_default();
// At this point, there should not be any clones of the // At this point, there should not be any clones of the
// `Lrc`, so we can still safely push into it in place: // `Lrc`, so we can still safely push into it in place:

View file

@ -267,7 +267,7 @@ impl<'a, 'tcx, 'rcx, 'cstore> AutoTraitFinder<'a, 'tcx, 'rcx, 'cstore> {
// all intermediate RegionVids. At the end, all constraints should // all intermediate RegionVids. At the end, all constraints should
// be between Regions (aka region variables). This gives us the information // be between Regions (aka region variables). This gives us the information
// we need to create the Generics. // we need to create the Generics.
let mut finished = FxHashMap(); let mut finished: FxHashMap<_, Vec<_>> = FxHashMap();
let mut vid_map: FxHashMap<RegionTarget, RegionDeps> = FxHashMap(); let mut vid_map: FxHashMap<RegionTarget, RegionDeps> = FxHashMap();
@ -281,25 +281,25 @@ impl<'a, 'tcx, 'rcx, 'cstore> AutoTraitFinder<'a, 'tcx, 'rcx, 'cstore> {
{ {
let deps1 = vid_map let deps1 = vid_map
.entry(RegionTarget::RegionVid(r1)) .entry(RegionTarget::RegionVid(r1))
.or_insert_with(|| Default::default()); .or_default();
deps1.larger.insert(RegionTarget::RegionVid(r2)); deps1.larger.insert(RegionTarget::RegionVid(r2));
} }
let deps2 = vid_map let deps2 = vid_map
.entry(RegionTarget::RegionVid(r2)) .entry(RegionTarget::RegionVid(r2))
.or_insert_with(|| Default::default()); .or_default();
deps2.smaller.insert(RegionTarget::RegionVid(r1)); deps2.smaller.insert(RegionTarget::RegionVid(r1));
} }
&Constraint::RegSubVar(region, vid) => { &Constraint::RegSubVar(region, vid) => {
let deps = vid_map let deps = vid_map
.entry(RegionTarget::RegionVid(vid)) .entry(RegionTarget::RegionVid(vid))
.or_insert_with(|| Default::default()); .or_default();
deps.smaller.insert(RegionTarget::Region(region)); deps.smaller.insert(RegionTarget::Region(region));
} }
&Constraint::VarSubReg(vid, region) => { &Constraint::VarSubReg(vid, region) => {
let deps = vid_map let deps = vid_map
.entry(RegionTarget::RegionVid(vid)) .entry(RegionTarget::RegionVid(vid))
.or_insert_with(|| Default::default()); .or_default();
deps.larger.insert(RegionTarget::Region(region)); deps.larger.insert(RegionTarget::Region(region));
} }
&Constraint::RegSubReg(r1, r2) => { &Constraint::RegSubReg(r1, r2) => {
@ -308,7 +308,7 @@ impl<'a, 'tcx, 'rcx, 'cstore> AutoTraitFinder<'a, 'tcx, 'rcx, 'cstore> {
if self.region_name(r1) != self.region_name(r2) { if self.region_name(r1) != self.region_name(r2) {
finished finished
.entry(self.region_name(r2).expect("no region_name found")) .entry(self.region_name(r2).expect("no region_name found"))
.or_insert_with(|| Vec::new()) .or_default()
.push(r1); .push(r1);
} }
} }
@ -343,7 +343,7 @@ impl<'a, 'tcx, 'rcx, 'cstore> AutoTraitFinder<'a, 'tcx, 'rcx, 'cstore> {
if self.region_name(r1) != self.region_name(r2) { if self.region_name(r1) != self.region_name(r2) {
finished finished
.entry(self.region_name(r2).expect("no region name found")) .entry(self.region_name(r2).expect("no region name found"))
.or_insert_with(|| Vec::new()) .or_default()
.push(r1) // Larger, smaller .push(r1) // Larger, smaller
} }
} }
@ -577,8 +577,8 @@ impl<'a, 'tcx, 'rcx, 'cstore> AutoTraitFinder<'a, 'tcx, 'rcx, 'cstore> {
} = full_generics.clean(self.cx); } = full_generics.clean(self.cx);
let mut has_sized = FxHashSet(); let mut has_sized = FxHashSet();
let mut ty_to_bounds = FxHashMap(); let mut ty_to_bounds: FxHashMap<_, FxHashSet<_>> = FxHashMap();
let mut lifetime_to_bounds = FxHashMap(); let mut lifetime_to_bounds: FxHashMap<_, FxHashSet<_>> = FxHashMap();
let mut ty_to_traits: FxHashMap<Type, FxHashSet<Type>> = FxHashMap(); let mut ty_to_traits: FxHashMap<Type, FxHashSet<Type>> = FxHashMap();
let mut ty_to_fn: FxHashMap<Type, (Option<PolyTrait>, Option<Type>)> = FxHashMap(); let mut ty_to_fn: FxHashMap<Type, (Option<PolyTrait>, Option<Type>)> = FxHashMap();
@ -647,11 +647,11 @@ impl<'a, 'tcx, 'rcx, 'cstore> AutoTraitFinder<'a, 'tcx, 'rcx, 'cstore> {
ty_to_bounds ty_to_bounds
.entry(ty.clone()) .entry(ty.clone())
.or_insert_with(|| FxHashSet()); .or_default();
} else { } else {
ty_to_bounds ty_to_bounds
.entry(ty.clone()) .entry(ty.clone())
.or_insert_with(|| FxHashSet()) .or_default()
.insert(b.clone()); .insert(b.clone());
} }
} }
@ -659,7 +659,7 @@ impl<'a, 'tcx, 'rcx, 'cstore> AutoTraitFinder<'a, 'tcx, 'rcx, 'cstore> {
WherePredicate::RegionPredicate { lifetime, bounds } => { WherePredicate::RegionPredicate { lifetime, bounds } => {
lifetime_to_bounds lifetime_to_bounds
.entry(lifetime) .entry(lifetime)
.or_insert_with(|| FxHashSet()) .or_default()
.extend(bounds); .extend(bounds);
} }
WherePredicate::EqPredicate { lhs, rhs } => { WherePredicate::EqPredicate { lhs, rhs } => {
@ -722,7 +722,7 @@ impl<'a, 'tcx, 'rcx, 'cstore> AutoTraitFinder<'a, 'tcx, 'rcx, 'cstore> {
let bounds = ty_to_bounds let bounds = ty_to_bounds
.entry(*ty.clone()) .entry(*ty.clone())
.or_insert_with(|| FxHashSet()); .or_default();
bounds.insert(GenericBound::TraitBound( bounds.insert(GenericBound::TraitBound(
PolyTrait { PolyTrait {
@ -752,7 +752,7 @@ impl<'a, 'tcx, 'rcx, 'cstore> AutoTraitFinder<'a, 'tcx, 'rcx, 'cstore> {
// loop // loop
ty_to_traits ty_to_traits
.entry(*ty.clone()) .entry(*ty.clone())
.or_insert_with(|| FxHashSet()) .or_default()
.insert(*trait_.clone()); .insert(*trait_.clone());
} }
_ => panic!("Unexpected trait {:?} for {:?}", trait_, did), _ => panic!("Unexpected trait {:?} for {:?}", trait_, did),

View file

@ -34,7 +34,7 @@ use core::DocContext;
pub fn where_clauses(cx: &DocContext, clauses: Vec<WP>) -> Vec<WP> { pub fn where_clauses(cx: &DocContext, clauses: Vec<WP>) -> Vec<WP> {
// First, partition the where clause into its separate components // First, partition the where clause into its separate components
let mut params = BTreeMap::new(); let mut params: BTreeMap<_, Vec<_>> = BTreeMap::new();
let mut lifetimes = Vec::new(); let mut lifetimes = Vec::new();
let mut equalities = Vec::new(); let mut equalities = Vec::new();
let mut tybounds = Vec::new(); let mut tybounds = Vec::new();
@ -43,7 +43,7 @@ pub fn where_clauses(cx: &DocContext, clauses: Vec<WP>) -> Vec<WP> {
match clause { match clause {
WP::BoundPredicate { ty, bounds } => { WP::BoundPredicate { ty, bounds } => {
match ty { match ty {
clean::Generic(s) => params.entry(s).or_insert(Vec::new()) clean::Generic(s) => params.entry(s).or_default()
.extend(bounds), .extend(bounds),
t => tybounds.push((t, ty_bounds(bounds))), t => tybounds.push((t, ty_bounds(bounds))),
} }

View file

@ -1245,7 +1245,7 @@ impl DocFolder for Cache {
// Collect all the implementors of traits. // Collect all the implementors of traits.
if let clean::ImplItem(ref i) = item.inner { if let clean::ImplItem(ref i) = item.inner {
if let Some(did) = i.trait_.def_id() { if let Some(did) = i.trait_.def_id() {
self.implementors.entry(did).or_insert(vec![]).push(Impl { self.implementors.entry(did).or_default().push(Impl {
impl_item: item.clone(), impl_item: item.clone(),
}); });
} }
@ -1440,7 +1440,7 @@ impl DocFolder for Cache {
unreachable!() unreachable!()
}; };
for did in dids { for did in dids {
self.impls.entry(did).or_insert(vec![]).push(Impl { self.impls.entry(did).or_default().push(Impl {
impl_item: item.clone(), impl_item: item.clone(),
}); });
} }
@ -1971,7 +1971,7 @@ impl Context {
fn build_sidebar_items(&self, m: &clean::Module) -> BTreeMap<String, Vec<NameDoc>> { fn build_sidebar_items(&self, m: &clean::Module) -> BTreeMap<String, Vec<NameDoc>> {
// BTreeMap instead of HashMap to get a sorted output // BTreeMap instead of HashMap to get a sorted output
let mut map = BTreeMap::new(); let mut map: BTreeMap<_, Vec<_>> = BTreeMap::new();
for item in &m.items { for item in &m.items {
if item.is_stripped() { continue } if item.is_stripped() { continue }
@ -1981,7 +1981,7 @@ impl Context {
Some(ref s) => s.to_string(), Some(ref s) => s.to_string(),
}; };
let short = short.to_string(); let short = short.to_string();
map.entry(short).or_insert(vec![]) map.entry(short).or_default()
.push((myname, Some(plain_summary_line(item.doc_value())))); .push((myname, Some(plain_summary_line(item.doc_value()))));
} }

View file

@ -599,7 +599,7 @@ where R: 'static + Send, F: 'static + Send + FnOnce(Output) -> R {
/// returns a map mapping crate names to their paths or else an /// returns a map mapping crate names to their paths or else an
/// error message. /// error message.
fn parse_externs(matches: &getopts::Matches) -> Result<Externs, String> { fn parse_externs(matches: &getopts::Matches) -> Result<Externs, String> {
let mut externs = BTreeMap::new(); let mut externs: BTreeMap<_, BTreeSet<_>> = BTreeMap::new();
for arg in &matches.opt_strs("extern") { for arg in &matches.opt_strs("extern") {
let mut parts = arg.splitn(2, '='); let mut parts = arg.splitn(2, '=');
let name = parts.next().ok_or("--extern value must not be empty".to_string())?; let name = parts.next().ok_or("--extern value must not be empty".to_string())?;
@ -607,7 +607,7 @@ fn parse_externs(matches: &getopts::Matches) -> Result<Externs, String> {
.ok_or("--extern value must be of the format `foo=bar`" .ok_or("--extern value must be of the format `foo=bar`"
.to_string())?; .to_string())?;
let name = name.to_string(); let name = name.to_string();
externs.entry(name).or_insert_with(BTreeSet::new).insert(location.to_string()); externs.entry(name).or_default().insert(location.to_string());
} }
Ok(Externs::new(externs)) Ok(Externs::new(externs))
} }

View file

@ -327,7 +327,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
// Unresolved macros produce dummy outputs as a recovery measure. // Unresolved macros produce dummy outputs as a recovery measure.
invocations.reverse(); invocations.reverse();
let mut expanded_fragments = Vec::new(); let mut expanded_fragments = Vec::new();
let mut derives = HashMap::new(); let mut derives: HashMap<Mark, Vec<_>> = HashMap::new();
let mut undetermined_invocations = Vec::new(); let mut undetermined_invocations = Vec::new();
let (mut progress, mut force) = (false, !self.monotonic); let (mut progress, mut force) = (false, !self.monotonic);
loop { loop {
@ -388,7 +388,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
.map_attrs(|mut attrs| { attrs.retain(|a| a.path != "derive"); attrs }); .map_attrs(|mut attrs| { attrs.retain(|a| a.path != "derive"); attrs });
let item_with_markers = let item_with_markers =
add_derived_markers(&mut self.cx, item.span(), &traits, item.clone()); add_derived_markers(&mut self.cx, item.span(), &traits, item.clone());
let derives = derives.entry(invoc.expansion_data.mark).or_insert_with(Vec::new); let derives = derives.entry(invoc.expansion_data.mark).or_default();
for path in &traits { for path in &traits {
let mark = Mark::fresh(self.cx.current_expansion.mark); let mark = Mark::fresh(self.cx.current_expansion.mark);

View file

@ -88,8 +88,7 @@ impl TTMacroExpander for MacroRulesMacroExpander {
fn trace_macros_note(cx: &mut ExtCtxt, sp: Span, message: String) { fn trace_macros_note(cx: &mut ExtCtxt, sp: Span, message: String) {
let sp = sp.macro_backtrace().last().map(|trace| trace.call_site).unwrap_or(sp); let sp = sp.macro_backtrace().last().map(|trace| trace.call_site).unwrap_or(sp);
let values: &mut Vec<String> = cx.expansions.entry(sp).or_insert_with(Vec::new); cx.expansions.entry(sp).or_default().push(message);
values.push(message);
} }
/// Given `lhses` and `rhses`, this is the new macro we create /// Given `lhses` and `rhses`, this is the new macro we create

View file

@ -355,10 +355,10 @@ fn check_crate_duplicate(resolve: &Resolve, bad: &mut bool) {
// "cargo", // FIXME(#53005) // "cargo", // FIXME(#53005)
"rustc-ap-syntax", "rustc-ap-syntax",
]; ];
let mut name_to_id = HashMap::new(); let mut name_to_id: HashMap<_, Vec<_>> = HashMap::new();
for node in resolve.nodes.iter() { for node in resolve.nodes.iter() {
name_to_id.entry(node.id.split_whitespace().next().unwrap()) name_to_id.entry(node.id.split_whitespace().next().unwrap())
.or_insert(Vec::new()) .or_default()
.push(&node.id); .push(&node.id);
} }

View file

@ -20,7 +20,7 @@ use std::path::Path;
pub fn check(path: &Path, bad: &mut bool) { pub fn check(path: &Path, bad: &mut bool) {
let mut contents = String::new(); let mut contents = String::new();
let mut map = HashMap::new(); let mut map: HashMap<_, Vec<_>> = HashMap::new();
super::walk(path, super::walk(path,
&mut |path| super::filter_dirs(path) || path.ends_with("src/test"), &mut |path| super::filter_dirs(path) || path.ends_with("src/test"),
&mut |file| { &mut |file| {
@ -61,7 +61,7 @@ pub fn check(path: &Path, bad: &mut bool) {
Ok(n) => n, Ok(n) => n,
Err(..) => continue, Err(..) => continue,
}; };
map.entry(code).or_insert(Vec::new()) map.entry(code).or_default()
.push((file.to_owned(), num + 1, line.to_owned())); .push((file.to_owned(), num + 1, line.to_owned()));
break break
} }