Use the new Entry::or_default method where possible.
This commit is contained in:
parent
d5b6b95aef
commit
14aed81d9a
36 changed files with 133 additions and 149 deletions
|
@ -176,7 +176,7 @@ pub fn check(build: &mut Build) {
|
|||
if target.contains("-none-") {
|
||||
if build.no_std(*target).is_none() {
|
||||
let target = build.config.target_config.entry(target.clone())
|
||||
.or_insert(Default::default());
|
||||
.or_default();
|
||||
|
||||
target.no_std = true;
|
||||
}
|
||||
|
@ -192,7 +192,7 @@ pub fn check(build: &mut Build) {
|
|||
// fall back to the system toolchain in /usr before giving up
|
||||
if build.musl_root(*target).is_none() && build.config.build == *target {
|
||||
let target = build.config.target_config.entry(target.clone())
|
||||
.or_insert(Default::default());
|
||||
.or_default();
|
||||
target.musl_root = Some("/usr".into());
|
||||
}
|
||||
match build.musl_root(*target) {
|
||||
|
|
|
@ -183,7 +183,7 @@ impl Step for ToolBuild {
|
|||
let mut artifacts = builder.tool_artifacts.borrow_mut();
|
||||
let prev_artifacts = artifacts
|
||||
.entry(target)
|
||||
.or_insert_with(Default::default);
|
||||
.or_default();
|
||||
if let Some(prev) = prev_artifacts.get(&*id) {
|
||||
if prev.1 != val.1 {
|
||||
duplicates.push((
|
||||
|
|
|
@ -2334,7 +2334,7 @@ impl<'a> LoweringContext<'a> {
|
|||
// FIXME: This could probably be done with less rightward drift. Also looks like two control
|
||||
// paths where report_error is called are also the only paths that advance to after
|
||||
// the match statement, so the error reporting could probably just be moved there.
|
||||
let mut add_bounds = NodeMap();
|
||||
let mut add_bounds: NodeMap<Vec<_>> = NodeMap();
|
||||
for pred in &generics.where_clause.predicates {
|
||||
if let WherePredicate::BoundPredicate(ref bound_pred) = *pred {
|
||||
'next_bound: for bound in &bound_pred.bounds {
|
||||
|
@ -2364,7 +2364,7 @@ impl<'a> LoweringContext<'a> {
|
|||
GenericParamKind::Type { .. } => {
|
||||
if node_id == param.id {
|
||||
add_bounds.entry(param.id)
|
||||
.or_insert(Vec::new())
|
||||
.or_default()
|
||||
.push(bound.clone());
|
||||
continue 'next_bound;
|
||||
}
|
||||
|
@ -2730,7 +2730,7 @@ impl<'a> LoweringContext<'a> {
|
|||
|
||||
if let Some(ref trait_ref) = trait_ref {
|
||||
if let Def::Trait(def_id) = trait_ref.path.def {
|
||||
this.trait_impls.entry(def_id).or_insert(vec![]).push(id);
|
||||
this.trait_impls.entry(def_id).or_default().push(id);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -512,7 +512,7 @@ impl LintBuffer {
|
|||
msg: msg.to_string(),
|
||||
diagnostic
|
||||
};
|
||||
let arr = self.map.entry(id).or_insert(Vec::new());
|
||||
let arr = self.map.entry(id).or_default();
|
||||
if !arr.contains(&early_lint) {
|
||||
arr.push(early_lint);
|
||||
}
|
||||
|
|
|
@ -391,37 +391,33 @@ fn resolve_lifetimes<'tcx>(
|
|||
|
||||
let named_region_map = krate(tcx);
|
||||
|
||||
let mut defs = FxHashMap();
|
||||
let mut rl = ResolveLifetimes {
|
||||
defs: FxHashMap(),
|
||||
late_bound: FxHashMap(),
|
||||
object_lifetime_defaults: FxHashMap(),
|
||||
};
|
||||
|
||||
for (k, v) in named_region_map.defs {
|
||||
let hir_id = tcx.hir.node_to_hir_id(k);
|
||||
let map = defs.entry(hir_id.owner_local_def_id())
|
||||
.or_insert_with(|| Lrc::new(FxHashMap()));
|
||||
let map = rl.defs.entry(hir_id.owner_local_def_id()).or_default();
|
||||
Lrc::get_mut(map).unwrap().insert(hir_id.local_id, v);
|
||||
}
|
||||
let mut late_bound = FxHashMap();
|
||||
for k in named_region_map.late_bound {
|
||||
let hir_id = tcx.hir.node_to_hir_id(k);
|
||||
let map = late_bound
|
||||
.entry(hir_id.owner_local_def_id())
|
||||
.or_insert_with(|| Lrc::new(FxHashSet()));
|
||||
let map = rl.late_bound.entry(hir_id.owner_local_def_id()).or_default();
|
||||
Lrc::get_mut(map).unwrap().insert(hir_id.local_id);
|
||||
}
|
||||
let mut object_lifetime_defaults = FxHashMap();
|
||||
for (k, v) in named_region_map.object_lifetime_defaults {
|
||||
let hir_id = tcx.hir.node_to_hir_id(k);
|
||||
let map = object_lifetime_defaults
|
||||
let map = rl.object_lifetime_defaults
|
||||
.entry(hir_id.owner_local_def_id())
|
||||
.or_insert_with(|| Lrc::new(FxHashMap()));
|
||||
.or_default();
|
||||
Lrc::get_mut(map)
|
||||
.unwrap()
|
||||
.insert(hir_id.local_id, Lrc::new(v));
|
||||
}
|
||||
|
||||
Lrc::new(ResolveLifetimes {
|
||||
defs,
|
||||
late_bound,
|
||||
object_lifetime_defaults,
|
||||
})
|
||||
Lrc::new(rl)
|
||||
}
|
||||
|
||||
fn krate<'tcx>(tcx: TyCtxt<'_, 'tcx, 'tcx>) -> NamedRegionMap {
|
||||
|
|
|
@ -2174,7 +2174,7 @@ pub fn build_session_options_and_crate_config(
|
|||
);
|
||||
}
|
||||
|
||||
let mut externs = BTreeMap::new();
|
||||
let mut externs: BTreeMap<_, BTreeSet<_>> = BTreeMap::new();
|
||||
for arg in &matches.opt_strs("extern") {
|
||||
let mut parts = arg.splitn(2, '=');
|
||||
let name = match parts.next() {
|
||||
|
@ -2191,7 +2191,7 @@ pub fn build_session_options_and_crate_config(
|
|||
|
||||
externs
|
||||
.entry(name.to_string())
|
||||
.or_insert_with(BTreeSet::new)
|
||||
.or_default()
|
||||
.insert(location.to_string());
|
||||
}
|
||||
|
||||
|
|
|
@ -513,26 +513,26 @@ impl<'a, 'tcx> AutoTraitFinder<'a, 'tcx> {
|
|||
{
|
||||
let deps1 = vid_map
|
||||
.entry(RegionTarget::RegionVid(r1))
|
||||
.or_insert_with(|| Default::default());
|
||||
.or_default();
|
||||
deps1.larger.insert(RegionTarget::RegionVid(r2));
|
||||
}
|
||||
|
||||
let deps2 = vid_map
|
||||
.entry(RegionTarget::RegionVid(r2))
|
||||
.or_insert_with(|| Default::default());
|
||||
.or_default();
|
||||
deps2.smaller.insert(RegionTarget::RegionVid(r1));
|
||||
}
|
||||
&Constraint::RegSubVar(region, vid) => {
|
||||
{
|
||||
let deps1 = vid_map
|
||||
.entry(RegionTarget::Region(region))
|
||||
.or_insert_with(|| Default::default());
|
||||
.or_default();
|
||||
deps1.larger.insert(RegionTarget::RegionVid(vid));
|
||||
}
|
||||
|
||||
let deps2 = vid_map
|
||||
.entry(RegionTarget::RegionVid(vid))
|
||||
.or_insert_with(|| Default::default());
|
||||
.or_default();
|
||||
deps2.smaller.insert(RegionTarget::Region(region));
|
||||
}
|
||||
&Constraint::VarSubReg(vid, region) => {
|
||||
|
@ -542,13 +542,13 @@ impl<'a, 'tcx> AutoTraitFinder<'a, 'tcx> {
|
|||
{
|
||||
let deps1 = vid_map
|
||||
.entry(RegionTarget::Region(r1))
|
||||
.or_insert_with(|| Default::default());
|
||||
.or_default();
|
||||
deps1.larger.insert(RegionTarget::Region(r2));
|
||||
}
|
||||
|
||||
let deps2 = vid_map
|
||||
.entry(RegionTarget::Region(r2))
|
||||
.or_insert_with(|| Default::default());
|
||||
.or_default();
|
||||
deps2.smaller.insert(RegionTarget::Region(r1));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -57,7 +57,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
|||
index: Option<usize>, // None if this is an old error
|
||||
}
|
||||
|
||||
let mut error_map : FxHashMap<_, _> =
|
||||
let mut error_map : FxHashMap<_, Vec<_>> =
|
||||
self.reported_trait_errors.borrow().iter().map(|(&span, predicates)| {
|
||||
(span, predicates.iter().map(|predicate| ErrorDescriptor {
|
||||
predicate: predicate.clone(),
|
||||
|
@ -66,14 +66,14 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
|||
}).collect();
|
||||
|
||||
for (index, error) in errors.iter().enumerate() {
|
||||
error_map.entry(error.obligation.cause.span).or_insert(Vec::new()).push(
|
||||
error_map.entry(error.obligation.cause.span).or_default().push(
|
||||
ErrorDescriptor {
|
||||
predicate: error.obligation.predicate.clone(),
|
||||
index: Some(index)
|
||||
});
|
||||
|
||||
self.reported_trait_errors.borrow_mut()
|
||||
.entry(error.obligation.cause.span).or_insert(Vec::new())
|
||||
.entry(error.obligation.cause.span).or_default()
|
||||
.push(error.obligation.predicate.clone());
|
||||
}
|
||||
|
||||
|
|
|
@ -49,7 +49,7 @@ pub struct Graph {
|
|||
|
||||
/// Children of a given impl, grouped into blanket/non-blanket varieties as is
|
||||
/// done in `TraitDef`.
|
||||
#[derive(RustcEncodable, RustcDecodable)]
|
||||
#[derive(Default, RustcEncodable, RustcDecodable)]
|
||||
struct Children {
|
||||
// Impls of a trait (or specializations of a given impl). To allow for
|
||||
// quicker lookup, the impls are indexed by a simplified version of their
|
||||
|
@ -81,13 +81,6 @@ enum Inserted {
|
|||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> Children {
|
||||
fn new() -> Children {
|
||||
Children {
|
||||
nonblanket_impls: FxHashMap(),
|
||||
blanket_impls: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
/// Insert an impl into this set of children without comparing to any existing impls
|
||||
fn insert_blindly(&mut self,
|
||||
tcx: TyCtxt<'a, 'gcx, 'tcx>,
|
||||
|
@ -95,7 +88,7 @@ impl<'a, 'gcx, 'tcx> Children {
|
|||
let trait_ref = tcx.impl_trait_ref(impl_def_id).unwrap();
|
||||
if let Some(sty) = fast_reject::simplify_type(tcx, trait_ref.self_ty(), false) {
|
||||
debug!("insert_blindly: impl_def_id={:?} sty={:?}", impl_def_id, sty);
|
||||
self.nonblanket_impls.entry(sty).or_insert(vec![]).push(impl_def_id)
|
||||
self.nonblanket_impls.entry(sty).or_default().push(impl_def_id)
|
||||
} else {
|
||||
debug!("insert_blindly: impl_def_id={:?} sty=None", impl_def_id);
|
||||
self.blanket_impls.push(impl_def_id)
|
||||
|
@ -230,7 +223,7 @@ impl<'a, 'gcx, 'tcx> Children {
|
|||
}
|
||||
|
||||
fn filtered(&mut self, sty: SimplifiedType) -> Box<dyn Iterator<Item = DefId> + '_> {
|
||||
let nonblanket = self.nonblanket_impls.entry(sty).or_insert(vec![]).iter();
|
||||
let nonblanket = self.nonblanket_impls.entry(sty).or_default().iter();
|
||||
Box::new(self.blanket_impls.iter().chain(nonblanket).cloned())
|
||||
}
|
||||
}
|
||||
|
@ -268,7 +261,7 @@ impl<'a, 'gcx, 'tcx> Graph {
|
|||
trait_ref, impl_def_id, trait_def_id);
|
||||
|
||||
self.parent.insert(impl_def_id, trait_def_id);
|
||||
self.children.entry(trait_def_id).or_insert(Children::new())
|
||||
self.children.entry(trait_def_id).or_default()
|
||||
.insert_blindly(tcx, impl_def_id);
|
||||
return Ok(None);
|
||||
}
|
||||
|
@ -281,7 +274,7 @@ impl<'a, 'gcx, 'tcx> Graph {
|
|||
loop {
|
||||
use self::Inserted::*;
|
||||
|
||||
let insert_result = self.children.entry(parent).or_insert(Children::new())
|
||||
let insert_result = self.children.entry(parent).or_default()
|
||||
.insert(tcx, impl_def_id, simplified)?;
|
||||
|
||||
match insert_result {
|
||||
|
@ -318,9 +311,8 @@ impl<'a, 'gcx, 'tcx> Graph {
|
|||
self.parent.insert(impl_def_id, parent);
|
||||
|
||||
// Add G as N's child.
|
||||
let mut grand_children = Children::new();
|
||||
grand_children.insert_blindly(tcx, grand_child_to_be);
|
||||
self.children.insert(impl_def_id, grand_children);
|
||||
self.children.entry(impl_def_id).or_default()
|
||||
.insert_blindly(tcx, grand_child_to_be);
|
||||
break;
|
||||
}
|
||||
ShouldRecurseOn(new_parent) => {
|
||||
|
@ -343,7 +335,7 @@ impl<'a, 'gcx, 'tcx> Graph {
|
|||
was already present.");
|
||||
}
|
||||
|
||||
self.children.entry(parent).or_insert(Children::new()).insert_blindly(tcx, child);
|
||||
self.children.entry(parent).or_default().insert_blindly(tcx, child);
|
||||
}
|
||||
|
||||
/// The parent of a given impl, which is the def id of the trait when the
|
||||
|
|
|
@ -1132,11 +1132,10 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
|||
None
|
||||
};
|
||||
|
||||
let mut trait_map = FxHashMap();
|
||||
let mut trait_map: FxHashMap<_, Lrc<FxHashMap<_, _>>> = FxHashMap();
|
||||
for (k, v) in resolutions.trait_map {
|
||||
let hir_id = hir.node_to_hir_id(k);
|
||||
let map = trait_map.entry(hir_id.owner)
|
||||
.or_insert_with(|| Lrc::new(FxHashMap()));
|
||||
let map = trait_map.entry(hir_id.owner).or_default();
|
||||
Lrc::get_mut(map).unwrap()
|
||||
.insert(hir_id.local_id,
|
||||
Lrc::new(StableVec::new(v)));
|
||||
|
|
|
@ -228,7 +228,7 @@ impl<'a, 'gcx, 'tcx> TyS<'tcx> {
|
|||
match self.sty {
|
||||
TyAdt(def, substs) => {
|
||||
{
|
||||
let substs_set = visited.entry(def.did).or_insert(FxHashSet::default());
|
||||
let substs_set = visited.entry(def.did).or_default();
|
||||
if !substs_set.insert(substs) {
|
||||
// We are already calculating the inhabitedness of this type.
|
||||
// The type must contain a reference to itself. Break the
|
||||
|
|
|
@ -41,6 +41,7 @@ pub struct TraitDef {
|
|||
pub def_path_hash: DefPathHash,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct TraitImpls {
|
||||
blanket_impls: Vec<DefId>,
|
||||
/// Impls indexed by their simplified self-type, for fast lookup.
|
||||
|
@ -143,47 +144,43 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
|||
pub(super) fn trait_impls_of_provider<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
trait_id: DefId)
|
||||
-> Lrc<TraitImpls> {
|
||||
let mut remote_impls = Vec::new();
|
||||
let mut impls = TraitImpls::default();
|
||||
|
||||
// Traits defined in the current crate can't have impls in upstream
|
||||
// crates, so we don't bother querying the cstore.
|
||||
if !trait_id.is_local() {
|
||||
for &cnum in tcx.crates().iter() {
|
||||
let impls = tcx.implementations_of_trait((cnum, trait_id));
|
||||
remote_impls.extend(impls.iter().cloned());
|
||||
}
|
||||
}
|
||||
|
||||
let mut blanket_impls = Vec::new();
|
||||
let mut non_blanket_impls = FxHashMap();
|
||||
|
||||
let local_impls = tcx.hir
|
||||
.trait_impls(trait_id)
|
||||
.into_iter()
|
||||
.map(|&node_id| tcx.hir.local_def_id(node_id));
|
||||
|
||||
for impl_def_id in local_impls.chain(remote_impls.into_iter()) {
|
||||
{
|
||||
let mut add_impl = |impl_def_id| {
|
||||
let impl_self_ty = tcx.type_of(impl_def_id);
|
||||
if impl_def_id.is_local() && impl_self_ty.references_error() {
|
||||
continue
|
||||
return;
|
||||
}
|
||||
|
||||
if let Some(simplified_self_ty) =
|
||||
fast_reject::simplify_type(tcx, impl_self_ty, false)
|
||||
{
|
||||
non_blanket_impls
|
||||
impls.non_blanket_impls
|
||||
.entry(simplified_self_ty)
|
||||
.or_insert(vec![])
|
||||
.or_default()
|
||||
.push(impl_def_id);
|
||||
} else {
|
||||
blanket_impls.push(impl_def_id);
|
||||
impls.blanket_impls.push(impl_def_id);
|
||||
}
|
||||
};
|
||||
|
||||
// Traits defined in the current crate can't have impls in upstream
|
||||
// crates, so we don't bother querying the cstore.
|
||||
if !trait_id.is_local() {
|
||||
for &cnum in tcx.crates().iter() {
|
||||
for &def_id in tcx.implementations_of_trait((cnum, trait_id)).iter() {
|
||||
add_impl(def_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Lrc::new(TraitImpls {
|
||||
blanket_impls: blanket_impls,
|
||||
non_blanket_impls: non_blanket_impls,
|
||||
})
|
||||
for &node_id in tcx.hir.trait_impls(trait_id) {
|
||||
add_impl(tcx.hir.local_def_id(node_id));
|
||||
}
|
||||
}
|
||||
|
||||
Lrc::new(impls)
|
||||
}
|
||||
|
||||
impl<'a> HashStable<StableHashingContext<'a>> for TraitImpls {
|
||||
|
|
|
@ -44,7 +44,7 @@ struct UnusedMutCx<'a, 'tcx: 'a> {
|
|||
impl<'a, 'tcx> UnusedMutCx<'a, 'tcx> {
|
||||
fn check_unused_mut_pat(&self, pats: &[P<hir::Pat>]) {
|
||||
let tcx = self.bccx.tcx;
|
||||
let mut mutables = FxHashMap();
|
||||
let mut mutables: FxHashMap<_, Vec<_>> = FxHashMap();
|
||||
for p in pats {
|
||||
p.each_binding(|_, hir_id, span, ident| {
|
||||
// Skip anything that looks like `_foo`
|
||||
|
@ -60,7 +60,7 @@ impl<'a, 'tcx> UnusedMutCx<'a, 'tcx> {
|
|||
_ => return,
|
||||
}
|
||||
|
||||
mutables.entry(ident.name).or_insert(Vec::new()).push((hir_id, span));
|
||||
mutables.entry(ident.name).or_default().push((hir_id, span));
|
||||
} else {
|
||||
tcx.sess.delay_span_bug(span, "missing binding mode");
|
||||
}
|
||||
|
|
|
@ -181,7 +181,7 @@ fn build_local_id_to_index(body: Option<&hir::Body>,
|
|||
|
||||
cfg.graph.each_node(|node_idx, node| {
|
||||
if let cfg::CFGNodeData::AST(id) = node.data {
|
||||
index.entry(id).or_insert(vec![]).push(node_idx);
|
||||
index.entry(id).or_default().push(node_idx);
|
||||
}
|
||||
true
|
||||
});
|
||||
|
@ -209,7 +209,7 @@ fn build_local_id_to_index(body: Option<&hir::Body>,
|
|||
}
|
||||
|
||||
fn visit_pat(&mut self, p: &hir::Pat) {
|
||||
self.index.entry(p.hir_id.local_id).or_insert(vec![]).push(self.entry);
|
||||
self.index.entry(p.hir_id.local_id).or_default().push(self.entry);
|
||||
intravisit::walk_pat(self, p)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -299,7 +299,7 @@ fn upstream_monomorphizations_provider<'a, 'tcx>(
|
|||
|
||||
let cnums = tcx.all_crate_nums(LOCAL_CRATE);
|
||||
|
||||
let mut instances = DefIdMap();
|
||||
let mut instances: DefIdMap<FxHashMap<_, _>> = DefIdMap();
|
||||
|
||||
let cnum_stable_ids: IndexVec<CrateNum, Fingerprint> = {
|
||||
let mut cnum_stable_ids = IndexVec::from_elem_n(Fingerprint::ZERO,
|
||||
|
@ -318,8 +318,7 @@ fn upstream_monomorphizations_provider<'a, 'tcx>(
|
|||
for &cnum in cnums.iter() {
|
||||
for &(ref exported_symbol, _) in tcx.exported_symbols(cnum).iter() {
|
||||
if let &ExportedSymbol::Generic(def_id, substs) = exported_symbol {
|
||||
let substs_map = instances.entry(def_id)
|
||||
.or_insert_with(|| FxHashMap());
|
||||
let substs_map = instances.entry(def_id).or_default();
|
||||
|
||||
match substs_map.entry(substs) {
|
||||
Occupied(mut e) => {
|
||||
|
|
|
@ -1020,12 +1020,12 @@ fn collect_and_partition_mono_items<'a, 'tcx>(
|
|||
}).collect();
|
||||
|
||||
if tcx.sess.opts.debugging_opts.print_mono_items.is_some() {
|
||||
let mut item_to_cgus = FxHashMap();
|
||||
let mut item_to_cgus: FxHashMap<_, Vec<_>> = FxHashMap();
|
||||
|
||||
for cgu in &codegen_units {
|
||||
for (&mono_item, &linkage) in cgu.items() {
|
||||
item_to_cgus.entry(mono_item)
|
||||
.or_insert(Vec::new())
|
||||
.or_default()
|
||||
.push((cgu.name().clone(), linkage));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,12 +33,12 @@ impl TestGraph {
|
|||
for &(source, target) in edges {
|
||||
graph.num_nodes = max(graph.num_nodes, source + 1);
|
||||
graph.num_nodes = max(graph.num_nodes, target + 1);
|
||||
graph.successors.entry(source).or_insert(vec![]).push(target);
|
||||
graph.predecessors.entry(target).or_insert(vec![]).push(source);
|
||||
graph.successors.entry(source).or_default().push(target);
|
||||
graph.predecessors.entry(target).or_default().push(source);
|
||||
}
|
||||
for node in 0..graph.num_nodes {
|
||||
graph.successors.entry(node).or_insert(vec![]);
|
||||
graph.predecessors.entry(node).or_insert(vec![]);
|
||||
graph.successors.entry(node).or_default();
|
||||
graph.predecessors.entry(node).or_default();
|
||||
}
|
||||
graph
|
||||
}
|
||||
|
|
|
@ -1788,7 +1788,7 @@ impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for ImplVisitor<'a, 'tcx> {
|
|||
if let Some(trait_ref) = self.tcx.impl_trait_ref(impl_id) {
|
||||
self.impls
|
||||
.entry(trait_ref.def_id)
|
||||
.or_insert(vec![])
|
||||
.or_default()
|
||||
.push(impl_id.index);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -451,7 +451,10 @@ impl<'a> Context<'a> {
|
|||
let rlib_prefix = format!("lib{}{}", self.crate_name, extra_prefix);
|
||||
let staticlib_prefix = format!("{}{}{}", staticpair.0, self.crate_name, extra_prefix);
|
||||
|
||||
let mut candidates = FxHashMap();
|
||||
let mut candidates: FxHashMap<
|
||||
_,
|
||||
(FxHashMap<_, _>, FxHashMap<_, _>, FxHashMap<_, _>),
|
||||
> = FxHashMap();
|
||||
let mut staticlibs = vec![];
|
||||
|
||||
// First, find all possible candidate rlibs and dylibs purely based on
|
||||
|
@ -493,8 +496,7 @@ impl<'a> Context<'a> {
|
|||
info!("lib candidate: {}", path.display());
|
||||
|
||||
let hash_str = hash.to_string();
|
||||
let slot = candidates.entry(hash_str)
|
||||
.or_insert_with(|| (FxHashMap(), FxHashMap(), FxHashMap()));
|
||||
let slot = candidates.entry(hash_str).or_default();
|
||||
let (ref mut rlibs, ref mut rmetas, ref mut dylibs) = *slot;
|
||||
fs::canonicalize(path)
|
||||
.map(|p| {
|
||||
|
|
|
@ -248,7 +248,7 @@ impl<'a, 'gcx, 'tcx> Visitor<'tcx> for GatherBorrows<'a, 'gcx, 'tcx> {
|
|||
|
||||
self.activation_map
|
||||
.entry(location)
|
||||
.or_insert(Vec::new())
|
||||
.or_default()
|
||||
.push(borrow_index);
|
||||
TwoPhaseActivation::ActivatedAt(location)
|
||||
}
|
||||
|
|
|
@ -80,7 +80,7 @@ fn precompute_borrows_out_of_scope<'tcx>(
|
|||
debug!("borrow {:?} gets killed at {:?}", borrow_index, location);
|
||||
borrows_out_of_scope_at_location
|
||||
.entry(location)
|
||||
.or_insert(vec![])
|
||||
.or_default()
|
||||
.push(borrow_index);
|
||||
continue;
|
||||
}
|
||||
|
|
|
@ -696,7 +696,7 @@ fn internalize_symbols<'a, 'tcx>(_tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
|||
inlining_map.iter_accesses(|accessor, accessees| {
|
||||
for accessee in accessees {
|
||||
accessor_map.entry(*accessee)
|
||||
.or_insert(Vec::new())
|
||||
.or_default()
|
||||
.push(accessor);
|
||||
}
|
||||
});
|
||||
|
|
|
@ -528,7 +528,7 @@ pub fn write_mir_intro<'a, 'gcx, 'tcx>(
|
|||
if let Some(parent) = scope_data.parent_scope {
|
||||
scope_tree
|
||||
.entry(parent)
|
||||
.or_insert(vec![])
|
||||
.or_default()
|
||||
.push(SourceScope::new(index));
|
||||
} else {
|
||||
// Only the argument scope has no parent, because it's the root.
|
||||
|
|
|
@ -65,7 +65,7 @@ impl<'a, 'b, 'd> UnusedImportCheckVisitor<'a, 'b, 'd> {
|
|||
// Check later.
|
||||
return;
|
||||
}
|
||||
self.unused_imports.entry(item_id).or_insert_with(NodeMap).insert(id, span);
|
||||
self.unused_imports.entry(item_id).or_default().insert(id, span);
|
||||
} else {
|
||||
// This trait import is definitely used, in a way other than
|
||||
// method resolution.
|
||||
|
@ -112,7 +112,7 @@ impl<'a, 'b, 'cl> Visitor<'a> for UnusedImportCheckVisitor<'a, 'b, 'cl> {
|
|||
if items.len() == 0 {
|
||||
self.unused_imports
|
||||
.entry(self.base_id)
|
||||
.or_insert_with(NodeMap)
|
||||
.or_default()
|
||||
.insert(id, span);
|
||||
}
|
||||
} else {
|
||||
|
|
|
@ -1819,7 +1819,7 @@ impl<'a, 'crateloader: 'a> Resolver<'a, 'crateloader> {
|
|||
|
||||
fn add_to_glob_map(&mut self, id: NodeId, ident: Ident) {
|
||||
if self.make_glob_map {
|
||||
self.glob_map.entry(id).or_insert_with(FxHashSet).insert(ident.name);
|
||||
self.glob_map.entry(id).or_default().insert(ident.name);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3703,14 +3703,14 @@ impl<'a, 'crateloader: 'a> Resolver<'a, 'crateloader> {
|
|||
|
||||
let seen = self.freevars_seen
|
||||
.entry(function_id)
|
||||
.or_insert_with(|| NodeMap());
|
||||
.or_default();
|
||||
if let Some(&index) = seen.get(&node_id) {
|
||||
def = Def::Upvar(node_id, index, function_id);
|
||||
continue;
|
||||
}
|
||||
let vec = self.freevars
|
||||
.entry(function_id)
|
||||
.or_insert_with(|| vec![]);
|
||||
.or_default();
|
||||
let depth = vec.len();
|
||||
def = Def::Upvar(node_id, depth, function_id);
|
||||
|
||||
|
|
|
@ -2005,7 +2005,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
|
|||
closure_def_id: DefId,
|
||||
r: DeferredCallResolution<'gcx, 'tcx>) {
|
||||
let mut deferred_call_resolutions = self.deferred_call_resolutions.borrow_mut();
|
||||
deferred_call_resolutions.entry(closure_def_id).or_insert(vec![]).push(r);
|
||||
deferred_call_resolutions.entry(closure_def_id).or_default().push(r);
|
||||
}
|
||||
|
||||
fn remove_deferred_call_resolutions(&self,
|
||||
|
|
|
@ -242,7 +242,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
|
|||
.borrow_mut()
|
||||
.adjustments_mut()
|
||||
.entry(rhs_expr.hir_id)
|
||||
.or_insert(vec![])
|
||||
.or_default()
|
||||
.push(autoref);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -304,7 +304,7 @@ impl<'a, 'tcx> InherentCollect<'a, 'tcx> {
|
|||
let impl_def_id = self.tcx.hir.local_def_id(item.id);
|
||||
let mut rc_vec = self.impls_map.inherent_impls
|
||||
.entry(def_id)
|
||||
.or_insert_with(|| Lrc::new(vec![]));
|
||||
.or_default();
|
||||
|
||||
// At this point, there should not be any clones of the
|
||||
// `Lrc`, so we can still safely push into it in place:
|
||||
|
|
|
@ -267,7 +267,7 @@ impl<'a, 'tcx, 'rcx, 'cstore> AutoTraitFinder<'a, 'tcx, 'rcx, 'cstore> {
|
|||
// all intermediate RegionVids. At the end, all constraints should
|
||||
// be between Regions (aka region variables). This gives us the information
|
||||
// we need to create the Generics.
|
||||
let mut finished = FxHashMap();
|
||||
let mut finished: FxHashMap<_, Vec<_>> = FxHashMap();
|
||||
|
||||
let mut vid_map: FxHashMap<RegionTarget, RegionDeps> = FxHashMap();
|
||||
|
||||
|
@ -281,25 +281,25 @@ impl<'a, 'tcx, 'rcx, 'cstore> AutoTraitFinder<'a, 'tcx, 'rcx, 'cstore> {
|
|||
{
|
||||
let deps1 = vid_map
|
||||
.entry(RegionTarget::RegionVid(r1))
|
||||
.or_insert_with(|| Default::default());
|
||||
.or_default();
|
||||
deps1.larger.insert(RegionTarget::RegionVid(r2));
|
||||
}
|
||||
|
||||
let deps2 = vid_map
|
||||
.entry(RegionTarget::RegionVid(r2))
|
||||
.or_insert_with(|| Default::default());
|
||||
.or_default();
|
||||
deps2.smaller.insert(RegionTarget::RegionVid(r1));
|
||||
}
|
||||
&Constraint::RegSubVar(region, vid) => {
|
||||
let deps = vid_map
|
||||
.entry(RegionTarget::RegionVid(vid))
|
||||
.or_insert_with(|| Default::default());
|
||||
.or_default();
|
||||
deps.smaller.insert(RegionTarget::Region(region));
|
||||
}
|
||||
&Constraint::VarSubReg(vid, region) => {
|
||||
let deps = vid_map
|
||||
.entry(RegionTarget::RegionVid(vid))
|
||||
.or_insert_with(|| Default::default());
|
||||
.or_default();
|
||||
deps.larger.insert(RegionTarget::Region(region));
|
||||
}
|
||||
&Constraint::RegSubReg(r1, r2) => {
|
||||
|
@ -308,7 +308,7 @@ impl<'a, 'tcx, 'rcx, 'cstore> AutoTraitFinder<'a, 'tcx, 'rcx, 'cstore> {
|
|||
if self.region_name(r1) != self.region_name(r2) {
|
||||
finished
|
||||
.entry(self.region_name(r2).expect("no region_name found"))
|
||||
.or_insert_with(|| Vec::new())
|
||||
.or_default()
|
||||
.push(r1);
|
||||
}
|
||||
}
|
||||
|
@ -343,7 +343,7 @@ impl<'a, 'tcx, 'rcx, 'cstore> AutoTraitFinder<'a, 'tcx, 'rcx, 'cstore> {
|
|||
if self.region_name(r1) != self.region_name(r2) {
|
||||
finished
|
||||
.entry(self.region_name(r2).expect("no region name found"))
|
||||
.or_insert_with(|| Vec::new())
|
||||
.or_default()
|
||||
.push(r1) // Larger, smaller
|
||||
}
|
||||
}
|
||||
|
@ -577,8 +577,8 @@ impl<'a, 'tcx, 'rcx, 'cstore> AutoTraitFinder<'a, 'tcx, 'rcx, 'cstore> {
|
|||
} = full_generics.clean(self.cx);
|
||||
|
||||
let mut has_sized = FxHashSet();
|
||||
let mut ty_to_bounds = FxHashMap();
|
||||
let mut lifetime_to_bounds = FxHashMap();
|
||||
let mut ty_to_bounds: FxHashMap<_, FxHashSet<_>> = FxHashMap();
|
||||
let mut lifetime_to_bounds: FxHashMap<_, FxHashSet<_>> = FxHashMap();
|
||||
let mut ty_to_traits: FxHashMap<Type, FxHashSet<Type>> = FxHashMap();
|
||||
|
||||
let mut ty_to_fn: FxHashMap<Type, (Option<PolyTrait>, Option<Type>)> = FxHashMap();
|
||||
|
@ -647,11 +647,11 @@ impl<'a, 'tcx, 'rcx, 'cstore> AutoTraitFinder<'a, 'tcx, 'rcx, 'cstore> {
|
|||
|
||||
ty_to_bounds
|
||||
.entry(ty.clone())
|
||||
.or_insert_with(|| FxHashSet());
|
||||
.or_default();
|
||||
} else {
|
||||
ty_to_bounds
|
||||
.entry(ty.clone())
|
||||
.or_insert_with(|| FxHashSet())
|
||||
.or_default()
|
||||
.insert(b.clone());
|
||||
}
|
||||
}
|
||||
|
@ -659,7 +659,7 @@ impl<'a, 'tcx, 'rcx, 'cstore> AutoTraitFinder<'a, 'tcx, 'rcx, 'cstore> {
|
|||
WherePredicate::RegionPredicate { lifetime, bounds } => {
|
||||
lifetime_to_bounds
|
||||
.entry(lifetime)
|
||||
.or_insert_with(|| FxHashSet())
|
||||
.or_default()
|
||||
.extend(bounds);
|
||||
}
|
||||
WherePredicate::EqPredicate { lhs, rhs } => {
|
||||
|
@ -722,7 +722,7 @@ impl<'a, 'tcx, 'rcx, 'cstore> AutoTraitFinder<'a, 'tcx, 'rcx, 'cstore> {
|
|||
|
||||
let bounds = ty_to_bounds
|
||||
.entry(*ty.clone())
|
||||
.or_insert_with(|| FxHashSet());
|
||||
.or_default();
|
||||
|
||||
bounds.insert(GenericBound::TraitBound(
|
||||
PolyTrait {
|
||||
|
@ -752,7 +752,7 @@ impl<'a, 'tcx, 'rcx, 'cstore> AutoTraitFinder<'a, 'tcx, 'rcx, 'cstore> {
|
|||
// loop
|
||||
ty_to_traits
|
||||
.entry(*ty.clone())
|
||||
.or_insert_with(|| FxHashSet())
|
||||
.or_default()
|
||||
.insert(*trait_.clone());
|
||||
}
|
||||
_ => panic!("Unexpected trait {:?} for {:?}", trait_, did),
|
||||
|
|
|
@ -34,7 +34,7 @@ use core::DocContext;
|
|||
|
||||
pub fn where_clauses(cx: &DocContext, clauses: Vec<WP>) -> Vec<WP> {
|
||||
// First, partition the where clause into its separate components
|
||||
let mut params = BTreeMap::new();
|
||||
let mut params: BTreeMap<_, Vec<_>> = BTreeMap::new();
|
||||
let mut lifetimes = Vec::new();
|
||||
let mut equalities = Vec::new();
|
||||
let mut tybounds = Vec::new();
|
||||
|
@ -43,7 +43,7 @@ pub fn where_clauses(cx: &DocContext, clauses: Vec<WP>) -> Vec<WP> {
|
|||
match clause {
|
||||
WP::BoundPredicate { ty, bounds } => {
|
||||
match ty {
|
||||
clean::Generic(s) => params.entry(s).or_insert(Vec::new())
|
||||
clean::Generic(s) => params.entry(s).or_default()
|
||||
.extend(bounds),
|
||||
t => tybounds.push((t, ty_bounds(bounds))),
|
||||
}
|
||||
|
|
|
@ -1245,7 +1245,7 @@ impl DocFolder for Cache {
|
|||
// Collect all the implementors of traits.
|
||||
if let clean::ImplItem(ref i) = item.inner {
|
||||
if let Some(did) = i.trait_.def_id() {
|
||||
self.implementors.entry(did).or_insert(vec![]).push(Impl {
|
||||
self.implementors.entry(did).or_default().push(Impl {
|
||||
impl_item: item.clone(),
|
||||
});
|
||||
}
|
||||
|
@ -1440,7 +1440,7 @@ impl DocFolder for Cache {
|
|||
unreachable!()
|
||||
};
|
||||
for did in dids {
|
||||
self.impls.entry(did).or_insert(vec![]).push(Impl {
|
||||
self.impls.entry(did).or_default().push(Impl {
|
||||
impl_item: item.clone(),
|
||||
});
|
||||
}
|
||||
|
@ -1971,7 +1971,7 @@ impl Context {
|
|||
|
||||
fn build_sidebar_items(&self, m: &clean::Module) -> BTreeMap<String, Vec<NameDoc>> {
|
||||
// BTreeMap instead of HashMap to get a sorted output
|
||||
let mut map = BTreeMap::new();
|
||||
let mut map: BTreeMap<_, Vec<_>> = BTreeMap::new();
|
||||
for item in &m.items {
|
||||
if item.is_stripped() { continue }
|
||||
|
||||
|
@ -1981,7 +1981,7 @@ impl Context {
|
|||
Some(ref s) => s.to_string(),
|
||||
};
|
||||
let short = short.to_string();
|
||||
map.entry(short).or_insert(vec![])
|
||||
map.entry(short).or_default()
|
||||
.push((myname, Some(plain_summary_line(item.doc_value()))));
|
||||
}
|
||||
|
||||
|
|
|
@ -599,7 +599,7 @@ where R: 'static + Send, F: 'static + Send + FnOnce(Output) -> R {
|
|||
/// returns a map mapping crate names to their paths or else an
|
||||
/// error message.
|
||||
fn parse_externs(matches: &getopts::Matches) -> Result<Externs, String> {
|
||||
let mut externs = BTreeMap::new();
|
||||
let mut externs: BTreeMap<_, BTreeSet<_>> = BTreeMap::new();
|
||||
for arg in &matches.opt_strs("extern") {
|
||||
let mut parts = arg.splitn(2, '=');
|
||||
let name = parts.next().ok_or("--extern value must not be empty".to_string())?;
|
||||
|
@ -607,7 +607,7 @@ fn parse_externs(matches: &getopts::Matches) -> Result<Externs, String> {
|
|||
.ok_or("--extern value must be of the format `foo=bar`"
|
||||
.to_string())?;
|
||||
let name = name.to_string();
|
||||
externs.entry(name).or_insert_with(BTreeSet::new).insert(location.to_string());
|
||||
externs.entry(name).or_default().insert(location.to_string());
|
||||
}
|
||||
Ok(Externs::new(externs))
|
||||
}
|
||||
|
|
|
@ -327,7 +327,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
|||
// Unresolved macros produce dummy outputs as a recovery measure.
|
||||
invocations.reverse();
|
||||
let mut expanded_fragments = Vec::new();
|
||||
let mut derives = HashMap::new();
|
||||
let mut derives: HashMap<Mark, Vec<_>> = HashMap::new();
|
||||
let mut undetermined_invocations = Vec::new();
|
||||
let (mut progress, mut force) = (false, !self.monotonic);
|
||||
loop {
|
||||
|
@ -388,7 +388,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
|||
.map_attrs(|mut attrs| { attrs.retain(|a| a.path != "derive"); attrs });
|
||||
let item_with_markers =
|
||||
add_derived_markers(&mut self.cx, item.span(), &traits, item.clone());
|
||||
let derives = derives.entry(invoc.expansion_data.mark).or_insert_with(Vec::new);
|
||||
let derives = derives.entry(invoc.expansion_data.mark).or_default();
|
||||
|
||||
for path in &traits {
|
||||
let mark = Mark::fresh(self.cx.current_expansion.mark);
|
||||
|
|
|
@ -88,8 +88,7 @@ impl TTMacroExpander for MacroRulesMacroExpander {
|
|||
|
||||
fn trace_macros_note(cx: &mut ExtCtxt, sp: Span, message: String) {
|
||||
let sp = sp.macro_backtrace().last().map(|trace| trace.call_site).unwrap_or(sp);
|
||||
let values: &mut Vec<String> = cx.expansions.entry(sp).or_insert_with(Vec::new);
|
||||
values.push(message);
|
||||
cx.expansions.entry(sp).or_default().push(message);
|
||||
}
|
||||
|
||||
/// Given `lhses` and `rhses`, this is the new macro we create
|
||||
|
|
|
@ -355,10 +355,10 @@ fn check_crate_duplicate(resolve: &Resolve, bad: &mut bool) {
|
|||
// "cargo", // FIXME(#53005)
|
||||
"rustc-ap-syntax",
|
||||
];
|
||||
let mut name_to_id = HashMap::new();
|
||||
let mut name_to_id: HashMap<_, Vec<_>> = HashMap::new();
|
||||
for node in resolve.nodes.iter() {
|
||||
name_to_id.entry(node.id.split_whitespace().next().unwrap())
|
||||
.or_insert(Vec::new())
|
||||
.or_default()
|
||||
.push(&node.id);
|
||||
}
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@ use std::path::Path;
|
|||
|
||||
pub fn check(path: &Path, bad: &mut bool) {
|
||||
let mut contents = String::new();
|
||||
let mut map = HashMap::new();
|
||||
let mut map: HashMap<_, Vec<_>> = HashMap::new();
|
||||
super::walk(path,
|
||||
&mut |path| super::filter_dirs(path) || path.ends_with("src/test"),
|
||||
&mut |file| {
|
||||
|
@ -61,7 +61,7 @@ pub fn check(path: &Path, bad: &mut bool) {
|
|||
Ok(n) => n,
|
||||
Err(..) => continue,
|
||||
};
|
||||
map.entry(code).or_insert(Vec::new())
|
||||
map.entry(code).or_default()
|
||||
.push((file.to_owned(), num + 1, line.to_owned()));
|
||||
break
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue