Auto merge of #89016 - lcnr:non_blanket_impls, r=nikomatsakis,michaelwoerister
fix non_blanket_impls iteration order We sometimes iterate over all `non_blanket_impls`, not sure if this is observable outside of error messages (i.e. as incremental bugs). This should fix the underlying issue of #86986. second attempt of #88718 r? `@nikomatsakis`
This commit is contained in:
commit
bf642323d6
8 changed files with 43 additions and 118 deletions
|
@ -277,7 +277,7 @@ fn attempt_static(tcx: TyCtxt<'_>) -> Option<DependencyList> {
|
||||||
let all_crates_available_as_rlib = tcx
|
let all_crates_available_as_rlib = tcx
|
||||||
.crates(())
|
.crates(())
|
||||||
.iter()
|
.iter()
|
||||||
.cloned()
|
.copied()
|
||||||
.filter_map(|cnum| {
|
.filter_map(|cnum| {
|
||||||
if tcx.dep_kind(cnum).macros_only() {
|
if tcx.dep_kind(cnum).macros_only() {
|
||||||
return None;
|
return None;
|
||||||
|
@ -291,10 +291,11 @@ fn attempt_static(tcx: TyCtxt<'_>) -> Option<DependencyList> {
|
||||||
|
|
||||||
// All crates are available in an rlib format, so we're just going to link
|
// All crates are available in an rlib format, so we're just going to link
|
||||||
// everything in explicitly so long as it's actually required.
|
// everything in explicitly so long as it's actually required.
|
||||||
let last_crate = tcx.crates(()).len();
|
let mut ret = tcx
|
||||||
let mut ret = (1..last_crate + 1)
|
.crates(())
|
||||||
.map(|cnum| {
|
.iter()
|
||||||
if tcx.dep_kind(CrateNum::new(cnum)) == CrateDepKind::Explicit {
|
.map(|&cnum| {
|
||||||
|
if tcx.dep_kind(cnum) == CrateDepKind::Explicit {
|
||||||
Linkage::Static
|
Linkage::Static
|
||||||
} else {
|
} else {
|
||||||
Linkage::NotLinked
|
Linkage::NotLinked
|
||||||
|
|
|
@ -304,17 +304,7 @@ pub fn provide(providers: &mut Providers) {
|
||||||
// traversal, but not globally minimal across all crates.
|
// traversal, but not globally minimal across all crates.
|
||||||
let bfs_queue = &mut VecDeque::new();
|
let bfs_queue = &mut VecDeque::new();
|
||||||
|
|
||||||
// Preferring shortest paths alone does not guarantee a
|
for &cnum in tcx.crates(()) {
|
||||||
// deterministic result; so sort by crate num to avoid
|
|
||||||
// hashtable iteration non-determinism. This only makes
|
|
||||||
// things as deterministic as crate-nums assignment is,
|
|
||||||
// which is to say, its not deterministic in general. But
|
|
||||||
// we believe that libstd is consistently assigned crate
|
|
||||||
// num 1, so it should be enough to resolve #46112.
|
|
||||||
let mut crates: Vec<CrateNum> = (*tcx.crates(())).to_owned();
|
|
||||||
crates.sort();
|
|
||||||
|
|
||||||
for &cnum in crates.iter() {
|
|
||||||
// Ignore crates without a corresponding local `extern crate` item.
|
// Ignore crates without a corresponding local `extern crate` item.
|
||||||
if tcx.missing_extern_crate_item(cnum) {
|
if tcx.missing_extern_crate_item(cnum) {
|
||||||
continue;
|
continue;
|
||||||
|
@ -323,35 +313,31 @@ pub fn provide(providers: &mut Providers) {
|
||||||
bfs_queue.push_back(DefId { krate: cnum, index: CRATE_DEF_INDEX });
|
bfs_queue.push_back(DefId { krate: cnum, index: CRATE_DEF_INDEX });
|
||||||
}
|
}
|
||||||
|
|
||||||
// (restrict scope of mutable-borrow of `visible_parent_map`)
|
let mut add_child = |bfs_queue: &mut VecDeque<_>, child: &Export, parent: DefId| {
|
||||||
{
|
if child.vis != ty::Visibility::Public {
|
||||||
let visible_parent_map = &mut visible_parent_map;
|
return;
|
||||||
let mut add_child = |bfs_queue: &mut VecDeque<_>, child: &Export, parent: DefId| {
|
}
|
||||||
if child.vis != ty::Visibility::Public {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(child) = child.res.opt_def_id() {
|
if let Some(child) = child.res.opt_def_id() {
|
||||||
match visible_parent_map.entry(child) {
|
match visible_parent_map.entry(child) {
|
||||||
Entry::Occupied(mut entry) => {
|
Entry::Occupied(mut entry) => {
|
||||||
// If `child` is defined in crate `cnum`, ensure
|
// If `child` is defined in crate `cnum`, ensure
|
||||||
// that it is mapped to a parent in `cnum`.
|
// that it is mapped to a parent in `cnum`.
|
||||||
if child.is_local() && entry.get().is_local() {
|
if child.is_local() && entry.get().is_local() {
|
||||||
entry.insert(parent);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Entry::Vacant(entry) => {
|
|
||||||
entry.insert(parent);
|
entry.insert(parent);
|
||||||
bfs_queue.push_back(child);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Entry::Vacant(entry) => {
|
||||||
|
entry.insert(parent);
|
||||||
|
bfs_queue.push_back(child);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
};
|
}
|
||||||
|
};
|
||||||
|
|
||||||
while let Some(def) = bfs_queue.pop_front() {
|
while let Some(def) = bfs_queue.pop_front() {
|
||||||
for child in tcx.item_children(def).iter() {
|
for child in tcx.item_children(def).iter() {
|
||||||
add_child(bfs_queue, child, def);
|
add_child(bfs_queue, child, def);
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1708,9 +1708,10 @@ impl EncodeContext<'a, 'tcx> {
|
||||||
|
|
||||||
fn encode_crate_deps(&mut self) -> Lazy<[CrateDep]> {
|
fn encode_crate_deps(&mut self) -> Lazy<[CrateDep]> {
|
||||||
empty_proc_macro!(self);
|
empty_proc_macro!(self);
|
||||||
let crates = self.tcx.crates(());
|
|
||||||
|
|
||||||
let mut deps = crates
|
let deps = self
|
||||||
|
.tcx
|
||||||
|
.crates(())
|
||||||
.iter()
|
.iter()
|
||||||
.map(|&cnum| {
|
.map(|&cnum| {
|
||||||
let dep = CrateDep {
|
let dep = CrateDep {
|
||||||
|
@ -1724,8 +1725,6 @@ impl EncodeContext<'a, 'tcx> {
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
deps.sort_by_key(|&(cnum, _)| cnum);
|
|
||||||
|
|
||||||
{
|
{
|
||||||
// Sanity-check the crate numbers
|
// Sanity-check the crate numbers
|
||||||
let mut expected_cnum = 1;
|
let mut expected_cnum = 1;
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
use crate::ich;
|
use crate::ich;
|
||||||
use crate::middle::cstore::CrateStore;
|
use crate::middle::cstore::CrateStore;
|
||||||
use crate::ty::{fast_reject, TyCtxt};
|
use crate::ty::TyCtxt;
|
||||||
|
|
||||||
use rustc_ast as ast;
|
use rustc_ast as ast;
|
||||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
use rustc_data_structures::fx::FxHashSet;
|
||||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||||
use rustc_data_structures::sync::Lrc;
|
use rustc_data_structures::sync::Lrc;
|
||||||
use rustc_hir as hir;
|
use rustc_hir as hir;
|
||||||
|
@ -14,9 +14,6 @@ use rustc_span::source_map::SourceMap;
|
||||||
use rustc_span::symbol::Symbol;
|
use rustc_span::symbol::Symbol;
|
||||||
use rustc_span::{BytePos, CachingSourceMapView, SourceFile, Span, SpanData};
|
use rustc_span::{BytePos, CachingSourceMapView, SourceFile, Span, SpanData};
|
||||||
|
|
||||||
use smallvec::SmallVec;
|
|
||||||
use std::cmp::Ord;
|
|
||||||
|
|
||||||
fn compute_ignored_attr_names() -> FxHashSet<Symbol> {
|
fn compute_ignored_attr_names() -> FxHashSet<Symbol> {
|
||||||
debug_assert!(!ich::IGNORED_ATTRIBUTES.is_empty());
|
debug_assert!(!ich::IGNORED_ATTRIBUTES.is_empty());
|
||||||
ich::IGNORED_ATTRIBUTES.iter().copied().collect()
|
ich::IGNORED_ATTRIBUTES.iter().copied().collect()
|
||||||
|
@ -241,39 +238,3 @@ impl<'a> rustc_span::HashStableContext for StableHashingContext<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl rustc_session::HashStableContext for StableHashingContext<'a> {}
|
impl rustc_session::HashStableContext for StableHashingContext<'a> {}
|
||||||
|
|
||||||
pub fn hash_stable_trait_impls<'a>(
|
|
||||||
hcx: &mut StableHashingContext<'a>,
|
|
||||||
hasher: &mut StableHasher,
|
|
||||||
blanket_impls: &[DefId],
|
|
||||||
non_blanket_impls: &FxHashMap<fast_reject::SimplifiedType, Vec<DefId>>,
|
|
||||||
) {
|
|
||||||
{
|
|
||||||
let mut blanket_impls: SmallVec<[_; 8]> =
|
|
||||||
blanket_impls.iter().map(|&def_id| hcx.def_path_hash(def_id)).collect();
|
|
||||||
|
|
||||||
if blanket_impls.len() > 1 {
|
|
||||||
blanket_impls.sort_unstable();
|
|
||||||
}
|
|
||||||
|
|
||||||
blanket_impls.hash_stable(hcx, hasher);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let mut keys: SmallVec<[_; 8]> =
|
|
||||||
non_blanket_impls.keys().map(|k| (k, k.map_def(|d| hcx.def_path_hash(d)))).collect();
|
|
||||||
keys.sort_unstable_by(|&(_, ref k1), &(_, ref k2)| k1.cmp(k2));
|
|
||||||
keys.len().hash_stable(hcx, hasher);
|
|
||||||
for (key, ref stable_key) in keys {
|
|
||||||
stable_key.hash_stable(hcx, hasher);
|
|
||||||
let mut impls: SmallVec<[_; 8]> =
|
|
||||||
non_blanket_impls[key].iter().map(|&impl_id| hcx.def_path_hash(impl_id)).collect();
|
|
||||||
|
|
||||||
if impls.len() > 1 {
|
|
||||||
impls.sort_unstable();
|
|
||||||
}
|
|
||||||
|
|
||||||
impls.hash_stable(hcx, hasher);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,8 +1,6 @@
|
||||||
//! ICH - Incremental Compilation Hash
|
//! ICH - Incremental Compilation Hash
|
||||||
|
|
||||||
pub use self::hcx::{
|
pub use self::hcx::{NodeIdHashingMode, StableHashingContext, StableHashingContextProvider};
|
||||||
hash_stable_trait_impls, NodeIdHashingMode, StableHashingContext, StableHashingContextProvider,
|
|
||||||
};
|
|
||||||
use rustc_span::symbol::{sym, Symbol};
|
use rustc_span::symbol::{sym, Symbol};
|
||||||
|
|
||||||
mod hcx;
|
mod hcx;
|
||||||
|
|
|
@ -1,9 +1,7 @@
|
||||||
use crate::ich::{self, StableHashingContext};
|
|
||||||
use crate::ty::fast_reject::SimplifiedType;
|
use crate::ty::fast_reject::SimplifiedType;
|
||||||
use crate::ty::fold::TypeFoldable;
|
use crate::ty::fold::TypeFoldable;
|
||||||
use crate::ty::{self, TyCtxt};
|
use crate::ty::{self, TyCtxt};
|
||||||
use rustc_data_structures::fx::FxHashMap;
|
use rustc_data_structures::fx::FxIndexMap;
|
||||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
|
||||||
use rustc_errors::ErrorReported;
|
use rustc_errors::ErrorReported;
|
||||||
use rustc_hir::def_id::{DefId, DefIdMap};
|
use rustc_hir::def_id::{DefId, DefIdMap};
|
||||||
use rustc_span::symbol::Ident;
|
use rustc_span::symbol::Ident;
|
||||||
|
@ -50,19 +48,19 @@ impl Graph {
|
||||||
|
|
||||||
/// Children of a given impl, grouped into blanket/non-blanket varieties as is
|
/// Children of a given impl, grouped into blanket/non-blanket varieties as is
|
||||||
/// done in `TraitDef`.
|
/// done in `TraitDef`.
|
||||||
#[derive(Default, TyEncodable, TyDecodable, Debug)]
|
#[derive(Default, TyEncodable, TyDecodable, Debug, HashStable)]
|
||||||
pub struct Children {
|
pub struct Children {
|
||||||
// Impls of a trait (or specializations of a given impl). To allow for
|
// Impls of a trait (or specializations of a given impl). To allow for
|
||||||
// quicker lookup, the impls are indexed by a simplified version of their
|
// quicker lookup, the impls are indexed by a simplified version of their
|
||||||
// `Self` type: impls with a simplifiable `Self` are stored in
|
// `Self` type: impls with a simplifiable `Self` are stored in
|
||||||
// `nonblanket_impls` keyed by it, while all other impls are stored in
|
// `non_blanket_impls` keyed by it, while all other impls are stored in
|
||||||
// `blanket_impls`.
|
// `blanket_impls`.
|
||||||
//
|
//
|
||||||
// A similar division is used within `TraitDef`, but the lists there collect
|
// A similar division is used within `TraitDef`, but the lists there collect
|
||||||
// together *all* the impls for a trait, and are populated prior to building
|
// together *all* the impls for a trait, and are populated prior to building
|
||||||
// the specialization graph.
|
// the specialization graph.
|
||||||
/// Impls of the trait.
|
/// Impls of the trait.
|
||||||
pub nonblanket_impls: FxHashMap<SimplifiedType, Vec<DefId>>,
|
pub non_blanket_impls: FxIndexMap<SimplifiedType, Vec<DefId>>,
|
||||||
|
|
||||||
/// Blanket impls associated with the trait.
|
/// Blanket impls associated with the trait.
|
||||||
pub blanket_impls: Vec<DefId>,
|
pub blanket_impls: Vec<DefId>,
|
||||||
|
@ -235,11 +233,3 @@ pub fn ancestors(
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> HashStable<StableHashingContext<'a>> for Children {
|
|
||||||
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
|
|
||||||
let Children { ref nonblanket_impls, ref blanket_impls } = *self;
|
|
||||||
|
|
||||||
ich::hash_stable_trait_impls(hcx, hasher, blanket_impls, nonblanket_impls);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
use crate::ich::{self, StableHashingContext};
|
|
||||||
use crate::traits::specialization_graph;
|
use crate::traits::specialization_graph;
|
||||||
use crate::ty::fast_reject;
|
use crate::ty::fast_reject;
|
||||||
use crate::ty::fold::TypeFoldable;
|
use crate::ty::fold::TypeFoldable;
|
||||||
|
@ -7,8 +6,7 @@ use rustc_hir as hir;
|
||||||
use rustc_hir::def_id::DefId;
|
use rustc_hir::def_id::DefId;
|
||||||
use rustc_hir::definitions::DefPathHash;
|
use rustc_hir::definitions::DefPathHash;
|
||||||
|
|
||||||
use rustc_data_structures::fx::FxHashMap;
|
use rustc_data_structures::fx::FxIndexMap;
|
||||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
|
||||||
use rustc_errors::ErrorReported;
|
use rustc_errors::ErrorReported;
|
||||||
use rustc_macros::HashStable;
|
use rustc_macros::HashStable;
|
||||||
|
|
||||||
|
@ -66,11 +64,11 @@ pub enum TraitSpecializationKind {
|
||||||
AlwaysApplicable,
|
AlwaysApplicable,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default, Debug)]
|
#[derive(Default, Debug, HashStable)]
|
||||||
pub struct TraitImpls {
|
pub struct TraitImpls {
|
||||||
blanket_impls: Vec<DefId>,
|
blanket_impls: Vec<DefId>,
|
||||||
/// Impls indexed by their simplified self type, for fast lookup.
|
/// Impls indexed by their simplified self type, for fast lookup.
|
||||||
non_blanket_impls: FxHashMap<fast_reject::SimplifiedType, Vec<DefId>>,
|
non_blanket_impls: FxIndexMap<fast_reject::SimplifiedType, Vec<DefId>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TraitImpls {
|
impl TraitImpls {
|
||||||
|
@ -249,11 +247,3 @@ pub(super) fn trait_impls_of_provider(tcx: TyCtxt<'_>, trait_id: DefId) -> Trait
|
||||||
|
|
||||||
impls
|
impls
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> HashStable<StableHashingContext<'a>> for TraitImpls {
|
|
||||||
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
|
|
||||||
let TraitImpls { ref blanket_impls, ref non_blanket_impls } = *self;
|
|
||||||
|
|
||||||
ich::hash_stable_trait_impls(hcx, hasher, blanket_impls, non_blanket_impls);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
@ -50,7 +50,7 @@ impl ChildrenExt for Children {
|
||||||
let trait_ref = tcx.impl_trait_ref(impl_def_id).unwrap();
|
let trait_ref = tcx.impl_trait_ref(impl_def_id).unwrap();
|
||||||
if let Some(st) = fast_reject::simplify_type(tcx, trait_ref.self_ty(), false) {
|
if let Some(st) = fast_reject::simplify_type(tcx, trait_ref.self_ty(), false) {
|
||||||
debug!("insert_blindly: impl_def_id={:?} st={:?}", impl_def_id, st);
|
debug!("insert_blindly: impl_def_id={:?} st={:?}", impl_def_id, st);
|
||||||
self.nonblanket_impls.entry(st).or_default().push(impl_def_id)
|
self.non_blanket_impls.entry(st).or_default().push(impl_def_id)
|
||||||
} else {
|
} else {
|
||||||
debug!("insert_blindly: impl_def_id={:?} st=None", impl_def_id);
|
debug!("insert_blindly: impl_def_id={:?} st=None", impl_def_id);
|
||||||
self.blanket_impls.push(impl_def_id)
|
self.blanket_impls.push(impl_def_id)
|
||||||
|
@ -65,7 +65,7 @@ impl ChildrenExt for Children {
|
||||||
let vec: &mut Vec<DefId>;
|
let vec: &mut Vec<DefId>;
|
||||||
if let Some(st) = fast_reject::simplify_type(tcx, trait_ref.self_ty(), false) {
|
if let Some(st) = fast_reject::simplify_type(tcx, trait_ref.self_ty(), false) {
|
||||||
debug!("remove_existing: impl_def_id={:?} st={:?}", impl_def_id, st);
|
debug!("remove_existing: impl_def_id={:?} st={:?}", impl_def_id, st);
|
||||||
vec = self.nonblanket_impls.get_mut(&st).unwrap();
|
vec = self.non_blanket_impls.get_mut(&st).unwrap();
|
||||||
} else {
|
} else {
|
||||||
debug!("remove_existing: impl_def_id={:?} st=None", impl_def_id);
|
debug!("remove_existing: impl_def_id={:?} st=None", impl_def_id);
|
||||||
vec = &mut self.blanket_impls;
|
vec = &mut self.blanket_impls;
|
||||||
|
@ -218,7 +218,7 @@ impl ChildrenExt for Children {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn iter_children(children: &mut Children) -> impl Iterator<Item = DefId> + '_ {
|
fn iter_children(children: &mut Children) -> impl Iterator<Item = DefId> + '_ {
|
||||||
let nonblanket = children.nonblanket_impls.iter_mut().flat_map(|(_, v)| v.iter());
|
let nonblanket = children.non_blanket_impls.iter().flat_map(|(_, v)| v.iter());
|
||||||
children.blanket_impls.iter().chain(nonblanket).cloned()
|
children.blanket_impls.iter().chain(nonblanket).cloned()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -226,7 +226,7 @@ fn filtered_children(
|
||||||
children: &mut Children,
|
children: &mut Children,
|
||||||
st: SimplifiedType,
|
st: SimplifiedType,
|
||||||
) -> impl Iterator<Item = DefId> + '_ {
|
) -> impl Iterator<Item = DefId> + '_ {
|
||||||
let nonblanket = children.nonblanket_impls.entry(st).or_default().iter();
|
let nonblanket = children.non_blanket_impls.entry(st).or_default().iter();
|
||||||
children.blanket_impls.iter().chain(nonblanket).cloned()
|
children.blanket_impls.iter().chain(nonblanket).cloned()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue