Rollup merge of #139236 - Zoxc:anon-counter, r=davidtwco
Use a session counter to make anon dep nodes unique This changes the unique session hash used to ensure unique anon dep nodes per session from a timestamp to a counter. This is nicer for debugging as it makes the dep graph deterministic.
This commit is contained in:
commit
844b7c7935
2 changed files with 16 additions and 8 deletions
|
@ -1174,8 +1174,7 @@ pub(super) struct CurrentDepGraph<D: Deps> {
|
||||||
/// ID from the previous session. In order to side-step this problem, we make
|
/// ID from the previous session. In order to side-step this problem, we make
|
||||||
/// sure that anonymous `NodeId`s allocated in different sessions don't overlap.
|
/// sure that anonymous `NodeId`s allocated in different sessions don't overlap.
|
||||||
/// This is implemented by mixing a session-key into the ID fingerprint of
|
/// This is implemented by mixing a session-key into the ID fingerprint of
|
||||||
/// each anon node. The session-key is just a random number generated when
|
/// each anon node. The session-key is a hash of the number of previous sessions.
|
||||||
/// the `DepGraph` is created.
|
|
||||||
anon_id_seed: Fingerprint,
|
anon_id_seed: Fingerprint,
|
||||||
|
|
||||||
/// These are simple counters that are for profiling and
|
/// These are simple counters that are for profiling and
|
||||||
|
@ -1193,12 +1192,8 @@ impl<D: Deps> CurrentDepGraph<D> {
|
||||||
record_stats: bool,
|
record_stats: bool,
|
||||||
previous: Arc<SerializedDepGraph>,
|
previous: Arc<SerializedDepGraph>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
use std::time::{SystemTime, UNIX_EPOCH};
|
|
||||||
|
|
||||||
let duration = SystemTime::now().duration_since(UNIX_EPOCH).unwrap();
|
|
||||||
let nanos = duration.as_nanos();
|
|
||||||
let mut stable_hasher = StableHasher::new();
|
let mut stable_hasher = StableHasher::new();
|
||||||
nanos.hash(&mut stable_hasher);
|
previous.session_count().hash(&mut stable_hasher);
|
||||||
let anon_id_seed = stable_hasher.finish();
|
let anon_id_seed = stable_hasher.finish();
|
||||||
|
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
|
|
|
@ -92,6 +92,9 @@ pub struct SerializedDepGraph {
|
||||||
/// Stores a map from fingerprints to nodes per dep node kind.
|
/// Stores a map from fingerprints to nodes per dep node kind.
|
||||||
/// This is the reciprocal of `nodes`.
|
/// This is the reciprocal of `nodes`.
|
||||||
index: Vec<UnhashMap<PackedFingerprint, SerializedDepNodeIndex>>,
|
index: Vec<UnhashMap<PackedFingerprint, SerializedDepNodeIndex>>,
|
||||||
|
/// The number of previous compilation sessions. This is used to generate
|
||||||
|
/// unique anon dep nodes per session.
|
||||||
|
session_count: u64,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SerializedDepGraph {
|
impl SerializedDepGraph {
|
||||||
|
@ -146,6 +149,11 @@ impl SerializedDepGraph {
|
||||||
pub fn node_count(&self) -> usize {
|
pub fn node_count(&self) -> usize {
|
||||||
self.nodes.len()
|
self.nodes.len()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub fn session_count(&self) -> u64 {
|
||||||
|
self.session_count
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A packed representation of an edge's start index and byte width.
|
/// A packed representation of an edge's start index and byte width.
|
||||||
|
@ -252,6 +260,8 @@ impl SerializedDepGraph {
|
||||||
.map(|_| UnhashMap::with_capacity_and_hasher(d.read_u32() as usize, Default::default()))
|
.map(|_| UnhashMap::with_capacity_and_hasher(d.read_u32() as usize, Default::default()))
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
|
let session_count = d.read_u64();
|
||||||
|
|
||||||
for (idx, node) in nodes.iter_enumerated() {
|
for (idx, node) in nodes.iter_enumerated() {
|
||||||
if index[node.kind.as_usize()].insert(node.hash, idx).is_some() {
|
if index[node.kind.as_usize()].insert(node.hash, idx).is_some() {
|
||||||
// Side effect nodes can have duplicates
|
// Side effect nodes can have duplicates
|
||||||
|
@ -273,6 +283,7 @@ impl SerializedDepGraph {
|
||||||
edge_list_indices,
|
edge_list_indices,
|
||||||
edge_list_data,
|
edge_list_data,
|
||||||
index,
|
index,
|
||||||
|
session_count,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -603,7 +614,7 @@ impl<D: Deps> EncoderState<D> {
|
||||||
stats: _,
|
stats: _,
|
||||||
kind_stats,
|
kind_stats,
|
||||||
marker: _,
|
marker: _,
|
||||||
previous: _,
|
previous,
|
||||||
} = self;
|
} = self;
|
||||||
|
|
||||||
let node_count = total_node_count.try_into().unwrap();
|
let node_count = total_node_count.try_into().unwrap();
|
||||||
|
@ -614,6 +625,8 @@ impl<D: Deps> EncoderState<D> {
|
||||||
count.encode(&mut encoder);
|
count.encode(&mut encoder);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
previous.session_count.checked_add(1).unwrap().encode(&mut encoder);
|
||||||
|
|
||||||
debug!(?node_count, ?edge_count);
|
debug!(?node_count, ?edge_count);
|
||||||
debug!("position: {:?}", encoder.position());
|
debug!("position: {:?}", encoder.position());
|
||||||
IntEncodedWithFixedSize(node_count).encode(&mut encoder);
|
IntEncodedWithFixedSize(node_count).encode(&mut encoder);
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue