Address review.

This commit is contained in:
Camille GILLOT 2021-03-18 19:26:08 +01:00
parent 65a8681a17
commit fe89f3236c
5 changed files with 45 additions and 55 deletions

View file

@ -391,8 +391,6 @@ impl DirtyCleanVisitor<'tcx> {
fn assert_clean(&self, item_span: Span, dep_node: DepNode) { fn assert_clean(&self, item_span: Span, dep_node: DepNode) {
debug!("assert_clean({:?})", dep_node); debug!("assert_clean({:?})", dep_node);
// if the node wasn't previously evaluated and now is (or vice versa),
// then the node isn't actually clean or dirty.
if self.tcx.dep_graph.is_red(&dep_node) { if self.tcx.dep_graph.is_red(&dep_node) {
let dep_node_str = self.dep_node_str(&dep_node); let dep_node_str = self.dep_node_str(&dep_node);
self.tcx self.tcx

View file

@ -34,10 +34,8 @@ pub fn save_dep_graph(tcx: TyCtxt<'_>) {
let dep_graph_path = dep_graph_path(sess); let dep_graph_path = dep_graph_path(sess);
let staging_dep_graph_path = staging_dep_graph_path(sess); let staging_dep_graph_path = staging_dep_graph_path(sess);
join( sess.time("assert_dep_graph", || crate::assert_dep_graph(tcx));
|| sess.time("assert_dep_graph", || crate::assert_dep_graph(tcx)), sess.time("check_dirty_clean", || dirty_clean::check_dirty_clean_annotations(tcx));
|| sess.time("check_dirty_clean", || dirty_clean::check_dirty_clean_annotations(tcx)),
);
if sess.opts.debugging_opts.incremental_info { if sess.opts.debugging_opts.incremental_info {
tcx.dep_graph.print_incremental_info() tcx.dep_graph.print_incremental_info()

View file

@ -626,11 +626,10 @@ impl<K: DepKind> DepGraph<K> {
// There may be multiple threads trying to mark the same dep node green concurrently // There may be multiple threads trying to mark the same dep node green concurrently
let dep_node_index = {
// We allocating an entry for the node in the current dependency graph and // We allocating an entry for the node in the current dependency graph and
// adding all the appropriate edges imported from the previous graph // adding all the appropriate edges imported from the previous graph
data.current.intern_dark_green_node(&data.previous, prev_dep_node_index) let dep_node_index =
}; data.current.promote_node_and_deps_to_current(&data.previous, prev_dep_node_index);
// ... emitting any stored diagnostic ... // ... emitting any stored diagnostic ...
@ -713,7 +712,7 @@ impl<K: DepKind> DepGraph<K> {
} }
} }
// Returns true if the given node has been marked as green during the // Returns true if the given node has been marked as red during the
// current compilation session. Used in various assertions // current compilation session. Used in various assertions
pub fn is_red(&self, dep_node: &DepNode<K>) -> bool { pub fn is_red(&self, dep_node: &DepNode<K>) -> bool {
self.node_color(dep_node) == Some(DepNodeColor::Red) self.node_color(dep_node) == Some(DepNodeColor::Red)
@ -833,17 +832,11 @@ rustc_index::newtype_index! {
/// will be populated as we run queries or tasks. We never remove nodes from the /// will be populated as we run queries or tasks. We never remove nodes from the
/// graph: they are only added. /// graph: they are only added.
/// ///
/// The nodes in it are identified by a `DepNodeIndex`. Internally, this maps to /// The nodes in it are identified by a `DepNodeIndex`. We avoid keeping the nodes
/// a `HybridIndex`, which identifies which collection in the `data` field /// in memory. This is important, because these graph structures are some of the
/// contains a node's data. Which collection is used for a node depends on /// largest in the compiler.
/// whether the node was present in the `PreviousDepGraph`, and if so, the color
/// of the node. Each type of node can share more or less data with the previous
/// graph. When possible, we can store just the index of the node in the
/// previous graph, rather than duplicating its data in our own collections.
/// This is important, because these graph structures are some of the largest in
/// the compiler.
/// ///
/// For the same reason, we also avoid storing `DepNode`s more than once as map /// For this reason, we avoid storing `DepNode`s more than once as map
/// keys. The `new_node_to_index` map only contains nodes not in the previous /// keys. The `new_node_to_index` map only contains nodes not in the previous
/// graph, and we map nodes in the previous graph to indices via a two-step /// graph, and we map nodes in the previous graph to indices via a two-step
/// mapping. `PreviousDepGraph` maps from `DepNode` to `SerializedDepNodeIndex`, /// mapping. `PreviousDepGraph` maps from `DepNode` to `SerializedDepNodeIndex`,
@ -939,6 +932,15 @@ impl<K: DepKind> CurrentDepGraph<K> {
} }
} }
#[cfg(debug_assertions)]
fn record_edge(&self, dep_node_index: DepNodeIndex, key: DepNode<K>) {
if let Some(forbidden_edge) = &self.forbidden_edge {
forbidden_edge.index_to_node.lock().insert(dep_node_index, key);
}
}
/// Writes the node to the current dep-graph and allocates a `DepNodeIndex` for it.
/// Assumes that this is a node that has no equivalent in the previous dep-graph.
fn intern_new_node( fn intern_new_node(
&self, &self,
key: DepNode<K>, key: DepNode<K>,
@ -951,9 +953,7 @@ impl<K: DepKind> CurrentDepGraph<K> {
let dep_node_index = self.encoder.borrow().send(key, current_fingerprint, edges); let dep_node_index = self.encoder.borrow().send(key, current_fingerprint, edges);
entry.insert(dep_node_index); entry.insert(dep_node_index);
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
if let Some(forbidden_edge) = &self.forbidden_edge { self.record_edge(dep_node_index, key);
forbidden_edge.index_to_node.lock().insert(dep_node_index, key);
}
dep_node_index dep_node_index
} }
} }
@ -964,20 +964,20 @@ impl<K: DepKind> CurrentDepGraph<K> {
prev_graph: &PreviousDepGraph<K>, prev_graph: &PreviousDepGraph<K>,
key: DepNode<K>, key: DepNode<K>,
edges: EdgesVec, edges: EdgesVec,
current_fingerprint: Option<Fingerprint>, fingerprint: Option<Fingerprint>,
print_status: bool, print_status: bool,
) -> (DepNodeIndex, Option<(SerializedDepNodeIndex, DepNodeColor)>) { ) -> (DepNodeIndex, Option<(SerializedDepNodeIndex, DepNodeColor)>) {
let print_status = cfg!(debug_assertions) && print_status; let print_status = cfg!(debug_assertions) && print_status;
if let Some(prev_index) = prev_graph.node_to_index_opt(&key) { if let Some(prev_index) = prev_graph.node_to_index_opt(&key) {
// Determine the color and index of the new `DepNode`. // Determine the color and index of the new `DepNode`.
if let Some(current_fingerprint) = current_fingerprint { if let Some(fingerprint) = fingerprint {
if current_fingerprint == prev_graph.fingerprint_by_index(prev_index) { if fingerprint == prev_graph.fingerprint_by_index(prev_index) {
if print_status { if print_status {
eprintln!("[task::green] {:?}", key); eprintln!("[task::green] {:?}", key);
} }
// This is a light green node: it existed in the previous compilation, // This is a green node: it existed in the previous compilation,
// its query was re-executed, and it has the same result as before. // its query was re-executed, and it has the same result as before.
let mut prev_index_to_index = self.prev_index_to_index.lock(); let mut prev_index_to_index = self.prev_index_to_index.lock();
@ -985,16 +985,14 @@ impl<K: DepKind> CurrentDepGraph<K> {
Some(dep_node_index) => dep_node_index, Some(dep_node_index) => dep_node_index,
None => { None => {
let dep_node_index = let dep_node_index =
self.encoder.borrow().send(key, current_fingerprint, edges); self.encoder.borrow().send(key, fingerprint, edges);
prev_index_to_index[prev_index] = Some(dep_node_index); prev_index_to_index[prev_index] = Some(dep_node_index);
dep_node_index dep_node_index
} }
}; };
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
if let Some(forbidden_edge) = &self.forbidden_edge { self.record_edge(dep_node_index, key);
forbidden_edge.index_to_node.lock().insert(dep_node_index, key);
}
(dep_node_index, Some((prev_index, DepNodeColor::Green(dep_node_index)))) (dep_node_index, Some((prev_index, DepNodeColor::Green(dep_node_index))))
} else { } else {
if print_status { if print_status {
@ -1009,16 +1007,14 @@ impl<K: DepKind> CurrentDepGraph<K> {
Some(dep_node_index) => dep_node_index, Some(dep_node_index) => dep_node_index,
None => { None => {
let dep_node_index = let dep_node_index =
self.encoder.borrow().send(key, current_fingerprint, edges); self.encoder.borrow().send(key, fingerprint, edges);
prev_index_to_index[prev_index] = Some(dep_node_index); prev_index_to_index[prev_index] = Some(dep_node_index);
dep_node_index dep_node_index
} }
}; };
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
if let Some(forbidden_edge) = &self.forbidden_edge { self.record_edge(dep_node_index, key);
forbidden_edge.index_to_node.lock().insert(dep_node_index, key);
}
(dep_node_index, Some((prev_index, DepNodeColor::Red))) (dep_node_index, Some((prev_index, DepNodeColor::Red)))
} }
} else { } else {
@ -1043,9 +1039,7 @@ impl<K: DepKind> CurrentDepGraph<K> {
}; };
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
if let Some(forbidden_edge) = &self.forbidden_edge { self.record_edge(dep_node_index, key);
forbidden_edge.index_to_node.lock().insert(dep_node_index, key);
}
(dep_node_index, Some((prev_index, DepNodeColor::Red))) (dep_node_index, Some((prev_index, DepNodeColor::Red)))
} }
} else { } else {
@ -1053,16 +1047,16 @@ impl<K: DepKind> CurrentDepGraph<K> {
eprintln!("[task::new] {:?}", key); eprintln!("[task::new] {:?}", key);
} }
let current_fingerprint = current_fingerprint.unwrap_or(Fingerprint::ZERO); let fingerprint = fingerprint.unwrap_or(Fingerprint::ZERO);
// This is a new node: it didn't exist in the previous compilation session. // This is a new node: it didn't exist in the previous compilation session.
let dep_node_index = self.intern_new_node(key, edges, current_fingerprint); let dep_node_index = self.intern_new_node(key, edges, fingerprint);
(dep_node_index, None) (dep_node_index, None)
} }
} }
fn intern_dark_green_node( fn promote_node_and_deps_to_current(
&self, &self,
prev_graph: &PreviousDepGraph<K>, prev_graph: &PreviousDepGraph<K>,
prev_index: SerializedDepNodeIndex, prev_index: SerializedDepNodeIndex,
@ -1086,9 +1080,7 @@ impl<K: DepKind> CurrentDepGraph<K> {
); );
prev_index_to_index[prev_index] = Some(dep_node_index); prev_index_to_index[prev_index] = Some(dep_node_index);
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
if let Some(forbidden_edge) = &self.forbidden_edge { self.record_edge(dep_node_index, key);
forbidden_edge.index_to_node.lock().insert(dep_node_index, key);
}
dep_node_index dep_node_index
} }
} }

View file

@ -32,7 +32,8 @@ impl<K: DepKind> DepGraphQuery<K> {
for &target in edges.iter() { for &target in edges.iter() {
let target = self.dep_index_to_index[target]; let target = self.dep_index_to_index[target];
// Skip missing edges. // We may miss the edges that are pushed while the `DepGraphQuery` is being accessed.
// Skip them to issues.
if let Some(target) = target { if let Some(target) = target {
self.graph.add_edge(source, target, ()); self.graph.add_edge(source, target, ());
} }

View file

@ -73,7 +73,7 @@ impl<'a, K: DepKind + Decodable<opaque::Decoder<'a>>> Decodable<opaque::Decoder<
{ {
#[instrument(skip(d))] #[instrument(skip(d))]
fn decode(d: &mut opaque::Decoder<'a>) -> Result<SerializedDepGraph<K>, String> { fn decode(d: &mut opaque::Decoder<'a>) -> Result<SerializedDepGraph<K>, String> {
let position = d.position(); let start_position = d.position();
// The last 16 bytes are the node count and edge count. // The last 16 bytes are the node count and edge count.
debug!("position: {:?}", d.position()); debug!("position: {:?}", d.position());
@ -85,7 +85,7 @@ impl<'a, K: DepKind + Decodable<opaque::Decoder<'a>>> Decodable<opaque::Decoder<
debug!(?node_count, ?edge_count); debug!(?node_count, ?edge_count);
debug!("position: {:?}", d.position()); debug!("position: {:?}", d.position());
d.set_position(position); d.set_position(start_position);
debug!("position: {:?}", d.position()); debug!("position: {:?}", d.position());
let mut nodes = IndexVec::with_capacity(node_count); let mut nodes = IndexVec::with_capacity(node_count);
@ -137,7 +137,7 @@ struct Stat<K: DepKind> {
edge_counter: u64, edge_counter: u64,
} }
struct EncodingStatus<K: DepKind> { struct EncoderState<K: DepKind> {
encoder: FileEncoder, encoder: FileEncoder,
total_node_count: usize, total_node_count: usize,
total_edge_count: usize, total_edge_count: usize,
@ -145,7 +145,7 @@ struct EncodingStatus<K: DepKind> {
stats: Option<FxHashMap<K, Stat<K>>>, stats: Option<FxHashMap<K, Stat<K>>>,
} }
impl<K: DepKind> EncodingStatus<K> { impl<K: DepKind> EncoderState<K> {
fn new(encoder: FileEncoder, record_stats: bool) -> Self { fn new(encoder: FileEncoder, record_stats: bool) -> Self {
Self { Self {
encoder, encoder,
@ -186,8 +186,9 @@ impl<K: DepKind> EncodingStatus<K> {
debug!(?index, ?node); debug!(?index, ?node);
let encoder = &mut self.encoder; let encoder = &mut self.encoder;
self.result = if self.result.is_ok() {
std::mem::replace(&mut self.result, Ok(())).and_then(|()| node.encode(encoder)); self.result = node.encode(encoder);
}
index index
} }
@ -209,7 +210,7 @@ impl<K: DepKind> EncodingStatus<K> {
} }
pub struct GraphEncoder<K: DepKind> { pub struct GraphEncoder<K: DepKind> {
status: Lock<EncodingStatus<K>>, status: Lock<EncoderState<K>>,
record_graph: Option<Lock<DepGraphQuery<K>>>, record_graph: Option<Lock<DepGraphQuery<K>>>,
} }
@ -225,7 +226,7 @@ impl<K: DepKind + Encodable<FileEncoder>> GraphEncoder<K> {
} else { } else {
None None
}; };
let status = Lock::new(EncodingStatus::new(encoder, record_stats)); let status = Lock::new(EncoderState::new(encoder, record_stats));
GraphEncoder { status, record_graph } GraphEncoder { status, record_graph }
} }