Unify with_task
functions.
Remove with_eval_always_task.
This commit is contained in:
parent
f2c8707abb
commit
eeb3c8f4b7
2 changed files with 78 additions and 105 deletions
|
@ -215,24 +215,17 @@ impl<K: DepKind> DepGraph<K> {
|
||||||
cx: Ctxt,
|
cx: Ctxt,
|
||||||
arg: A,
|
arg: A,
|
||||||
task: fn(Ctxt, A) -> R,
|
task: fn(Ctxt, A) -> R,
|
||||||
hash_result: impl FnOnce(&mut Ctxt::StableHashingContext, &R) -> Option<Fingerprint>,
|
hash_result: fn(&mut Ctxt::StableHashingContext, &R) -> Option<Fingerprint>,
|
||||||
) -> (R, DepNodeIndex) {
|
) -> (R, DepNodeIndex) {
|
||||||
self.with_task_impl(
|
if self.is_fully_enabled() {
|
||||||
key,
|
self.with_task_impl(key, cx, arg, task, hash_result)
|
||||||
cx,
|
} else {
|
||||||
arg,
|
// Incremental compilation is turned off. We just execute the task
|
||||||
task,
|
// without tracking. We still provide a dep-node index that uniquely
|
||||||
|_key| {
|
// identifies the task so that we have a cheap way of referring to
|
||||||
Some(TaskDeps {
|
// the query for self-profiling.
|
||||||
#[cfg(debug_assertions)]
|
(task(cx, arg), self.next_virtual_depnode_index())
|
||||||
node: Some(_key),
|
}
|
||||||
reads: SmallVec::new(),
|
|
||||||
read_set: Default::default(),
|
|
||||||
phantom_data: PhantomData,
|
|
||||||
})
|
|
||||||
},
|
|
||||||
hash_result,
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn with_task_impl<Ctxt: HasDepContext<DepKind = K>, A: Debug, R>(
|
fn with_task_impl<Ctxt: HasDepContext<DepKind = K>, A: Debug, R>(
|
||||||
|
@ -241,71 +234,74 @@ impl<K: DepKind> DepGraph<K> {
|
||||||
cx: Ctxt,
|
cx: Ctxt,
|
||||||
arg: A,
|
arg: A,
|
||||||
task: fn(Ctxt, A) -> R,
|
task: fn(Ctxt, A) -> R,
|
||||||
create_task: fn(DepNode<K>) -> Option<TaskDeps<K>>,
|
hash_result: fn(&mut Ctxt::StableHashingContext, &R) -> Option<Fingerprint>,
|
||||||
hash_result: impl FnOnce(&mut Ctxt::StableHashingContext, &R) -> Option<Fingerprint>,
|
|
||||||
) -> (R, DepNodeIndex) {
|
) -> (R, DepNodeIndex) {
|
||||||
if let Some(ref data) = self.data {
|
// This function is only called when the graph is enabled.
|
||||||
// If the following assertion triggers, it can have two reasons:
|
let data = self.data.as_ref().unwrap();
|
||||||
// 1. Something is wrong with DepNode creation, either here or
|
|
||||||
// in `DepGraph::try_mark_green()`.
|
// If the following assertion triggers, it can have two reasons:
|
||||||
// 2. Two distinct query keys get mapped to the same `DepNode`
|
// 1. Something is wrong with DepNode creation, either here or
|
||||||
// (see for example #48923).
|
// in `DepGraph::try_mark_green()`.
|
||||||
assert!(
|
// 2. Two distinct query keys get mapped to the same `DepNode`
|
||||||
!self.dep_node_exists(&key),
|
// (see for example #48923).
|
||||||
"forcing query with already existing `DepNode`\n\
|
assert!(
|
||||||
|
!self.dep_node_exists(&key),
|
||||||
|
"forcing query with already existing `DepNode`\n\
|
||||||
- query-key: {:?}\n\
|
- query-key: {:?}\n\
|
||||||
- dep-node: {:?}",
|
- dep-node: {:?}",
|
||||||
arg,
|
arg,
|
||||||
|
key
|
||||||
|
);
|
||||||
|
|
||||||
|
let task_deps = if key.kind.is_eval_always() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(Lock::new(TaskDeps {
|
||||||
|
#[cfg(debug_assertions)]
|
||||||
|
node: Some(key),
|
||||||
|
reads: SmallVec::new(),
|
||||||
|
read_set: Default::default(),
|
||||||
|
phantom_data: PhantomData,
|
||||||
|
}))
|
||||||
|
};
|
||||||
|
let result = K::with_deps(task_deps.as_ref(), || task(cx, arg));
|
||||||
|
let edges = task_deps.map_or_else(|| smallvec![], |lock| lock.into_inner().reads);
|
||||||
|
|
||||||
|
let dcx = cx.dep_context();
|
||||||
|
let mut hcx = dcx.create_stable_hashing_context();
|
||||||
|
let hashing_timer = dcx.profiler().incr_result_hashing();
|
||||||
|
let current_fingerprint = hash_result(&mut hcx, &result);
|
||||||
|
|
||||||
|
let print_status = cfg!(debug_assertions) && dcx.sess().opts.debugging_opts.dep_tasks;
|
||||||
|
|
||||||
|
// Get timer for profiling `DepNode` interning
|
||||||
|
let node_intern_timer =
|
||||||
|
self.node_intern_event_id.map(|eid| dcx.profiler().generic_activity_with_event_id(eid));
|
||||||
|
// Intern the new `DepNode`.
|
||||||
|
let (dep_node_index, prev_and_color) = data.current.intern_node(
|
||||||
|
dcx.profiler(),
|
||||||
|
&data.previous,
|
||||||
|
key,
|
||||||
|
edges,
|
||||||
|
current_fingerprint,
|
||||||
|
print_status,
|
||||||
|
);
|
||||||
|
drop(node_intern_timer);
|
||||||
|
|
||||||
|
hashing_timer.finish_with_query_invocation_id(dep_node_index.into());
|
||||||
|
|
||||||
|
if let Some((prev_index, color)) = prev_and_color {
|
||||||
|
debug_assert!(
|
||||||
|
data.colors.get(prev_index).is_none(),
|
||||||
|
"DepGraph::with_task() - Duplicate DepNodeColor \
|
||||||
|
insertion for {:?}",
|
||||||
key
|
key
|
||||||
);
|
);
|
||||||
|
|
||||||
let dcx = cx.dep_context();
|
data.colors.insert(prev_index, color);
|
||||||
let task_deps = create_task(key).map(Lock::new);
|
|
||||||
let result = K::with_deps(task_deps.as_ref(), || task(cx, arg));
|
|
||||||
let edges = task_deps.map_or_else(|| smallvec![], |lock| lock.into_inner().reads);
|
|
||||||
|
|
||||||
let mut hcx = dcx.create_stable_hashing_context();
|
|
||||||
let hashing_timer = dcx.profiler().incr_result_hashing();
|
|
||||||
let current_fingerprint = hash_result(&mut hcx, &result);
|
|
||||||
|
|
||||||
let print_status = cfg!(debug_assertions) && dcx.sess().opts.debugging_opts.dep_tasks;
|
|
||||||
|
|
||||||
// Get timer for profiling `DepNode` interning
|
|
||||||
let node_intern_timer = self
|
|
||||||
.node_intern_event_id
|
|
||||||
.map(|eid| dcx.profiler().generic_activity_with_event_id(eid));
|
|
||||||
// Intern the new `DepNode`.
|
|
||||||
let (dep_node_index, prev_and_color) = data.current.intern_node(
|
|
||||||
dcx.profiler(),
|
|
||||||
&data.previous,
|
|
||||||
key,
|
|
||||||
edges,
|
|
||||||
current_fingerprint,
|
|
||||||
print_status,
|
|
||||||
);
|
|
||||||
drop(node_intern_timer);
|
|
||||||
|
|
||||||
hashing_timer.finish_with_query_invocation_id(dep_node_index.into());
|
|
||||||
|
|
||||||
if let Some((prev_index, color)) = prev_and_color {
|
|
||||||
debug_assert!(
|
|
||||||
data.colors.get(prev_index).is_none(),
|
|
||||||
"DepGraph::with_task() - Duplicate DepNodeColor \
|
|
||||||
insertion for {:?}",
|
|
||||||
key
|
|
||||||
);
|
|
||||||
|
|
||||||
data.colors.insert(prev_index, color);
|
|
||||||
}
|
|
||||||
|
|
||||||
(result, dep_node_index)
|
|
||||||
} else {
|
|
||||||
// Incremental compilation is turned off. We just execute the task
|
|
||||||
// without tracking. We still provide a dep-node index that uniquely
|
|
||||||
// identifies the task so that we have a cheap way of referring to
|
|
||||||
// the query for self-profiling.
|
|
||||||
(task(cx, arg), self.next_virtual_depnode_index())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
(result, dep_node_index)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Executes something within an "anonymous" task, that is, a task the
|
/// Executes something within an "anonymous" task, that is, a task the
|
||||||
|
@ -372,19 +368,6 @@ impl<K: DepKind> DepGraph<K> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Executes something within an "eval-always" task which is a task
|
|
||||||
/// that runs whenever anything changes.
|
|
||||||
pub fn with_eval_always_task<Ctxt: HasDepContext<DepKind = K>, A: Debug, R>(
|
|
||||||
&self,
|
|
||||||
key: DepNode<K>,
|
|
||||||
cx: Ctxt,
|
|
||||||
arg: A,
|
|
||||||
task: fn(Ctxt, A) -> R,
|
|
||||||
hash_result: impl FnOnce(&mut Ctxt::StableHashingContext, &R) -> Option<Fingerprint>,
|
|
||||||
) -> (R, DepNodeIndex) {
|
|
||||||
self.with_task_impl(key, cx, arg, task, |_| None, hash_result)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn read_index(&self, dep_node_index: DepNodeIndex) {
|
pub fn read_index(&self, dep_node_index: DepNodeIndex) {
|
||||||
if let Some(ref data) = self.data {
|
if let Some(ref data) = self.data {
|
||||||
|
|
|
@ -491,23 +491,13 @@ where
|
||||||
// `to_dep_node` is expensive for some `DepKind`s.
|
// `to_dep_node` is expensive for some `DepKind`s.
|
||||||
let dep_node = dep_node_opt.unwrap_or_else(|| query.to_dep_node(*tcx.dep_context(), &key));
|
let dep_node = dep_node_opt.unwrap_or_else(|| query.to_dep_node(*tcx.dep_context(), &key));
|
||||||
|
|
||||||
if query.eval_always {
|
tcx.dep_context().dep_graph().with_task(
|
||||||
tcx.dep_context().dep_graph().with_eval_always_task(
|
dep_node,
|
||||||
dep_node,
|
*tcx.dep_context(),
|
||||||
*tcx.dep_context(),
|
key,
|
||||||
key,
|
compute,
|
||||||
compute,
|
query.hash_result,
|
||||||
query.hash_result,
|
)
|
||||||
)
|
|
||||||
} else {
|
|
||||||
tcx.dep_context().dep_graph().with_task(
|
|
||||||
dep_node,
|
|
||||||
*tcx.dep_context(),
|
|
||||||
key,
|
|
||||||
compute,
|
|
||||||
query.hash_result,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
|
|
||||||
prof_timer.finish_with_query_invocation_id(dep_node_index.into());
|
prof_timer.finish_with_query_invocation_id(dep_node_index.into());
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue