1
Fork 0

amortize dfs storage creation

This commit is contained in:
Niko Matsakis 2018-03-29 03:37:29 -04:00
parent 96f3e560d9
commit 499d784fbd
2 changed files with 65 additions and 31 deletions

View file

@ -18,9 +18,26 @@ use borrow_check::nll::region_infer::values::{RegionElementIndex, RegionValueEle
use syntax::codemap::Span;
use rustc::mir::{Location, Mir};
use rustc::ty::RegionVid;
use rustc_data_structures::fx::FxHashSet;
use rustc_data_structures::bitvec::BitVector;
use rustc_data_structures::indexed_vec::Idx;
pub(super) struct DfsStorage {
stack: Vec<Location>,
visited: BitVector,
}
impl<'tcx> RegionInferenceContext<'tcx> {
/// Creates dfs storage for use by dfs; this should be shared
/// across as many calls to dfs as possible to amortize allocation
/// costs.
pub(super) fn new_dfs_storage(&self) -> DfsStorage {
let num_elements = self.elements.num_elements();
DfsStorage {
stack: vec![],
visited: BitVector::new(num_elements),
}
}
/// Function used to satisfy or test a `R1: R2 @ P`
/// constraint. The core idea is that it performs a DFS starting
/// from `P`. The precise actions *during* that DFS depend on the
@ -35,17 +52,20 @@ impl<'tcx> RegionInferenceContext<'tcx> {
/// - `Err(early)` if the walk was existed early by `op`. `earlyelem` is the
/// value that `op` returned.
#[inline(never)] // ensure dfs is identifiable in profiles
pub(super) fn dfs<C>(&self, mir: &Mir<'tcx>, mut op: C) -> Result<bool, C::Early>
pub(super) fn dfs<C>(
&self,
mir: &Mir<'tcx>,
dfs: &mut DfsStorage,
mut op: C,
) -> Result<bool, C::Early>
where
C: DfsOp,
{
let mut changed = false;
let mut stack = vec![];
let mut visited = FxHashSet();
stack.push(op.start_point());
while let Some(p) = stack.pop() {
dfs.visited.clear();
dfs.stack.push(op.start_point());
while let Some(p) = dfs.stack.pop() {
let point_index = self.elements.index(p);
if !op.source_region_contains(point_index) {
@ -53,7 +73,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
continue;
}
if !visited.insert(p) {
if !dfs.visited.insert(point_index.index()) {
debug!(" already visited");
continue;
}
@ -63,25 +83,27 @@ impl<'tcx> RegionInferenceContext<'tcx> {
let block_data = &mir[p.block];
let start_stack_len = stack.len();
let start_stack_len = dfs.stack.len();
if p.statement_index < block_data.statements.len() {
stack.push(Location {
dfs.stack.push(Location {
statement_index: p.statement_index + 1,
..p
});
} else {
stack.extend(block_data.terminator().successors().iter().map(
|&basic_block| {
Location {
dfs.stack.extend(
block_data
.terminator()
.successors()
.iter()
.map(|&basic_block| Location {
statement_index: 0,
block: basic_block,
}
},
));
}),
);
}
if stack.len() == start_stack_len {
if dfs.stack.len() == start_stack_len {
// If we reach the END point in the graph, then copy
// over any skolemized end points in the `from_region`
// and make sure they are included in the `to_region`.
@ -230,9 +252,8 @@ impl<'v, 'tcx> DfsOp for TestTargetOutlivesSource<'v, 'tcx> {
// `X: ur_in_source`, OK.
if self.inferred_values
.universal_regions_outlived_by(self.target_region)
.any(|ur_in_target| {
self.universal_regions.outlives(ur_in_target, ur_in_source)
}) {
.any(|ur_in_target| self.universal_regions.outlives(ur_in_target, ur_in_source))
{
continue;
}

View file

@ -436,7 +436,9 @@ impl<'tcx> RegionInferenceContext<'tcx> {
) -> Option<ClosureRegionRequirements<'gcx>> {
assert!(self.inferred_values.is_none(), "values already inferred");
self.propagate_constraints(mir);
let dfs_storage = &mut self.new_dfs_storage();
self.propagate_constraints(mir, dfs_storage);
// If this is a closure, we can propagate unsatisfied
// `outlives_requirements` to our creator, so create a vector
@ -449,7 +451,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
None
};
self.check_type_tests(infcx, mir, mir_def_id, outlives_requirements.as_mut());
self.check_type_tests(infcx, mir, dfs_storage, mir_def_id, outlives_requirements.as_mut());
self.check_universal_regions(infcx, mir_def_id, outlives_requirements.as_mut());
@ -469,7 +471,8 @@ impl<'tcx> RegionInferenceContext<'tcx> {
/// Re-execute the region inference, this time tracking causal information.
/// This is significantly slower, so it is done only when an error is being reported.
pub(super) fn compute_causal_info(&self, mir: &Mir<'tcx>) -> RegionCausalInfo {
let inferred_values = self.compute_region_values(mir, TrackCauses(true));
let dfs_storage = &mut self.new_dfs_storage();
let inferred_values = self.compute_region_values(mir, dfs_storage, TrackCauses(true));
RegionCausalInfo { inferred_values }
}
@ -477,14 +480,19 @@ impl<'tcx> RegionInferenceContext<'tcx> {
/// for each region variable until all the constraints are
/// satisfied. Note that some values may grow **too** large to be
/// feasible, but we check this later.
fn propagate_constraints(&mut self, mir: &Mir<'tcx>) {
fn propagate_constraints(&mut self, mir: &Mir<'tcx>, dfs_storage: &mut dfs::DfsStorage) {
self.dependency_map = Some(self.build_dependency_map());
let inferred_values = self.compute_region_values(mir, TrackCauses(false));
let inferred_values = self.compute_region_values(mir, dfs_storage, TrackCauses(false));
self.inferred_values = Some(inferred_values);
}
#[inline(never)] // ensure dfs is identifiable in profiles
fn compute_region_values(&self, mir: &Mir<'tcx>, track_causes: TrackCauses) -> RegionValues {
fn compute_region_values(
&self,
mir: &Mir<'tcx>,
dfs_storage: &mut dfs::DfsStorage,
track_causes: TrackCauses,
) -> RegionValues {
debug!("compute_region_values()");
debug!("compute_region_values: constraints={:#?}", {
let mut constraints: Vec<_> = self.constraints.iter().collect();
@ -515,6 +523,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
// outlives constraint.
let Ok(made_changes) = self.dfs(
mir,
dfs_storage,
CopyFromSourceToTarget {
source_region: constraint.sub,
target_region: constraint.sup,
@ -569,6 +578,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
&self,
infcx: &InferCtxt<'_, 'gcx, 'tcx>,
mir: &Mir<'tcx>,
dfs_storage: &mut dfs::DfsStorage,
mir_def_id: DefId,
mut propagated_outlives_requirements: Option<&mut Vec<ClosureOutlivesRequirement<'gcx>>>,
) {
@ -577,7 +587,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
for type_test in &self.type_tests {
debug!("check_type_test: {:?}", type_test);
if self.eval_region_test(mir, type_test.point, type_test.lower_bound, &type_test.test) {
if self.eval_region_test(mir, dfs_storage, type_test.point, type_test.lower_bound, &type_test.test) {
continue;
}
@ -834,6 +844,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
fn eval_region_test(
&self,
mir: &Mir<'tcx>,
dfs_storage: &mut dfs::DfsStorage,
point: Location,
lower_bound: RegionVid,
test: &RegionTest,
@ -846,19 +857,19 @@ impl<'tcx> RegionInferenceContext<'tcx> {
match test {
RegionTest::IsOutlivedByAllRegionsIn(regions) => regions
.iter()
.all(|&r| self.eval_outlives(mir, r, lower_bound, point)),
.all(|&r| self.eval_outlives(mir, dfs_storage, r, lower_bound, point)),
RegionTest::IsOutlivedByAnyRegionIn(regions) => regions
.iter()
.any(|&r| self.eval_outlives(mir, r, lower_bound, point)),
.any(|&r| self.eval_outlives(mir, dfs_storage, r, lower_bound, point)),
RegionTest::Any(tests) => tests
.iter()
.any(|test| self.eval_region_test(mir, point, lower_bound, test)),
.any(|test| self.eval_region_test(mir, dfs_storage, point, lower_bound, test)),
RegionTest::All(tests) => tests
.iter()
.all(|test| self.eval_region_test(mir, point, lower_bound, test)),
.all(|test| self.eval_region_test(mir, dfs_storage, point, lower_bound, test)),
}
}
@ -866,6 +877,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
fn eval_outlives(
&self,
mir: &Mir<'tcx>,
dfs_storage: &mut dfs::DfsStorage,
sup_region: RegionVid,
sub_region: RegionVid,
point: Location,
@ -881,6 +893,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
// yield an `Err` result.
match self.dfs(
mir,
dfs_storage,
TestTargetOutlivesSource {
source_region: sub_region,
target_region: sup_region,