1
Fork 0

Auto merge of #69295 - ecstatic-morse:unified-dataflow-generators, r=tmandry

Use new dataflow framework for generators

#65672 introduced a new dataflow framework that can handle arbitrarily complex transfer functions as well as ones expressed as a series of gen/kill operations. This PR ports the analyses used to implement generators to the new framework so that we can remove the old one. See #68241 for a prior example of this. The new framework has some superficial API changes, but this shouldn't alter the generator passes in any way.

r? @tmandry
This commit is contained in:
bors 2020-03-01 07:53:13 +00:00
commit ee50590803
5 changed files with 160 additions and 159 deletions

View file

@ -75,6 +75,24 @@ where
pub fn entry_set_for_block(&self, block: BasicBlock) -> &BitSet<A::Idx> {
&self.entry_sets[block]
}
pub fn visit_with(
&self,
body: &'mir mir::Body<'tcx>,
blocks: impl IntoIterator<Item = BasicBlock>,
vis: &mut impl ResultsVisitor<'mir, 'tcx, FlowState = BitSet<A::Idx>>,
) {
visit_results(body, blocks, self, vis)
}
pub fn visit_in_rpo_with(
&self,
body: &'mir mir::Body<'tcx>,
vis: &mut impl ResultsVisitor<'mir, 'tcx, FlowState = BitSet<A::Idx>>,
) {
let blocks = mir::traversal::reverse_postorder(body);
visit_results(body, blocks.map(|(bb, _)| bb), self, vis)
}
}
/// Define the domain of a dataflow problem.
@ -433,16 +451,5 @@ impl<T: Idx> GenKill<T> for BitSet<T> {
}
}
// For compatibility with old framework
impl<T: Idx> GenKill<T> for crate::dataflow::GenKillSet<T> {
fn gen(&mut self, elem: T) {
self.gen(elem);
}
fn kill(&mut self, elem: T) {
self.kill(elem);
}
}
#[cfg(test)]
mod tests;

View file

@ -14,7 +14,7 @@ use crate::util::elaborate_drops::DropFlagState;
use super::generic::{AnalysisDomain, GenKill, GenKillAnalysis};
use super::move_paths::{HasMoveData, InitIndex, InitKind, LookupResult, MoveData, MovePathIndex};
use super::{BottomValue, GenKillSet};
use super::BottomValue;
use super::drop_flag_effects_for_function_entry;
use super::drop_flag_effects_for_location;

View file

@ -1,47 +1,39 @@
pub use super::*;
use crate::dataflow::generic::{Results, ResultsRefCursor};
use crate::dataflow::BitDenotation;
use crate::dataflow::MaybeBorrowedLocals;
use crate::dataflow::generic::{self as dataflow, GenKill, Results, ResultsRefCursor};
use crate::dataflow::BottomValue;
use rustc::mir::visit::{NonMutatingUseContext, PlaceContext, Visitor};
use rustc::mir::*;
use std::cell::RefCell;
#[derive(Copy, Clone)]
pub struct MaybeStorageLive<'a, 'tcx> {
body: &'a Body<'tcx>,
}
pub struct MaybeStorageLive;
impl<'a, 'tcx> MaybeStorageLive<'a, 'tcx> {
pub fn new(body: &'a Body<'tcx>) -> Self {
MaybeStorageLive { body }
}
pub fn body(&self) -> &Body<'tcx> {
self.body
}
}
impl<'a, 'tcx> BitDenotation<'tcx> for MaybeStorageLive<'a, 'tcx> {
impl dataflow::AnalysisDomain<'tcx> for MaybeStorageLive {
type Idx = Local;
fn name() -> &'static str {
"maybe_storage_live"
}
fn bits_per_block(&self) -> usize {
self.body.local_decls.len()
const NAME: &'static str = "maybe_storage_live";
fn bits_per_block(&self, body: &mir::Body<'tcx>) -> usize {
body.local_decls.len()
}
fn start_block_effect(&self, on_entry: &mut BitSet<Local>) {
fn initialize_start_block(&self, body: &mir::Body<'tcx>, on_entry: &mut BitSet<Self::Idx>) {
// The resume argument is live on function entry (we don't care about
// the `self` argument)
for arg in self.body.args_iter().skip(1) {
for arg in body.args_iter().skip(1) {
on_entry.insert(arg);
}
}
}
fn statement_effect(&self, trans: &mut GenKillSet<Local>, loc: Location) {
let stmt = &self.body[loc.block].statements[loc.statement_index];
impl dataflow::GenKillAnalysis<'tcx> for MaybeStorageLive {
fn statement_effect(
&self,
trans: &mut impl GenKill<Self::Idx>,
stmt: &mir::Statement<'tcx>,
_: Location,
) {
match stmt.kind {
StatementKind::StorageLive(l) => trans.gen(l),
StatementKind::StorageDead(l) => trans.kill(l),
@ -49,22 +41,28 @@ impl<'a, 'tcx> BitDenotation<'tcx> for MaybeStorageLive<'a, 'tcx> {
}
}
fn terminator_effect(&self, _trans: &mut GenKillSet<Local>, _loc: Location) {
fn terminator_effect(
&self,
_trans: &mut impl GenKill<Self::Idx>,
_: &mir::Terminator<'tcx>,
_: Location,
) {
// Terminators have no effect
}
fn propagate_call_return(
fn call_return_effect(
&self,
_in_out: &mut BitSet<Local>,
_call_bb: mir::BasicBlock,
_dest_bb: mir::BasicBlock,
_dest_place: &mir::Place<'tcx>,
_trans: &mut impl GenKill<Self::Idx>,
_block: BasicBlock,
_func: &mir::Operand<'tcx>,
_args: &[mir::Operand<'tcx>],
_return_place: &mir::Place<'tcx>,
) {
// Nothing to do when a call returns successfully
}
}
impl<'a, 'tcx> BottomValue for MaybeStorageLive<'a, 'tcx> {
impl BottomValue for MaybeStorageLive {
/// bottom = dead
const BOTTOM_VALUE: bool = false;
}
@ -73,60 +71,62 @@ type BorrowedLocalsResults<'a, 'tcx> = ResultsRefCursor<'a, 'a, 'tcx, MaybeBorro
/// Dataflow analysis that determines whether each local requires storage at a
/// given location; i.e. whether its storage can go away without being observed.
pub struct RequiresStorage<'mir, 'tcx> {
pub struct MaybeRequiresStorage<'mir, 'tcx> {
body: ReadOnlyBodyAndCache<'mir, 'tcx>,
borrowed_locals: RefCell<BorrowedLocalsResults<'mir, 'tcx>>,
}
impl<'mir, 'tcx: 'mir> RequiresStorage<'mir, 'tcx> {
impl<'mir, 'tcx> MaybeRequiresStorage<'mir, 'tcx> {
pub fn new(
body: ReadOnlyBodyAndCache<'mir, 'tcx>,
borrowed_locals: &'mir Results<'tcx, MaybeBorrowedLocals>,
) -> Self {
RequiresStorage {
MaybeRequiresStorage {
body,
borrowed_locals: RefCell::new(ResultsRefCursor::new(*body, borrowed_locals)),
}
}
pub fn body(&self) -> &Body<'tcx> {
&self.body
}
}
impl<'mir, 'tcx> BitDenotation<'tcx> for RequiresStorage<'mir, 'tcx> {
impl<'mir, 'tcx> dataflow::AnalysisDomain<'tcx> for MaybeRequiresStorage<'mir, 'tcx> {
type Idx = Local;
fn name() -> &'static str {
"requires_storage"
}
fn bits_per_block(&self) -> usize {
self.body.local_decls.len()
const NAME: &'static str = "requires_storage";
fn bits_per_block(&self, body: &mir::Body<'tcx>) -> usize {
body.local_decls.len()
}
fn start_block_effect(&self, on_entry: &mut BitSet<Local>) {
fn initialize_start_block(&self, body: &mir::Body<'tcx>, on_entry: &mut BitSet<Self::Idx>) {
// The resume argument is live on function entry (we don't care about
// the `self` argument)
for arg in self.body.args_iter().skip(1) {
for arg in body.args_iter().skip(1) {
on_entry.insert(arg);
}
}
}
fn before_statement_effect(&self, sets: &mut GenKillSet<Self::Idx>, loc: Location) {
let stmt = &self.body[loc.block].statements[loc.statement_index];
impl<'mir, 'tcx> dataflow::GenKillAnalysis<'tcx> for MaybeRequiresStorage<'mir, 'tcx> {
fn before_statement_effect(
&self,
trans: &mut impl GenKill<Self::Idx>,
stmt: &mir::Statement<'tcx>,
loc: Location,
) {
// If a place is borrowed in a statement, it needs storage for that statement.
self.borrowed_locals.borrow().analysis().statement_effect(sets, stmt, loc);
self.borrowed_locals.borrow().analysis().statement_effect(trans, stmt, loc);
// If a place is assigned to in a statement, it needs storage for that statement.
match &stmt.kind {
StatementKind::StorageDead(l) => sets.kill(*l),
StatementKind::StorageDead(l) => trans.kill(*l),
// If a place is assigned to in a statement, it needs storage for that statement.
StatementKind::Assign(box (place, _))
| StatementKind::SetDiscriminant { box place, .. } => {
sets.gen(place.local);
trans.gen(place.local);
}
StatementKind::InlineAsm(box InlineAsm { outputs, .. }) => {
for place in &**outputs {
sets.gen(place.local);
StatementKind::InlineAsm(asm) => {
for place in &*asm.outputs {
trans.gen(place.local);
}
}
@ -140,22 +140,30 @@ impl<'mir, 'tcx> BitDenotation<'tcx> for RequiresStorage<'mir, 'tcx> {
}
}
fn statement_effect(&self, sets: &mut GenKillSet<Local>, loc: Location) {
fn statement_effect(
&self,
trans: &mut impl GenKill<Self::Idx>,
_: &mir::Statement<'tcx>,
loc: Location,
) {
// If we move from a place then only stops needing storage *after*
// that statement.
self.check_for_move(sets, loc);
self.check_for_move(trans, loc);
}
fn before_terminator_effect(&self, sets: &mut GenKillSet<Local>, loc: Location) {
let terminator = self.body[loc.block].terminator();
fn before_terminator_effect(
&self,
trans: &mut impl GenKill<Self::Idx>,
terminator: &mir::Terminator<'tcx>,
loc: Location,
) {
// If a place is borrowed in a terminator, it needs storage for that terminator.
self.borrowed_locals.borrow().analysis().terminator_effect(sets, terminator, loc);
self.borrowed_locals.borrow().analysis().terminator_effect(trans, terminator, loc);
match &terminator.kind {
TerminatorKind::Call { destination: Some((Place { local, .. }, _)), .. }
| TerminatorKind::Yield { resume_arg: Place { local, .. }, .. } => {
sets.gen(*local);
TerminatorKind::Call { destination: Some((place, _)), .. }
| TerminatorKind::Yield { resume_arg: place, .. } => {
trans.gen(place.local);
}
// Nothing to do for these. Match exhaustively so this fails to compile when new
@ -176,14 +184,19 @@ impl<'mir, 'tcx> BitDenotation<'tcx> for RequiresStorage<'mir, 'tcx> {
}
}
fn terminator_effect(&self, sets: &mut GenKillSet<Local>, loc: Location) {
match &self.body[loc.block].terminator().kind {
fn terminator_effect(
&self,
trans: &mut impl GenKill<Self::Idx>,
terminator: &mir::Terminator<'tcx>,
loc: Location,
) {
match &terminator.kind {
// For call terminators the destination requires storage for the call
// and after the call returns successfully, but not after a panic.
// Since `propagate_call_unwind` doesn't exist, we have to kill the
// destination here, and then gen it again in `propagate_call_return`.
TerminatorKind::Call { destination: Some((Place { local, .. }, _)), .. } => {
sets.kill(*local);
// destination here, and then gen it again in `call_return_effect`.
TerminatorKind::Call { destination: Some((place, _)), .. } => {
trans.kill(place.local);
}
// Nothing to do for these. Match exhaustively so this fails to compile when new
@ -204,45 +217,49 @@ impl<'mir, 'tcx> BitDenotation<'tcx> for RequiresStorage<'mir, 'tcx> {
| TerminatorKind::Unreachable => {}
}
self.check_for_move(sets, loc);
self.check_for_move(trans, loc);
}
fn propagate_call_return(
fn call_return_effect(
&self,
in_out: &mut BitSet<Local>,
_call_bb: mir::BasicBlock,
_dest_bb: mir::BasicBlock,
dest_place: &mir::Place<'tcx>,
trans: &mut impl GenKill<Self::Idx>,
_block: BasicBlock,
_func: &mir::Operand<'tcx>,
_args: &[mir::Operand<'tcx>],
return_place: &mir::Place<'tcx>,
) {
in_out.insert(dest_place.local);
trans.gen(return_place.local);
}
}
impl<'mir, 'tcx> RequiresStorage<'mir, 'tcx> {
impl<'mir, 'tcx> MaybeRequiresStorage<'mir, 'tcx> {
/// Kill locals that are fully moved and have not been borrowed.
fn check_for_move(&self, sets: &mut GenKillSet<Local>, loc: Location) {
let mut visitor = MoveVisitor { sets, borrowed_locals: &self.borrowed_locals };
fn check_for_move(&self, trans: &mut impl GenKill<Local>, loc: Location) {
let mut visitor = MoveVisitor { trans, borrowed_locals: &self.borrowed_locals };
visitor.visit_location(self.body, loc);
}
}
impl<'mir, 'tcx> BottomValue for RequiresStorage<'mir, 'tcx> {
impl<'mir, 'tcx> BottomValue for MaybeRequiresStorage<'mir, 'tcx> {
/// bottom = dead
const BOTTOM_VALUE: bool = false;
}
struct MoveVisitor<'a, 'mir, 'tcx> {
struct MoveVisitor<'a, 'mir, 'tcx, T> {
borrowed_locals: &'a RefCell<BorrowedLocalsResults<'mir, 'tcx>>,
sets: &'a mut GenKillSet<Local>,
trans: &'a mut T,
}
impl<'a, 'mir: 'a, 'tcx> Visitor<'tcx> for MoveVisitor<'a, 'mir, 'tcx> {
impl<'a, 'mir, 'tcx, T> Visitor<'tcx> for MoveVisitor<'a, 'mir, 'tcx, T>
where
T: GenKill<Local>,
{
fn visit_local(&mut self, local: &Local, context: PlaceContext, loc: Location) {
if PlaceContext::NonMutatingUse(NonMutatingUseContext::Move) == context {
let mut borrowed_locals = self.borrowed_locals.borrow_mut();
borrowed_locals.seek_before(loc);
if !borrowed_locals.contains(*local) {
self.sets.kill(*local);
self.trans.kill(*local);
}
}
}

View file

@ -25,7 +25,7 @@ pub use self::impls::DefinitelyInitializedPlaces;
pub use self::impls::EverInitializedPlaces;
pub use self::impls::{MaybeBorrowedLocals, MaybeMutBorrowedLocals};
pub use self::impls::{MaybeInitializedPlaces, MaybeUninitializedPlaces};
pub use self::impls::{MaybeStorageLive, RequiresStorage};
pub use self::impls::{MaybeRequiresStorage, MaybeStorageLive};
use self::move_paths::MoveData;

View file

@ -49,10 +49,8 @@
//! For generators with state 1 (returned) and state 2 (poisoned) it does nothing.
//! Otherwise it drops all the values in scope at the last suspension point.
use crate::dataflow::generic::{Analysis, ResultsCursor};
use crate::dataflow::{do_dataflow, DataflowResultsCursor, DebugFormatted};
use crate::dataflow::{DataflowResults, DataflowResultsConsumer, FlowAtLocation};
use crate::dataflow::{MaybeBorrowedLocals, MaybeStorageLive, RequiresStorage};
use crate::dataflow::generic::{self as dataflow, Analysis};
use crate::dataflow::{MaybeBorrowedLocals, MaybeRequiresStorage, MaybeStorageLive};
use crate::transform::no_landing_pads::no_landing_pads;
use crate::transform::simplify;
use crate::transform::{MirPass, MirSource};
@ -467,18 +465,15 @@ fn locals_live_across_suspend_points(
source: MirSource<'tcx>,
movable: bool,
) -> LivenessInfo {
let dead_unwinds = BitSet::new_empty(body.basic_blocks().len());
let def_id = source.def_id();
let body_ref: &Body<'_> = &body;
// Calculate when MIR locals have live storage. This gives us an upper bound of their
// lifetimes.
let storage_live_analysis = MaybeStorageLive::new(body_ref);
let storage_live_results =
do_dataflow(tcx, body_ref, def_id, &[], &dead_unwinds, storage_live_analysis, |bd, p| {
DebugFormatted::new(&bd.body().local_decls[p])
});
let mut storage_live_cursor = DataflowResultsCursor::new(&storage_live_results, body_ref);
let mut storage_live = MaybeStorageLive
.into_engine(tcx, body_ref, def_id)
.iterate_to_fixpoint()
.into_results_cursor(body_ref);
// Find the MIR locals which do not use StorageLive/StorageDead statements.
// The storage of these locals are always live.
@ -490,22 +485,16 @@ fn locals_live_across_suspend_points(
let borrowed_locals_results =
MaybeBorrowedLocals::all_borrows().into_engine(tcx, body_ref, def_id).iterate_to_fixpoint();
let mut borrowed_locals_cursor = ResultsCursor::new(body_ref, &borrowed_locals_results);
let mut borrowed_locals_cursor =
dataflow::ResultsCursor::new(body_ref, &borrowed_locals_results);
// Calculate the MIR locals that we actually need to keep storage around
// for.
let requires_storage_analysis = RequiresStorage::new(body, &borrowed_locals_results);
let requires_storage_results = do_dataflow(
tcx,
body_ref,
def_id,
&[],
&dead_unwinds,
requires_storage_analysis,
|bd, p| DebugFormatted::new(&bd.body().local_decls[p]),
);
let requires_storage_results = MaybeRequiresStorage::new(body, &borrowed_locals_results)
.into_engine(tcx, body_ref, def_id)
.iterate_to_fixpoint();
let mut requires_storage_cursor =
DataflowResultsCursor::new(&requires_storage_results, body_ref);
dataflow::ResultsCursor::new(body_ref, &requires_storage_results);
// Calculate the liveness of MIR locals ignoring borrows.
let mut live_locals = liveness::LiveVarSet::new_empty(body.local_decls.len());
@ -534,14 +523,14 @@ fn locals_live_across_suspend_points(
liveness.outs[block].union(borrowed_locals_cursor.get());
}
storage_live_cursor.seek(loc);
let storage_liveness = storage_live_cursor.get();
storage_live.seek_before(loc);
let storage_liveness = storage_live.get();
// Store the storage liveness for later use so we can restore the state
// after a suspension point
storage_liveness_map.insert(block, storage_liveness.clone());
requires_storage_cursor.seek(loc);
requires_storage_cursor.seek_before(loc);
let storage_required = requires_storage_cursor.get().clone();
// Locals live are live at this point only if they are used across
@ -611,7 +600,7 @@ fn compute_storage_conflicts(
body: &'mir Body<'tcx>,
stored_locals: &liveness::LiveVarSet,
ignored: &StorageIgnored,
requires_storage: DataflowResults<'tcx, RequiresStorage<'mir, 'tcx>>,
requires_storage: dataflow::Results<'tcx, MaybeRequiresStorage<'mir, 'tcx>>,
) -> BitMatrix<GeneratorSavedLocal, GeneratorSavedLocal> {
assert_eq!(body.local_decls.len(), ignored.0.domain_size());
assert_eq!(body.local_decls.len(), stored_locals.domain_size());
@ -629,8 +618,11 @@ fn compute_storage_conflicts(
stored_locals: &stored_locals,
local_conflicts: BitMatrix::from_row_n(&ineligible_locals, body.local_decls.len()),
};
let mut state = FlowAtLocation::new(requires_storage);
visitor.analyze_results(&mut state);
// Visit only reachable basic blocks. The exact order is not important.
let reachable_blocks = traversal::preorder(body).map(|(bb, _)| bb);
requires_storage.visit_with(body, reachable_blocks, &mut visitor);
let local_conflicts = visitor.local_conflicts;
// Compress the matrix using only stored locals (Local -> GeneratorSavedLocal).
@ -659,60 +651,45 @@ fn compute_storage_conflicts(
storage_conflicts
}
struct StorageConflictVisitor<'body, 'tcx, 's> {
body: &'body Body<'tcx>,
struct StorageConflictVisitor<'mir, 'tcx, 's> {
body: &'mir Body<'tcx>,
stored_locals: &'s liveness::LiveVarSet,
// FIXME(tmandry): Consider using sparse bitsets here once we have good
// benchmarks for generators.
local_conflicts: BitMatrix<Local, Local>,
}
impl<'body, 'tcx, 's> DataflowResultsConsumer<'body, 'tcx>
for StorageConflictVisitor<'body, 'tcx, 's>
{
type FlowState = FlowAtLocation<'tcx, RequiresStorage<'body, 'tcx>>;
impl dataflow::ResultsVisitor<'mir, 'tcx> for StorageConflictVisitor<'mir, 'tcx, '_> {
type FlowState = BitSet<Local>;
fn body(&self) -> &'body Body<'tcx> {
self.body
}
fn visit_block_entry(&mut self, block: BasicBlock, flow_state: &Self::FlowState) {
// statement_index is only used for logging, so this is fine.
self.apply_state(flow_state, Location { block, statement_index: 0 });
}
fn visit_statement_entry(
fn visit_statement(
&mut self,
state: &Self::FlowState,
_statement: &'mir Statement<'tcx>,
loc: Location,
_stmt: &Statement<'tcx>,
flow_state: &Self::FlowState,
) {
self.apply_state(flow_state, loc);
self.apply_state(state, loc);
}
fn visit_terminator_entry(
fn visit_terminator(
&mut self,
state: &Self::FlowState,
_terminator: &'mir Terminator<'tcx>,
loc: Location,
_term: &Terminator<'tcx>,
flow_state: &Self::FlowState,
) {
self.apply_state(flow_state, loc);
self.apply_state(state, loc);
}
}
impl<'body, 'tcx, 's> StorageConflictVisitor<'body, 'tcx, 's> {
fn apply_state(
&mut self,
flow_state: &FlowAtLocation<'tcx, RequiresStorage<'body, 'tcx>>,
loc: Location,
) {
fn apply_state(&mut self, flow_state: &BitSet<Local>, loc: Location) {
// Ignore unreachable blocks.
match self.body.basic_blocks()[loc.block].terminator().kind {
TerminatorKind::Unreachable => return,
_ => (),
};
let mut eligible_storage_live = flow_state.as_dense().clone();
let mut eligible_storage_live = flow_state.clone();
eligible_storage_live.intersect(&self.stored_locals);
for local in eligible_storage_live.iter() {