1
Fork 0

Move initialization dataflow impls into their own module.

This commit is contained in:
Camille GILLOT 2023-05-06 14:13:12 +00:00
parent 760881b29d
commit 8726cbc75f
4 changed files with 756 additions and 749 deletions

View file

@ -1,9 +1,10 @@
use super::*; use rustc_index::bit_set::BitSet;
use crate::{AnalysisDomain, CallReturnPlaces, GenKill, GenKillAnalysis};
use rustc_middle::mir::visit::Visitor; use rustc_middle::mir::visit::Visitor;
use rustc_middle::mir::*; use rustc_middle::mir::*;
use crate::framework::CallReturnPlaces;
use crate::{AnalysisDomain, GenKill, GenKillAnalysis};
/// A dataflow analysis that tracks whether a pointer or reference could possibly exist that points /// A dataflow analysis that tracks whether a pointer or reference could possibly exist that points
/// to a given local. /// to a given local.
/// ///
@ -23,12 +24,12 @@ impl<'tcx> AnalysisDomain<'tcx> for MaybeBorrowedLocals {
type Domain = BitSet<Local>; type Domain = BitSet<Local>;
const NAME: &'static str = "maybe_borrowed_locals"; const NAME: &'static str = "maybe_borrowed_locals";
fn bottom_value(&self, body: &mir::Body<'tcx>) -> Self::Domain { fn bottom_value(&self, body: &Body<'tcx>) -> Self::Domain {
// bottom = unborrowed // bottom = unborrowed
BitSet::new_empty(body.local_decls().len()) BitSet::new_empty(body.local_decls().len())
} }
fn initialize_start_block(&self, _: &mir::Body<'tcx>, _: &mut Self::Domain) { fn initialize_start_block(&self, _: &Body<'tcx>, _: &mut Self::Domain) {
// No locals are aliased on function entry // No locals are aliased on function entry
} }
} }
@ -39,7 +40,7 @@ impl<'tcx> GenKillAnalysis<'tcx> for MaybeBorrowedLocals {
fn statement_effect( fn statement_effect(
&mut self, &mut self,
trans: &mut impl GenKill<Self::Idx>, trans: &mut impl GenKill<Self::Idx>,
statement: &mir::Statement<'tcx>, statement: &Statement<'tcx>,
location: Location, location: Location,
) { ) {
self.transfer_function(trans).visit_statement(statement, location); self.transfer_function(trans).visit_statement(statement, location);
@ -48,7 +49,7 @@ impl<'tcx> GenKillAnalysis<'tcx> for MaybeBorrowedLocals {
fn terminator_effect( fn terminator_effect(
&mut self, &mut self,
trans: &mut impl GenKill<Self::Idx>, trans: &mut impl GenKill<Self::Idx>,
terminator: &mir::Terminator<'tcx>, terminator: &Terminator<'tcx>,
location: Location, location: Location,
) { ) {
self.transfer_function(trans).visit_terminator(terminator, location); self.transfer_function(trans).visit_terminator(terminator, location);
@ -57,7 +58,7 @@ impl<'tcx> GenKillAnalysis<'tcx> for MaybeBorrowedLocals {
fn call_return_effect( fn call_return_effect(
&mut self, &mut self,
_trans: &mut impl GenKill<Self::Idx>, _trans: &mut impl GenKill<Self::Idx>,
_block: mir::BasicBlock, _block: BasicBlock,
_return_places: CallReturnPlaces<'_, 'tcx>, _return_places: CallReturnPlaces<'_, 'tcx>,
) { ) {
} }
@ -82,37 +83,37 @@ where
} }
} }
fn visit_rvalue(&mut self, rvalue: &mir::Rvalue<'tcx>, location: Location) { fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
self.super_rvalue(rvalue, location); self.super_rvalue(rvalue, location);
match rvalue { match rvalue {
mir::Rvalue::AddressOf(_, borrowed_place) | mir::Rvalue::Ref(_, _, borrowed_place) => { Rvalue::AddressOf(_, borrowed_place) | Rvalue::Ref(_, _, borrowed_place) => {
if !borrowed_place.is_indirect() { if !borrowed_place.is_indirect() {
self.trans.gen(borrowed_place.local); self.trans.gen(borrowed_place.local);
} }
} }
mir::Rvalue::Cast(..) Rvalue::Cast(..)
| mir::Rvalue::ShallowInitBox(..) | Rvalue::ShallowInitBox(..)
| mir::Rvalue::Use(..) | Rvalue::Use(..)
| mir::Rvalue::ThreadLocalRef(..) | Rvalue::ThreadLocalRef(..)
| mir::Rvalue::Repeat(..) | Rvalue::Repeat(..)
| mir::Rvalue::Len(..) | Rvalue::Len(..)
| mir::Rvalue::BinaryOp(..) | Rvalue::BinaryOp(..)
| mir::Rvalue::CheckedBinaryOp(..) | Rvalue::CheckedBinaryOp(..)
| mir::Rvalue::NullaryOp(..) | Rvalue::NullaryOp(..)
| mir::Rvalue::UnaryOp(..) | Rvalue::UnaryOp(..)
| mir::Rvalue::Discriminant(..) | Rvalue::Discriminant(..)
| mir::Rvalue::Aggregate(..) | Rvalue::Aggregate(..)
| mir::Rvalue::CopyForDeref(..) => {} | Rvalue::CopyForDeref(..) => {}
} }
} }
fn visit_terminator(&mut self, terminator: &mir::Terminator<'tcx>, location: Location) { fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
self.super_terminator(terminator, location); self.super_terminator(terminator, location);
match terminator.kind { match terminator.kind {
mir::TerminatorKind::Drop { place: dropped_place, .. } => { TerminatorKind::Drop { place: dropped_place, .. } => {
// Drop terminators may call custom drop glue (`Drop::drop`), which takes `&mut // Drop terminators may call custom drop glue (`Drop::drop`), which takes `&mut
// self` as a parameter. In the general case, a drop impl could launder that // self` as a parameter. In the general case, a drop impl could launder that
// reference into the surrounding environment through a raw pointer, thus creating // reference into the surrounding environment through a raw pointer, thus creating

View file

@ -0,0 +1,705 @@
use rustc_index::bit_set::{BitSet, ChunkedBitSet};
use rustc_index::Idx;
use rustc_middle::mir::{self, Body, Location};
use rustc_middle::ty::{self, TyCtxt};
use crate::drop_flag_effects_for_function_entry;
use crate::drop_flag_effects_for_location;
use crate::elaborate_drops::DropFlagState;
use crate::framework::{CallReturnPlaces, SwitchIntEdgeEffects};
use crate::move_paths::{HasMoveData, InitIndex, InitKind, LookupResult, MoveData, MovePathIndex};
use crate::on_lookup_result_bits;
use crate::MoveDataParamEnv;
use crate::{drop_flag_effects, on_all_children_bits};
use crate::{lattice, AnalysisDomain, GenKill, GenKillAnalysis};
/// `MaybeInitializedPlaces` tracks all places that might be
/// initialized upon reaching a particular point in the control flow
/// for a function.
///
/// For example, in code like the following, we have corresponding
/// dataflow information shown in the right-hand comments.
///
/// ```rust
/// struct S;
/// fn foo(pred: bool) { // maybe-init:
/// // {}
/// let a = S; let mut b = S; let c; let d; // {a, b}
///
/// if pred {
/// drop(a); // { b}
/// b = S; // { b}
///
/// } else {
/// drop(b); // {a}
/// d = S; // {a, d}
///
/// } // {a, b, d}
///
/// c = S; // {a, b, c, d}
/// }
/// ```
///
/// To determine whether a place *must* be initialized at a
/// particular control-flow point, one can take the set-difference
/// between this data and the data from `MaybeUninitializedPlaces` at the
/// corresponding control-flow point.
///
/// Similarly, at a given `drop` statement, the set-intersection
/// between this data and `MaybeUninitializedPlaces` yields the set of
/// places that would require a dynamic drop-flag at that statement.
pub struct MaybeInitializedPlaces<'a, 'tcx> {
tcx: TyCtxt<'tcx>,
body: &'a Body<'tcx>,
mdpe: &'a MoveDataParamEnv<'tcx>,
}
impl<'a, 'tcx> MaybeInitializedPlaces<'a, 'tcx> {
pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
MaybeInitializedPlaces { tcx, body, mdpe }
}
}
impl<'a, 'tcx> HasMoveData<'tcx> for MaybeInitializedPlaces<'a, 'tcx> {
fn move_data(&self) -> &MoveData<'tcx> {
&self.mdpe.move_data
}
}
/// `MaybeUninitializedPlaces` tracks all places that might be
/// uninitialized upon reaching a particular point in the control flow
/// for a function.
///
/// For example, in code like the following, we have corresponding
/// dataflow information shown in the right-hand comments.
///
/// ```rust
/// struct S;
/// fn foo(pred: bool) { // maybe-uninit:
/// // {a, b, c, d}
/// let a = S; let mut b = S; let c; let d; // { c, d}
///
/// if pred {
/// drop(a); // {a, c, d}
/// b = S; // {a, c, d}
///
/// } else {
/// drop(b); // { b, c, d}
/// d = S; // { b, c }
///
/// } // {a, b, c, d}
///
/// c = S; // {a, b, d}
/// }
/// ```
///
/// To determine whether a place *must* be uninitialized at a
/// particular control-flow point, one can take the set-difference
/// between this data and the data from `MaybeInitializedPlaces` at the
/// corresponding control-flow point.
///
/// Similarly, at a given `drop` statement, the set-intersection
/// between this data and `MaybeInitializedPlaces` yields the set of
/// places that would require a dynamic drop-flag at that statement.
pub struct MaybeUninitializedPlaces<'a, 'tcx> {
tcx: TyCtxt<'tcx>,
body: &'a Body<'tcx>,
mdpe: &'a MoveDataParamEnv<'tcx>,
mark_inactive_variants_as_uninit: bool,
}
impl<'a, 'tcx> MaybeUninitializedPlaces<'a, 'tcx> {
pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
MaybeUninitializedPlaces { tcx, body, mdpe, mark_inactive_variants_as_uninit: false }
}
/// Causes inactive enum variants to be marked as "maybe uninitialized" after a switch on an
/// enum discriminant.
///
/// This is correct in a vacuum but is not the default because it causes problems in the borrow
/// checker, where this information gets propagated along `FakeEdge`s.
pub fn mark_inactive_variants_as_uninit(mut self) -> Self {
self.mark_inactive_variants_as_uninit = true;
self
}
}
impl<'a, 'tcx> HasMoveData<'tcx> for MaybeUninitializedPlaces<'a, 'tcx> {
fn move_data(&self) -> &MoveData<'tcx> {
&self.mdpe.move_data
}
}
/// `DefinitelyInitializedPlaces` tracks all places that are definitely
/// initialized upon reaching a particular point in the control flow
/// for a function.
///
/// For example, in code like the following, we have corresponding
/// dataflow information shown in the right-hand comments.
///
/// ```rust
/// struct S;
/// fn foo(pred: bool) { // definite-init:
/// // { }
/// let a = S; let mut b = S; let c; let d; // {a, b }
///
/// if pred {
/// drop(a); // { b, }
/// b = S; // { b, }
///
/// } else {
/// drop(b); // {a, }
/// d = S; // {a, d}
///
/// } // { }
///
/// c = S; // { c }
/// }
/// ```
///
/// To determine whether a place *may* be uninitialized at a
/// particular control-flow point, one can take the set-complement
/// of this data.
///
/// Similarly, at a given `drop` statement, the set-difference between
/// this data and `MaybeInitializedPlaces` yields the set of places
/// that would require a dynamic drop-flag at that statement.
pub struct DefinitelyInitializedPlaces<'a, 'tcx> {
tcx: TyCtxt<'tcx>,
body: &'a Body<'tcx>,
mdpe: &'a MoveDataParamEnv<'tcx>,
}
impl<'a, 'tcx> DefinitelyInitializedPlaces<'a, 'tcx> {
pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
DefinitelyInitializedPlaces { tcx, body, mdpe }
}
}
impl<'a, 'tcx> HasMoveData<'tcx> for DefinitelyInitializedPlaces<'a, 'tcx> {
fn move_data(&self) -> &MoveData<'tcx> {
&self.mdpe.move_data
}
}
/// `EverInitializedPlaces` tracks all places that might have ever been
/// initialized upon reaching a particular point in the control flow
/// for a function, without an intervening `StorageDead`.
///
/// This dataflow is used to determine if an immutable local variable may
/// be assigned to.
///
/// For example, in code like the following, we have corresponding
/// dataflow information shown in the right-hand comments.
///
/// ```rust
/// struct S;
/// fn foo(pred: bool) { // ever-init:
/// // { }
/// let a = S; let mut b = S; let c; let d; // {a, b }
///
/// if pred {
/// drop(a); // {a, b, }
/// b = S; // {a, b, }
///
/// } else {
/// drop(b); // {a, b, }
/// d = S; // {a, b, d }
///
/// } // {a, b, d }
///
/// c = S; // {a, b, c, d }
/// }
/// ```
pub struct EverInitializedPlaces<'a, 'tcx> {
#[allow(dead_code)]
tcx: TyCtxt<'tcx>,
body: &'a Body<'tcx>,
mdpe: &'a MoveDataParamEnv<'tcx>,
}
impl<'a, 'tcx> EverInitializedPlaces<'a, 'tcx> {
pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
EverInitializedPlaces { tcx, body, mdpe }
}
}
impl<'a, 'tcx> HasMoveData<'tcx> for EverInitializedPlaces<'a, 'tcx> {
fn move_data(&self) -> &MoveData<'tcx> {
&self.mdpe.move_data
}
}
impl<'a, 'tcx> MaybeInitializedPlaces<'a, 'tcx> {
fn update_bits(
trans: &mut impl GenKill<MovePathIndex>,
path: MovePathIndex,
state: DropFlagState,
) {
match state {
DropFlagState::Absent => trans.kill(path),
DropFlagState::Present => trans.gen(path),
}
}
}
impl<'a, 'tcx> MaybeUninitializedPlaces<'a, 'tcx> {
fn update_bits(
trans: &mut impl GenKill<MovePathIndex>,
path: MovePathIndex,
state: DropFlagState,
) {
match state {
DropFlagState::Absent => trans.gen(path),
DropFlagState::Present => trans.kill(path),
}
}
}
impl<'a, 'tcx> DefinitelyInitializedPlaces<'a, 'tcx> {
fn update_bits(
trans: &mut impl GenKill<MovePathIndex>,
path: MovePathIndex,
state: DropFlagState,
) {
match state {
DropFlagState::Absent => trans.kill(path),
DropFlagState::Present => trans.gen(path),
}
}
}
impl<'tcx> AnalysisDomain<'tcx> for MaybeInitializedPlaces<'_, 'tcx> {
type Domain = ChunkedBitSet<MovePathIndex>;
const NAME: &'static str = "maybe_init";
fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
// bottom = uninitialized
ChunkedBitSet::new_empty(self.move_data().move_paths.len())
}
fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
assert!(s == DropFlagState::Present);
state.insert(path);
});
}
}
impl<'tcx> GenKillAnalysis<'tcx> for MaybeInitializedPlaces<'_, 'tcx> {
type Idx = MovePathIndex;
fn statement_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
statement: &mir::Statement<'tcx>,
location: Location,
) {
drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
Self::update_bits(trans, path, s)
});
// Mark all places as "maybe init" if they are mutably borrowed. See #90752.
if self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration
&& let Some((_, rvalue)) = statement.kind.as_assign()
&& let mir::Rvalue::Ref(_, mir::BorrowKind::Mut { .. }, place)
// FIXME: Does `&raw const foo` allow mutation? See #90413.
| mir::Rvalue::AddressOf(_, place) = rvalue
&& let LookupResult::Exact(mpi) = self.move_data().rev_lookup.find(place.as_ref())
{
on_all_children_bits(self.tcx, self.body, self.move_data(), mpi, |child| {
trans.gen(child);
})
}
}
fn terminator_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
_: &mir::Terminator<'tcx>,
location: Location,
) {
drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
Self::update_bits(trans, path, s)
});
}
fn call_return_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
_block: mir::BasicBlock,
return_places: CallReturnPlaces<'_, 'tcx>,
) {
return_places.for_each(|place| {
// when a call returns successfully, that means we need to set
// the bits for that dest_place to 1 (initialized).
on_lookup_result_bits(
self.tcx,
self.body,
self.move_data(),
self.move_data().rev_lookup.find(place.as_ref()),
|mpi| {
trans.gen(mpi);
},
);
});
}
fn switch_int_edge_effects<G: GenKill<Self::Idx>>(
&mut self,
block: mir::BasicBlock,
discr: &mir::Operand<'tcx>,
edge_effects: &mut impl SwitchIntEdgeEffects<G>,
) {
if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
return;
}
let enum_ = discr.place().and_then(|discr| {
switch_on_enum_discriminant(self.tcx, &self.body, &self.body[block], discr)
});
let Some((enum_place, enum_def)) = enum_ else {
return;
};
let mut discriminants = enum_def.discriminants(self.tcx);
edge_effects.apply(|trans, edge| {
let Some(value) = edge.value else {
return;
};
// MIR building adds discriminants to the `values` array in the same order as they
// are yielded by `AdtDef::discriminants`. We rely on this to match each
// discriminant in `values` to its corresponding variant in linear time.
let (variant, _) = discriminants
.find(|&(_, discr)| discr.val == value)
.expect("Order of `AdtDef::discriminants` differed from `SwitchInt::values`");
// Kill all move paths that correspond to variants we know to be inactive along this
// particular outgoing edge of a `SwitchInt`.
drop_flag_effects::on_all_inactive_variants(
self.tcx,
self.body,
self.move_data(),
enum_place,
variant,
|mpi| trans.kill(mpi),
);
});
}
}
impl<'tcx> AnalysisDomain<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> {
type Domain = ChunkedBitSet<MovePathIndex>;
const NAME: &'static str = "maybe_uninit";
fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
// bottom = initialized (start_block_effect counters this at outset)
ChunkedBitSet::new_empty(self.move_data().move_paths.len())
}
// sets on_entry bits for Arg places
fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
// set all bits to 1 (uninit) before gathering counter-evidence
state.insert_all();
drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
assert!(s == DropFlagState::Present);
state.remove(path);
});
}
}
impl<'tcx> GenKillAnalysis<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> {
type Idx = MovePathIndex;
fn statement_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
_statement: &mir::Statement<'tcx>,
location: Location,
) {
drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
Self::update_bits(trans, path, s)
});
// Unlike in `MaybeInitializedPlaces` above, we don't need to change the state when a
// mutable borrow occurs. Places cannot become uninitialized through a mutable reference.
}
fn terminator_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
_terminator: &mir::Terminator<'tcx>,
location: Location,
) {
drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
Self::update_bits(trans, path, s)
});
}
fn call_return_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
_block: mir::BasicBlock,
return_places: CallReturnPlaces<'_, 'tcx>,
) {
return_places.for_each(|place| {
// when a call returns successfully, that means we need to set
// the bits for that dest_place to 0 (initialized).
on_lookup_result_bits(
self.tcx,
self.body,
self.move_data(),
self.move_data().rev_lookup.find(place.as_ref()),
|mpi| {
trans.kill(mpi);
},
);
});
}
fn switch_int_edge_effects<G: GenKill<Self::Idx>>(
&mut self,
block: mir::BasicBlock,
discr: &mir::Operand<'tcx>,
edge_effects: &mut impl SwitchIntEdgeEffects<G>,
) {
if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
return;
}
if !self.mark_inactive_variants_as_uninit {
return;
}
let enum_ = discr.place().and_then(|discr| {
switch_on_enum_discriminant(self.tcx, &self.body, &self.body[block], discr)
});
let Some((enum_place, enum_def)) = enum_ else {
return;
};
let mut discriminants = enum_def.discriminants(self.tcx);
edge_effects.apply(|trans, edge| {
let Some(value) = edge.value else {
return;
};
// MIR building adds discriminants to the `values` array in the same order as they
// are yielded by `AdtDef::discriminants`. We rely on this to match each
// discriminant in `values` to its corresponding variant in linear time.
let (variant, _) = discriminants
.find(|&(_, discr)| discr.val == value)
.expect("Order of `AdtDef::discriminants` differed from `SwitchInt::values`");
// Mark all move paths that correspond to variants other than this one as maybe
// uninitialized (in reality, they are *definitely* uninitialized).
drop_flag_effects::on_all_inactive_variants(
self.tcx,
self.body,
self.move_data(),
enum_place,
variant,
|mpi| trans.gen(mpi),
);
});
}
}
impl<'a, 'tcx> AnalysisDomain<'tcx> for DefinitelyInitializedPlaces<'a, 'tcx> {
/// Use set intersection as the join operator.
type Domain = lattice::Dual<BitSet<MovePathIndex>>;
const NAME: &'static str = "definite_init";
fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
// bottom = initialized (start_block_effect counters this at outset)
lattice::Dual(BitSet::new_filled(self.move_data().move_paths.len()))
}
// sets on_entry bits for Arg places
fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
state.0.clear();
drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
assert!(s == DropFlagState::Present);
state.0.insert(path);
});
}
}
impl<'tcx> GenKillAnalysis<'tcx> for DefinitelyInitializedPlaces<'_, 'tcx> {
type Idx = MovePathIndex;
fn statement_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
_statement: &mir::Statement<'tcx>,
location: Location,
) {
drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
Self::update_bits(trans, path, s)
})
}
fn terminator_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
_terminator: &mir::Terminator<'tcx>,
location: Location,
) {
drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
Self::update_bits(trans, path, s)
})
}
fn call_return_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
_block: mir::BasicBlock,
return_places: CallReturnPlaces<'_, 'tcx>,
) {
return_places.for_each(|place| {
// when a call returns successfully, that means we need to set
// the bits for that dest_place to 1 (initialized).
on_lookup_result_bits(
self.tcx,
self.body,
self.move_data(),
self.move_data().rev_lookup.find(place.as_ref()),
|mpi| {
trans.gen(mpi);
},
);
});
}
}
impl<'tcx> AnalysisDomain<'tcx> for EverInitializedPlaces<'_, 'tcx> {
type Domain = ChunkedBitSet<InitIndex>;
const NAME: &'static str = "ever_init";
fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
// bottom = no initialized variables by default
ChunkedBitSet::new_empty(self.move_data().inits.len())
}
fn initialize_start_block(&self, body: &mir::Body<'tcx>, state: &mut Self::Domain) {
for arg_init in 0..body.arg_count {
state.insert(InitIndex::new(arg_init));
}
}
}
impl<'tcx> GenKillAnalysis<'tcx> for EverInitializedPlaces<'_, 'tcx> {
type Idx = InitIndex;
#[instrument(skip(self, trans), level = "debug")]
fn statement_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
stmt: &mir::Statement<'tcx>,
location: Location,
) {
let move_data = self.move_data();
let init_path_map = &move_data.init_path_map;
let init_loc_map = &move_data.init_loc_map;
let rev_lookup = &move_data.rev_lookup;
debug!("initializes move_indexes {:?}", &init_loc_map[location]);
trans.gen_all(init_loc_map[location].iter().copied());
if let mir::StatementKind::StorageDead(local) = stmt.kind {
// End inits for StorageDead, so that an immutable variable can
// be reinitialized on the next iteration of the loop.
let move_path_index = rev_lookup.find_local(local);
debug!("clears the ever initialized status of {:?}", init_path_map[move_path_index]);
trans.kill_all(init_path_map[move_path_index].iter().copied());
}
}
#[instrument(skip(self, trans, _terminator), level = "debug")]
fn terminator_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
_terminator: &mir::Terminator<'tcx>,
location: Location,
) {
let (body, move_data) = (self.body, self.move_data());
let term = body[location.block].terminator();
let init_loc_map = &move_data.init_loc_map;
debug!(?term);
debug!("initializes move_indexes {:?}", init_loc_map[location]);
trans.gen_all(
init_loc_map[location]
.iter()
.filter(|init_index| {
move_data.inits[**init_index].kind != InitKind::NonPanicPathOnly
})
.copied(),
);
}
fn call_return_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
block: mir::BasicBlock,
_return_places: CallReturnPlaces<'_, 'tcx>,
) {
let move_data = self.move_data();
let init_loc_map = &move_data.init_loc_map;
let call_loc = self.body.terminator_loc(block);
for init_index in &init_loc_map[call_loc] {
trans.gen(*init_index);
}
}
}
/// Inspect a `SwitchInt`-terminated basic block to see if the condition of that `SwitchInt` is
/// an enum discriminant.
///
/// We expect such blocks to have a call to `discriminant` as their last statement like so:
///
/// ```text
/// ...
/// _42 = discriminant(_1)
/// SwitchInt(_42, ..)
/// ```
///
/// If the basic block matches this pattern, this function returns the place corresponding to the
/// enum (`_1` in the example above) as well as the `AdtDef` of that enum.
fn switch_on_enum_discriminant<'mir, 'tcx>(
tcx: TyCtxt<'tcx>,
body: &'mir mir::Body<'tcx>,
block: &'mir mir::BasicBlockData<'tcx>,
switch_on: mir::Place<'tcx>,
) -> Option<(mir::Place<'tcx>, ty::AdtDef<'tcx>)> {
for statement in block.statements.iter().rev() {
match &statement.kind {
mir::StatementKind::Assign(box (lhs, mir::Rvalue::Discriminant(discriminated)))
if *lhs == switch_on =>
{
match discriminated.ty(body, tcx).ty.kind() {
ty::Adt(def, _) => return Some((*discriminated, *def)),
// `Rvalue::Discriminant` is also used to get the active yield point for a
// generator, but we do not need edge-specific effects in that case. This may
// change in the future.
ty::Generator(..) => return None,
t => bug!("`discriminant` called on unexpected type {:?}", t),
}
}
mir::StatementKind::Coverage(_) => continue,
_ => return None,
}
}
None
}

View file

@ -2,719 +2,18 @@
//! bitvectors attached to each basic block, represented via a //! bitvectors attached to each basic block, represented via a
//! zero-sized structure. //! zero-sized structure.
use rustc_index::bit_set::{BitSet, ChunkedBitSet};
use rustc_index::Idx;
use rustc_middle::mir::{self, Body, Location};
use rustc_middle::ty::{self, TyCtxt};
use crate::drop_flag_effects_for_function_entry;
use crate::drop_flag_effects_for_location;
use crate::elaborate_drops::DropFlagState;
use crate::framework::{CallReturnPlaces, SwitchIntEdgeEffects};
use crate::move_paths::{HasMoveData, InitIndex, InitKind, LookupResult, MoveData, MovePathIndex};
use crate::on_lookup_result_bits;
use crate::MoveDataParamEnv;
use crate::{drop_flag_effects, on_all_children_bits};
use crate::{lattice, AnalysisDomain, GenKill, GenKillAnalysis};
mod borrowed_locals; mod borrowed_locals;
mod initialized;
mod liveness; mod liveness;
mod storage_liveness; mod storage_liveness;
pub use self::borrowed_locals::borrowed_locals; pub use self::borrowed_locals::borrowed_locals;
pub use self::borrowed_locals::MaybeBorrowedLocals; pub use self::borrowed_locals::MaybeBorrowedLocals;
pub use self::initialized::{
DefinitelyInitializedPlaces, EverInitializedPlaces, MaybeInitializedPlaces,
MaybeUninitializedPlaces,
};
pub use self::liveness::MaybeLiveLocals; pub use self::liveness::MaybeLiveLocals;
pub use self::liveness::MaybeTransitiveLiveLocals; pub use self::liveness::MaybeTransitiveLiveLocals;
pub use self::liveness::TransferFunction as LivenessTransferFunction; pub use self::liveness::TransferFunction as LivenessTransferFunction;
pub use self::storage_liveness::{MaybeRequiresStorage, MaybeStorageDead, MaybeStorageLive}; pub use self::storage_liveness::{MaybeRequiresStorage, MaybeStorageDead, MaybeStorageLive};
/// `MaybeInitializedPlaces` tracks all places that might be
/// initialized upon reaching a particular point in the control flow
/// for a function.
///
/// For example, in code like the following, we have corresponding
/// dataflow information shown in the right-hand comments.
///
/// ```rust
/// struct S;
/// fn foo(pred: bool) { // maybe-init:
/// // {}
/// let a = S; let mut b = S; let c; let d; // {a, b}
///
/// if pred {
/// drop(a); // { b}
/// b = S; // { b}
///
/// } else {
/// drop(b); // {a}
/// d = S; // {a, d}
///
/// } // {a, b, d}
///
/// c = S; // {a, b, c, d}
/// }
/// ```
///
/// To determine whether a place *must* be initialized at a
/// particular control-flow point, one can take the set-difference
/// between this data and the data from `MaybeUninitializedPlaces` at the
/// corresponding control-flow point.
///
/// Similarly, at a given `drop` statement, the set-intersection
/// between this data and `MaybeUninitializedPlaces` yields the set of
/// places that would require a dynamic drop-flag at that statement.
pub struct MaybeInitializedPlaces<'a, 'tcx> {
tcx: TyCtxt<'tcx>,
body: &'a Body<'tcx>,
mdpe: &'a MoveDataParamEnv<'tcx>,
}
impl<'a, 'tcx> MaybeInitializedPlaces<'a, 'tcx> {
pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
MaybeInitializedPlaces { tcx, body, mdpe }
}
}
impl<'a, 'tcx> HasMoveData<'tcx> for MaybeInitializedPlaces<'a, 'tcx> {
fn move_data(&self) -> &MoveData<'tcx> {
&self.mdpe.move_data
}
}
/// `MaybeUninitializedPlaces` tracks all places that might be
/// uninitialized upon reaching a particular point in the control flow
/// for a function.
///
/// For example, in code like the following, we have corresponding
/// dataflow information shown in the right-hand comments.
///
/// ```rust
/// struct S;
/// fn foo(pred: bool) { // maybe-uninit:
/// // {a, b, c, d}
/// let a = S; let mut b = S; let c; let d; // { c, d}
///
/// if pred {
/// drop(a); // {a, c, d}
/// b = S; // {a, c, d}
///
/// } else {
/// drop(b); // { b, c, d}
/// d = S; // { b, c }
///
/// } // {a, b, c, d}
///
/// c = S; // {a, b, d}
/// }
/// ```
///
/// To determine whether a place *must* be uninitialized at a
/// particular control-flow point, one can take the set-difference
/// between this data and the data from `MaybeInitializedPlaces` at the
/// corresponding control-flow point.
///
/// Similarly, at a given `drop` statement, the set-intersection
/// between this data and `MaybeInitializedPlaces` yields the set of
/// places that would require a dynamic drop-flag at that statement.
pub struct MaybeUninitializedPlaces<'a, 'tcx> {
tcx: TyCtxt<'tcx>,
body: &'a Body<'tcx>,
mdpe: &'a MoveDataParamEnv<'tcx>,
mark_inactive_variants_as_uninit: bool,
}
impl<'a, 'tcx> MaybeUninitializedPlaces<'a, 'tcx> {
pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
MaybeUninitializedPlaces { tcx, body, mdpe, mark_inactive_variants_as_uninit: false }
}
/// Causes inactive enum variants to be marked as "maybe uninitialized" after a switch on an
/// enum discriminant.
///
/// This is correct in a vacuum but is not the default because it causes problems in the borrow
/// checker, where this information gets propagated along `FakeEdge`s.
pub fn mark_inactive_variants_as_uninit(mut self) -> Self {
self.mark_inactive_variants_as_uninit = true;
self
}
}
impl<'a, 'tcx> HasMoveData<'tcx> for MaybeUninitializedPlaces<'a, 'tcx> {
fn move_data(&self) -> &MoveData<'tcx> {
&self.mdpe.move_data
}
}
/// `DefinitelyInitializedPlaces` tracks all places that are definitely
/// initialized upon reaching a particular point in the control flow
/// for a function.
///
/// For example, in code like the following, we have corresponding
/// dataflow information shown in the right-hand comments.
///
/// ```rust
/// struct S;
/// fn foo(pred: bool) { // definite-init:
/// // { }
/// let a = S; let mut b = S; let c; let d; // {a, b }
///
/// if pred {
/// drop(a); // { b, }
/// b = S; // { b, }
///
/// } else {
/// drop(b); // {a, }
/// d = S; // {a, d}
///
/// } // { }
///
/// c = S; // { c }
/// }
/// ```
///
/// To determine whether a place *may* be uninitialized at a
/// particular control-flow point, one can take the set-complement
/// of this data.
///
/// Similarly, at a given `drop` statement, the set-difference between
/// this data and `MaybeInitializedPlaces` yields the set of places
/// that would require a dynamic drop-flag at that statement.
pub struct DefinitelyInitializedPlaces<'a, 'tcx> {
tcx: TyCtxt<'tcx>,
body: &'a Body<'tcx>,
mdpe: &'a MoveDataParamEnv<'tcx>,
}
impl<'a, 'tcx> DefinitelyInitializedPlaces<'a, 'tcx> {
pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
DefinitelyInitializedPlaces { tcx, body, mdpe }
}
}
impl<'a, 'tcx> HasMoveData<'tcx> for DefinitelyInitializedPlaces<'a, 'tcx> {
fn move_data(&self) -> &MoveData<'tcx> {
&self.mdpe.move_data
}
}
/// `EverInitializedPlaces` tracks all places that might have ever been
/// initialized upon reaching a particular point in the control flow
/// for a function, without an intervening `StorageDead`.
///
/// This dataflow is used to determine if an immutable local variable may
/// be assigned to.
///
/// For example, in code like the following, we have corresponding
/// dataflow information shown in the right-hand comments.
///
/// ```rust
/// struct S;
/// fn foo(pred: bool) { // ever-init:
/// // { }
/// let a = S; let mut b = S; let c; let d; // {a, b }
///
/// if pred {
/// drop(a); // {a, b, }
/// b = S; // {a, b, }
///
/// } else {
/// drop(b); // {a, b, }
/// d = S; // {a, b, d }
///
/// } // {a, b, d }
///
/// c = S; // {a, b, c, d }
/// }
/// ```
pub struct EverInitializedPlaces<'a, 'tcx> {
#[allow(dead_code)]
tcx: TyCtxt<'tcx>,
body: &'a Body<'tcx>,
mdpe: &'a MoveDataParamEnv<'tcx>,
}
impl<'a, 'tcx> EverInitializedPlaces<'a, 'tcx> {
pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
EverInitializedPlaces { tcx, body, mdpe }
}
}
impl<'a, 'tcx> HasMoveData<'tcx> for EverInitializedPlaces<'a, 'tcx> {
fn move_data(&self) -> &MoveData<'tcx> {
&self.mdpe.move_data
}
}
impl<'a, 'tcx> MaybeInitializedPlaces<'a, 'tcx> {
fn update_bits(
trans: &mut impl GenKill<MovePathIndex>,
path: MovePathIndex,
state: DropFlagState,
) {
match state {
DropFlagState::Absent => trans.kill(path),
DropFlagState::Present => trans.gen(path),
}
}
}
impl<'a, 'tcx> MaybeUninitializedPlaces<'a, 'tcx> {
fn update_bits(
trans: &mut impl GenKill<MovePathIndex>,
path: MovePathIndex,
state: DropFlagState,
) {
match state {
DropFlagState::Absent => trans.gen(path),
DropFlagState::Present => trans.kill(path),
}
}
}
impl<'a, 'tcx> DefinitelyInitializedPlaces<'a, 'tcx> {
fn update_bits(
trans: &mut impl GenKill<MovePathIndex>,
path: MovePathIndex,
state: DropFlagState,
) {
match state {
DropFlagState::Absent => trans.kill(path),
DropFlagState::Present => trans.gen(path),
}
}
}
impl<'tcx> AnalysisDomain<'tcx> for MaybeInitializedPlaces<'_, 'tcx> {
type Domain = ChunkedBitSet<MovePathIndex>;
const NAME: &'static str = "maybe_init";
fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
// bottom = uninitialized
ChunkedBitSet::new_empty(self.move_data().move_paths.len())
}
fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
assert!(s == DropFlagState::Present);
state.insert(path);
});
}
}
impl<'tcx> GenKillAnalysis<'tcx> for MaybeInitializedPlaces<'_, 'tcx> {
type Idx = MovePathIndex;
fn statement_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
statement: &mir::Statement<'tcx>,
location: Location,
) {
drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
Self::update_bits(trans, path, s)
});
// Mark all places as "maybe init" if they are mutably borrowed. See #90752.
if self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration
&& let Some((_, rvalue)) = statement.kind.as_assign()
&& let mir::Rvalue::Ref(_, mir::BorrowKind::Mut { .. }, place)
// FIXME: Does `&raw const foo` allow mutation? See #90413.
| mir::Rvalue::AddressOf(_, place) = rvalue
&& let LookupResult::Exact(mpi) = self.move_data().rev_lookup.find(place.as_ref())
{
on_all_children_bits(self.tcx, self.body, self.move_data(), mpi, |child| {
trans.gen(child);
})
}
}
fn terminator_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
_: &mir::Terminator<'tcx>,
location: Location,
) {
drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
Self::update_bits(trans, path, s)
});
}
fn call_return_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
_block: mir::BasicBlock,
return_places: CallReturnPlaces<'_, 'tcx>,
) {
return_places.for_each(|place| {
// when a call returns successfully, that means we need to set
// the bits for that dest_place to 1 (initialized).
on_lookup_result_bits(
self.tcx,
self.body,
self.move_data(),
self.move_data().rev_lookup.find(place.as_ref()),
|mpi| {
trans.gen(mpi);
},
);
});
}
fn switch_int_edge_effects<G: GenKill<Self::Idx>>(
&mut self,
block: mir::BasicBlock,
discr: &mir::Operand<'tcx>,
edge_effects: &mut impl SwitchIntEdgeEffects<G>,
) {
if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
return;
}
let enum_ = discr.place().and_then(|discr| {
switch_on_enum_discriminant(self.tcx, &self.body, &self.body[block], discr)
});
let Some((enum_place, enum_def)) = enum_ else {
return;
};
let mut discriminants = enum_def.discriminants(self.tcx);
edge_effects.apply(|trans, edge| {
let Some(value) = edge.value else {
return;
};
// MIR building adds discriminants to the `values` array in the same order as they
// are yielded by `AdtDef::discriminants`. We rely on this to match each
// discriminant in `values` to its corresponding variant in linear time.
let (variant, _) = discriminants
.find(|&(_, discr)| discr.val == value)
.expect("Order of `AdtDef::discriminants` differed from `SwitchInt::values`");
// Kill all move paths that correspond to variants we know to be inactive along this
// particular outgoing edge of a `SwitchInt`.
drop_flag_effects::on_all_inactive_variants(
self.tcx,
self.body,
self.move_data(),
enum_place,
variant,
|mpi| trans.kill(mpi),
);
});
}
}
impl<'tcx> AnalysisDomain<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> {
type Domain = ChunkedBitSet<MovePathIndex>;
const NAME: &'static str = "maybe_uninit";
fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
// bottom = initialized (start_block_effect counters this at outset)
ChunkedBitSet::new_empty(self.move_data().move_paths.len())
}
// sets on_entry bits for Arg places
fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
// set all bits to 1 (uninit) before gathering counter-evidence
state.insert_all();
drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
assert!(s == DropFlagState::Present);
state.remove(path);
});
}
}
impl<'tcx> GenKillAnalysis<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> {
type Idx = MovePathIndex;
fn statement_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
_statement: &mir::Statement<'tcx>,
location: Location,
) {
drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
Self::update_bits(trans, path, s)
});
// Unlike in `MaybeInitializedPlaces` above, we don't need to change the state when a
// mutable borrow occurs. Places cannot become uninitialized through a mutable reference.
}
fn terminator_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
_terminator: &mir::Terminator<'tcx>,
location: Location,
) {
drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
Self::update_bits(trans, path, s)
});
}
fn call_return_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
_block: mir::BasicBlock,
return_places: CallReturnPlaces<'_, 'tcx>,
) {
return_places.for_each(|place| {
// when a call returns successfully, that means we need to set
// the bits for that dest_place to 0 (initialized).
on_lookup_result_bits(
self.tcx,
self.body,
self.move_data(),
self.move_data().rev_lookup.find(place.as_ref()),
|mpi| {
trans.kill(mpi);
},
);
});
}
fn switch_int_edge_effects<G: GenKill<Self::Idx>>(
&mut self,
block: mir::BasicBlock,
discr: &mir::Operand<'tcx>,
edge_effects: &mut impl SwitchIntEdgeEffects<G>,
) {
if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
return;
}
if !self.mark_inactive_variants_as_uninit {
return;
}
let enum_ = discr.place().and_then(|discr| {
switch_on_enum_discriminant(self.tcx, &self.body, &self.body[block], discr)
});
let Some((enum_place, enum_def)) = enum_ else {
return;
};
let mut discriminants = enum_def.discriminants(self.tcx);
edge_effects.apply(|trans, edge| {
let Some(value) = edge.value else {
return;
};
// MIR building adds discriminants to the `values` array in the same order as they
// are yielded by `AdtDef::discriminants`. We rely on this to match each
// discriminant in `values` to its corresponding variant in linear time.
let (variant, _) = discriminants
.find(|&(_, discr)| discr.val == value)
.expect("Order of `AdtDef::discriminants` differed from `SwitchInt::values`");
// Mark all move paths that correspond to variants other than this one as maybe
// uninitialized (in reality, they are *definitely* uninitialized).
drop_flag_effects::on_all_inactive_variants(
self.tcx,
self.body,
self.move_data(),
enum_place,
variant,
|mpi| trans.gen(mpi),
);
});
}
}
impl<'a, 'tcx> AnalysisDomain<'tcx> for DefinitelyInitializedPlaces<'a, 'tcx> {
/// Use set intersection as the join operator.
type Domain = lattice::Dual<BitSet<MovePathIndex>>;
const NAME: &'static str = "definite_init";
fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
// bottom = initialized (start_block_effect counters this at outset)
lattice::Dual(BitSet::new_filled(self.move_data().move_paths.len()))
}
// sets on_entry bits for Arg places
fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
state.0.clear();
drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
assert!(s == DropFlagState::Present);
state.0.insert(path);
});
}
}
impl<'tcx> GenKillAnalysis<'tcx> for DefinitelyInitializedPlaces<'_, 'tcx> {
type Idx = MovePathIndex;
fn statement_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
_statement: &mir::Statement<'tcx>,
location: Location,
) {
drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
Self::update_bits(trans, path, s)
})
}
fn terminator_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
_terminator: &mir::Terminator<'tcx>,
location: Location,
) {
drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
Self::update_bits(trans, path, s)
})
}
fn call_return_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
_block: mir::BasicBlock,
return_places: CallReturnPlaces<'_, 'tcx>,
) {
return_places.for_each(|place| {
// when a call returns successfully, that means we need to set
// the bits for that dest_place to 1 (initialized).
on_lookup_result_bits(
self.tcx,
self.body,
self.move_data(),
self.move_data().rev_lookup.find(place.as_ref()),
|mpi| {
trans.gen(mpi);
},
);
});
}
}
impl<'tcx> AnalysisDomain<'tcx> for EverInitializedPlaces<'_, 'tcx> {
type Domain = ChunkedBitSet<InitIndex>;
const NAME: &'static str = "ever_init";
fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
// bottom = no initialized variables by default
ChunkedBitSet::new_empty(self.move_data().inits.len())
}
fn initialize_start_block(&self, body: &mir::Body<'tcx>, state: &mut Self::Domain) {
for arg_init in 0..body.arg_count {
state.insert(InitIndex::new(arg_init));
}
}
}
impl<'tcx> GenKillAnalysis<'tcx> for EverInitializedPlaces<'_, 'tcx> {
type Idx = InitIndex;
#[instrument(skip(self, trans), level = "debug")]
fn statement_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
stmt: &mir::Statement<'tcx>,
location: Location,
) {
let move_data = self.move_data();
let init_path_map = &move_data.init_path_map;
let init_loc_map = &move_data.init_loc_map;
let rev_lookup = &move_data.rev_lookup;
debug!("initializes move_indexes {:?}", &init_loc_map[location]);
trans.gen_all(init_loc_map[location].iter().copied());
if let mir::StatementKind::StorageDead(local) = stmt.kind {
// End inits for StorageDead, so that an immutable variable can
// be reinitialized on the next iteration of the loop.
let move_path_index = rev_lookup.find_local(local);
debug!("clears the ever initialized status of {:?}", init_path_map[move_path_index]);
trans.kill_all(init_path_map[move_path_index].iter().copied());
}
}
#[instrument(skip(self, trans, _terminator), level = "debug")]
fn terminator_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
_terminator: &mir::Terminator<'tcx>,
location: Location,
) {
let (body, move_data) = (self.body, self.move_data());
let term = body[location.block].terminator();
let init_loc_map = &move_data.init_loc_map;
debug!(?term);
debug!("initializes move_indexes {:?}", init_loc_map[location]);
trans.gen_all(
init_loc_map[location]
.iter()
.filter(|init_index| {
move_data.inits[**init_index].kind != InitKind::NonPanicPathOnly
})
.copied(),
);
}
fn call_return_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
block: mir::BasicBlock,
_return_places: CallReturnPlaces<'_, 'tcx>,
) {
let move_data = self.move_data();
let init_loc_map = &move_data.init_loc_map;
let call_loc = self.body.terminator_loc(block);
for init_index in &init_loc_map[call_loc] {
trans.gen(*init_index);
}
}
}
/// Inspect a `SwitchInt`-terminated basic block to see if the condition of that `SwitchInt` is
/// an enum discriminant.
///
/// We expect such blocks to have a call to `discriminant` as their last statement like so:
///
/// ```text
/// ...
/// _42 = discriminant(_1)
/// SwitchInt(_42, ..)
/// ```
///
/// If the basic block matches this pattern, this function returns the place corresponding to the
/// enum (`_1` in the example above) as well as the `AdtDef` of that enum.
fn switch_on_enum_discriminant<'mir, 'tcx>(
tcx: TyCtxt<'tcx>,
body: &'mir mir::Body<'tcx>,
block: &'mir mir::BasicBlockData<'tcx>,
switch_on: mir::Place<'tcx>,
) -> Option<(mir::Place<'tcx>, ty::AdtDef<'tcx>)> {
for statement in block.statements.iter().rev() {
match &statement.kind {
mir::StatementKind::Assign(box (lhs, mir::Rvalue::Discriminant(discriminated)))
if *lhs == switch_on =>
{
match discriminated.ty(body, tcx).ty.kind() {
ty::Adt(def, _) => return Some((*discriminated, *def)),
// `Rvalue::Discriminant` is also used to get the active yield point for a
// generator, but we do not need edge-specific effects in that case. This may
// change in the future.
ty::Generator(..) => return None,
t => bug!("`discriminant` called on unexpected type {:?}", t),
}
}
mir::StatementKind::Coverage(_) => continue,
_ => return None,
}
}
None
}

View file

@ -1,10 +1,12 @@
pub use super::*; use rustc_index::bit_set::BitSet;
use crate::{CallReturnPlaces, GenKill, ResultsClonedCursor};
use rustc_middle::mir::visit::{NonMutatingUseContext, PlaceContext, Visitor}; use rustc_middle::mir::visit::{NonMutatingUseContext, PlaceContext, Visitor};
use rustc_middle::mir::*; use rustc_middle::mir::*;
use std::borrow::Cow; use std::borrow::Cow;
use super::MaybeBorrowedLocals;
use crate::{CallReturnPlaces, GenKill, ResultsClonedCursor};
#[derive(Clone)] #[derive(Clone)]
pub struct MaybeStorageLive<'a> { pub struct MaybeStorageLive<'a> {
always_live_locals: Cow<'a, BitSet<Local>>, always_live_locals: Cow<'a, BitSet<Local>>,
@ -27,12 +29,12 @@ impl<'tcx, 'a> crate::AnalysisDomain<'tcx> for MaybeStorageLive<'a> {
const NAME: &'static str = "maybe_storage_live"; const NAME: &'static str = "maybe_storage_live";
fn bottom_value(&self, body: &mir::Body<'tcx>) -> Self::Domain { fn bottom_value(&self, body: &Body<'tcx>) -> Self::Domain {
// bottom = dead // bottom = dead
BitSet::new_empty(body.local_decls.len()) BitSet::new_empty(body.local_decls.len())
} }
fn initialize_start_block(&self, body: &mir::Body<'tcx>, on_entry: &mut Self::Domain) { fn initialize_start_block(&self, body: &Body<'tcx>, on_entry: &mut Self::Domain) {
assert_eq!(body.local_decls.len(), self.always_live_locals.domain_size()); assert_eq!(body.local_decls.len(), self.always_live_locals.domain_size());
for local in self.always_live_locals.iter() { for local in self.always_live_locals.iter() {
on_entry.insert(local); on_entry.insert(local);
@ -50,7 +52,7 @@ impl<'tcx, 'a> crate::GenKillAnalysis<'tcx> for MaybeStorageLive<'a> {
fn statement_effect( fn statement_effect(
&mut self, &mut self,
trans: &mut impl GenKill<Self::Idx>, trans: &mut impl GenKill<Self::Idx>,
stmt: &mir::Statement<'tcx>, stmt: &Statement<'tcx>,
_: Location, _: Location,
) { ) {
match stmt.kind { match stmt.kind {
@ -63,7 +65,7 @@ impl<'tcx, 'a> crate::GenKillAnalysis<'tcx> for MaybeStorageLive<'a> {
fn terminator_effect( fn terminator_effect(
&mut self, &mut self,
_trans: &mut impl GenKill<Self::Idx>, _trans: &mut impl GenKill<Self::Idx>,
_: &mir::Terminator<'tcx>, _: &Terminator<'tcx>,
_: Location, _: Location,
) { ) {
// Terminators have no effect // Terminators have no effect
@ -95,12 +97,12 @@ impl<'tcx> crate::AnalysisDomain<'tcx> for MaybeStorageDead {
const NAME: &'static str = "maybe_storage_dead"; const NAME: &'static str = "maybe_storage_dead";
fn bottom_value(&self, body: &mir::Body<'tcx>) -> Self::Domain { fn bottom_value(&self, body: &Body<'tcx>) -> Self::Domain {
// bottom = live // bottom = live
BitSet::new_empty(body.local_decls.len()) BitSet::new_empty(body.local_decls.len())
} }
fn initialize_start_block(&self, body: &mir::Body<'tcx>, on_entry: &mut Self::Domain) { fn initialize_start_block(&self, body: &Body<'tcx>, on_entry: &mut Self::Domain) {
assert_eq!(body.local_decls.len(), self.always_live_locals.domain_size()); assert_eq!(body.local_decls.len(), self.always_live_locals.domain_size());
// Do not iterate on return place and args, as they are trivially always live. // Do not iterate on return place and args, as they are trivially always live.
for local in body.vars_and_temps_iter() { for local in body.vars_and_temps_iter() {
@ -117,7 +119,7 @@ impl<'tcx> crate::GenKillAnalysis<'tcx> for MaybeStorageDead {
fn statement_effect( fn statement_effect(
&mut self, &mut self,
trans: &mut impl GenKill<Self::Idx>, trans: &mut impl GenKill<Self::Idx>,
stmt: &mir::Statement<'tcx>, stmt: &Statement<'tcx>,
_: Location, _: Location,
) { ) {
match stmt.kind { match stmt.kind {
@ -130,7 +132,7 @@ impl<'tcx> crate::GenKillAnalysis<'tcx> for MaybeStorageDead {
fn terminator_effect( fn terminator_effect(
&mut self, &mut self,
_trans: &mut impl GenKill<Self::Idx>, _trans: &mut impl GenKill<Self::Idx>,
_: &mir::Terminator<'tcx>, _: &Terminator<'tcx>,
_: Location, _: Location,
) { ) {
// Terminators have no effect // Terminators have no effect
@ -172,12 +174,12 @@ impl<'tcx> crate::AnalysisDomain<'tcx> for MaybeRequiresStorage<'_, '_, 'tcx> {
const NAME: &'static str = "requires_storage"; const NAME: &'static str = "requires_storage";
fn bottom_value(&self, body: &mir::Body<'tcx>) -> Self::Domain { fn bottom_value(&self, body: &Body<'tcx>) -> Self::Domain {
// bottom = dead // bottom = dead
BitSet::new_empty(body.local_decls.len()) BitSet::new_empty(body.local_decls.len())
} }
fn initialize_start_block(&self, body: &mir::Body<'tcx>, on_entry: &mut Self::Domain) { fn initialize_start_block(&self, body: &Body<'tcx>, on_entry: &mut Self::Domain) {
// The resume argument is live on function entry (we don't care about // The resume argument is live on function entry (we don't care about
// the `self` argument) // the `self` argument)
for arg in body.args_iter().skip(1) { for arg in body.args_iter().skip(1) {
@ -192,7 +194,7 @@ impl<'tcx> crate::GenKillAnalysis<'tcx> for MaybeRequiresStorage<'_, '_, 'tcx> {
fn before_statement_effect( fn before_statement_effect(
&mut self, &mut self,
trans: &mut impl GenKill<Self::Idx>, trans: &mut impl GenKill<Self::Idx>,
stmt: &mir::Statement<'tcx>, stmt: &Statement<'tcx>,
loc: Location, loc: Location,
) { ) {
// If a place is borrowed in a statement, it needs storage for that statement. // If a place is borrowed in a statement, it needs storage for that statement.
@ -225,7 +227,7 @@ impl<'tcx> crate::GenKillAnalysis<'tcx> for MaybeRequiresStorage<'_, '_, 'tcx> {
fn statement_effect( fn statement_effect(
&mut self, &mut self,
trans: &mut impl GenKill<Self::Idx>, trans: &mut impl GenKill<Self::Idx>,
_: &mir::Statement<'tcx>, _: &Statement<'tcx>,
loc: Location, loc: Location,
) { ) {
// If we move from a place then it only stops needing storage *after* // If we move from a place then it only stops needing storage *after*
@ -236,7 +238,7 @@ impl<'tcx> crate::GenKillAnalysis<'tcx> for MaybeRequiresStorage<'_, '_, 'tcx> {
fn before_terminator_effect( fn before_terminator_effect(
&mut self, &mut self,
trans: &mut impl GenKill<Self::Idx>, trans: &mut impl GenKill<Self::Idx>,
terminator: &mir::Terminator<'tcx>, terminator: &Terminator<'tcx>,
loc: Location, loc: Location,
) { ) {
// If a place is borrowed in a terminator, it needs storage for that terminator. // If a place is borrowed in a terminator, it needs storage for that terminator.
@ -289,7 +291,7 @@ impl<'tcx> crate::GenKillAnalysis<'tcx> for MaybeRequiresStorage<'_, '_, 'tcx> {
fn terminator_effect( fn terminator_effect(
&mut self, &mut self,
trans: &mut impl GenKill<Self::Idx>, trans: &mut impl GenKill<Self::Idx>,
terminator: &mir::Terminator<'tcx>, terminator: &Terminator<'tcx>,
loc: Location, loc: Location,
) { ) {
match terminator.kind { match terminator.kind {
@ -338,7 +340,7 @@ impl<'tcx> crate::GenKillAnalysis<'tcx> for MaybeRequiresStorage<'_, '_, 'tcx> {
&mut self, &mut self,
trans: &mut impl GenKill<Self::Idx>, trans: &mut impl GenKill<Self::Idx>,
_resume_block: BasicBlock, _resume_block: BasicBlock,
resume_place: mir::Place<'tcx>, resume_place: Place<'tcx>,
) { ) {
trans.gen(resume_place.local); trans.gen(resume_place.local);
} }