1
Fork 0

Auto merge of #111555 - cjgillot:elaborate-drops, r=tmiasko

Only run MaybeInitializedPlaces dataflow once to elaborate drops

This pass allows forward dataflow analyses to modify the CFG depending on the dataflow state. This possibility is used for the `MaybeInitializedPlace` analysis in drop elaboration, to skip the dataflow effect of dead unwinds without having to compute dataflow twice.
This commit is contained in:
bors 2023-08-16 23:21:17 +00:00
commit f3b4c6746a
25 changed files with 1424 additions and 1292 deletions

View file

@ -2,12 +2,14 @@
#![deny(rustc::diagnostic_outside_of_impl)] #![deny(rustc::diagnostic_outside_of_impl)]
use rustc_data_structures::fx::FxIndexMap; use rustc_data_structures::fx::FxIndexMap;
use rustc_index::bit_set::BitSet; use rustc_index::bit_set::BitSet;
use rustc_middle::mir::{self, BasicBlock, Body, Location, Place}; use rustc_middle::mir::{
self, BasicBlock, Body, CallReturnPlaces, Location, Place, TerminatorEdges,
};
use rustc_middle::ty::RegionVid; use rustc_middle::ty::RegionVid;
use rustc_middle::ty::TyCtxt; use rustc_middle::ty::TyCtxt;
use rustc_mir_dataflow::impls::{EverInitializedPlaces, MaybeUninitializedPlaces}; use rustc_mir_dataflow::impls::{EverInitializedPlaces, MaybeUninitializedPlaces};
use rustc_mir_dataflow::ResultsVisitable; use rustc_mir_dataflow::ResultsVisitable;
use rustc_mir_dataflow::{self, fmt::DebugWithContext, CallReturnPlaces, GenKill}; use rustc_mir_dataflow::{self, fmt::DebugWithContext, GenKill};
use rustc_mir_dataflow::{Analysis, Direction, Results}; use rustc_mir_dataflow::{Analysis, Direction, Results};
use std::fmt; use std::fmt;
@ -334,6 +336,10 @@ impl<'tcx> rustc_mir_dataflow::AnalysisDomain<'tcx> for Borrows<'_, 'tcx> {
impl<'tcx> rustc_mir_dataflow::GenKillAnalysis<'tcx> for Borrows<'_, 'tcx> { impl<'tcx> rustc_mir_dataflow::GenKillAnalysis<'tcx> for Borrows<'_, 'tcx> {
type Idx = BorrowIndex; type Idx = BorrowIndex;
fn domain_size(&self, _: &mir::Body<'tcx>) -> usize {
self.borrow_set.len()
}
fn before_statement_effect( fn before_statement_effect(
&mut self, &mut self,
trans: &mut impl GenKill<Self::Idx>, trans: &mut impl GenKill<Self::Idx>,
@ -400,12 +406,12 @@ impl<'tcx> rustc_mir_dataflow::GenKillAnalysis<'tcx> for Borrows<'_, 'tcx> {
self.kill_loans_out_of_scope_at_location(trans, location); self.kill_loans_out_of_scope_at_location(trans, location);
} }
fn terminator_effect( fn terminator_effect<'mir>(
&mut self, &mut self,
trans: &mut impl GenKill<Self::Idx>, trans: &mut Self::Domain,
terminator: &mir::Terminator<'tcx>, terminator: &'mir mir::Terminator<'tcx>,
_location: Location, _location: Location,
) { ) -> TerminatorEdges<'mir, 'tcx> {
if let mir::TerminatorKind::InlineAsm { operands, .. } = &terminator.kind { if let mir::TerminatorKind::InlineAsm { operands, .. } = &terminator.kind {
for op in operands { for op in operands {
if let mir::InlineAsmOperand::Out { place: Some(place), .. } if let mir::InlineAsmOperand::Out { place: Some(place), .. }
@ -415,6 +421,7 @@ impl<'tcx> rustc_mir_dataflow::GenKillAnalysis<'tcx> for Borrows<'_, 'tcx> {
} }
} }
} }
terminator.edges()
} }
fn call_return_effect( fn call_return_effect(

View file

@ -4,10 +4,12 @@
use rustc_index::bit_set::BitSet; use rustc_index::bit_set::BitSet;
use rustc_middle::mir::visit::Visitor; use rustc_middle::mir::visit::Visitor;
use rustc_middle::mir::{self, BasicBlock, Local, Location, Statement, StatementKind}; use rustc_middle::mir::{
self, BasicBlock, CallReturnPlaces, Local, Location, Statement, StatementKind, TerminatorEdges,
};
use rustc_mir_dataflow::fmt::DebugWithContext; use rustc_mir_dataflow::fmt::DebugWithContext;
use rustc_mir_dataflow::JoinSemiLattice; use rustc_mir_dataflow::JoinSemiLattice;
use rustc_mir_dataflow::{Analysis, AnalysisDomain, CallReturnPlaces}; use rustc_mir_dataflow::{Analysis, AnalysisDomain};
use std::fmt; use std::fmt;
use std::marker::PhantomData; use std::marker::PhantomData;
@ -345,13 +347,14 @@ where
self.transfer_function(state).visit_statement(statement, location); self.transfer_function(state).visit_statement(statement, location);
} }
fn apply_terminator_effect( fn apply_terminator_effect<'mir>(
&mut self, &mut self,
state: &mut Self::Domain, state: &mut Self::Domain,
terminator: &mir::Terminator<'tcx>, terminator: &'mir mir::Terminator<'tcx>,
location: Location, location: Location,
) { ) -> TerminatorEdges<'mir, 'tcx> {
self.transfer_function(state).visit_terminator(terminator, location); self.transfer_function(state).visit_terminator(terminator, location);
terminator.edges()
} }
fn apply_call_return_effect( fn apply_call_return_effect(

View file

@ -10,6 +10,7 @@ use std::iter;
use std::slice; use std::slice;
pub use super::query::*; pub use super::query::*;
use super::*;
#[derive(Debug, Clone, TyEncodable, TyDecodable, Hash, HashStable, PartialEq)] #[derive(Debug, Clone, TyEncodable, TyDecodable, Hash, HashStable, PartialEq)]
pub struct SwitchTargets { pub struct SwitchTargets {
@ -430,3 +431,108 @@ impl<'tcx> TerminatorKind<'tcx> {
} }
} }
} }
#[derive(Copy, Clone, Debug)]
pub enum TerminatorEdges<'mir, 'tcx> {
/// For terminators that have no successor, like `return`.
None,
/// For terminators that a single successor, like `goto`, and `assert` without cleanup block.
Single(BasicBlock),
/// For terminators that two successors, `assert` with cleanup block and `falseEdge`.
Double(BasicBlock, BasicBlock),
/// Special action for `Yield`, `Call` and `InlineAsm` terminators.
AssignOnReturn {
return_: Option<BasicBlock>,
unwind: UnwindAction,
place: CallReturnPlaces<'mir, 'tcx>,
},
/// Special edge for `SwitchInt`.
SwitchInt { targets: &'mir SwitchTargets, discr: &'mir Operand<'tcx> },
}
/// List of places that are written to after a successful (non-unwind) return
/// from a `Call`, `Yield` or `InlineAsm`.
#[derive(Copy, Clone, Debug)]
pub enum CallReturnPlaces<'a, 'tcx> {
Call(Place<'tcx>),
Yield(Place<'tcx>),
InlineAsm(&'a [InlineAsmOperand<'tcx>]),
}
impl<'tcx> CallReturnPlaces<'_, 'tcx> {
pub fn for_each(&self, mut f: impl FnMut(Place<'tcx>)) {
match *self {
Self::Call(place) | Self::Yield(place) => f(place),
Self::InlineAsm(operands) => {
for op in operands {
match *op {
InlineAsmOperand::Out { place: Some(place), .. }
| InlineAsmOperand::InOut { out_place: Some(place), .. } => f(place),
_ => {}
}
}
}
}
}
}
impl<'tcx> Terminator<'tcx> {
pub fn edges(&self) -> TerminatorEdges<'_, 'tcx> {
self.kind.edges()
}
}
impl<'tcx> TerminatorKind<'tcx> {
pub fn edges(&self) -> TerminatorEdges<'_, 'tcx> {
use TerminatorKind::*;
match *self {
Return | Resume | Terminate | GeneratorDrop | Unreachable => TerminatorEdges::None,
Goto { target } => TerminatorEdges::Single(target),
Assert { target, unwind, expected: _, msg: _, cond: _ }
| Drop { target, unwind, place: _, replace: _ }
| FalseUnwind { real_target: target, unwind } => match unwind {
UnwindAction::Cleanup(unwind) => TerminatorEdges::Double(target, unwind),
UnwindAction::Continue | UnwindAction::Terminate | UnwindAction::Unreachable => {
TerminatorEdges::Single(target)
}
},
FalseEdge { real_target, imaginary_target } => {
TerminatorEdges::Double(real_target, imaginary_target)
}
Yield { resume: target, drop, resume_arg, value: _ } => {
TerminatorEdges::AssignOnReturn {
return_: Some(target),
unwind: drop.map_or(UnwindAction::Terminate, UnwindAction::Cleanup),
place: CallReturnPlaces::Yield(resume_arg),
}
}
Call { unwind, destination, target, func: _, args: _, fn_span: _, call_source: _ } => {
TerminatorEdges::AssignOnReturn {
return_: target,
unwind,
place: CallReturnPlaces::Call(destination),
}
}
InlineAsm {
template: _,
ref operands,
options: _,
line_spans: _,
destination,
unwind,
} => TerminatorEdges::AssignOnReturn {
return_: destination,
unwind,
place: CallReturnPlaces::InlineAsm(operands),
},
SwitchInt { ref targets, ref discr } => TerminatorEdges::SwitchInt { targets, discr },
}
}
}

View file

@ -1,11 +1,10 @@
use rustc_middle::mir::{self, BasicBlock, Location, SwitchTargets, UnwindAction}; use rustc_middle::mir::{
use rustc_middle::ty::TyCtxt; self, BasicBlock, CallReturnPlaces, Location, SwitchTargets, TerminatorEdges, UnwindAction,
};
use std::ops::RangeInclusive; use std::ops::RangeInclusive;
use super::visitor::{ResultsVisitable, ResultsVisitor}; use super::visitor::{ResultsVisitable, ResultsVisitor};
use super::{ use super::{Analysis, Effect, EffectIndex, GenKillAnalysis, GenKillSet, SwitchIntTarget};
Analysis, CallReturnPlaces, Effect, EffectIndex, GenKillAnalysis, GenKillSet, SwitchIntTarget,
};
pub trait Direction { pub trait Direction {
const IS_FORWARD: bool; const IS_FORWARD: bool;
@ -24,15 +23,17 @@ pub trait Direction {
) where ) where
A: Analysis<'tcx>; A: Analysis<'tcx>;
fn apply_effects_in_block<'tcx, A>( fn apply_effects_in_block<'mir, 'tcx, A>(
analysis: &mut A, analysis: &mut A,
state: &mut A::Domain, state: &mut A::Domain,
block: BasicBlock, block: BasicBlock,
block_data: &mir::BasicBlockData<'tcx>, block_data: &'mir mir::BasicBlockData<'tcx>,
) where statement_effect: Option<&dyn Fn(BasicBlock, &mut A::Domain)>,
) -> TerminatorEdges<'mir, 'tcx>
where
A: Analysis<'tcx>; A: Analysis<'tcx>;
fn gen_kill_effects_in_block<'tcx, A>( fn gen_kill_statement_effects_in_block<'tcx, A>(
analysis: &mut A, analysis: &mut A,
trans: &mut GenKillSet<A::Idx>, trans: &mut GenKillSet<A::Idx>,
block: BasicBlock, block: BasicBlock,
@ -51,10 +52,10 @@ pub trait Direction {
fn join_state_into_successors_of<'tcx, A>( fn join_state_into_successors_of<'tcx, A>(
analysis: &mut A, analysis: &mut A,
tcx: TyCtxt<'tcx>,
body: &mir::Body<'tcx>, body: &mir::Body<'tcx>,
exit_state: &mut A::Domain, exit_state: &mut A::Domain,
block: (BasicBlock, &'_ mir::BasicBlockData<'tcx>), block: BasicBlock,
edges: TerminatorEdges<'_, 'tcx>,
propagate: impl FnMut(BasicBlock, &A::Domain), propagate: impl FnMut(BasicBlock, &A::Domain),
) where ) where
A: Analysis<'tcx>; A: Analysis<'tcx>;
@ -66,27 +67,33 @@ pub struct Backward;
impl Direction for Backward { impl Direction for Backward {
const IS_FORWARD: bool = false; const IS_FORWARD: bool = false;
fn apply_effects_in_block<'tcx, A>( fn apply_effects_in_block<'mir, 'tcx, A>(
analysis: &mut A, analysis: &mut A,
state: &mut A::Domain, state: &mut A::Domain,
block: BasicBlock, block: BasicBlock,
block_data: &mir::BasicBlockData<'tcx>, block_data: &'mir mir::BasicBlockData<'tcx>,
) where statement_effect: Option<&dyn Fn(BasicBlock, &mut A::Domain)>,
) -> TerminatorEdges<'mir, 'tcx>
where
A: Analysis<'tcx>, A: Analysis<'tcx>,
{ {
let terminator = block_data.terminator(); let terminator = block_data.terminator();
let location = Location { block, statement_index: block_data.statements.len() }; let location = Location { block, statement_index: block_data.statements.len() };
analysis.apply_before_terminator_effect(state, terminator, location); analysis.apply_before_terminator_effect(state, terminator, location);
analysis.apply_terminator_effect(state, terminator, location); let edges = analysis.apply_terminator_effect(state, terminator, location);
if let Some(statement_effect) = statement_effect {
for (statement_index, statement) in block_data.statements.iter().enumerate().rev() { statement_effect(block, state)
let location = Location { block, statement_index }; } else {
analysis.apply_before_statement_effect(state, statement, location); for (statement_index, statement) in block_data.statements.iter().enumerate().rev() {
analysis.apply_statement_effect(state, statement, location); let location = Location { block, statement_index };
analysis.apply_before_statement_effect(state, statement, location);
analysis.apply_statement_effect(state, statement, location);
}
} }
edges
} }
fn gen_kill_effects_in_block<'tcx, A>( fn gen_kill_statement_effects_in_block<'tcx, A>(
analysis: &mut A, analysis: &mut A,
trans: &mut GenKillSet<A::Idx>, trans: &mut GenKillSet<A::Idx>,
block: BasicBlock, block: BasicBlock,
@ -94,11 +101,6 @@ impl Direction for Backward {
) where ) where
A: GenKillAnalysis<'tcx>, A: GenKillAnalysis<'tcx>,
{ {
let terminator = block_data.terminator();
let location = Location { block, statement_index: block_data.statements.len() };
analysis.before_terminator_effect(trans, terminator, location);
analysis.terminator_effect(trans, terminator, location);
for (statement_index, statement) in block_data.statements.iter().enumerate().rev() { for (statement_index, statement) in block_data.statements.iter().enumerate().rev() {
let location = Location { block, statement_index }; let location = Location { block, statement_index };
analysis.before_statement_effect(trans, statement, location); analysis.before_statement_effect(trans, statement, location);
@ -217,10 +219,10 @@ impl Direction for Backward {
fn join_state_into_successors_of<'tcx, A>( fn join_state_into_successors_of<'tcx, A>(
analysis: &mut A, analysis: &mut A,
_tcx: TyCtxt<'tcx>,
body: &mir::Body<'tcx>, body: &mir::Body<'tcx>,
exit_state: &mut A::Domain, exit_state: &mut A::Domain,
(bb, _bb_data): (BasicBlock, &'_ mir::BasicBlockData<'tcx>), bb: BasicBlock,
_edges: TerminatorEdges<'_, 'tcx>,
mut propagate: impl FnMut(BasicBlock, &A::Domain), mut propagate: impl FnMut(BasicBlock, &A::Domain),
) where ) where
A: Analysis<'tcx>, A: Analysis<'tcx>,
@ -254,7 +256,11 @@ impl Direction for Backward {
mir::TerminatorKind::Yield { resume, resume_arg, .. } if resume == bb => { mir::TerminatorKind::Yield { resume, resume_arg, .. } if resume == bb => {
let mut tmp = exit_state.clone(); let mut tmp = exit_state.clone();
analysis.apply_yield_resume_effect(&mut tmp, resume, resume_arg); analysis.apply_call_return_effect(
&mut tmp,
resume,
CallReturnPlaces::Yield(resume_arg),
);
propagate(pred, &tmp); propagate(pred, &tmp);
} }
@ -318,27 +324,33 @@ pub struct Forward;
impl Direction for Forward { impl Direction for Forward {
const IS_FORWARD: bool = true; const IS_FORWARD: bool = true;
fn apply_effects_in_block<'tcx, A>( fn apply_effects_in_block<'mir, 'tcx, A>(
analysis: &mut A, analysis: &mut A,
state: &mut A::Domain, state: &mut A::Domain,
block: BasicBlock, block: BasicBlock,
block_data: &mir::BasicBlockData<'tcx>, block_data: &'mir mir::BasicBlockData<'tcx>,
) where statement_effect: Option<&dyn Fn(BasicBlock, &mut A::Domain)>,
) -> TerminatorEdges<'mir, 'tcx>
where
A: Analysis<'tcx>, A: Analysis<'tcx>,
{ {
for (statement_index, statement) in block_data.statements.iter().enumerate() { if let Some(statement_effect) = statement_effect {
let location = Location { block, statement_index }; statement_effect(block, state)
analysis.apply_before_statement_effect(state, statement, location); } else {
analysis.apply_statement_effect(state, statement, location); for (statement_index, statement) in block_data.statements.iter().enumerate() {
let location = Location { block, statement_index };
analysis.apply_before_statement_effect(state, statement, location);
analysis.apply_statement_effect(state, statement, location);
}
} }
let terminator = block_data.terminator(); let terminator = block_data.terminator();
let location = Location { block, statement_index: block_data.statements.len() }; let location = Location { block, statement_index: block_data.statements.len() };
analysis.apply_before_terminator_effect(state, terminator, location); analysis.apply_before_terminator_effect(state, terminator, location);
analysis.apply_terminator_effect(state, terminator, location); analysis.apply_terminator_effect(state, terminator, location)
} }
fn gen_kill_effects_in_block<'tcx, A>( fn gen_kill_statement_effects_in_block<'tcx, A>(
analysis: &mut A, analysis: &mut A,
trans: &mut GenKillSet<A::Idx>, trans: &mut GenKillSet<A::Idx>,
block: BasicBlock, block: BasicBlock,
@ -351,11 +363,6 @@ impl Direction for Forward {
analysis.before_statement_effect(trans, statement, location); analysis.before_statement_effect(trans, statement, location);
analysis.statement_effect(trans, statement, location); analysis.statement_effect(trans, statement, location);
} }
let terminator = block_data.terminator();
let location = Location { block, statement_index: block_data.statements.len() };
analysis.before_terminator_effect(trans, terminator, location);
analysis.terminator_effect(trans, terminator, location);
} }
fn apply_effects_in_range<'tcx, A>( fn apply_effects_in_range<'tcx, A>(
@ -464,86 +471,32 @@ impl Direction for Forward {
fn join_state_into_successors_of<'tcx, A>( fn join_state_into_successors_of<'tcx, A>(
analysis: &mut A, analysis: &mut A,
_tcx: TyCtxt<'tcx>,
_body: &mir::Body<'tcx>, _body: &mir::Body<'tcx>,
exit_state: &mut A::Domain, exit_state: &mut A::Domain,
(bb, bb_data): (BasicBlock, &'_ mir::BasicBlockData<'tcx>), bb: BasicBlock,
edges: TerminatorEdges<'_, 'tcx>,
mut propagate: impl FnMut(BasicBlock, &A::Domain), mut propagate: impl FnMut(BasicBlock, &A::Domain),
) where ) where
A: Analysis<'tcx>, A: Analysis<'tcx>,
{ {
use mir::TerminatorKind::*; match edges {
match bb_data.terminator().kind { TerminatorEdges::None => {}
Return | Resume | Terminate | GeneratorDrop | Unreachable => {} TerminatorEdges::Single(target) => propagate(target, exit_state),
TerminatorEdges::Double(target, unwind) => {
Goto { target } => propagate(target, exit_state),
Assert { target, unwind, expected: _, msg: _, cond: _ }
| Drop { target, unwind, place: _, replace: _ }
| FalseUnwind { real_target: target, unwind } => {
if let UnwindAction::Cleanup(unwind) = unwind {
propagate(unwind, exit_state);
}
propagate(target, exit_state); propagate(target, exit_state);
propagate(unwind, exit_state);
} }
TerminatorEdges::AssignOnReturn { return_, unwind, place } => {
FalseEdge { real_target, imaginary_target } => { // This must be done *first*, otherwise the unwind path will see the assignments.
propagate(real_target, exit_state);
propagate(imaginary_target, exit_state);
}
Yield { resume: target, drop, resume_arg, value: _ } => {
if let Some(drop) = drop {
propagate(drop, exit_state);
}
analysis.apply_yield_resume_effect(exit_state, target, resume_arg);
propagate(target, exit_state);
}
Call { unwind, destination, target, func: _, args: _, call_source: _, fn_span: _ } => {
if let UnwindAction::Cleanup(unwind) = unwind { if let UnwindAction::Cleanup(unwind) = unwind {
propagate(unwind, exit_state); propagate(unwind, exit_state);
} }
if let Some(return_) = return_ {
if let Some(target) = target { analysis.apply_call_return_effect(exit_state, bb, place);
// N.B.: This must be done *last*, otherwise the unwind path will see the call propagate(return_, exit_state);
// return effect.
analysis.apply_call_return_effect(
exit_state,
bb,
CallReturnPlaces::Call(destination),
);
propagate(target, exit_state);
} }
} }
TerminatorEdges::SwitchInt { targets, discr } => {
InlineAsm {
template: _,
ref operands,
options: _,
line_spans: _,
destination,
unwind,
} => {
if let UnwindAction::Cleanup(unwind) = unwind {
propagate(unwind, exit_state);
}
if let Some(target) = destination {
// N.B.: This must be done *last*, otherwise the unwind path will see the call
// return effect.
analysis.apply_call_return_effect(
exit_state,
bb,
CallReturnPlaces::InlineAsm(operands),
);
propagate(target, exit_state);
}
}
SwitchInt { ref targets, ref discr } => {
let mut applier = ForwardSwitchIntEdgeEffectsApplier { let mut applier = ForwardSwitchIntEdgeEffectsApplier {
exit_state, exit_state,
targets, targets,

View file

@ -144,7 +144,7 @@ where
// gen/kill problems on cyclic CFGs. This is not ideal, but it doesn't seem to degrade // gen/kill problems on cyclic CFGs. This is not ideal, but it doesn't seem to degrade
// performance in practice. I've tried a few ways to avoid this, but they have downsides. See // performance in practice. I've tried a few ways to avoid this, but they have downsides. See
// the message for the commit that added this FIXME for more information. // the message for the commit that added this FIXME for more information.
apply_trans_for_block: Option<Box<dyn Fn(BasicBlock, &mut A::Domain)>>, apply_statement_trans_for_block: Option<Box<dyn Fn(BasicBlock, &mut A::Domain)>>,
} }
impl<'a, 'tcx, A, D, T> Engine<'a, 'tcx, A> impl<'a, 'tcx, A, D, T> Engine<'a, 'tcx, A>
@ -165,12 +165,17 @@ where
// Otherwise, compute and store the cumulative transfer function for each block. // Otherwise, compute and store the cumulative transfer function for each block.
let identity = GenKillSet::identity(analysis.bottom_value(body).domain_size()); let identity = GenKillSet::identity(analysis.domain_size(body));
let mut trans_for_block = IndexVec::from_elem(identity, &body.basic_blocks); let mut trans_for_block = IndexVec::from_elem(identity, &body.basic_blocks);
for (block, block_data) in body.basic_blocks.iter_enumerated() { for (block, block_data) in body.basic_blocks.iter_enumerated() {
let trans = &mut trans_for_block[block]; let trans = &mut trans_for_block[block];
A::Direction::gen_kill_effects_in_block(&mut analysis, trans, block, block_data); A::Direction::gen_kill_statement_effects_in_block(
&mut analysis,
trans,
block,
block_data,
);
} }
let apply_trans = Box::new(move |bb: BasicBlock, state: &mut A::Domain| { let apply_trans = Box::new(move |bb: BasicBlock, state: &mut A::Domain| {
@ -199,17 +204,18 @@ where
tcx: TyCtxt<'tcx>, tcx: TyCtxt<'tcx>,
body: &'a mir::Body<'tcx>, body: &'a mir::Body<'tcx>,
analysis: A, analysis: A,
apply_trans_for_block: Option<Box<dyn Fn(BasicBlock, &mut A::Domain)>>, apply_statement_trans_for_block: Option<Box<dyn Fn(BasicBlock, &mut A::Domain)>>,
) -> Self { ) -> Self {
let bottom_value = analysis.bottom_value(body); let mut entry_sets =
let mut entry_sets = IndexVec::from_elem(bottom_value.clone(), &body.basic_blocks); IndexVec::from_fn_n(|_| analysis.bottom_value(body), body.basic_blocks.len());
analysis.initialize_start_block(body, &mut entry_sets[mir::START_BLOCK]); analysis.initialize_start_block(body, &mut entry_sets[mir::START_BLOCK]);
if A::Direction::IS_BACKWARD && entry_sets[mir::START_BLOCK] != bottom_value { if A::Direction::IS_BACKWARD && entry_sets[mir::START_BLOCK] != analysis.bottom_value(body)
{
bug!("`initialize_start_block` is not yet supported for backward dataflow analyses"); bug!("`initialize_start_block` is not yet supported for backward dataflow analyses");
} }
Engine { analysis, tcx, body, pass_name: None, entry_sets, apply_trans_for_block } Engine { analysis, tcx, body, pass_name: None, entry_sets, apply_statement_trans_for_block }
} }
/// Adds an identifier to the graphviz output for this particular run of a dataflow analysis. /// Adds an identifier to the graphviz output for this particular run of a dataflow analysis.
@ -231,7 +237,7 @@ where
body, body,
mut entry_sets, mut entry_sets,
tcx, tcx,
apply_trans_for_block, apply_statement_trans_for_block,
pass_name, pass_name,
.. ..
} = self; } = self;
@ -263,19 +269,20 @@ where
state.clone_from(&entry_sets[bb]); state.clone_from(&entry_sets[bb]);
// Apply the block transfer function, using the cached one if it exists. // Apply the block transfer function, using the cached one if it exists.
match &apply_trans_for_block { let edges = A::Direction::apply_effects_in_block(
Some(apply) => apply(bb, &mut state), &mut analysis,
None => { &mut state,
A::Direction::apply_effects_in_block(&mut analysis, &mut state, bb, bb_data) bb,
} bb_data,
} apply_statement_trans_for_block.as_deref(),
);
A::Direction::join_state_into_successors_of( A::Direction::join_state_into_successors_of(
&mut analysis, &mut analysis,
tcx,
body, body,
&mut state, &mut state,
(bb, bb_data), bb,
edges,
|target: BasicBlock, state: &A::Domain| { |target: BasicBlock, state: &A::Domain| {
let set_changed = entry_sets[target].join(state); let set_changed = entry_sets[target].join(state);
if set_changed { if set_changed {

View file

@ -1,6 +1,7 @@
//! Custom formatting traits used when outputting Graphviz diagrams with the results of a dataflow //! Custom formatting traits used when outputting Graphviz diagrams with the results of a dataflow
//! analysis. //! analysis.
use super::lattice::MaybeReachable;
use rustc_index::bit_set::{BitSet, ChunkedBitSet, HybridBitSet}; use rustc_index::bit_set::{BitSet, ChunkedBitSet, HybridBitSet};
use rustc_index::Idx; use rustc_index::Idx;
use std::fmt; use std::fmt;
@ -124,6 +125,37 @@ where
} }
} }
impl<S, C> DebugWithContext<C> for MaybeReachable<S>
where
S: DebugWithContext<C>,
{
fn fmt_with(&self, ctxt: &C, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
MaybeReachable::Unreachable => {
write!(f, "unreachable")
}
MaybeReachable::Reachable(set) => set.fmt_with(ctxt, f),
}
}
fn fmt_diff_with(&self, old: &Self, ctxt: &C, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match (self, old) {
(MaybeReachable::Unreachable, MaybeReachable::Unreachable) => Ok(()),
(MaybeReachable::Unreachable, MaybeReachable::Reachable(set)) => {
write!(f, "\u{001f}+")?;
set.fmt_with(ctxt, f)
}
(MaybeReachable::Reachable(set), MaybeReachable::Unreachable) => {
write!(f, "\u{001f}-")?;
set.fmt_with(ctxt, f)
}
(MaybeReachable::Reachable(this), MaybeReachable::Reachable(old)) => {
this.fmt_diff_with(old, ctxt, f)
}
}
}
}
fn fmt_diff<T, C>( fn fmt_diff<T, C>(
inserted: &HybridBitSet<T>, inserted: &HybridBitSet<T>,
removed: &HybridBitSet<T>, removed: &HybridBitSet<T>,

View file

@ -269,7 +269,11 @@ where
self.write_row(w, "", "(on yield resume)", |this, w, fmt| { self.write_row(w, "", "(on yield resume)", |this, w, fmt| {
let state_on_generator_drop = this.results.get().clone(); let state_on_generator_drop = this.results.get().clone();
this.results.apply_custom_effect(|analysis, state| { this.results.apply_custom_effect(|analysis, state| {
analysis.apply_yield_resume_effect(state, resume, resume_arg); analysis.apply_call_return_effect(
state,
resume,
CallReturnPlaces::Yield(resume_arg),
);
}); });
write!( write!(

View file

@ -187,10 +187,6 @@ impl<T: Idx> MeetSemiLattice for ChunkedBitSet<T> {
pub struct Dual<T>(pub T); pub struct Dual<T>(pub T);
impl<T: Idx> BitSetExt<T> for Dual<BitSet<T>> { impl<T: Idx> BitSetExt<T> for Dual<BitSet<T>> {
fn domain_size(&self) -> usize {
self.0.domain_size()
}
fn contains(&self, elem: T) -> bool { fn contains(&self, elem: T) -> bool {
self.0.contains(elem) self.0.contains(elem)
} }
@ -276,3 +272,93 @@ impl<T> HasBottom for FlatSet<T> {
impl<T> HasTop for FlatSet<T> { impl<T> HasTop for FlatSet<T> {
const TOP: Self = Self::Top; const TOP: Self = Self::Top;
} }
/// Extend a lattice with a bottom value to represent an unreachable execution.
///
/// The only useful action on an unreachable state is joining it with a reachable one to make it
/// reachable. All other actions, gen/kill for instance, are no-ops.
#[derive(PartialEq, Eq, Debug)]
pub enum MaybeReachable<T> {
Unreachable,
Reachable(T),
}
impl<T> MaybeReachable<T> {
pub fn is_reachable(&self) -> bool {
matches!(self, MaybeReachable::Reachable(_))
}
}
impl<T> HasBottom for MaybeReachable<T> {
const BOTTOM: Self = MaybeReachable::Unreachable;
}
impl<T: HasTop> HasTop for MaybeReachable<T> {
const TOP: Self = MaybeReachable::Reachable(T::TOP);
}
impl<S> MaybeReachable<S> {
/// Return whether the current state contains the given element. If the state is unreachable,
/// it does no contain anything.
pub fn contains<T>(&self, elem: T) -> bool
where
S: BitSetExt<T>,
{
match self {
MaybeReachable::Unreachable => false,
MaybeReachable::Reachable(set) => set.contains(elem),
}
}
}
impl<T, S: BitSetExt<T>> BitSetExt<T> for MaybeReachable<S> {
fn contains(&self, elem: T) -> bool {
self.contains(elem)
}
fn union(&mut self, other: &HybridBitSet<T>) {
match self {
MaybeReachable::Unreachable => {}
MaybeReachable::Reachable(set) => set.union(other),
}
}
fn subtract(&mut self, other: &HybridBitSet<T>) {
match self {
MaybeReachable::Unreachable => {}
MaybeReachable::Reachable(set) => set.subtract(other),
}
}
}
impl<V: Clone> Clone for MaybeReachable<V> {
fn clone(&self) -> Self {
match self {
MaybeReachable::Reachable(x) => MaybeReachable::Reachable(x.clone()),
MaybeReachable::Unreachable => MaybeReachable::Unreachable,
}
}
fn clone_from(&mut self, source: &Self) {
match (&mut *self, source) {
(MaybeReachable::Reachable(x), MaybeReachable::Reachable(y)) => {
x.clone_from(&y);
}
_ => *self = source.clone(),
}
}
}
impl<T: JoinSemiLattice + Clone> JoinSemiLattice for MaybeReachable<T> {
fn join(&mut self, other: &Self) -> bool {
// Unreachable acts as a bottom.
match (&mut *self, &other) {
(_, MaybeReachable::Unreachable) => false,
(MaybeReachable::Unreachable, _) => {
*self = other.clone();
true
}
(MaybeReachable::Reachable(this), MaybeReachable::Reachable(other)) => this.join(other),
}
}
}

View file

@ -34,7 +34,7 @@ use std::cmp::Ordering;
use rustc_index::bit_set::{BitSet, ChunkedBitSet, HybridBitSet}; use rustc_index::bit_set::{BitSet, ChunkedBitSet, HybridBitSet};
use rustc_index::Idx; use rustc_index::Idx;
use rustc_middle::mir::{self, BasicBlock, Location}; use rustc_middle::mir::{self, BasicBlock, CallReturnPlaces, Location, TerminatorEdges};
use rustc_middle::ty::TyCtxt; use rustc_middle::ty::TyCtxt;
mod cursor; mod cursor;
@ -48,23 +48,18 @@ mod visitor;
pub use self::cursor::{AnalysisResults, ResultsClonedCursor, ResultsCursor, ResultsRefCursor}; pub use self::cursor::{AnalysisResults, ResultsClonedCursor, ResultsCursor, ResultsRefCursor};
pub use self::direction::{Backward, Direction, Forward}; pub use self::direction::{Backward, Direction, Forward};
pub use self::engine::{Engine, EntrySets, Results, ResultsCloned}; pub use self::engine::{Engine, EntrySets, Results, ResultsCloned};
pub use self::lattice::{JoinSemiLattice, MeetSemiLattice}; pub use self::lattice::{JoinSemiLattice, MaybeReachable, MeetSemiLattice};
pub use self::visitor::{visit_results, ResultsVisitable, ResultsVisitor}; pub use self::visitor::{visit_results, ResultsVisitable, ResultsVisitor};
/// Analysis domains are all bitsets of various kinds. This trait holds /// Analysis domains are all bitsets of various kinds. This trait holds
/// operations needed by all of them. /// operations needed by all of them.
pub trait BitSetExt<T> { pub trait BitSetExt<T> {
fn domain_size(&self) -> usize;
fn contains(&self, elem: T) -> bool; fn contains(&self, elem: T) -> bool;
fn union(&mut self, other: &HybridBitSet<T>); fn union(&mut self, other: &HybridBitSet<T>);
fn subtract(&mut self, other: &HybridBitSet<T>); fn subtract(&mut self, other: &HybridBitSet<T>);
} }
impl<T: Idx> BitSetExt<T> for BitSet<T> { impl<T: Idx> BitSetExt<T> for BitSet<T> {
fn domain_size(&self) -> usize {
self.domain_size()
}
fn contains(&self, elem: T) -> bool { fn contains(&self, elem: T) -> bool {
self.contains(elem) self.contains(elem)
} }
@ -79,10 +74,6 @@ impl<T: Idx> BitSetExt<T> for BitSet<T> {
} }
impl<T: Idx> BitSetExt<T> for ChunkedBitSet<T> { impl<T: Idx> BitSetExt<T> for ChunkedBitSet<T> {
fn domain_size(&self) -> usize {
self.domain_size()
}
fn contains(&self, elem: T) -> bool { fn contains(&self, elem: T) -> bool {
self.contains(elem) self.contains(elem)
} }
@ -172,12 +163,12 @@ pub trait Analysis<'tcx>: AnalysisDomain<'tcx> {
/// in this function. That should go in `apply_call_return_effect`. For example, in the /// in this function. That should go in `apply_call_return_effect`. For example, in the
/// `InitializedPlaces` analyses, the return place for a function call is not marked as /// `InitializedPlaces` analyses, the return place for a function call is not marked as
/// initialized here. /// initialized here.
fn apply_terminator_effect( fn apply_terminator_effect<'mir>(
&mut self, &mut self,
state: &mut Self::Domain, state: &mut Self::Domain,
terminator: &mir::Terminator<'tcx>, terminator: &'mir mir::Terminator<'tcx>,
location: Location, location: Location,
); ) -> TerminatorEdges<'mir, 'tcx>;
/// Updates the current dataflow state with an effect that occurs immediately *before* the /// Updates the current dataflow state with an effect that occurs immediately *before* the
/// given terminator. /// given terminator.
@ -207,20 +198,6 @@ pub trait Analysis<'tcx>: AnalysisDomain<'tcx> {
return_places: CallReturnPlaces<'_, 'tcx>, return_places: CallReturnPlaces<'_, 'tcx>,
); );
/// Updates the current dataflow state with the effect of resuming from a `Yield` terminator.
///
/// This is similar to `apply_call_return_effect` in that it only takes place after the
/// generator is resumed, not when it is dropped.
///
/// By default, no effects happen.
fn apply_yield_resume_effect(
&mut self,
_state: &mut Self::Domain,
_resume_block: BasicBlock,
_resume_place: mir::Place<'tcx>,
) {
}
/// Updates the current dataflow state with the effect of taking a particular branch in a /// Updates the current dataflow state with the effect of taking a particular branch in a
/// `SwitchInt` terminator. /// `SwitchInt` terminator.
/// ///
@ -295,6 +272,8 @@ where
pub trait GenKillAnalysis<'tcx>: Analysis<'tcx> { pub trait GenKillAnalysis<'tcx>: Analysis<'tcx> {
type Idx: Idx; type Idx: Idx;
fn domain_size(&self, body: &mir::Body<'tcx>) -> usize;
/// See `Analysis::apply_statement_effect`. /// See `Analysis::apply_statement_effect`.
fn statement_effect( fn statement_effect(
&mut self, &mut self,
@ -313,12 +292,12 @@ pub trait GenKillAnalysis<'tcx>: Analysis<'tcx> {
} }
/// See `Analysis::apply_terminator_effect`. /// See `Analysis::apply_terminator_effect`.
fn terminator_effect( fn terminator_effect<'mir>(
&mut self, &mut self,
trans: &mut impl GenKill<Self::Idx>, trans: &mut Self::Domain,
terminator: &mir::Terminator<'tcx>, terminator: &'mir mir::Terminator<'tcx>,
location: Location, location: Location,
); ) -> TerminatorEdges<'mir, 'tcx>;
/// See `Analysis::apply_before_terminator_effect`. /// See `Analysis::apply_before_terminator_effect`.
fn before_terminator_effect( fn before_terminator_effect(
@ -339,15 +318,6 @@ pub trait GenKillAnalysis<'tcx>: Analysis<'tcx> {
return_places: CallReturnPlaces<'_, 'tcx>, return_places: CallReturnPlaces<'_, 'tcx>,
); );
/// See `Analysis::apply_yield_resume_effect`.
fn yield_resume_effect(
&mut self,
_trans: &mut impl GenKill<Self::Idx>,
_resume_block: BasicBlock,
_resume_place: mir::Place<'tcx>,
) {
}
/// See `Analysis::apply_switch_int_edge_effects`. /// See `Analysis::apply_switch_int_edge_effects`.
fn switch_int_edge_effects<G: GenKill<Self::Idx>>( fn switch_int_edge_effects<G: GenKill<Self::Idx>>(
&mut self, &mut self,
@ -381,13 +351,13 @@ where
self.before_statement_effect(state, statement, location); self.before_statement_effect(state, statement, location);
} }
fn apply_terminator_effect( fn apply_terminator_effect<'mir>(
&mut self, &mut self,
state: &mut A::Domain, state: &mut A::Domain,
terminator: &mir::Terminator<'tcx>, terminator: &'mir mir::Terminator<'tcx>,
location: Location, location: Location,
) { ) -> TerminatorEdges<'mir, 'tcx> {
self.terminator_effect(state, terminator, location); self.terminator_effect(state, terminator, location)
} }
fn apply_before_terminator_effect( fn apply_before_terminator_effect(
@ -410,15 +380,6 @@ where
self.call_return_effect(state, block, return_places); self.call_return_effect(state, block, return_places);
} }
fn apply_yield_resume_effect(
&mut self,
state: &mut A::Domain,
resume_block: BasicBlock,
resume_place: mir::Place<'tcx>,
) {
self.yield_resume_effect(state, resume_block, resume_place);
}
fn apply_switch_int_edge_effects( fn apply_switch_int_edge_effects(
&mut self, &mut self,
block: BasicBlock, block: BasicBlock,
@ -531,6 +492,24 @@ impl<T: Idx> GenKill<T> for ChunkedBitSet<T> {
} }
} }
impl<T, S: GenKill<T>> GenKill<T> for MaybeReachable<S> {
fn gen(&mut self, elem: T) {
match self {
// If the state is not reachable, adding an element does nothing.
MaybeReachable::Unreachable => {}
MaybeReachable::Reachable(set) => set.gen(elem),
}
}
fn kill(&mut self, elem: T) {
match self {
// If the state is not reachable, killing an element does nothing.
MaybeReachable::Unreachable => {}
MaybeReachable::Reachable(set) => set.kill(elem),
}
}
}
impl<T: Idx> GenKill<T> for lattice::Dual<BitSet<T>> { impl<T: Idx> GenKill<T> for lattice::Dual<BitSet<T>> {
fn gen(&mut self, elem: T) { fn gen(&mut self, elem: T) {
self.0.insert(elem); self.0.insert(elem);
@ -612,29 +591,5 @@ pub trait SwitchIntEdgeEffects<D> {
fn apply(&mut self, apply_edge_effect: impl FnMut(&mut D, SwitchIntTarget)); fn apply(&mut self, apply_edge_effect: impl FnMut(&mut D, SwitchIntTarget));
} }
/// List of places that are written to after a successful (non-unwind) return
/// from a `Call` or `InlineAsm`.
pub enum CallReturnPlaces<'a, 'tcx> {
Call(mir::Place<'tcx>),
InlineAsm(&'a [mir::InlineAsmOperand<'tcx>]),
}
impl<'tcx> CallReturnPlaces<'_, 'tcx> {
pub fn for_each(&self, mut f: impl FnMut(mir::Place<'tcx>)) {
match *self {
Self::Call(place) => f(place),
Self::InlineAsm(operands) => {
for op in operands {
match *op {
mir::InlineAsmOperand::Out { place: Some(place), .. }
| mir::InlineAsmOperand::InOut { out_place: Some(place), .. } => f(place),
_ => {}
}
}
}
}
}
}
#[cfg(test)] #[cfg(test)]
mod tests; mod tests;

View file

@ -198,14 +198,15 @@ impl<'tcx, D: Direction> Analysis<'tcx> for MockAnalysis<'tcx, D> {
assert!(state.insert(idx)); assert!(state.insert(idx));
} }
fn apply_terminator_effect( fn apply_terminator_effect<'mir>(
&mut self, &mut self,
state: &mut Self::Domain, state: &mut Self::Domain,
_terminator: &mir::Terminator<'tcx>, terminator: &'mir mir::Terminator<'tcx>,
location: Location, location: Location,
) { ) -> TerminatorEdges<'mir, 'tcx> {
let idx = self.effect(Effect::Primary.at_index(location.statement_index)); let idx = self.effect(Effect::Primary.at_index(location.statement_index));
assert!(state.insert(idx)); assert!(state.insert(idx));
terminator.edges()
} }
fn apply_before_terminator_effect( fn apply_before_terminator_effect(

View file

@ -1,9 +1,9 @@
use super::*; use rustc_index::bit_set::BitSet;
use crate::{AnalysisDomain, CallReturnPlaces, GenKill, GenKillAnalysis};
use rustc_middle::mir::visit::Visitor; use rustc_middle::mir::visit::Visitor;
use rustc_middle::mir::*; use rustc_middle::mir::*;
use crate::{AnalysisDomain, GenKill, GenKillAnalysis};
/// A dataflow analysis that tracks whether a pointer or reference could possibly exist that points /// A dataflow analysis that tracks whether a pointer or reference could possibly exist that points
/// to a given local. /// to a given local.
/// ///
@ -14,7 +14,7 @@ use rustc_middle::mir::*;
pub struct MaybeBorrowedLocals; pub struct MaybeBorrowedLocals;
impl MaybeBorrowedLocals { impl MaybeBorrowedLocals {
fn transfer_function<'a, T>(&'a self, trans: &'a mut T) -> TransferFunction<'a, T> { pub(super) fn transfer_function<'a, T>(&'a self, trans: &'a mut T) -> TransferFunction<'a, T> {
TransferFunction { trans } TransferFunction { trans }
} }
} }
@ -23,12 +23,12 @@ impl<'tcx> AnalysisDomain<'tcx> for MaybeBorrowedLocals {
type Domain = BitSet<Local>; type Domain = BitSet<Local>;
const NAME: &'static str = "maybe_borrowed_locals"; const NAME: &'static str = "maybe_borrowed_locals";
fn bottom_value(&self, body: &mir::Body<'tcx>) -> Self::Domain { fn bottom_value(&self, body: &Body<'tcx>) -> Self::Domain {
// bottom = unborrowed // bottom = unborrowed
BitSet::new_empty(body.local_decls().len()) BitSet::new_empty(body.local_decls().len())
} }
fn initialize_start_block(&self, _: &mir::Body<'tcx>, _: &mut Self::Domain) { fn initialize_start_block(&self, _: &Body<'tcx>, _: &mut Self::Domain) {
// No locals are aliased on function entry // No locals are aliased on function entry
} }
} }
@ -36,35 +36,40 @@ impl<'tcx> AnalysisDomain<'tcx> for MaybeBorrowedLocals {
impl<'tcx> GenKillAnalysis<'tcx> for MaybeBorrowedLocals { impl<'tcx> GenKillAnalysis<'tcx> for MaybeBorrowedLocals {
type Idx = Local; type Idx = Local;
fn domain_size(&self, body: &Body<'tcx>) -> usize {
body.local_decls.len()
}
fn statement_effect( fn statement_effect(
&mut self, &mut self,
trans: &mut impl GenKill<Self::Idx>, trans: &mut impl GenKill<Self::Idx>,
statement: &mir::Statement<'tcx>, statement: &Statement<'tcx>,
location: Location, location: Location,
) { ) {
self.transfer_function(trans).visit_statement(statement, location); self.transfer_function(trans).visit_statement(statement, location);
} }
fn terminator_effect( fn terminator_effect<'mir>(
&mut self, &mut self,
trans: &mut impl GenKill<Self::Idx>, trans: &mut Self::Domain,
terminator: &mir::Terminator<'tcx>, terminator: &'mir Terminator<'tcx>,
location: Location, location: Location,
) { ) -> TerminatorEdges<'mir, 'tcx> {
self.transfer_function(trans).visit_terminator(terminator, location); self.transfer_function(trans).visit_terminator(terminator, location);
terminator.edges()
} }
fn call_return_effect( fn call_return_effect(
&mut self, &mut self,
_trans: &mut impl GenKill<Self::Idx>, _trans: &mut impl GenKill<Self::Idx>,
_block: mir::BasicBlock, _block: BasicBlock,
_return_places: CallReturnPlaces<'_, 'tcx>, _return_places: CallReturnPlaces<'_, 'tcx>,
) { ) {
} }
} }
/// A `Visitor` that defines the transfer function for `MaybeBorrowedLocals`. /// A `Visitor` that defines the transfer function for `MaybeBorrowedLocals`.
struct TransferFunction<'a, T> { pub(super) struct TransferFunction<'a, T> {
trans: &'a mut T, trans: &'a mut T,
} }
@ -82,37 +87,37 @@ where
} }
} }
fn visit_rvalue(&mut self, rvalue: &mir::Rvalue<'tcx>, location: Location) { fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
self.super_rvalue(rvalue, location); self.super_rvalue(rvalue, location);
match rvalue { match rvalue {
mir::Rvalue::AddressOf(_, borrowed_place) | mir::Rvalue::Ref(_, _, borrowed_place) => { Rvalue::AddressOf(_, borrowed_place) | Rvalue::Ref(_, _, borrowed_place) => {
if !borrowed_place.is_indirect() { if !borrowed_place.is_indirect() {
self.trans.gen(borrowed_place.local); self.trans.gen(borrowed_place.local);
} }
} }
mir::Rvalue::Cast(..) Rvalue::Cast(..)
| mir::Rvalue::ShallowInitBox(..) | Rvalue::ShallowInitBox(..)
| mir::Rvalue::Use(..) | Rvalue::Use(..)
| mir::Rvalue::ThreadLocalRef(..) | Rvalue::ThreadLocalRef(..)
| mir::Rvalue::Repeat(..) | Rvalue::Repeat(..)
| mir::Rvalue::Len(..) | Rvalue::Len(..)
| mir::Rvalue::BinaryOp(..) | Rvalue::BinaryOp(..)
| mir::Rvalue::CheckedBinaryOp(..) | Rvalue::CheckedBinaryOp(..)
| mir::Rvalue::NullaryOp(..) | Rvalue::NullaryOp(..)
| mir::Rvalue::UnaryOp(..) | Rvalue::UnaryOp(..)
| mir::Rvalue::Discriminant(..) | Rvalue::Discriminant(..)
| mir::Rvalue::Aggregate(..) | Rvalue::Aggregate(..)
| mir::Rvalue::CopyForDeref(..) => {} | Rvalue::CopyForDeref(..) => {}
} }
} }
fn visit_terminator(&mut self, terminator: &mir::Terminator<'tcx>, location: Location) { fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
self.super_terminator(terminator, location); self.super_terminator(terminator, location);
match terminator.kind { match terminator.kind {
mir::TerminatorKind::Drop { place: dropped_place, .. } => { TerminatorKind::Drop { place: dropped_place, .. } => {
// Drop terminators may call custom drop glue (`Drop::drop`), which takes `&mut // Drop terminators may call custom drop glue (`Drop::drop`), which takes `&mut
// self` as a parameter. In the general case, a drop impl could launder that // self` as a parameter. In the general case, a drop impl could launder that
// reference into the surrounding environment through a raw pointer, thus creating // reference into the surrounding environment through a raw pointer, thus creating

View file

@ -0,0 +1,778 @@
use rustc_index::bit_set::{BitSet, ChunkedBitSet};
use rustc_index::Idx;
use rustc_middle::mir::{self, Body, CallReturnPlaces, Location, TerminatorEdges};
use rustc_middle::ty::{self, TyCtxt};
use crate::drop_flag_effects_for_function_entry;
use crate::drop_flag_effects_for_location;
use crate::elaborate_drops::DropFlagState;
use crate::framework::SwitchIntEdgeEffects;
use crate::move_paths::{HasMoveData, InitIndex, InitKind, LookupResult, MoveData, MovePathIndex};
use crate::on_lookup_result_bits;
use crate::MoveDataParamEnv;
use crate::{drop_flag_effects, on_all_children_bits, on_all_drop_children_bits};
use crate::{lattice, AnalysisDomain, GenKill, GenKillAnalysis, MaybeReachable};
/// `MaybeInitializedPlaces` tracks all places that might be
/// initialized upon reaching a particular point in the control flow
/// for a function.
///
/// For example, in code like the following, we have corresponding
/// dataflow information shown in the right-hand comments.
///
/// ```rust
/// struct S;
/// fn foo(pred: bool) { // maybe-init:
/// // {}
/// let a = S; let mut b = S; let c; let d; // {a, b}
///
/// if pred {
/// drop(a); // { b}
/// b = S; // { b}
///
/// } else {
/// drop(b); // {a}
/// d = S; // {a, d}
///
/// } // {a, b, d}
///
/// c = S; // {a, b, c, d}
/// }
/// ```
///
/// To determine whether a place *must* be initialized at a
/// particular control-flow point, one can take the set-difference
/// between this data and the data from `MaybeUninitializedPlaces` at the
/// corresponding control-flow point.
///
/// Similarly, at a given `drop` statement, the set-intersection
/// between this data and `MaybeUninitializedPlaces` yields the set of
/// places that would require a dynamic drop-flag at that statement.
pub struct MaybeInitializedPlaces<'a, 'tcx> {
tcx: TyCtxt<'tcx>,
body: &'a Body<'tcx>,
mdpe: &'a MoveDataParamEnv<'tcx>,
skip_unreachable_unwind: bool,
}
impl<'a, 'tcx> MaybeInitializedPlaces<'a, 'tcx> {
pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
MaybeInitializedPlaces { tcx, body, mdpe, skip_unreachable_unwind: false }
}
pub fn skipping_unreachable_unwind(mut self) -> Self {
self.skip_unreachable_unwind = true;
self
}
pub fn is_unwind_dead(
&self,
place: mir::Place<'tcx>,
state: &MaybeReachable<ChunkedBitSet<MovePathIndex>>,
) -> bool {
if let LookupResult::Exact(path) = self.move_data().rev_lookup.find(place.as_ref()) {
let mut maybe_live = false;
on_all_drop_children_bits(self.tcx, self.body, self.mdpe, path, |child| {
maybe_live |= state.contains(child);
});
!maybe_live
} else {
false
}
}
}
impl<'a, 'tcx> HasMoveData<'tcx> for MaybeInitializedPlaces<'a, 'tcx> {
fn move_data(&self) -> &MoveData<'tcx> {
&self.mdpe.move_data
}
}
/// `MaybeUninitializedPlaces` tracks all places that might be
/// uninitialized upon reaching a particular point in the control flow
/// for a function.
///
/// For example, in code like the following, we have corresponding
/// dataflow information shown in the right-hand comments.
///
/// ```rust
/// struct S;
/// fn foo(pred: bool) { // maybe-uninit:
/// // {a, b, c, d}
/// let a = S; let mut b = S; let c; let d; // { c, d}
///
/// if pred {
/// drop(a); // {a, c, d}
/// b = S; // {a, c, d}
///
/// } else {
/// drop(b); // { b, c, d}
/// d = S; // { b, c }
///
/// } // {a, b, c, d}
///
/// c = S; // {a, b, d}
/// }
/// ```
///
/// To determine whether a place *must* be uninitialized at a
/// particular control-flow point, one can take the set-difference
/// between this data and the data from `MaybeInitializedPlaces` at the
/// corresponding control-flow point.
///
/// Similarly, at a given `drop` statement, the set-intersection
/// between this data and `MaybeInitializedPlaces` yields the set of
/// places that would require a dynamic drop-flag at that statement.
pub struct MaybeUninitializedPlaces<'a, 'tcx> {
tcx: TyCtxt<'tcx>,
body: &'a Body<'tcx>,
mdpe: &'a MoveDataParamEnv<'tcx>,
mark_inactive_variants_as_uninit: bool,
skip_unreachable_unwind: BitSet<mir::BasicBlock>,
}
impl<'a, 'tcx> MaybeUninitializedPlaces<'a, 'tcx> {
pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
MaybeUninitializedPlaces {
tcx,
body,
mdpe,
mark_inactive_variants_as_uninit: false,
skip_unreachable_unwind: BitSet::new_empty(body.basic_blocks.len()),
}
}
/// Causes inactive enum variants to be marked as "maybe uninitialized" after a switch on an
/// enum discriminant.
///
/// This is correct in a vacuum but is not the default because it causes problems in the borrow
/// checker, where this information gets propagated along `FakeEdge`s.
pub fn mark_inactive_variants_as_uninit(mut self) -> Self {
self.mark_inactive_variants_as_uninit = true;
self
}
pub fn skipping_unreachable_unwind(
mut self,
unreachable_unwind: BitSet<mir::BasicBlock>,
) -> Self {
self.skip_unreachable_unwind = unreachable_unwind;
self
}
}
impl<'a, 'tcx> HasMoveData<'tcx> for MaybeUninitializedPlaces<'a, 'tcx> {
fn move_data(&self) -> &MoveData<'tcx> {
&self.mdpe.move_data
}
}
/// `DefinitelyInitializedPlaces` tracks all places that are definitely
/// initialized upon reaching a particular point in the control flow
/// for a function.
///
/// For example, in code like the following, we have corresponding
/// dataflow information shown in the right-hand comments.
///
/// ```rust
/// struct S;
/// fn foo(pred: bool) { // definite-init:
/// // { }
/// let a = S; let mut b = S; let c; let d; // {a, b }
///
/// if pred {
/// drop(a); // { b, }
/// b = S; // { b, }
///
/// } else {
/// drop(b); // {a, }
/// d = S; // {a, d}
///
/// } // { }
///
/// c = S; // { c }
/// }
/// ```
///
/// To determine whether a place *may* be uninitialized at a
/// particular control-flow point, one can take the set-complement
/// of this data.
///
/// Similarly, at a given `drop` statement, the set-difference between
/// this data and `MaybeInitializedPlaces` yields the set of places
/// that would require a dynamic drop-flag at that statement.
pub struct DefinitelyInitializedPlaces<'a, 'tcx> {
tcx: TyCtxt<'tcx>,
body: &'a Body<'tcx>,
mdpe: &'a MoveDataParamEnv<'tcx>,
}
impl<'a, 'tcx> DefinitelyInitializedPlaces<'a, 'tcx> {
pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
DefinitelyInitializedPlaces { tcx, body, mdpe }
}
}
impl<'a, 'tcx> HasMoveData<'tcx> for DefinitelyInitializedPlaces<'a, 'tcx> {
fn move_data(&self) -> &MoveData<'tcx> {
&self.mdpe.move_data
}
}
/// `EverInitializedPlaces` tracks all places that might have ever been
/// initialized upon reaching a particular point in the control flow
/// for a function, without an intervening `StorageDead`.
///
/// This dataflow is used to determine if an immutable local variable may
/// be assigned to.
///
/// For example, in code like the following, we have corresponding
/// dataflow information shown in the right-hand comments.
///
/// ```rust
/// struct S;
/// fn foo(pred: bool) { // ever-init:
/// // { }
/// let a = S; let mut b = S; let c; let d; // {a, b }
///
/// if pred {
/// drop(a); // {a, b, }
/// b = S; // {a, b, }
///
/// } else {
/// drop(b); // {a, b, }
/// d = S; // {a, b, d }
///
/// } // {a, b, d }
///
/// c = S; // {a, b, c, d }
/// }
/// ```
pub struct EverInitializedPlaces<'a, 'tcx> {
#[allow(dead_code)]
tcx: TyCtxt<'tcx>,
body: &'a Body<'tcx>,
mdpe: &'a MoveDataParamEnv<'tcx>,
}
impl<'a, 'tcx> EverInitializedPlaces<'a, 'tcx> {
pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
EverInitializedPlaces { tcx, body, mdpe }
}
}
impl<'a, 'tcx> HasMoveData<'tcx> for EverInitializedPlaces<'a, 'tcx> {
fn move_data(&self) -> &MoveData<'tcx> {
&self.mdpe.move_data
}
}
impl<'a, 'tcx> MaybeInitializedPlaces<'a, 'tcx> {
fn update_bits(
trans: &mut impl GenKill<MovePathIndex>,
path: MovePathIndex,
state: DropFlagState,
) {
match state {
DropFlagState::Absent => trans.kill(path),
DropFlagState::Present => trans.gen(path),
}
}
}
impl<'a, 'tcx> MaybeUninitializedPlaces<'a, 'tcx> {
fn update_bits(
trans: &mut impl GenKill<MovePathIndex>,
path: MovePathIndex,
state: DropFlagState,
) {
match state {
DropFlagState::Absent => trans.gen(path),
DropFlagState::Present => trans.kill(path),
}
}
}
impl<'a, 'tcx> DefinitelyInitializedPlaces<'a, 'tcx> {
fn update_bits(
trans: &mut impl GenKill<MovePathIndex>,
path: MovePathIndex,
state: DropFlagState,
) {
match state {
DropFlagState::Absent => trans.kill(path),
DropFlagState::Present => trans.gen(path),
}
}
}
impl<'tcx> AnalysisDomain<'tcx> for MaybeInitializedPlaces<'_, 'tcx> {
type Domain = MaybeReachable<ChunkedBitSet<MovePathIndex>>;
const NAME: &'static str = "maybe_init";
fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
// bottom = uninitialized
MaybeReachable::Unreachable
}
fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
*state =
MaybeReachable::Reachable(ChunkedBitSet::new_empty(self.move_data().move_paths.len()));
drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
assert!(s == DropFlagState::Present);
state.gen(path);
});
}
}
impl<'tcx> GenKillAnalysis<'tcx> for MaybeInitializedPlaces<'_, 'tcx> {
type Idx = MovePathIndex;
fn domain_size(&self, _: &Body<'tcx>) -> usize {
self.move_data().move_paths.len()
}
fn statement_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
statement: &mir::Statement<'tcx>,
location: Location,
) {
drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
Self::update_bits(trans, path, s)
});
// Mark all places as "maybe init" if they are mutably borrowed. See #90752.
if self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration
&& let Some((_, rvalue)) = statement.kind.as_assign()
&& let mir::Rvalue::Ref(_, mir::BorrowKind::Mut { .. }, place)
// FIXME: Does `&raw const foo` allow mutation? See #90413.
| mir::Rvalue::AddressOf(_, place) = rvalue
&& let LookupResult::Exact(mpi) = self.move_data().rev_lookup.find(place.as_ref())
{
on_all_children_bits(self.tcx, self.body, self.move_data(), mpi, |child| {
trans.gen(child);
})
}
}
fn terminator_effect<'mir>(
&mut self,
state: &mut Self::Domain,
terminator: &'mir mir::Terminator<'tcx>,
location: Location,
) -> TerminatorEdges<'mir, 'tcx> {
let mut edges = terminator.edges();
if self.skip_unreachable_unwind
&& let mir::TerminatorKind::Drop { target, unwind, place, replace: _ } = terminator.kind
&& matches!(unwind, mir::UnwindAction::Cleanup(_))
&& self.is_unwind_dead(place, state)
{
edges = TerminatorEdges::Single(target);
}
drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
Self::update_bits(state, path, s)
});
edges
}
fn call_return_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
_block: mir::BasicBlock,
return_places: CallReturnPlaces<'_, 'tcx>,
) {
return_places.for_each(|place| {
// when a call returns successfully, that means we need to set
// the bits for that dest_place to 1 (initialized).
on_lookup_result_bits(
self.tcx,
self.body,
self.move_data(),
self.move_data().rev_lookup.find(place.as_ref()),
|mpi| {
trans.gen(mpi);
},
);
});
}
fn switch_int_edge_effects<G: GenKill<Self::Idx>>(
&mut self,
block: mir::BasicBlock,
discr: &mir::Operand<'tcx>,
edge_effects: &mut impl SwitchIntEdgeEffects<G>,
) {
if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
return;
}
let enum_ = discr.place().and_then(|discr| {
switch_on_enum_discriminant(self.tcx, &self.body, &self.body[block], discr)
});
let Some((enum_place, enum_def)) = enum_ else {
return;
};
let mut discriminants = enum_def.discriminants(self.tcx);
edge_effects.apply(|trans, edge| {
let Some(value) = edge.value else {
return;
};
// MIR building adds discriminants to the `values` array in the same order as they
// are yielded by `AdtDef::discriminants`. We rely on this to match each
// discriminant in `values` to its corresponding variant in linear time.
let (variant, _) = discriminants
.find(|&(_, discr)| discr.val == value)
.expect("Order of `AdtDef::discriminants` differed from `SwitchInt::values`");
// Kill all move paths that correspond to variants we know to be inactive along this
// particular outgoing edge of a `SwitchInt`.
drop_flag_effects::on_all_inactive_variants(
self.tcx,
self.body,
self.move_data(),
enum_place,
variant,
|mpi| trans.kill(mpi),
);
});
}
}
impl<'tcx> AnalysisDomain<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> {
type Domain = ChunkedBitSet<MovePathIndex>;
const NAME: &'static str = "maybe_uninit";
fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
// bottom = initialized (start_block_effect counters this at outset)
ChunkedBitSet::new_empty(self.move_data().move_paths.len())
}
// sets on_entry bits for Arg places
fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
// set all bits to 1 (uninit) before gathering counter-evidence
state.insert_all();
drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
assert!(s == DropFlagState::Present);
state.remove(path);
});
}
}
impl<'tcx> GenKillAnalysis<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> {
type Idx = MovePathIndex;
fn domain_size(&self, _: &Body<'tcx>) -> usize {
self.move_data().move_paths.len()
}
fn statement_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
_statement: &mir::Statement<'tcx>,
location: Location,
) {
drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
Self::update_bits(trans, path, s)
});
// Unlike in `MaybeInitializedPlaces` above, we don't need to change the state when a
// mutable borrow occurs. Places cannot become uninitialized through a mutable reference.
}
fn terminator_effect<'mir>(
&mut self,
trans: &mut Self::Domain,
terminator: &'mir mir::Terminator<'tcx>,
location: Location,
) -> TerminatorEdges<'mir, 'tcx> {
drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
Self::update_bits(trans, path, s)
});
if self.skip_unreachable_unwind.contains(location.block) {
let mir::TerminatorKind::Drop { target, unwind, .. } = terminator.kind else { bug!() };
assert!(matches!(unwind, mir::UnwindAction::Cleanup(_)));
TerminatorEdges::Single(target)
} else {
terminator.edges()
}
}
fn call_return_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
_block: mir::BasicBlock,
return_places: CallReturnPlaces<'_, 'tcx>,
) {
return_places.for_each(|place| {
// when a call returns successfully, that means we need to set
// the bits for that dest_place to 0 (initialized).
on_lookup_result_bits(
self.tcx,
self.body,
self.move_data(),
self.move_data().rev_lookup.find(place.as_ref()),
|mpi| {
trans.kill(mpi);
},
);
});
}
fn switch_int_edge_effects<G: GenKill<Self::Idx>>(
&mut self,
block: mir::BasicBlock,
discr: &mir::Operand<'tcx>,
edge_effects: &mut impl SwitchIntEdgeEffects<G>,
) {
if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
return;
}
if !self.mark_inactive_variants_as_uninit {
return;
}
let enum_ = discr.place().and_then(|discr| {
switch_on_enum_discriminant(self.tcx, &self.body, &self.body[block], discr)
});
let Some((enum_place, enum_def)) = enum_ else {
return;
};
let mut discriminants = enum_def.discriminants(self.tcx);
edge_effects.apply(|trans, edge| {
let Some(value) = edge.value else {
return;
};
// MIR building adds discriminants to the `values` array in the same order as they
// are yielded by `AdtDef::discriminants`. We rely on this to match each
// discriminant in `values` to its corresponding variant in linear time.
let (variant, _) = discriminants
.find(|&(_, discr)| discr.val == value)
.expect("Order of `AdtDef::discriminants` differed from `SwitchInt::values`");
// Mark all move paths that correspond to variants other than this one as maybe
// uninitialized (in reality, they are *definitely* uninitialized).
drop_flag_effects::on_all_inactive_variants(
self.tcx,
self.body,
self.move_data(),
enum_place,
variant,
|mpi| trans.gen(mpi),
);
});
}
}
impl<'a, 'tcx> AnalysisDomain<'tcx> for DefinitelyInitializedPlaces<'a, 'tcx> {
/// Use set intersection as the join operator.
type Domain = lattice::Dual<BitSet<MovePathIndex>>;
const NAME: &'static str = "definite_init";
fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
// bottom = initialized (start_block_effect counters this at outset)
lattice::Dual(BitSet::new_filled(self.move_data().move_paths.len()))
}
// sets on_entry bits for Arg places
fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
state.0.clear();
drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
assert!(s == DropFlagState::Present);
state.0.insert(path);
});
}
}
impl<'tcx> GenKillAnalysis<'tcx> for DefinitelyInitializedPlaces<'_, 'tcx> {
type Idx = MovePathIndex;
fn domain_size(&self, _: &Body<'tcx>) -> usize {
self.move_data().move_paths.len()
}
fn statement_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
_statement: &mir::Statement<'tcx>,
location: Location,
) {
drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
Self::update_bits(trans, path, s)
})
}
fn terminator_effect<'mir>(
&mut self,
trans: &mut Self::Domain,
terminator: &'mir mir::Terminator<'tcx>,
location: Location,
) -> TerminatorEdges<'mir, 'tcx> {
drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
Self::update_bits(trans, path, s)
});
terminator.edges()
}
fn call_return_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
_block: mir::BasicBlock,
return_places: CallReturnPlaces<'_, 'tcx>,
) {
return_places.for_each(|place| {
// when a call returns successfully, that means we need to set
// the bits for that dest_place to 1 (initialized).
on_lookup_result_bits(
self.tcx,
self.body,
self.move_data(),
self.move_data().rev_lookup.find(place.as_ref()),
|mpi| {
trans.gen(mpi);
},
);
});
}
}
impl<'tcx> AnalysisDomain<'tcx> for EverInitializedPlaces<'_, 'tcx> {
type Domain = ChunkedBitSet<InitIndex>;
const NAME: &'static str = "ever_init";
fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
// bottom = no initialized variables by default
ChunkedBitSet::new_empty(self.move_data().inits.len())
}
fn initialize_start_block(&self, body: &mir::Body<'tcx>, state: &mut Self::Domain) {
for arg_init in 0..body.arg_count {
state.insert(InitIndex::new(arg_init));
}
}
}
impl<'tcx> GenKillAnalysis<'tcx> for EverInitializedPlaces<'_, 'tcx> {
type Idx = InitIndex;
fn domain_size(&self, _: &Body<'tcx>) -> usize {
self.move_data().inits.len()
}
#[instrument(skip(self, trans), level = "debug")]
fn statement_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
stmt: &mir::Statement<'tcx>,
location: Location,
) {
let move_data = self.move_data();
let init_path_map = &move_data.init_path_map;
let init_loc_map = &move_data.init_loc_map;
let rev_lookup = &move_data.rev_lookup;
debug!("initializes move_indexes {:?}", &init_loc_map[location]);
trans.gen_all(init_loc_map[location].iter().copied());
if let mir::StatementKind::StorageDead(local) = stmt.kind {
// End inits for StorageDead, so that an immutable variable can
// be reinitialized on the next iteration of the loop.
let move_path_index = rev_lookup.find_local(local);
debug!("clears the ever initialized status of {:?}", init_path_map[move_path_index]);
trans.kill_all(init_path_map[move_path_index].iter().copied());
}
}
#[instrument(skip(self, trans, terminator), level = "debug")]
fn terminator_effect<'mir>(
&mut self,
trans: &mut Self::Domain,
terminator: &'mir mir::Terminator<'tcx>,
location: Location,
) -> TerminatorEdges<'mir, 'tcx> {
let (body, move_data) = (self.body, self.move_data());
let term = body[location.block].terminator();
let init_loc_map = &move_data.init_loc_map;
debug!(?term);
debug!("initializes move_indexes {:?}", init_loc_map[location]);
trans.gen_all(
init_loc_map[location]
.iter()
.filter(|init_index| {
move_data.inits[**init_index].kind != InitKind::NonPanicPathOnly
})
.copied(),
);
terminator.edges()
}
fn call_return_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
block: mir::BasicBlock,
_return_places: CallReturnPlaces<'_, 'tcx>,
) {
let move_data = self.move_data();
let init_loc_map = &move_data.init_loc_map;
let call_loc = self.body.terminator_loc(block);
for init_index in &init_loc_map[call_loc] {
trans.gen(*init_index);
}
}
}
/// Inspect a `SwitchInt`-terminated basic block to see if the condition of that `SwitchInt` is
/// an enum discriminant.
///
/// We expect such blocks to have a call to `discriminant` as their last statement like so:
///
/// ```text
/// ...
/// _42 = discriminant(_1)
/// SwitchInt(_42, ..)
/// ```
///
/// If the basic block matches this pattern, this function returns the place corresponding to the
/// enum (`_1` in the example above) as well as the `AdtDef` of that enum.
fn switch_on_enum_discriminant<'mir, 'tcx>(
tcx: TyCtxt<'tcx>,
body: &'mir mir::Body<'tcx>,
block: &'mir mir::BasicBlockData<'tcx>,
switch_on: mir::Place<'tcx>,
) -> Option<(mir::Place<'tcx>, ty::AdtDef<'tcx>)> {
for statement in block.statements.iter().rev() {
match &statement.kind {
mir::StatementKind::Assign(box (lhs, mir::Rvalue::Discriminant(discriminated)))
if *lhs == switch_on =>
{
match discriminated.ty(body, tcx).ty.kind() {
ty::Adt(def, _) => return Some((*discriminated, *def)),
// `Rvalue::Discriminant` is also used to get the active yield point for a
// generator, but we do not need edge-specific effects in that case. This may
// change in the future.
ty::Generator(..) => return None,
t => bug!("`discriminant` called on unexpected type {:?}", t),
}
}
mir::StatementKind::Coverage(_) => continue,
_ => return None,
}
}
None
}

View file

@ -1,8 +1,10 @@
use rustc_index::bit_set::{BitSet, ChunkedBitSet}; use rustc_index::bit_set::{BitSet, ChunkedBitSet};
use rustc_middle::mir::visit::{MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor}; use rustc_middle::mir::visit::{MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor};
use rustc_middle::mir::{self, Local, Location, Place, StatementKind}; use rustc_middle::mir::{
self, CallReturnPlaces, Local, Location, Place, StatementKind, TerminatorEdges,
};
use crate::{Analysis, AnalysisDomain, Backward, CallReturnPlaces, GenKill, GenKillAnalysis}; use crate::{Analysis, AnalysisDomain, Backward, GenKill, GenKillAnalysis};
/// A [live-variable dataflow analysis][liveness]. /// A [live-variable dataflow analysis][liveness].
/// ///
@ -43,6 +45,10 @@ impl<'tcx> AnalysisDomain<'tcx> for MaybeLiveLocals {
impl<'tcx> GenKillAnalysis<'tcx> for MaybeLiveLocals { impl<'tcx> GenKillAnalysis<'tcx> for MaybeLiveLocals {
type Idx = Local; type Idx = Local;
fn domain_size(&self, body: &mir::Body<'tcx>) -> usize {
body.local_decls.len()
}
fn statement_effect( fn statement_effect(
&mut self, &mut self,
trans: &mut impl GenKill<Self::Idx>, trans: &mut impl GenKill<Self::Idx>,
@ -52,13 +58,14 @@ impl<'tcx> GenKillAnalysis<'tcx> for MaybeLiveLocals {
TransferFunction(trans).visit_statement(statement, location); TransferFunction(trans).visit_statement(statement, location);
} }
fn terminator_effect( fn terminator_effect<'mir>(
&mut self, &mut self,
trans: &mut impl GenKill<Self::Idx>, trans: &mut Self::Domain,
terminator: &mir::Terminator<'tcx>, terminator: &'mir mir::Terminator<'tcx>,
location: Location, location: Location,
) { ) -> TerminatorEdges<'mir, 'tcx> {
TransferFunction(trans).visit_terminator(terminator, location); TransferFunction(trans).visit_terminator(terminator, location);
terminator.edges()
} }
fn call_return_effect( fn call_return_effect(
@ -67,24 +74,19 @@ impl<'tcx> GenKillAnalysis<'tcx> for MaybeLiveLocals {
_block: mir::BasicBlock, _block: mir::BasicBlock,
return_places: CallReturnPlaces<'_, 'tcx>, return_places: CallReturnPlaces<'_, 'tcx>,
) { ) {
return_places.for_each(|place| { if let CallReturnPlaces::Yield(resume_place) = return_places {
if let Some(local) = place.as_local() { YieldResumeEffect(trans).visit_place(
trans.kill(local); &resume_place,
} PlaceContext::MutatingUse(MutatingUseContext::Yield),
}); Location::START,
} )
} else {
fn yield_resume_effect( return_places.for_each(|place| {
&mut self, if let Some(local) = place.as_local() {
trans: &mut impl GenKill<Self::Idx>, trans.kill(local);
_resume_block: mir::BasicBlock, }
resume_place: mir::Place<'tcx>, });
) { }
YieldResumeEffect(trans).visit_place(
&resume_place,
PlaceContext::MutatingUse(MutatingUseContext::Yield),
Location::START,
)
} }
} }
@ -97,7 +99,7 @@ where
fn visit_place(&mut self, place: &mir::Place<'tcx>, context: PlaceContext, location: Location) { fn visit_place(&mut self, place: &mir::Place<'tcx>, context: PlaceContext, location: Location) {
if let PlaceContext::MutatingUse(MutatingUseContext::Yield) = context { if let PlaceContext::MutatingUse(MutatingUseContext::Yield) = context {
// The resume place is evaluated and assigned to only after generator resumes, so its // The resume place is evaluated and assigned to only after generator resumes, so its
// effect is handled separately in `yield_resume_effect`. // effect is handled separately in `call_resume_effect`.
return; return;
} }
@ -283,13 +285,14 @@ impl<'a, 'tcx> Analysis<'tcx> for MaybeTransitiveLiveLocals<'a> {
TransferFunction(trans).visit_statement(statement, location); TransferFunction(trans).visit_statement(statement, location);
} }
fn apply_terminator_effect( fn apply_terminator_effect<'mir>(
&mut self, &mut self,
trans: &mut Self::Domain, trans: &mut Self::Domain,
terminator: &mir::Terminator<'tcx>, terminator: &'mir mir::Terminator<'tcx>,
location: Location, location: Location,
) { ) -> TerminatorEdges<'mir, 'tcx> {
TransferFunction(trans).visit_terminator(terminator, location); TransferFunction(trans).visit_terminator(terminator, location);
terminator.edges()
} }
fn apply_call_return_effect( fn apply_call_return_effect(
@ -298,23 +301,18 @@ impl<'a, 'tcx> Analysis<'tcx> for MaybeTransitiveLiveLocals<'a> {
_block: mir::BasicBlock, _block: mir::BasicBlock,
return_places: CallReturnPlaces<'_, 'tcx>, return_places: CallReturnPlaces<'_, 'tcx>,
) { ) {
return_places.for_each(|place| { if let CallReturnPlaces::Yield(resume_place) = return_places {
if let Some(local) = place.as_local() { YieldResumeEffect(trans).visit_place(
trans.remove(local); &resume_place,
} PlaceContext::MutatingUse(MutatingUseContext::Yield),
}); Location::START,
} )
} else {
fn apply_yield_resume_effect( return_places.for_each(|place| {
&mut self, if let Some(local) = place.as_local() {
trans: &mut Self::Domain, trans.remove(local);
_resume_block: mir::BasicBlock, }
resume_place: mir::Place<'tcx>, });
) { }
YieldResumeEffect(trans).visit_place(
&resume_place,
PlaceContext::MutatingUse(MutatingUseContext::Yield),
Location::START,
)
} }
} }

View file

@ -2,768 +2,18 @@
//! bitvectors attached to each basic block, represented via a //! bitvectors attached to each basic block, represented via a
//! zero-sized structure. //! zero-sized structure.
use rustc_index::bit_set::{BitSet, ChunkedBitSet};
use rustc_index::Idx;
use rustc_middle::mir::visit::{MirVisitable, Visitor};
use rustc_middle::mir::{self, Body, Location};
use rustc_middle::ty::{self, TyCtxt};
use crate::drop_flag_effects_for_function_entry;
use crate::drop_flag_effects_for_location;
use crate::elaborate_drops::DropFlagState;
use crate::framework::{CallReturnPlaces, SwitchIntEdgeEffects};
use crate::move_paths::{HasMoveData, InitIndex, InitKind, LookupResult, MoveData, MovePathIndex};
use crate::on_lookup_result_bits;
use crate::MoveDataParamEnv;
use crate::{drop_flag_effects, on_all_children_bits};
use crate::{lattice, AnalysisDomain, GenKill, GenKillAnalysis};
mod borrowed_locals; mod borrowed_locals;
mod initialized;
mod liveness; mod liveness;
mod storage_liveness; mod storage_liveness;
pub use self::borrowed_locals::borrowed_locals; pub use self::borrowed_locals::borrowed_locals;
pub use self::borrowed_locals::MaybeBorrowedLocals; pub use self::borrowed_locals::MaybeBorrowedLocals;
pub use self::initialized::{
DefinitelyInitializedPlaces, EverInitializedPlaces, MaybeInitializedPlaces,
MaybeUninitializedPlaces,
};
pub use self::liveness::MaybeLiveLocals; pub use self::liveness::MaybeLiveLocals;
pub use self::liveness::MaybeTransitiveLiveLocals; pub use self::liveness::MaybeTransitiveLiveLocals;
pub use self::liveness::TransferFunction as LivenessTransferFunction; pub use self::liveness::TransferFunction as LivenessTransferFunction;
pub use self::storage_liveness::{MaybeRequiresStorage, MaybeStorageDead, MaybeStorageLive}; pub use self::storage_liveness::{MaybeRequiresStorage, MaybeStorageDead, MaybeStorageLive};
/// `MaybeInitializedPlaces` tracks all places that might be
/// initialized upon reaching a particular point in the control flow
/// for a function.
///
/// For example, in code like the following, we have corresponding
/// dataflow information shown in the right-hand comments.
///
/// ```rust
/// struct S;
/// fn foo(pred: bool) { // maybe-init:
/// // {}
/// let a = S; let mut b = S; let c; let d; // {a, b}
///
/// if pred {
/// drop(a); // { b}
/// b = S; // { b}
///
/// } else {
/// drop(b); // {a}
/// d = S; // {a, d}
///
/// } // {a, b, d}
///
/// c = S; // {a, b, c, d}
/// }
/// ```
///
/// To determine whether a place *must* be initialized at a
/// particular control-flow point, one can take the set-difference
/// between this data and the data from `MaybeUninitializedPlaces` at the
/// corresponding control-flow point.
///
/// Similarly, at a given `drop` statement, the set-intersection
/// between this data and `MaybeUninitializedPlaces` yields the set of
/// places that would require a dynamic drop-flag at that statement.
pub struct MaybeInitializedPlaces<'a, 'tcx> {
tcx: TyCtxt<'tcx>,
body: &'a Body<'tcx>,
mdpe: &'a MoveDataParamEnv<'tcx>,
}
impl<'a, 'tcx> MaybeInitializedPlaces<'a, 'tcx> {
pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
MaybeInitializedPlaces { tcx, body, mdpe }
}
}
impl<'a, 'tcx> HasMoveData<'tcx> for MaybeInitializedPlaces<'a, 'tcx> {
fn move_data(&self) -> &MoveData<'tcx> {
&self.mdpe.move_data
}
}
/// `MaybeUninitializedPlaces` tracks all places that might be
/// uninitialized upon reaching a particular point in the control flow
/// for a function.
///
/// For example, in code like the following, we have corresponding
/// dataflow information shown in the right-hand comments.
///
/// ```rust
/// struct S;
/// fn foo(pred: bool) { // maybe-uninit:
/// // {a, b, c, d}
/// let a = S; let mut b = S; let c; let d; // { c, d}
///
/// if pred {
/// drop(a); // {a, c, d}
/// b = S; // {a, c, d}
///
/// } else {
/// drop(b); // { b, c, d}
/// d = S; // { b, c }
///
/// } // {a, b, c, d}
///
/// c = S; // {a, b, d}
/// }
/// ```
///
/// To determine whether a place *must* be uninitialized at a
/// particular control-flow point, one can take the set-difference
/// between this data and the data from `MaybeInitializedPlaces` at the
/// corresponding control-flow point.
///
/// Similarly, at a given `drop` statement, the set-intersection
/// between this data and `MaybeInitializedPlaces` yields the set of
/// places that would require a dynamic drop-flag at that statement.
pub struct MaybeUninitializedPlaces<'a, 'tcx> {
tcx: TyCtxt<'tcx>,
body: &'a Body<'tcx>,
mdpe: &'a MoveDataParamEnv<'tcx>,
mark_inactive_variants_as_uninit: bool,
}
impl<'a, 'tcx> MaybeUninitializedPlaces<'a, 'tcx> {
pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
MaybeUninitializedPlaces { tcx, body, mdpe, mark_inactive_variants_as_uninit: false }
}
/// Causes inactive enum variants to be marked as "maybe uninitialized" after a switch on an
/// enum discriminant.
///
/// This is correct in a vacuum but is not the default because it causes problems in the borrow
/// checker, where this information gets propagated along `FakeEdge`s.
pub fn mark_inactive_variants_as_uninit(mut self) -> Self {
self.mark_inactive_variants_as_uninit = true;
self
}
}
impl<'a, 'tcx> HasMoveData<'tcx> for MaybeUninitializedPlaces<'a, 'tcx> {
fn move_data(&self) -> &MoveData<'tcx> {
&self.mdpe.move_data
}
}
/// `DefinitelyInitializedPlaces` tracks all places that are definitely
/// initialized upon reaching a particular point in the control flow
/// for a function.
///
/// For example, in code like the following, we have corresponding
/// dataflow information shown in the right-hand comments.
///
/// ```rust
/// struct S;
/// fn foo(pred: bool) { // definite-init:
/// // { }
/// let a = S; let mut b = S; let c; let d; // {a, b }
///
/// if pred {
/// drop(a); // { b, }
/// b = S; // { b, }
///
/// } else {
/// drop(b); // {a, }
/// d = S; // {a, d}
///
/// } // { }
///
/// c = S; // { c }
/// }
/// ```
///
/// To determine whether a place *may* be uninitialized at a
/// particular control-flow point, one can take the set-complement
/// of this data.
///
/// Similarly, at a given `drop` statement, the set-difference between
/// this data and `MaybeInitializedPlaces` yields the set of places
/// that would require a dynamic drop-flag at that statement.
pub struct DefinitelyInitializedPlaces<'a, 'tcx> {
tcx: TyCtxt<'tcx>,
body: &'a Body<'tcx>,
mdpe: &'a MoveDataParamEnv<'tcx>,
}
impl<'a, 'tcx> DefinitelyInitializedPlaces<'a, 'tcx> {
pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
DefinitelyInitializedPlaces { tcx, body, mdpe }
}
}
impl<'a, 'tcx> HasMoveData<'tcx> for DefinitelyInitializedPlaces<'a, 'tcx> {
fn move_data(&self) -> &MoveData<'tcx> {
&self.mdpe.move_data
}
}
/// `EverInitializedPlaces` tracks all places that might have ever been
/// initialized upon reaching a particular point in the control flow
/// for a function, without an intervening `StorageDead`.
///
/// This dataflow is used to determine if an immutable local variable may
/// be assigned to.
///
/// For example, in code like the following, we have corresponding
/// dataflow information shown in the right-hand comments.
///
/// ```rust
/// struct S;
/// fn foo(pred: bool) { // ever-init:
/// // { }
/// let a = S; let mut b = S; let c; let d; // {a, b }
///
/// if pred {
/// drop(a); // {a, b, }
/// b = S; // {a, b, }
///
/// } else {
/// drop(b); // {a, b, }
/// d = S; // {a, b, d }
///
/// } // {a, b, d }
///
/// c = S; // {a, b, c, d }
/// }
/// ```
pub struct EverInitializedPlaces<'a, 'tcx> {
#[allow(dead_code)]
tcx: TyCtxt<'tcx>,
body: &'a Body<'tcx>,
mdpe: &'a MoveDataParamEnv<'tcx>,
}
impl<'a, 'tcx> EverInitializedPlaces<'a, 'tcx> {
pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
EverInitializedPlaces { tcx, body, mdpe }
}
}
impl<'a, 'tcx> HasMoveData<'tcx> for EverInitializedPlaces<'a, 'tcx> {
fn move_data(&self) -> &MoveData<'tcx> {
&self.mdpe.move_data
}
}
impl<'a, 'tcx> MaybeInitializedPlaces<'a, 'tcx> {
fn update_bits(
trans: &mut impl GenKill<MovePathIndex>,
path: MovePathIndex,
state: DropFlagState,
) {
match state {
DropFlagState::Absent => trans.kill(path),
DropFlagState::Present => trans.gen(path),
}
}
}
impl<'a, 'tcx> MaybeUninitializedPlaces<'a, 'tcx> {
fn update_bits(
trans: &mut impl GenKill<MovePathIndex>,
path: MovePathIndex,
state: DropFlagState,
) {
match state {
DropFlagState::Absent => trans.gen(path),
DropFlagState::Present => trans.kill(path),
}
}
}
impl<'a, 'tcx> DefinitelyInitializedPlaces<'a, 'tcx> {
fn update_bits(
trans: &mut impl GenKill<MovePathIndex>,
path: MovePathIndex,
state: DropFlagState,
) {
match state {
DropFlagState::Absent => trans.kill(path),
DropFlagState::Present => trans.gen(path),
}
}
}
impl<'tcx> AnalysisDomain<'tcx> for MaybeInitializedPlaces<'_, 'tcx> {
type Domain = ChunkedBitSet<MovePathIndex>;
const NAME: &'static str = "maybe_init";
fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
// bottom = uninitialized
ChunkedBitSet::new_empty(self.move_data().move_paths.len())
}
fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
assert!(s == DropFlagState::Present);
state.insert(path);
});
}
}
impl<'tcx> GenKillAnalysis<'tcx> for MaybeInitializedPlaces<'_, 'tcx> {
type Idx = MovePathIndex;
fn statement_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
statement: &mir::Statement<'tcx>,
location: Location,
) {
drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
Self::update_bits(trans, path, s)
});
if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
return;
}
// Mark all places as "maybe init" if they are mutably borrowed. See #90752.
for_each_mut_borrow(statement, location, |place| {
let LookupResult::Exact(mpi) = self.move_data().rev_lookup.find(place.as_ref()) else {
return;
};
on_all_children_bits(self.tcx, self.body, self.move_data(), mpi, |child| {
trans.gen(child);
})
})
}
fn terminator_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
terminator: &mir::Terminator<'tcx>,
location: Location,
) {
drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
Self::update_bits(trans, path, s)
});
if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
return;
}
for_each_mut_borrow(terminator, location, |place| {
let LookupResult::Exact(mpi) = self.move_data().rev_lookup.find(place.as_ref()) else {
return;
};
on_all_children_bits(self.tcx, self.body, self.move_data(), mpi, |child| {
trans.gen(child);
})
})
}
fn call_return_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
_block: mir::BasicBlock,
return_places: CallReturnPlaces<'_, 'tcx>,
) {
return_places.for_each(|place| {
// when a call returns successfully, that means we need to set
// the bits for that dest_place to 1 (initialized).
on_lookup_result_bits(
self.tcx,
self.body,
self.move_data(),
self.move_data().rev_lookup.find(place.as_ref()),
|mpi| {
trans.gen(mpi);
},
);
});
}
fn switch_int_edge_effects<G: GenKill<Self::Idx>>(
&mut self,
block: mir::BasicBlock,
discr: &mir::Operand<'tcx>,
edge_effects: &mut impl SwitchIntEdgeEffects<G>,
) {
if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
return;
}
let enum_ = discr.place().and_then(|discr| {
switch_on_enum_discriminant(self.tcx, &self.body, &self.body[block], discr)
});
let Some((enum_place, enum_def)) = enum_ else {
return;
};
let mut discriminants = enum_def.discriminants(self.tcx);
edge_effects.apply(|trans, edge| {
let Some(value) = edge.value else {
return;
};
// MIR building adds discriminants to the `values` array in the same order as they
// are yielded by `AdtDef::discriminants`. We rely on this to match each
// discriminant in `values` to its corresponding variant in linear time.
let (variant, _) = discriminants
.find(|&(_, discr)| discr.val == value)
.expect("Order of `AdtDef::discriminants` differed from `SwitchInt::values`");
// Kill all move paths that correspond to variants we know to be inactive along this
// particular outgoing edge of a `SwitchInt`.
drop_flag_effects::on_all_inactive_variants(
self.tcx,
self.body,
self.move_data(),
enum_place,
variant,
|mpi| trans.kill(mpi),
);
});
}
}
impl<'tcx> AnalysisDomain<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> {
type Domain = ChunkedBitSet<MovePathIndex>;
const NAME: &'static str = "maybe_uninit";
fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
// bottom = initialized (start_block_effect counters this at outset)
ChunkedBitSet::new_empty(self.move_data().move_paths.len())
}
// sets on_entry bits for Arg places
fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
// set all bits to 1 (uninit) before gathering counter-evidence
state.insert_all();
drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
assert!(s == DropFlagState::Present);
state.remove(path);
});
}
}
impl<'tcx> GenKillAnalysis<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> {
type Idx = MovePathIndex;
fn statement_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
_statement: &mir::Statement<'tcx>,
location: Location,
) {
drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
Self::update_bits(trans, path, s)
});
// Unlike in `MaybeInitializedPlaces` above, we don't need to change the state when a
// mutable borrow occurs. Places cannot become uninitialized through a mutable reference.
}
fn terminator_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
_terminator: &mir::Terminator<'tcx>,
location: Location,
) {
drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
Self::update_bits(trans, path, s)
});
}
fn call_return_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
_block: mir::BasicBlock,
return_places: CallReturnPlaces<'_, 'tcx>,
) {
return_places.for_each(|place| {
// when a call returns successfully, that means we need to set
// the bits for that dest_place to 0 (initialized).
on_lookup_result_bits(
self.tcx,
self.body,
self.move_data(),
self.move_data().rev_lookup.find(place.as_ref()),
|mpi| {
trans.kill(mpi);
},
);
});
}
fn switch_int_edge_effects<G: GenKill<Self::Idx>>(
&mut self,
block: mir::BasicBlock,
discr: &mir::Operand<'tcx>,
edge_effects: &mut impl SwitchIntEdgeEffects<G>,
) {
if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
return;
}
if !self.mark_inactive_variants_as_uninit {
return;
}
let enum_ = discr.place().and_then(|discr| {
switch_on_enum_discriminant(self.tcx, &self.body, &self.body[block], discr)
});
let Some((enum_place, enum_def)) = enum_ else {
return;
};
let mut discriminants = enum_def.discriminants(self.tcx);
edge_effects.apply(|trans, edge| {
let Some(value) = edge.value else {
return;
};
// MIR building adds discriminants to the `values` array in the same order as they
// are yielded by `AdtDef::discriminants`. We rely on this to match each
// discriminant in `values` to its corresponding variant in linear time.
let (variant, _) = discriminants
.find(|&(_, discr)| discr.val == value)
.expect("Order of `AdtDef::discriminants` differed from `SwitchInt::values`");
// Mark all move paths that correspond to variants other than this one as maybe
// uninitialized (in reality, they are *definitely* uninitialized).
drop_flag_effects::on_all_inactive_variants(
self.tcx,
self.body,
self.move_data(),
enum_place,
variant,
|mpi| trans.gen(mpi),
);
});
}
}
impl<'a, 'tcx> AnalysisDomain<'tcx> for DefinitelyInitializedPlaces<'a, 'tcx> {
/// Use set intersection as the join operator.
type Domain = lattice::Dual<BitSet<MovePathIndex>>;
const NAME: &'static str = "definite_init";
fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
// bottom = initialized (start_block_effect counters this at outset)
lattice::Dual(BitSet::new_filled(self.move_data().move_paths.len()))
}
// sets on_entry bits for Arg places
fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
state.0.clear();
drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
assert!(s == DropFlagState::Present);
state.0.insert(path);
});
}
}
impl<'tcx> GenKillAnalysis<'tcx> for DefinitelyInitializedPlaces<'_, 'tcx> {
type Idx = MovePathIndex;
fn statement_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
_statement: &mir::Statement<'tcx>,
location: Location,
) {
drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
Self::update_bits(trans, path, s)
})
}
fn terminator_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
_terminator: &mir::Terminator<'tcx>,
location: Location,
) {
drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
Self::update_bits(trans, path, s)
})
}
fn call_return_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
_block: mir::BasicBlock,
return_places: CallReturnPlaces<'_, 'tcx>,
) {
return_places.for_each(|place| {
// when a call returns successfully, that means we need to set
// the bits for that dest_place to 1 (initialized).
on_lookup_result_bits(
self.tcx,
self.body,
self.move_data(),
self.move_data().rev_lookup.find(place.as_ref()),
|mpi| {
trans.gen(mpi);
},
);
});
}
}
impl<'tcx> AnalysisDomain<'tcx> for EverInitializedPlaces<'_, 'tcx> {
type Domain = ChunkedBitSet<InitIndex>;
const NAME: &'static str = "ever_init";
fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
// bottom = no initialized variables by default
ChunkedBitSet::new_empty(self.move_data().inits.len())
}
fn initialize_start_block(&self, body: &mir::Body<'tcx>, state: &mut Self::Domain) {
for arg_init in 0..body.arg_count {
state.insert(InitIndex::new(arg_init));
}
}
}
impl<'tcx> GenKillAnalysis<'tcx> for EverInitializedPlaces<'_, 'tcx> {
type Idx = InitIndex;
#[instrument(skip(self, trans), level = "debug")]
fn statement_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
stmt: &mir::Statement<'tcx>,
location: Location,
) {
let move_data = self.move_data();
let init_path_map = &move_data.init_path_map;
let init_loc_map = &move_data.init_loc_map;
let rev_lookup = &move_data.rev_lookup;
debug!("initializes move_indexes {:?}", &init_loc_map[location]);
trans.gen_all(init_loc_map[location].iter().copied());
if let mir::StatementKind::StorageDead(local) = stmt.kind {
// End inits for StorageDead, so that an immutable variable can
// be reinitialized on the next iteration of the loop.
let move_path_index = rev_lookup.find_local(local);
debug!("clears the ever initialized status of {:?}", init_path_map[move_path_index]);
trans.kill_all(init_path_map[move_path_index].iter().copied());
}
}
#[instrument(skip(self, trans, _terminator), level = "debug")]
fn terminator_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
_terminator: &mir::Terminator<'tcx>,
location: Location,
) {
let (body, move_data) = (self.body, self.move_data());
let term = body[location.block].terminator();
let init_loc_map = &move_data.init_loc_map;
debug!(?term);
debug!("initializes move_indexes {:?}", init_loc_map[location]);
trans.gen_all(
init_loc_map[location]
.iter()
.filter(|init_index| {
move_data.inits[**init_index].kind != InitKind::NonPanicPathOnly
})
.copied(),
);
}
fn call_return_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
block: mir::BasicBlock,
_return_places: CallReturnPlaces<'_, 'tcx>,
) {
let move_data = self.move_data();
let init_loc_map = &move_data.init_loc_map;
let call_loc = self.body.terminator_loc(block);
for init_index in &init_loc_map[call_loc] {
trans.gen(*init_index);
}
}
}
/// Inspect a `SwitchInt`-terminated basic block to see if the condition of that `SwitchInt` is
/// an enum discriminant.
///
/// We expect such blocks to have a call to `discriminant` as their last statement like so:
///
/// ```text
/// ...
/// _42 = discriminant(_1)
/// SwitchInt(_42, ..)
/// ```
///
/// If the basic block matches this pattern, this function returns the place corresponding to the
/// enum (`_1` in the example above) as well as the `AdtDef` of that enum.
fn switch_on_enum_discriminant<'mir, 'tcx>(
tcx: TyCtxt<'tcx>,
body: &'mir mir::Body<'tcx>,
block: &'mir mir::BasicBlockData<'tcx>,
switch_on: mir::Place<'tcx>,
) -> Option<(mir::Place<'tcx>, ty::AdtDef<'tcx>)> {
for statement in block.statements.iter().rev() {
match &statement.kind {
mir::StatementKind::Assign(box (lhs, mir::Rvalue::Discriminant(discriminated)))
if *lhs == switch_on =>
{
match discriminated.ty(body, tcx).ty.kind() {
ty::Adt(def, _) => return Some((*discriminated, *def)),
// `Rvalue::Discriminant` is also used to get the active yield point for a
// generator, but we do not need edge-specific effects in that case. This may
// change in the future.
ty::Generator(..) => return None,
t => bug!("`discriminant` called on unexpected type {:?}", t),
}
}
mir::StatementKind::Coverage(_) => continue,
_ => return None,
}
}
None
}
struct OnMutBorrow<F>(F);
impl<'tcx, F> Visitor<'tcx> for OnMutBorrow<F>
where
F: FnMut(&mir::Place<'tcx>),
{
fn visit_rvalue(&mut self, rvalue: &mir::Rvalue<'tcx>, location: Location) {
// FIXME: Does `&raw const foo` allow mutation? See #90413.
match rvalue {
mir::Rvalue::Ref(_, mir::BorrowKind::Mut { .. }, place)
| mir::Rvalue::AddressOf(_, place) => (self.0)(place),
_ => {}
}
self.super_rvalue(rvalue, location)
}
}
/// Calls `f` for each mutable borrow or raw reference in the program.
///
/// This DOES NOT call `f` for a shared borrow of a type with interior mutability. That's okay for
/// initializedness, because we cannot move from an `UnsafeCell` (outside of `core::cell`), but
/// other analyses will likely need to check for `!Freeze`.
fn for_each_mut_borrow<'tcx>(
mir: &impl MirVisitable<'tcx>,
location: Location,
f: impl FnMut(&mir::Place<'tcx>),
) {
let mut vis = OnMutBorrow(f);
mir.apply(location, &mut vis);
}

View file

@ -1,10 +1,12 @@
pub use super::*; use rustc_index::bit_set::BitSet;
use crate::{CallReturnPlaces, GenKill, ResultsClonedCursor};
use rustc_middle::mir::visit::{NonMutatingUseContext, PlaceContext, Visitor}; use rustc_middle::mir::visit::{NonMutatingUseContext, PlaceContext, Visitor};
use rustc_middle::mir::*; use rustc_middle::mir::*;
use std::borrow::Cow; use std::borrow::Cow;
use super::MaybeBorrowedLocals;
use crate::{GenKill, ResultsClonedCursor};
#[derive(Clone)] #[derive(Clone)]
pub struct MaybeStorageLive<'a> { pub struct MaybeStorageLive<'a> {
always_live_locals: Cow<'a, BitSet<Local>>, always_live_locals: Cow<'a, BitSet<Local>>,
@ -27,12 +29,12 @@ impl<'tcx, 'a> crate::AnalysisDomain<'tcx> for MaybeStorageLive<'a> {
const NAME: &'static str = "maybe_storage_live"; const NAME: &'static str = "maybe_storage_live";
fn bottom_value(&self, body: &mir::Body<'tcx>) -> Self::Domain { fn bottom_value(&self, body: &Body<'tcx>) -> Self::Domain {
// bottom = dead // bottom = dead
BitSet::new_empty(body.local_decls.len()) BitSet::new_empty(body.local_decls.len())
} }
fn initialize_start_block(&self, body: &mir::Body<'tcx>, on_entry: &mut Self::Domain) { fn initialize_start_block(&self, body: &Body<'tcx>, on_entry: &mut Self::Domain) {
assert_eq!(body.local_decls.len(), self.always_live_locals.domain_size()); assert_eq!(body.local_decls.len(), self.always_live_locals.domain_size());
for local in self.always_live_locals.iter() { for local in self.always_live_locals.iter() {
on_entry.insert(local); on_entry.insert(local);
@ -47,10 +49,14 @@ impl<'tcx, 'a> crate::AnalysisDomain<'tcx> for MaybeStorageLive<'a> {
impl<'tcx, 'a> crate::GenKillAnalysis<'tcx> for MaybeStorageLive<'a> { impl<'tcx, 'a> crate::GenKillAnalysis<'tcx> for MaybeStorageLive<'a> {
type Idx = Local; type Idx = Local;
fn domain_size(&self, body: &Body<'tcx>) -> usize {
body.local_decls.len()
}
fn statement_effect( fn statement_effect(
&mut self, &mut self,
trans: &mut impl GenKill<Self::Idx>, trans: &mut impl GenKill<Self::Idx>,
stmt: &mir::Statement<'tcx>, stmt: &Statement<'tcx>,
_: Location, _: Location,
) { ) {
match stmt.kind { match stmt.kind {
@ -60,13 +66,14 @@ impl<'tcx, 'a> crate::GenKillAnalysis<'tcx> for MaybeStorageLive<'a> {
} }
} }
fn terminator_effect( fn terminator_effect<'mir>(
&mut self, &mut self,
_trans: &mut impl GenKill<Self::Idx>, _trans: &mut Self::Domain,
_: &mir::Terminator<'tcx>, terminator: &'mir Terminator<'tcx>,
_: Location, _: Location,
) { ) -> TerminatorEdges<'mir, 'tcx> {
// Terminators have no effect // Terminators have no effect
terminator.edges()
} }
fn call_return_effect( fn call_return_effect(
@ -95,12 +102,12 @@ impl<'tcx> crate::AnalysisDomain<'tcx> for MaybeStorageDead {
const NAME: &'static str = "maybe_storage_dead"; const NAME: &'static str = "maybe_storage_dead";
fn bottom_value(&self, body: &mir::Body<'tcx>) -> Self::Domain { fn bottom_value(&self, body: &Body<'tcx>) -> Self::Domain {
// bottom = live // bottom = live
BitSet::new_empty(body.local_decls.len()) BitSet::new_empty(body.local_decls.len())
} }
fn initialize_start_block(&self, body: &mir::Body<'tcx>, on_entry: &mut Self::Domain) { fn initialize_start_block(&self, body: &Body<'tcx>, on_entry: &mut Self::Domain) {
assert_eq!(body.local_decls.len(), self.always_live_locals.domain_size()); assert_eq!(body.local_decls.len(), self.always_live_locals.domain_size());
// Do not iterate on return place and args, as they are trivially always live. // Do not iterate on return place and args, as they are trivially always live.
for local in body.vars_and_temps_iter() { for local in body.vars_and_temps_iter() {
@ -114,10 +121,14 @@ impl<'tcx> crate::AnalysisDomain<'tcx> for MaybeStorageDead {
impl<'tcx> crate::GenKillAnalysis<'tcx> for MaybeStorageDead { impl<'tcx> crate::GenKillAnalysis<'tcx> for MaybeStorageDead {
type Idx = Local; type Idx = Local;
fn domain_size(&self, body: &Body<'tcx>) -> usize {
body.local_decls.len()
}
fn statement_effect( fn statement_effect(
&mut self, &mut self,
trans: &mut impl GenKill<Self::Idx>, trans: &mut impl GenKill<Self::Idx>,
stmt: &mir::Statement<'tcx>, stmt: &Statement<'tcx>,
_: Location, _: Location,
) { ) {
match stmt.kind { match stmt.kind {
@ -127,13 +138,14 @@ impl<'tcx> crate::GenKillAnalysis<'tcx> for MaybeStorageDead {
} }
} }
fn terminator_effect( fn terminator_effect<'mir>(
&mut self, &mut self,
_trans: &mut impl GenKill<Self::Idx>, _: &mut Self::Domain,
_: &mir::Terminator<'tcx>, terminator: &'mir Terminator<'tcx>,
_: Location, _: Location,
) { ) -> TerminatorEdges<'mir, 'tcx> {
// Terminators have no effect // Terminators have no effect
terminator.edges()
} }
fn call_return_effect( fn call_return_effect(
@ -172,12 +184,12 @@ impl<'tcx> crate::AnalysisDomain<'tcx> for MaybeRequiresStorage<'_, '_, 'tcx> {
const NAME: &'static str = "requires_storage"; const NAME: &'static str = "requires_storage";
fn bottom_value(&self, body: &mir::Body<'tcx>) -> Self::Domain { fn bottom_value(&self, body: &Body<'tcx>) -> Self::Domain {
// bottom = dead // bottom = dead
BitSet::new_empty(body.local_decls.len()) BitSet::new_empty(body.local_decls.len())
} }
fn initialize_start_block(&self, body: &mir::Body<'tcx>, on_entry: &mut Self::Domain) { fn initialize_start_block(&self, body: &Body<'tcx>, on_entry: &mut Self::Domain) {
// The resume argument is live on function entry (we don't care about // The resume argument is live on function entry (we don't care about
// the `self` argument) // the `self` argument)
for arg in body.args_iter().skip(1) { for arg in body.args_iter().skip(1) {
@ -189,10 +201,14 @@ impl<'tcx> crate::AnalysisDomain<'tcx> for MaybeRequiresStorage<'_, '_, 'tcx> {
impl<'tcx> crate::GenKillAnalysis<'tcx> for MaybeRequiresStorage<'_, '_, 'tcx> { impl<'tcx> crate::GenKillAnalysis<'tcx> for MaybeRequiresStorage<'_, '_, 'tcx> {
type Idx = Local; type Idx = Local;
fn domain_size(&self, body: &Body<'tcx>) -> usize {
body.local_decls.len()
}
fn before_statement_effect( fn before_statement_effect(
&mut self, &mut self,
trans: &mut impl GenKill<Self::Idx>, trans: &mut impl GenKill<Self::Idx>,
stmt: &mir::Statement<'tcx>, stmt: &Statement<'tcx>,
loc: Location, loc: Location,
) { ) {
// If a place is borrowed in a statement, it needs storage for that statement. // If a place is borrowed in a statement, it needs storage for that statement.
@ -225,7 +241,7 @@ impl<'tcx> crate::GenKillAnalysis<'tcx> for MaybeRequiresStorage<'_, '_, 'tcx> {
fn statement_effect( fn statement_effect(
&mut self, &mut self,
trans: &mut impl GenKill<Self::Idx>, trans: &mut impl GenKill<Self::Idx>,
_: &mir::Statement<'tcx>, _: &Statement<'tcx>,
loc: Location, loc: Location,
) { ) {
// If we move from a place then it only stops needing storage *after* // If we move from a place then it only stops needing storage *after*
@ -236,11 +252,14 @@ impl<'tcx> crate::GenKillAnalysis<'tcx> for MaybeRequiresStorage<'_, '_, 'tcx> {
fn before_terminator_effect( fn before_terminator_effect(
&mut self, &mut self,
trans: &mut impl GenKill<Self::Idx>, trans: &mut impl GenKill<Self::Idx>,
terminator: &mir::Terminator<'tcx>, terminator: &Terminator<'tcx>,
loc: Location, loc: Location,
) { ) {
// If a place is borrowed in a terminator, it needs storage for that terminator. // If a place is borrowed in a terminator, it needs storage for that terminator.
self.borrowed_locals.mut_analysis().terminator_effect(trans, terminator, loc); self.borrowed_locals
.mut_analysis()
.transfer_function(trans)
.visit_terminator(terminator, loc);
match &terminator.kind { match &terminator.kind {
TerminatorKind::Call { destination, .. } => { TerminatorKind::Call { destination, .. } => {
@ -286,12 +305,12 @@ impl<'tcx> crate::GenKillAnalysis<'tcx> for MaybeRequiresStorage<'_, '_, 'tcx> {
} }
} }
fn terminator_effect( fn terminator_effect<'t>(
&mut self, &mut self,
trans: &mut impl GenKill<Self::Idx>, trans: &mut Self::Domain,
terminator: &mir::Terminator<'tcx>, terminator: &'t Terminator<'tcx>,
loc: Location, loc: Location,
) { ) -> TerminatorEdges<'t, 'tcx> {
match terminator.kind { match terminator.kind {
// For call terminators the destination requires storage for the call // For call terminators the destination requires storage for the call
// and after the call returns successfully, but not after a panic. // and after the call returns successfully, but not after a panic.
@ -323,6 +342,7 @@ impl<'tcx> crate::GenKillAnalysis<'tcx> for MaybeRequiresStorage<'_, '_, 'tcx> {
} }
self.check_for_move(trans, loc); self.check_for_move(trans, loc);
terminator.edges()
} }
fn call_return_effect( fn call_return_effect(
@ -333,15 +353,6 @@ impl<'tcx> crate::GenKillAnalysis<'tcx> for MaybeRequiresStorage<'_, '_, 'tcx> {
) { ) {
return_places.for_each(|place| trans.gen(place.local)); return_places.for_each(|place| trans.gen(place.local));
} }
fn yield_resume_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
_resume_block: BasicBlock,
resume_place: mir::Place<'tcx>,
) {
trans.gen(resume_place.local);
}
} }
impl<'tcx> MaybeRequiresStorage<'_, '_, 'tcx> { impl<'tcx> MaybeRequiresStorage<'_, '_, 'tcx> {

View file

@ -28,8 +28,8 @@ pub use self::drop_flag_effects::{
}; };
pub use self::framework::{ pub use self::framework::{
fmt, graphviz, lattice, visit_results, Analysis, AnalysisDomain, AnalysisResults, Backward, fmt, graphviz, lattice, visit_results, Analysis, AnalysisDomain, AnalysisResults, Backward,
CallReturnPlaces, CloneAnalysis, Direction, Engine, Forward, GenKill, GenKillAnalysis, CloneAnalysis, Direction, Engine, Forward, GenKill, GenKillAnalysis, JoinSemiLattice,
JoinSemiLattice, Results, ResultsCloned, ResultsClonedCursor, ResultsCursor, ResultsRefCursor, MaybeReachable, Results, ResultsCloned, ResultsClonedCursor, ResultsCursor, ResultsRefCursor,
ResultsVisitable, ResultsVisitor, SwitchIntEdgeEffects, ResultsVisitable, ResultsVisitor, SwitchIntEdgeEffects,
}; };

View file

@ -47,8 +47,7 @@ use rustc_target::abi::{FieldIdx, VariantIdx};
use crate::lattice::{HasBottom, HasTop}; use crate::lattice::{HasBottom, HasTop};
use crate::{ use crate::{
fmt::DebugWithContext, Analysis, AnalysisDomain, CallReturnPlaces, JoinSemiLattice, fmt::DebugWithContext, Analysis, AnalysisDomain, JoinSemiLattice, SwitchIntEdgeEffects,
SwitchIntEdgeEffects,
}; };
pub trait ValueAnalysis<'tcx> { pub trait ValueAnalysis<'tcx> {
@ -242,11 +241,19 @@ pub trait ValueAnalysis<'tcx> {
/// The effect of a successful function call return should not be /// The effect of a successful function call return should not be
/// applied here, see [`Analysis::apply_terminator_effect`]. /// applied here, see [`Analysis::apply_terminator_effect`].
fn handle_terminator(&self, terminator: &Terminator<'tcx>, state: &mut State<Self::Value>) { fn handle_terminator<'mir>(
&self,
terminator: &'mir Terminator<'tcx>,
state: &mut State<Self::Value>,
) -> TerminatorEdges<'mir, 'tcx> {
self.super_terminator(terminator, state) self.super_terminator(terminator, state)
} }
fn super_terminator(&self, terminator: &Terminator<'tcx>, state: &mut State<Self::Value>) { fn super_terminator<'mir>(
&self,
terminator: &'mir Terminator<'tcx>,
state: &mut State<Self::Value>,
) -> TerminatorEdges<'mir, 'tcx> {
match &terminator.kind { match &terminator.kind {
TerminatorKind::Call { .. } | TerminatorKind::InlineAsm { .. } => { TerminatorKind::Call { .. } | TerminatorKind::InlineAsm { .. } => {
// Effect is applied by `handle_call_return`. // Effect is applied by `handle_call_return`.
@ -258,8 +265,10 @@ pub trait ValueAnalysis<'tcx> {
// They would have an effect, but are not allowed in this phase. // They would have an effect, but are not allowed in this phase.
bug!("encountered disallowed terminator"); bug!("encountered disallowed terminator");
} }
TerminatorKind::SwitchInt { discr, targets } => {
return self.handle_switch_int(discr, targets, state);
}
TerminatorKind::Goto { .. } TerminatorKind::Goto { .. }
| TerminatorKind::SwitchInt { .. }
| TerminatorKind::Resume | TerminatorKind::Resume
| TerminatorKind::Terminate | TerminatorKind::Terminate
| TerminatorKind::Return | TerminatorKind::Return
@ -271,6 +280,7 @@ pub trait ValueAnalysis<'tcx> {
// These terminators have no effect on the analysis. // These terminators have no effect on the analysis.
} }
} }
terminator.edges()
} }
fn handle_call_return( fn handle_call_return(
@ -291,19 +301,22 @@ pub trait ValueAnalysis<'tcx> {
}) })
} }
fn handle_switch_int( fn handle_switch_int<'mir>(
&self, &self,
discr: &Operand<'tcx>, discr: &'mir Operand<'tcx>,
apply_edge_effects: &mut impl SwitchIntEdgeEffects<State<Self::Value>>, targets: &'mir SwitchTargets,
) { state: &mut State<Self::Value>,
self.super_switch_int(discr, apply_edge_effects) ) -> TerminatorEdges<'mir, 'tcx> {
self.super_switch_int(discr, targets, state)
} }
fn super_switch_int( fn super_switch_int<'mir>(
&self, &self,
_discr: &Operand<'tcx>, discr: &'mir Operand<'tcx>,
_apply_edge_effects: &mut impl SwitchIntEdgeEffects<State<Self::Value>>, targets: &'mir SwitchTargets,
) { _state: &mut State<Self::Value>,
) -> TerminatorEdges<'mir, 'tcx> {
TerminatorEdges::SwitchInt { discr, targets }
} }
fn wrap(self) -> ValueAnalysisWrapper<Self> fn wrap(self) -> ValueAnalysisWrapper<Self>
@ -353,14 +366,16 @@ where
} }
} }
fn apply_terminator_effect( fn apply_terminator_effect<'mir>(
&mut self, &mut self,
state: &mut Self::Domain, state: &mut Self::Domain,
terminator: &Terminator<'tcx>, terminator: &'mir Terminator<'tcx>,
_location: Location, _location: Location,
) { ) -> TerminatorEdges<'mir, 'tcx> {
if state.is_reachable() { if state.is_reachable() {
self.0.handle_terminator(terminator, state); self.0.handle_terminator(terminator, state)
} else {
TerminatorEdges::None
} }
} }
@ -368,7 +383,7 @@ where
&mut self, &mut self,
state: &mut Self::Domain, state: &mut Self::Domain,
_block: BasicBlock, _block: BasicBlock,
return_places: crate::CallReturnPlaces<'_, 'tcx>, return_places: CallReturnPlaces<'_, 'tcx>,
) { ) {
if state.is_reachable() { if state.is_reachable() {
self.0.handle_call_return(return_places, state) self.0.handle_call_return(return_places, state)
@ -378,11 +393,9 @@ where
fn apply_switch_int_edge_effects( fn apply_switch_int_edge_effects(
&mut self, &mut self,
_block: BasicBlock, _block: BasicBlock,
discr: &Operand<'tcx>, _discr: &Operand<'tcx>,
apply_edge_effects: &mut impl SwitchIntEdgeEffects<Self::Domain>, _apply_edge_effects: &mut impl SwitchIntEdgeEffects<Self::Domain>,
) { ) {
// FIXME: Dataflow framework provides no access to current state here.
self.0.handle_switch_int(discr, apply_edge_effects)
} }
} }

View file

@ -13,9 +13,7 @@ use rustc_middle::ty::{self, Ty, TyCtxt};
use rustc_mir_dataflow::value_analysis::{ use rustc_mir_dataflow::value_analysis::{
Map, State, TrackElem, ValueAnalysis, ValueAnalysisWrapper, ValueOrPlace, Map, State, TrackElem, ValueAnalysis, ValueAnalysisWrapper, ValueOrPlace,
}; };
use rustc_mir_dataflow::{ use rustc_mir_dataflow::{lattice::FlatSet, Analysis, Results, ResultsVisitor};
lattice::FlatSet, Analysis, Results, ResultsVisitor, SwitchIntEdgeEffects,
};
use rustc_span::DUMMY_SP; use rustc_span::DUMMY_SP;
use rustc_target::abi::{Align, FieldIdx, VariantIdx}; use rustc_target::abi::{Align, FieldIdx, VariantIdx};
@ -249,49 +247,27 @@ impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'_, 'tcx> {
.unwrap_or(FlatSet::Top) .unwrap_or(FlatSet::Top)
} }
fn handle_switch_int( fn handle_switch_int<'mir>(
&self, &self,
discr: &Operand<'tcx>, discr: &'mir Operand<'tcx>,
apply_edge_effects: &mut impl SwitchIntEdgeEffects<State<Self::Value>>, targets: &'mir SwitchTargets,
) { state: &mut State<Self::Value>,
// FIXME: The dataflow framework only provides the state if we call `apply()`, which makes ) -> TerminatorEdges<'mir, 'tcx> {
// this more inefficient than it has to be. let value = match self.handle_operand(discr, state) {
let mut discr_value = None; ValueOrPlace::Value(value) => value,
let mut handled = false; ValueOrPlace::Place(place) => state.get_idx(place, self.map()),
apply_edge_effects.apply(|state, target| { };
let discr_value = match discr_value { match value {
Some(value) => value, // We are branching on uninitialized data, this is UB, treat it as unreachable.
None => { // This allows the set of visited edges to grow monotonically with the lattice.
let value = match self.handle_operand(discr, state) { FlatSet::Bottom => TerminatorEdges::None,
ValueOrPlace::Value(value) => value, FlatSet::Elem(ScalarTy(scalar, _)) => {
ValueOrPlace::Place(place) => state.get_idx(place, self.map()), let int = scalar.assert_int();
}; let choice = int.assert_bits(int.size());
let result = match value { TerminatorEdges::Single(targets.target_for_value(choice))
FlatSet::Top => FlatSet::Top,
FlatSet::Elem(ScalarTy(scalar, _)) => {
let int = scalar.assert_int();
FlatSet::Elem(int.assert_bits(int.size()))
}
FlatSet::Bottom => FlatSet::Bottom,
};
discr_value = Some(result);
result
}
};
let FlatSet::Elem(choice) = discr_value else {
// Do nothing if we don't know which branch will be taken.
return;
};
if target.value.map(|n| n == choice).unwrap_or(!handled) {
// Branch is taken. Has no effect on state.
handled = true;
} else {
// Branch is not taken.
state.mark_unreachable();
} }
}) FlatSet::Top => TerminatorEdges::SwitchInt { discr, targets },
}
} }
} }

View file

@ -48,6 +48,7 @@ use std::fmt;
pub struct ElaborateDrops; pub struct ElaborateDrops;
impl<'tcx> MirPass<'tcx> for ElaborateDrops { impl<'tcx> MirPass<'tcx> for ElaborateDrops {
#[instrument(level = "trace", skip(self, tcx, body))]
fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
debug!("elaborate_drops({:?} @ {:?})", body.source, body.span); debug!("elaborate_drops({:?} @ {:?})", body.source, body.span);
@ -65,23 +66,23 @@ impl<'tcx> MirPass<'tcx> for ElaborateDrops {
}; };
let elaborate_patch = { let elaborate_patch = {
let env = MoveDataParamEnv { move_data, param_env }; let env = MoveDataParamEnv { move_data, param_env };
remove_dead_unwinds(tcx, body, &env);
let inits = MaybeInitializedPlaces::new(tcx, body, &env) let mut inits = MaybeInitializedPlaces::new(tcx, body, &env)
.skipping_unreachable_unwind()
.into_engine(tcx, body) .into_engine(tcx, body)
.pass_name("elaborate_drops") .pass_name("elaborate_drops")
.iterate_to_fixpoint() .iterate_to_fixpoint()
.into_results_cursor(body); .into_results_cursor(body);
let dead_unwinds = compute_dead_unwinds(&body, &mut inits);
let uninits = MaybeUninitializedPlaces::new(tcx, body, &env) let uninits = MaybeUninitializedPlaces::new(tcx, body, &env)
.mark_inactive_variants_as_uninit() .mark_inactive_variants_as_uninit()
.skipping_unreachable_unwind(dead_unwinds)
.into_engine(tcx, body) .into_engine(tcx, body)
.pass_name("elaborate_drops") .pass_name("elaborate_drops")
.iterate_to_fixpoint() .iterate_to_fixpoint()
.into_results_cursor(body); .into_results_cursor(body);
let reachable = traversal::reachable_as_bitset(body);
let drop_flags = IndexVec::from_elem(None, &env.move_data.move_paths); let drop_flags = IndexVec::from_elem(None, &env.move_data.move_paths);
ElaborateDropsCtxt { ElaborateDropsCtxt {
tcx, tcx,
@ -90,7 +91,6 @@ impl<'tcx> MirPass<'tcx> for ElaborateDrops {
init_data: InitializationData { inits, uninits }, init_data: InitializationData { inits, uninits },
drop_flags, drop_flags,
patch: MirPatch::new(body), patch: MirPatch::new(body),
reachable,
} }
.elaborate() .elaborate()
}; };
@ -99,65 +99,30 @@ impl<'tcx> MirPass<'tcx> for ElaborateDrops {
} }
} }
/// Removes unwind edges which are known to be unreachable, because they are in `drop` terminators /// Records unwind edges which are known to be unreachable, because they are in `drop` terminators
/// that can't drop anything. /// that can't drop anything.
fn remove_dead_unwinds<'tcx>( #[instrument(level = "trace", skip(body, flow_inits), ret)]
tcx: TyCtxt<'tcx>, fn compute_dead_unwinds<'mir, 'tcx>(
body: &mut Body<'tcx>, body: &'mir Body<'tcx>,
env: &MoveDataParamEnv<'tcx>, flow_inits: &mut ResultsCursor<'mir, 'tcx, MaybeInitializedPlaces<'mir, 'tcx>>,
) { ) -> BitSet<BasicBlock> {
debug!("remove_dead_unwinds({:?})", body.span);
// We only need to do this pass once, because unwind edges can only // We only need to do this pass once, because unwind edges can only
// reach cleanup blocks, which can't have unwind edges themselves. // reach cleanup blocks, which can't have unwind edges themselves.
let mut dead_unwinds = Vec::new(); let mut dead_unwinds = BitSet::new_empty(body.basic_blocks.len());
let mut flow_inits = MaybeInitializedPlaces::new(tcx, body, &env)
.into_engine(tcx, body)
.pass_name("remove_dead_unwinds")
.iterate_to_fixpoint()
.into_results_cursor(body);
for (bb, bb_data) in body.basic_blocks.iter_enumerated() { for (bb, bb_data) in body.basic_blocks.iter_enumerated() {
let place = match bb_data.terminator().kind { let TerminatorKind::Drop { place, unwind: UnwindAction::Cleanup(_), .. } =
TerminatorKind::Drop { place, unwind: UnwindAction::Cleanup(_), .. } => place, bb_data.terminator().kind
_ => continue, else {
};
debug!("remove_dead_unwinds @ {:?}: {:?}", bb, bb_data);
let LookupResult::Exact(path) = env.move_data.rev_lookup.find(place.as_ref()) else {
debug!("remove_dead_unwinds: has parent; skipping");
continue; continue;
}; };
flow_inits.seek_before_primary_effect(body.terminator_loc(bb)); flow_inits.seek_before_primary_effect(body.terminator_loc(bb));
debug!( if flow_inits.analysis().is_unwind_dead(place, flow_inits.get()) {
"remove_dead_unwinds @ {:?}: path({:?})={:?}; init_data={:?}", dead_unwinds.insert(bb);
bb,
place,
path,
flow_inits.get()
);
let mut maybe_live = false;
on_all_drop_children_bits(tcx, body, &env, path, |child| {
maybe_live |= flow_inits.contains(child);
});
debug!("remove_dead_unwinds @ {:?}: maybe_live={}", bb, maybe_live);
if !maybe_live {
dead_unwinds.push(bb);
} }
} }
if dead_unwinds.is_empty() { dead_unwinds
return;
}
let basic_blocks = body.basic_blocks.as_mut();
for &bb in dead_unwinds.iter() {
if let Some(unwind) = basic_blocks[bb].terminator_mut().unwind_mut() {
*unwind = UnwindAction::Unreachable;
}
}
} }
struct InitializationData<'mir, 'tcx> { struct InitializationData<'mir, 'tcx> {
@ -290,7 +255,6 @@ struct ElaborateDropsCtxt<'a, 'tcx> {
init_data: InitializationData<'a, 'tcx>, init_data: InitializationData<'a, 'tcx>,
drop_flags: IndexVec<MovePathIndex, Option<Local>>, drop_flags: IndexVec<MovePathIndex, Option<Local>>,
patch: MirPatch<'tcx>, patch: MirPatch<'tcx>,
reachable: BitSet<BasicBlock>,
} }
impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> { impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
@ -330,9 +294,6 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
fn collect_drop_flags(&mut self) { fn collect_drop_flags(&mut self) {
for (bb, data) in self.body.basic_blocks.iter_enumerated() { for (bb, data) in self.body.basic_blocks.iter_enumerated() {
if !self.reachable.contains(bb) {
continue;
}
let terminator = data.terminator(); let terminator = data.terminator();
let place = match terminator.kind { let place = match terminator.kind {
TerminatorKind::Drop { ref place, .. } => place, TerminatorKind::Drop { ref place, .. } => place,
@ -384,9 +345,6 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
fn elaborate_drops(&mut self) { fn elaborate_drops(&mut self) {
for (bb, data) in self.body.basic_blocks.iter_enumerated() { for (bb, data) in self.body.basic_blocks.iter_enumerated() {
if !self.reachable.contains(bb) {
continue;
}
let loc = Location { block: bb, statement_index: data.statements.len() }; let loc = Location { block: bb, statement_index: data.statements.len() };
let terminator = data.terminator(); let terminator = data.terminator();
@ -465,9 +423,6 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
fn drop_flags_for_fn_rets(&mut self) { fn drop_flags_for_fn_rets(&mut self) {
for (bb, data) in self.body.basic_blocks.iter_enumerated() { for (bb, data) in self.body.basic_blocks.iter_enumerated() {
if !self.reachable.contains(bb) {
continue;
}
if let TerminatorKind::Call { if let TerminatorKind::Call {
destination, destination,
target: Some(tgt), target: Some(tgt),
@ -506,9 +461,6 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
// clobbered before they are read. // clobbered before they are read.
for (bb, data) in self.body.basic_blocks.iter_enumerated() { for (bb, data) in self.body.basic_blocks.iter_enumerated() {
if !self.reachable.contains(bb) {
continue;
}
debug!("drop_flags_for_locs({:?})", data); debug!("drop_flags_for_locs({:?})", data);
for i in 0..(data.statements.len() + 1) { for i in 0..(data.statements.len() + 1) {
debug!("drop_flag_for_locs: stmt {}", i); debug!("drop_flag_for_locs: stmt {}", i);

View file

@ -4,7 +4,9 @@ use rustc_middle::ty::GenericArgsRef;
use rustc_middle::ty::{self, ParamEnv, Ty, TyCtxt, VariantDef}; use rustc_middle::ty::{self, ParamEnv, Ty, TyCtxt, VariantDef};
use rustc_mir_dataflow::impls::MaybeInitializedPlaces; use rustc_mir_dataflow::impls::MaybeInitializedPlaces;
use rustc_mir_dataflow::move_paths::{LookupResult, MoveData, MovePathIndex}; use rustc_mir_dataflow::move_paths::{LookupResult, MoveData, MovePathIndex};
use rustc_mir_dataflow::{self, move_path_children_matching, Analysis, MoveDataParamEnv}; use rustc_mir_dataflow::{
self, move_path_children_matching, Analysis, MaybeReachable, MoveDataParamEnv,
};
use rustc_target::abi::FieldIdx; use rustc_target::abi::FieldIdx;
use crate::MirPass; use crate::MirPass;
@ -41,6 +43,7 @@ impl<'tcx> MirPass<'tcx> for RemoveUninitDrops {
let TerminatorKind::Drop { place, .. } = &terminator.kind else { continue }; let TerminatorKind::Drop { place, .. } = &terminator.kind else { continue };
maybe_inits.seek_before_primary_effect(body.terminator_loc(bb)); maybe_inits.seek_before_primary_effect(body.terminator_loc(bb));
let MaybeReachable::Reachable(maybe_inits) = maybe_inits.get() else { continue };
// If there's no move path for the dropped place, it's probably a `Deref`. Let it alone. // If there's no move path for the dropped place, it's probably a `Deref`. Let it alone.
let LookupResult::Exact(mpi) = mdpe.move_data.rev_lookup.find(place.as_ref()) else { let LookupResult::Exact(mpi) = mdpe.move_data.rev_lookup.find(place.as_ref()) else {
@ -50,7 +53,7 @@ impl<'tcx> MirPass<'tcx> for RemoveUninitDrops {
let should_keep = is_needs_drop_and_init( let should_keep = is_needs_drop_and_init(
tcx, tcx,
param_env, param_env,
maybe_inits.get(), maybe_inits,
&mdpe.move_data, &mdpe.move_data,
place.ty(body, tcx).ty, place.ty(body, tcx).ty,
mpi, mpi,

View file

@ -47,7 +47,8 @@
bb2 (cleanup): { bb2 (cleanup): {
_5 = move _6; _5 = move _6;
drop(_6) -> [return: bb6, unwind terminate]; - drop(_6) -> [return: bb6, unwind terminate];
+ goto -> bb6;
} }
bb3: { bb3: {
@ -70,7 +71,8 @@
} }
bb6 (cleanup): { bb6 (cleanup): {
drop(_5) -> [return: bb7, unwind terminate]; - drop(_5) -> [return: bb7, unwind terminate];
+ goto -> bb7;
} }
bb7 (cleanup): { bb7 (cleanup): {
@ -80,10 +82,6 @@
bb8 (cleanup): { bb8 (cleanup): {
resume; resume;
+ }
+
+ bb9 (cleanup): {
+ unreachable;
} }
} }

View file

@ -47,7 +47,8 @@
bb3 (cleanup): { bb3 (cleanup): {
_2 = move _5; _2 = move _5;
drop(_5) -> [return: bb8, unwind terminate]; - drop(_5) -> [return: bb8, unwind terminate];
+ goto -> bb8;
} }
bb4: { bb4: {
@ -80,7 +81,7 @@
bb9 (cleanup): { bb9 (cleanup): {
- drop(_1) -> [return: bb10, unwind terminate]; - drop(_1) -> [return: bb10, unwind terminate];
+ goto -> bb13; + goto -> bb12;
} }
bb10 (cleanup): { bb10 (cleanup): {
@ -88,15 +89,11 @@
+ } + }
+ +
+ bb11 (cleanup): { + bb11 (cleanup): {
+ unreachable;
+ }
+
+ bb12 (cleanup): {
+ drop(_1) -> [return: bb10, unwind terminate]; + drop(_1) -> [return: bb10, unwind terminate];
+ } + }
+ +
+ bb13 (cleanup): { + bb12 (cleanup): {
+ switchInt(_6) -> [0: bb10, otherwise: bb12]; + switchInt(_6) -> [0: bb10, otherwise: bb11];
} }
} }

View file

@ -47,7 +47,8 @@
bb3 (cleanup): { bb3 (cleanup): {
_2 = move _5; _2 = move _5;
drop(_5) -> [return: bb8, unwind terminate]; - drop(_5) -> [return: bb8, unwind terminate];
+ goto -> bb8;
} }
bb4: { bb4: {
@ -80,7 +81,7 @@
bb9 (cleanup): { bb9 (cleanup): {
- drop(_1) -> [return: bb10, unwind terminate]; - drop(_1) -> [return: bb10, unwind terminate];
+ goto -> bb13; + goto -> bb12;
} }
bb10 (cleanup): { bb10 (cleanup): {
@ -88,15 +89,11 @@
+ } + }
+ +
+ bb11 (cleanup): { + bb11 (cleanup): {
+ unreachable;
+ }
+
+ bb12 (cleanup): {
+ drop(_1) -> [return: bb10, unwind terminate]; + drop(_1) -> [return: bb10, unwind terminate];
+ } + }
+ +
+ bb13 (cleanup): { + bb12 (cleanup): {
+ switchInt(_6) -> [0: bb10, otherwise: bb12]; + switchInt(_6) -> [0: bb10, otherwise: bb11];
} }
} }

View file

@ -54,8 +54,12 @@
} }
bb4 (cleanup): { bb4 (cleanup): {
+ _7 = const true;
+ _8 = const true;
+ _9 = const true;
_1 = move _3; _1 = move _3;
drop(_3) -> [return: bb11, unwind terminate]; - drop(_3) -> [return: bb11, unwind terminate];
+ goto -> bb11;
} }
bb5: { bb5: {
@ -86,7 +90,7 @@
bb9: { bb9: {
StorageDead(_2); StorageDead(_2);
- drop(_1) -> [return: bb10, unwind: bb12]; - drop(_1) -> [return: bb10, unwind: bb12];
+ goto -> bb19; + goto -> bb18;
} }
bb10: { bb10: {
@ -106,43 +110,39 @@
resume; resume;
+ } + }
+ +
+ bb13 (cleanup): { + bb13: {
+ unreachable;
+ }
+
+ bb14: {
+ _7 = const false; + _7 = const false;
+ goto -> bb10; + goto -> bb10;
+ } + }
+ +
+ bb15 (cleanup): { + bb14 (cleanup): {
+ goto -> bb12; + goto -> bb12;
+ } + }
+ +
+ bb16: { + bb15: {
+ drop(_1) -> [return: bb14, unwind: bb12]; + drop(_1) -> [return: bb13, unwind: bb12];
+ } + }
+ +
+ bb17 (cleanup): { + bb16 (cleanup): {
+ drop(_1) -> [return: bb12, unwind terminate]; + drop(_1) -> [return: bb12, unwind terminate];
+ } + }
+ +
+ bb18: { + bb17: {
+ _10 = discriminant(_1); + _10 = discriminant(_1);
+ switchInt(move _10) -> [0: bb14, otherwise: bb16]; + switchInt(move _10) -> [0: bb13, otherwise: bb15];
+ } + }
+ +
+ bb19: { + bb18: {
+ switchInt(_7) -> [0: bb14, otherwise: bb18]; + switchInt(_7) -> [0: bb13, otherwise: bb17];
+ }
+
+ bb19 (cleanup): {
+ _11 = discriminant(_1);
+ switchInt(move _11) -> [0: bb14, otherwise: bb16];
+ } + }
+ +
+ bb20 (cleanup): { + bb20 (cleanup): {
+ _11 = discriminant(_1); + switchInt(_7) -> [0: bb12, otherwise: bb19];
+ switchInt(move _11) -> [0: bb15, otherwise: bb17];
+ }
+
+ bb21 (cleanup): {
+ switchInt(_7) -> [0: bb12, otherwise: bb20];
} }
} }

View file

@ -54,8 +54,12 @@
} }
bb4 (cleanup): { bb4 (cleanup): {
+ _7 = const true;
+ _8 = const true;
+ _9 = const true;
_1 = move _3; _1 = move _3;
drop(_3) -> [return: bb11, unwind terminate]; - drop(_3) -> [return: bb11, unwind terminate];
+ goto -> bb11;
} }
bb5: { bb5: {
@ -86,7 +90,7 @@
bb9: { bb9: {
StorageDead(_2); StorageDead(_2);
- drop(_1) -> [return: bb10, unwind continue]; - drop(_1) -> [return: bb10, unwind continue];
+ goto -> bb19; + goto -> bb18;
} }
bb10: { bb10: {
@ -106,43 +110,39 @@
resume; resume;
+ } + }
+ +
+ bb13 (cleanup): { + bb13: {
+ unreachable;
+ }
+
+ bb14: {
+ _7 = const false; + _7 = const false;
+ goto -> bb10; + goto -> bb10;
+ } + }
+ +
+ bb15 (cleanup): { + bb14 (cleanup): {
+ goto -> bb12; + goto -> bb12;
+ } + }
+ +
+ bb16: { + bb15: {
+ drop(_1) -> [return: bb14, unwind: bb12]; + drop(_1) -> [return: bb13, unwind: bb12];
+ } + }
+ +
+ bb17 (cleanup): { + bb16 (cleanup): {
+ drop(_1) -> [return: bb12, unwind terminate]; + drop(_1) -> [return: bb12, unwind terminate];
+ } + }
+ +
+ bb18: { + bb17: {
+ _10 = discriminant(_1); + _10 = discriminant(_1);
+ switchInt(move _10) -> [0: bb14, otherwise: bb16]; + switchInt(move _10) -> [0: bb13, otherwise: bb15];
+ } + }
+ +
+ bb19: { + bb18: {
+ switchInt(_7) -> [0: bb14, otherwise: bb18]; + switchInt(_7) -> [0: bb13, otherwise: bb17];
+ }
+
+ bb19 (cleanup): {
+ _11 = discriminant(_1);
+ switchInt(move _11) -> [0: bb14, otherwise: bb16];
+ } + }
+ +
+ bb20 (cleanup): { + bb20 (cleanup): {
+ _11 = discriminant(_1); + switchInt(_7) -> [0: bb12, otherwise: bb19];
+ switchInt(move _11) -> [0: bb15, otherwise: bb17];
+ }
+
+ bb21 (cleanup): {
+ switchInt(_7) -> [0: bb12, otherwise: bb20];
} }
} }