2022-06-13 16:37:41 +03:00
|
|
|
use crate::deref_separator::deref_finder;
|
2019-09-26 05:30:10 +00:00
|
|
|
use rustc_index::bit_set::BitSet;
|
2023-04-28 20:08:23 +00:00
|
|
|
use rustc_index::IndexVec;
|
2021-01-05 19:53:07 +01:00
|
|
|
use rustc_middle::mir::patch::MirPatch;
|
2020-03-29 17:19:48 +02:00
|
|
|
use rustc_middle::mir::*;
|
|
|
|
use rustc_middle::ty::{self, TyCtxt};
|
2021-01-05 19:53:07 +01:00
|
|
|
use rustc_mir_dataflow::elaborate_drops::{elaborate_drop, DropFlagState, Unwind};
|
|
|
|
use rustc_mir_dataflow::elaborate_drops::{DropElaborator, DropFlagMode, DropStyle};
|
|
|
|
use rustc_mir_dataflow::impls::{MaybeInitializedPlaces, MaybeUninitializedPlaces};
|
|
|
|
use rustc_mir_dataflow::move_paths::{LookupResult, MoveData, MovePathIndex};
|
2023-10-01 08:47:12 +00:00
|
|
|
use rustc_mir_dataflow::on_all_children_bits;
|
2021-01-05 19:53:07 +01:00
|
|
|
use rustc_mir_dataflow::on_lookup_result_bits;
|
|
|
|
use rustc_mir_dataflow::MoveDataParamEnv;
|
|
|
|
use rustc_mir_dataflow::{Analysis, ResultsCursor};
|
2023-05-25 17:30:23 +00:00
|
|
|
use rustc_span::Span;
|
2023-03-28 12:32:57 -07:00
|
|
|
use rustc_target::abi::{FieldIdx, VariantIdx};
|
Merge indexed_set.rs into bitvec.rs, and rename it bit_set.rs.
Currently we have two files implementing bitsets (and 2D bit matrices).
This commit combines them into one, taking the best features from each.
This involves renaming a lot of things. The high level changes are as
follows.
- bitvec.rs --> bit_set.rs
- indexed_set.rs --> (removed)
- BitArray + IdxSet --> BitSet (merged, see below)
- BitVector --> GrowableBitSet
- {,Sparse,Hybrid}IdxSet --> {,Sparse,Hybrid}BitSet
- BitMatrix --> BitMatrix
- SparseBitMatrix --> SparseBitMatrix
The changes within the bitset types themselves are as follows.
```
OLD OLD NEW
BitArray<C> IdxSet<T> BitSet<T>
-------- ------ ------
grow - grow
new - (remove)
new_empty new_empty new_empty
new_filled new_filled new_filled
- to_hybrid to_hybrid
clear clear clear
set_up_to set_up_to set_up_to
clear_above - clear_above
count - count
contains(T) contains(&T) contains(T)
contains_all - superset
is_empty - is_empty
insert(T) add(&T) insert(T)
insert_all - insert_all()
remove(T) remove(&T) remove(T)
words words words
words_mut words_mut words_mut
- overwrite overwrite
merge union union
- subtract subtract
- intersect intersect
iter iter iter
```
In general, when choosing names I went with:
- names that are more obvious (e.g. `BitSet` over `IdxSet`).
- names that are more like the Rust libraries (e.g. `T` over `C`,
`insert` over `add`);
- names that are more set-like (e.g. `union` over `merge`, `superset`
over `contains_all`, `domain_size` over `num_bits`).
Also, using `T` for index arguments seems more sensible than `&T` --
even though the latter is standard in Rust collection types -- because
indices are always copyable. It also results in fewer `&` and `*`
sigils in practice.
2018-09-14 15:07:25 +10:00
|
|
|
use std::fmt;
|
2016-05-17 02:26:18 +03:00
|
|
|
|
2023-03-05 21:02:14 +01:00
|
|
|
/// During MIR building, Drop terminators are inserted in every place where a drop may occur.
|
2023-01-24 18:48:05 +01:00
|
|
|
/// However, in this phase, the presence of these terminators does not guarantee that a destructor will run,
|
|
|
|
/// as the target of the drop may be uninitialized.
|
|
|
|
/// In general, the compiler cannot determine at compile time whether a destructor will run or not.
|
|
|
|
///
|
2023-03-05 21:02:14 +01:00
|
|
|
/// At a high level, this pass refines Drop to only run the destructor if the
|
2023-04-09 17:35:02 -04:00
|
|
|
/// target is initialized. The way this is achieved is by inserting drop flags for every variable
|
2023-01-24 18:48:05 +01:00
|
|
|
/// that may be dropped, and then using those flags to determine whether a destructor should run.
|
|
|
|
/// Once this is complete, Drop terminators in the MIR correspond to a call to the "drop glue" or
|
|
|
|
/// "drop shim" for the type of the dropped place.
|
|
|
|
///
|
|
|
|
/// This pass relies on dropped places having an associated move path, which is then used to determine
|
|
|
|
/// the initialization status of the place and its descendants.
|
|
|
|
/// It's worth noting that a MIR containing a Drop without an associated move path is probably ill formed,
|
|
|
|
/// as it would allow running a destructor on a place behind a reference:
|
|
|
|
///
|
|
|
|
/// ```text
|
|
|
|
// fn drop_term<T>(t: &mut T) {
|
|
|
|
// mir!(
|
|
|
|
// {
|
|
|
|
// Drop(*t, exit)
|
|
|
|
// }
|
|
|
|
// exit = {
|
|
|
|
// Return()
|
|
|
|
// }
|
|
|
|
// )
|
|
|
|
// }
|
|
|
|
/// ```
|
2016-05-17 02:26:18 +03:00
|
|
|
pub struct ElaborateDrops;
|
|
|
|
|
2019-08-04 16:20:00 -04:00
|
|
|
impl<'tcx> MirPass<'tcx> for ElaborateDrops {
|
2023-05-06 07:57:05 +00:00
|
|
|
#[instrument(level = "trace", skip(self, tcx, body))]
|
2020-10-04 11:01:38 -07:00
|
|
|
fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
|
|
|
|
debug!("elaborate_drops({:?} @ {:?})", body.source, body.span);
|
2017-11-10 19:20:35 +02:00
|
|
|
|
2020-10-04 11:01:38 -07:00
|
|
|
let def_id = body.source.def_id();
|
|
|
|
let param_env = tcx.param_env_reveal_all_normalized(def_id);
|
2023-09-30 21:42:49 +00:00
|
|
|
// For types that do not need dropping, the behaviour is trivial. So we only need to track
|
|
|
|
// init/uninit for types that do need dropping.
|
|
|
|
let move_data =
|
2023-11-21 20:07:32 +01:00
|
|
|
MoveData::gather_moves(body, tcx, param_env, |ty| ty.needs_drop(tcx, param_env));
|
2016-05-17 02:26:18 +03:00
|
|
|
let elaborate_patch = {
|
2016-05-27 15:07:08 +03:00
|
|
|
let env = MoveDataParamEnv { move_data, param_env };
|
2020-01-20 15:18:13 -08:00
|
|
|
|
2023-05-06 07:57:05 +00:00
|
|
|
let mut inits = MaybeInitializedPlaces::new(tcx, body, &env)
|
|
|
|
.skipping_unreachable_unwind()
|
2020-10-04 15:22:23 -07:00
|
|
|
.into_engine(tcx, body)
|
2020-09-14 17:13:47 -07:00
|
|
|
.pass_name("elaborate_drops")
|
2020-01-21 20:00:55 -08:00
|
|
|
.iterate_to_fixpoint()
|
|
|
|
.into_results_cursor(body);
|
2023-11-21 20:07:32 +01:00
|
|
|
let dead_unwinds = compute_dead_unwinds(body, &mut inits);
|
2020-01-20 15:18:13 -08:00
|
|
|
|
2020-01-21 20:00:55 -08:00
|
|
|
let uninits = MaybeUninitializedPlaces::new(tcx, body, &env)
|
2020-06-29 17:20:41 -07:00
|
|
|
.mark_inactive_variants_as_uninit()
|
2023-05-06 07:57:05 +00:00
|
|
|
.skipping_unreachable_unwind(dead_unwinds)
|
2020-10-04 15:22:23 -07:00
|
|
|
.into_engine(tcx, body)
|
2020-09-14 17:13:47 -07:00
|
|
|
.pass_name("elaborate_drops")
|
2020-01-21 20:00:55 -08:00
|
|
|
.iterate_to_fixpoint()
|
|
|
|
.into_results_cursor(body);
|
2016-05-27 15:07:08 +03:00
|
|
|
|
2023-04-28 20:08:23 +00:00
|
|
|
let drop_flags = IndexVec::from_elem(None, &env.move_data.move_paths);
|
2016-05-27 15:07:08 +03:00
|
|
|
ElaborateDropsCtxt {
|
2017-08-06 22:54:09 -07:00
|
|
|
tcx,
|
2019-06-03 18:26:48 -04:00
|
|
|
body,
|
2016-05-27 15:07:08 +03:00
|
|
|
env: &env,
|
2020-01-21 20:00:55 -08:00
|
|
|
init_data: InitializationData { inits, uninits },
|
2023-04-28 20:08:23 +00:00
|
|
|
drop_flags,
|
2019-06-03 18:26:48 -04:00
|
|
|
patch: MirPatch::new(body),
|
2016-05-27 15:07:08 +03:00
|
|
|
}
|
|
|
|
.elaborate()
|
2016-05-17 02:26:18 +03:00
|
|
|
};
|
2019-11-06 12:00:46 -05:00
|
|
|
elaborate_patch.apply(body);
|
2022-06-13 16:37:41 +03:00
|
|
|
deref_finder(tcx, body);
|
2016-05-17 02:26:18 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-08-16 18:15:49 +00:00
|
|
|
/// Records unwind edges which are known to be unreachable, because they are in `drop` terminators
|
2017-04-07 01:00:53 +03:00
|
|
|
/// that can't drop anything.
|
2023-05-06 07:57:05 +00:00
|
|
|
#[instrument(level = "trace", skip(body, flow_inits), ret)]
|
|
|
|
fn compute_dead_unwinds<'mir, 'tcx>(
|
|
|
|
body: &'mir Body<'tcx>,
|
|
|
|
flow_inits: &mut ResultsCursor<'mir, 'tcx, MaybeInitializedPlaces<'mir, 'tcx>>,
|
|
|
|
) -> BitSet<BasicBlock> {
|
2017-04-07 01:00:53 +03:00
|
|
|
// We only need to do this pass once, because unwind edges can only
|
|
|
|
// reach cleanup blocks, which can't have unwind edges themselves.
|
2023-05-06 07:57:05 +00:00
|
|
|
let mut dead_unwinds = BitSet::new_empty(body.basic_blocks.len());
|
2022-07-05 00:00:00 +00:00
|
|
|
for (bb, bb_data) in body.basic_blocks.iter_enumerated() {
|
2023-05-06 07:57:05 +00:00
|
|
|
let TerminatorKind::Drop { place, unwind: UnwindAction::Cleanup(_), .. } =
|
|
|
|
bb_data.terminator().kind
|
|
|
|
else {
|
2022-02-19 00:48:49 +01:00
|
|
|
continue;
|
2017-08-25 07:16:24 -07:00
|
|
|
};
|
2017-04-07 01:00:53 +03:00
|
|
|
|
2020-03-22 12:09:40 -07:00
|
|
|
flow_inits.seek_before_primary_effect(body.terminator_loc(bb));
|
2023-05-06 07:57:05 +00:00
|
|
|
if flow_inits.analysis().is_unwind_dead(place, flow_inits.get()) {
|
|
|
|
dead_unwinds.insert(bb);
|
2017-08-25 07:16:24 -07:00
|
|
|
}
|
2017-04-07 01:00:53 +03:00
|
|
|
}
|
|
|
|
|
2023-05-06 07:57:05 +00:00
|
|
|
dead_unwinds
|
2017-04-07 01:00:53 +03:00
|
|
|
}
|
|
|
|
|
2020-01-21 20:00:55 -08:00
|
|
|
struct InitializationData<'mir, 'tcx> {
|
|
|
|
inits: ResultsCursor<'mir, 'tcx, MaybeInitializedPlaces<'mir, 'tcx>>,
|
|
|
|
uninits: ResultsCursor<'mir, 'tcx, MaybeUninitializedPlaces<'mir, 'tcx>>,
|
2016-05-17 02:26:18 +03:00
|
|
|
}
|
|
|
|
|
2020-01-21 20:00:55 -08:00
|
|
|
impl InitializationData<'_, '_> {
|
2020-02-13 20:48:17 -08:00
|
|
|
fn seek_before(&mut self, loc: Location) {
|
2020-03-22 12:09:40 -07:00
|
|
|
self.inits.seek_before_primary_effect(loc);
|
|
|
|
self.uninits.seek_before_primary_effect(loc);
|
2016-05-17 02:26:18 +03:00
|
|
|
}
|
|
|
|
|
2020-02-13 21:30:10 -08:00
|
|
|
fn maybe_live_dead(&self, path: MovePathIndex) -> (bool, bool) {
|
2020-01-21 20:00:55 -08:00
|
|
|
(self.inits.contains(path), self.uninits.contains(path))
|
2016-05-17 02:26:18 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-14 19:39:39 +03:00
|
|
|
struct Elaborator<'a, 'b, 'tcx> {
|
2017-03-09 20:10:05 +02:00
|
|
|
ctxt: &'a mut ElaborateDropsCtxt<'b, 'tcx>,
|
|
|
|
}
|
|
|
|
|
2021-12-06 00:48:37 -08:00
|
|
|
impl fmt::Debug for Elaborator<'_, '_, '_> {
|
2019-02-08 06:28:15 +09:00
|
|
|
fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
2016-05-17 02:26:18 +03:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-12-06 00:48:37 -08:00
|
|
|
impl<'a, 'tcx> DropElaborator<'a, 'tcx> for Elaborator<'a, '_, 'tcx> {
|
2017-03-09 20:10:05 +02:00
|
|
|
type Path = MovePathIndex;
|
|
|
|
|
|
|
|
fn patch(&mut self) -> &mut MirPatch<'tcx> {
|
|
|
|
&mut self.ctxt.patch
|
|
|
|
}
|
|
|
|
|
2019-06-03 18:26:48 -04:00
|
|
|
fn body(&self) -> &'a Body<'tcx> {
|
|
|
|
self.ctxt.body
|
2017-03-09 20:10:05 +02:00
|
|
|
}
|
|
|
|
|
2019-06-14 00:48:52 +03:00
|
|
|
fn tcx(&self) -> TyCtxt<'tcx> {
|
2017-03-09 20:10:05 +02:00
|
|
|
self.ctxt.tcx
|
|
|
|
}
|
|
|
|
|
2017-05-15 17:57:30 -04:00
|
|
|
fn param_env(&self) -> ty::ParamEnv<'tcx> {
|
2017-03-09 20:10:05 +02:00
|
|
|
self.ctxt.param_env()
|
|
|
|
}
|
|
|
|
|
2023-09-11 14:24:55 +00:00
|
|
|
#[instrument(level = "debug", skip(self), ret)]
|
2017-03-09 20:10:05 +02:00
|
|
|
fn drop_style(&self, path: Self::Path, mode: DropFlagMode) -> DropStyle {
|
|
|
|
let ((maybe_live, maybe_dead), multipart) = match mode {
|
2020-02-13 21:30:10 -08:00
|
|
|
DropFlagMode::Shallow => (self.ctxt.init_data.maybe_live_dead(path), false),
|
2017-03-09 20:10:05 +02:00
|
|
|
DropFlagMode::Deep => {
|
|
|
|
let mut some_live = false;
|
|
|
|
let mut some_dead = false;
|
|
|
|
let mut children_count = 0;
|
2023-11-24 06:26:15 +11:00
|
|
|
on_all_children_bits(self.ctxt.move_data(), path, |child| {
|
|
|
|
let (live, dead) = self.ctxt.init_data.maybe_live_dead(child);
|
|
|
|
debug!("elaborate_drop: state({:?}) = {:?}", child, (live, dead));
|
|
|
|
some_live |= live;
|
|
|
|
some_dead |= dead;
|
|
|
|
children_count += 1;
|
|
|
|
});
|
2017-03-09 20:10:05 +02:00
|
|
|
((some_live, some_dead), children_count != 1)
|
|
|
|
}
|
|
|
|
};
|
|
|
|
match (maybe_live, maybe_dead, multipart) {
|
|
|
|
(false, _, _) => DropStyle::Dead,
|
|
|
|
(true, false, _) => DropStyle::Static,
|
|
|
|
(true, true, false) => DropStyle::Conditional,
|
|
|
|
(true, true, true) => DropStyle::Open,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn clear_drop_flag(&mut self, loc: Location, path: Self::Path, mode: DropFlagMode) {
|
|
|
|
match mode {
|
|
|
|
DropFlagMode::Shallow => {
|
|
|
|
self.ctxt.set_drop_flag(loc, path, DropFlagState::Absent);
|
|
|
|
}
|
|
|
|
DropFlagMode::Deep => {
|
2023-11-24 06:26:15 +11:00
|
|
|
on_all_children_bits(self.ctxt.move_data(), path, |child| {
|
|
|
|
self.ctxt.set_drop_flag(loc, child, DropFlagState::Absent)
|
|
|
|
});
|
2017-03-09 20:10:05 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-03-28 12:32:57 -07:00
|
|
|
fn field_subpath(&self, path: Self::Path, field: FieldIdx) -> Option<Self::Path> {
|
2021-01-05 19:53:07 +01:00
|
|
|
rustc_mir_dataflow::move_path_children_matching(self.ctxt.move_data(), path, |e| match e {
|
2020-05-23 12:02:54 +02:00
|
|
|
ProjectionElem::Field(idx, _) => idx == field,
|
2019-07-30 00:07:28 +02:00
|
|
|
_ => false,
|
2017-03-09 20:10:05 +02:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2020-08-23 14:54:58 +02:00
|
|
|
fn array_subpath(&self, path: Self::Path, index: u64, size: u64) -> Option<Self::Path> {
|
2021-01-05 19:53:07 +01:00
|
|
|
rustc_mir_dataflow::move_path_children_matching(self.ctxt.move_data(), path, |e| match e {
|
2019-11-22 20:28:02 +00:00
|
|
|
ProjectionElem::ConstantIndex { offset, min_length, from_end } => {
|
2020-05-23 12:02:54 +02:00
|
|
|
debug_assert!(size == min_length, "min_length should be exact for arrays");
|
2019-11-22 20:28:02 +00:00
|
|
|
assert!(!from_end, "from_end should not be used for array element ConstantIndex");
|
2020-05-23 12:02:54 +02:00
|
|
|
offset == index
|
2019-07-30 00:07:28 +02:00
|
|
|
}
|
|
|
|
_ => false,
|
2017-11-28 15:48:23 +03:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2017-03-09 20:10:05 +02:00
|
|
|
fn deref_subpath(&self, path: Self::Path) -> Option<Self::Path> {
|
2021-01-05 19:53:07 +01:00
|
|
|
rustc_mir_dataflow::move_path_children_matching(self.ctxt.move_data(), path, |e| {
|
2020-05-23 12:02:54 +02:00
|
|
|
e == ProjectionElem::Deref
|
2017-03-09 20:10:05 +02:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2018-11-01 19:03:38 +01:00
|
|
|
fn downcast_subpath(&self, path: Self::Path, variant: VariantIdx) -> Option<Self::Path> {
|
2021-01-05 19:53:07 +01:00
|
|
|
rustc_mir_dataflow::move_path_children_matching(self.ctxt.move_data(), path, |e| match e {
|
2020-05-23 12:02:54 +02:00
|
|
|
ProjectionElem::Downcast(_, idx) => idx == variant,
|
2019-07-30 00:07:28 +02:00
|
|
|
_ => false,
|
2017-03-09 20:10:05 +02:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
fn get_drop_flag(&mut self, path: Self::Path) -> Option<Operand<'tcx>> {
|
2017-11-17 17:19:57 +02:00
|
|
|
self.ctxt.drop_flag(path).map(Operand::Copy)
|
2017-03-09 20:10:05 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-14 19:39:39 +03:00
|
|
|
struct ElaborateDropsCtxt<'a, 'tcx> {
|
2019-06-14 00:48:52 +03:00
|
|
|
tcx: TyCtxt<'tcx>,
|
2019-06-03 18:26:48 -04:00
|
|
|
body: &'a Body<'tcx>,
|
2019-06-14 00:48:52 +03:00
|
|
|
env: &'a MoveDataParamEnv<'tcx>,
|
2020-01-21 20:00:55 -08:00
|
|
|
init_data: InitializationData<'a, 'tcx>,
|
2023-04-28 20:08:23 +00:00
|
|
|
drop_flags: IndexVec<MovePathIndex, Option<Local>>,
|
2016-05-17 02:26:18 +03:00
|
|
|
patch: MirPatch<'tcx>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
|
2016-05-27 15:07:08 +03:00
|
|
|
fn move_data(&self) -> &'b MoveData<'tcx> {
|
|
|
|
&self.env.move_data
|
|
|
|
}
|
2017-05-10 10:28:06 -04:00
|
|
|
|
2017-05-15 17:57:30 -04:00
|
|
|
fn param_env(&self) -> ty::ParamEnv<'tcx> {
|
2017-05-10 10:28:06 -04:00
|
|
|
self.env.param_env
|
2016-05-27 15:07:08 +03:00
|
|
|
}
|
2016-05-17 02:26:18 +03:00
|
|
|
|
2017-04-11 23:52:51 +03:00
|
|
|
fn create_drop_flag(&mut self, index: MovePathIndex, span: Span) {
|
2016-05-17 02:26:18 +03:00
|
|
|
let patch = &mut self.patch;
|
2019-06-03 18:26:48 -04:00
|
|
|
debug!("create_drop_flag({:?})", self.body.span);
|
2023-11-24 06:26:15 +11:00
|
|
|
self.drop_flags[index].get_or_insert_with(|| patch.new_temp(self.tcx.types.bool, span));
|
2016-05-17 02:26:18 +03:00
|
|
|
}
|
|
|
|
|
2017-12-01 14:31:47 +02:00
|
|
|
fn drop_flag(&mut self, index: MovePathIndex) -> Option<Place<'tcx>> {
|
2023-04-28 20:08:23 +00:00
|
|
|
self.drop_flags[index].map(Place::from)
|
2016-05-17 02:26:18 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
/// create a patch that elaborates all drops in the input
|
|
|
|
/// MIR.
|
|
|
|
fn elaborate(mut self) -> MirPatch<'tcx> {
|
|
|
|
self.collect_drop_flags();
|
|
|
|
|
|
|
|
self.elaborate_drops();
|
|
|
|
|
|
|
|
self.drop_flags_on_init();
|
|
|
|
self.drop_flags_for_fn_rets();
|
|
|
|
self.drop_flags_for_args();
|
|
|
|
self.drop_flags_for_locs();
|
|
|
|
|
|
|
|
self.patch
|
|
|
|
}
|
|
|
|
|
|
|
|
fn collect_drop_flags(&mut self) {
|
2022-07-05 00:00:00 +00:00
|
|
|
for (bb, data) in self.body.basic_blocks.iter_enumerated() {
|
2016-05-17 02:26:18 +03:00
|
|
|
let terminator = data.terminator();
|
2023-10-12 17:42:49 +00:00
|
|
|
let TerminatorKind::Drop { ref place, .. } = terminator.kind else { continue };
|
2016-05-17 02:26:18 +03:00
|
|
|
|
2020-06-10 09:56:54 +02:00
|
|
|
let path = self.move_data().rev_lookup.find(place.as_ref());
|
|
|
|
debug!("collect_drop_flags: {:?}, place {:?} ({:?})", bb, place, path);
|
2016-05-17 02:26:18 +03:00
|
|
|
|
2023-10-12 17:42:49 +00:00
|
|
|
match path {
|
|
|
|
LookupResult::Exact(path) => {
|
|
|
|
self.init_data.seek_before(self.body.terminator_loc(bb));
|
2023-11-24 06:26:15 +11:00
|
|
|
on_all_children_bits(self.move_data(), path, |child| {
|
2023-10-12 17:42:49 +00:00
|
|
|
let (maybe_live, maybe_dead) = self.init_data.maybe_live_dead(child);
|
|
|
|
debug!(
|
|
|
|
"collect_drop_flags: collecting {:?} from {:?}@{:?} - {:?}",
|
|
|
|
child,
|
|
|
|
place,
|
|
|
|
path,
|
|
|
|
(maybe_live, maybe_dead)
|
|
|
|
);
|
|
|
|
if maybe_live && maybe_dead {
|
|
|
|
self.create_drop_flag(child, terminator.source_info.span)
|
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
|
|
|
LookupResult::Parent(None) => {}
|
2016-06-11 23:47:28 +03:00
|
|
|
LookupResult::Parent(Some(parent)) => {
|
2022-06-13 16:37:41 +03:00
|
|
|
if self.body.local_decls[place.local].is_deref_temp() {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2023-10-12 17:42:49 +00:00
|
|
|
self.init_data.seek_before(self.body.terminator_loc(bb));
|
|
|
|
let (_maybe_live, maybe_dead) = self.init_data.maybe_live_dead(parent);
|
2016-06-11 23:47:28 +03:00
|
|
|
if maybe_dead {
|
2023-12-18 22:21:37 +11:00
|
|
|
self.tcx.dcx().span_delayed_bug(
|
2016-06-11 23:47:28 +03:00
|
|
|
terminator.source_info.span,
|
Restrict `From<S>` for `{D,Subd}iagnosticMessage`.
Currently a `{D,Subd}iagnosticMessage` can be created from any type that
impls `Into<String>`. That includes `&str`, `String`, and `Cow<'static,
str>`, which are reasonable. It also includes `&String`, which is pretty
weird, and results in many places making unnecessary allocations for
patterns like this:
```
self.fatal(&format!(...))
```
This creates a string with `format!`, takes a reference, passes the
reference to `fatal`, which does an `into()`, which clones the
reference, doing a second allocation. Two allocations for a single
string, bleh.
This commit changes the `From` impls so that you can only create a
`{D,Subd}iagnosticMessage` from `&str`, `String`, or `Cow<'static,
str>`. This requires changing all the places that currently create one
from a `&String`. Most of these are of the `&format!(...)` form
described above; each one removes an unnecessary static `&`, plus an
allocation when executed. There are also a few places where the existing
use of `&String` was more reasonable; these now just use `clone()` at
the call site.
As well as making the code nicer and more efficient, this is a step
towards possibly using `Cow<'static, str>` in
`{D,Subd}iagnosticMessage::{Str,Eager}`. That would require changing
the `From<&'a str>` impls to `From<&'static str>`, which is doable, but
I'm not yet sure if it's worthwhile.
2023-04-20 13:26:58 +10:00
|
|
|
format!(
|
2023-07-25 23:17:39 +02:00
|
|
|
"drop of untracked, uninitialized value {bb:?}, place {place:?} ({path:?})"
|
2021-12-22 00:00:00 +00:00
|
|
|
),
|
2016-06-11 23:47:28 +03:00
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
2016-05-17 02:26:18 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn elaborate_drops(&mut self) {
|
2023-10-12 17:42:49 +00:00
|
|
|
// This function should mirror what `collect_drop_flags` does.
|
2022-07-05 00:00:00 +00:00
|
|
|
for (bb, data) in self.body.basic_blocks.iter_enumerated() {
|
2016-05-17 02:26:18 +03:00
|
|
|
let terminator = data.terminator();
|
2023-10-12 17:42:49 +00:00
|
|
|
let TerminatorKind::Drop { place, target, unwind, replace } = terminator.kind else {
|
|
|
|
continue;
|
|
|
|
};
|
2016-05-17 02:26:18 +03:00
|
|
|
|
2023-10-21 12:14:17 +00:00
|
|
|
// This place does not need dropping. It does not have an associated move-path, so the
|
2023-09-30 21:42:49 +00:00
|
|
|
// match below will conservatively keep an unconditional drop. As that drop is useless,
|
|
|
|
// just remove it here and now.
|
|
|
|
if !place
|
|
|
|
.ty(&self.body.local_decls, self.tcx)
|
|
|
|
.ty
|
|
|
|
.needs_drop(self.tcx, self.env.param_env)
|
|
|
|
{
|
|
|
|
self.patch.patch_terminator(bb, TerminatorKind::Goto { target });
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2023-10-12 17:42:49 +00:00
|
|
|
let path = self.move_data().rev_lookup.find(place.as_ref());
|
|
|
|
match path {
|
|
|
|
LookupResult::Exact(path) => {
|
|
|
|
let unwind = match unwind {
|
|
|
|
_ if data.is_cleanup => Unwind::InCleanup,
|
|
|
|
UnwindAction::Cleanup(cleanup) => Unwind::To(cleanup),
|
|
|
|
UnwindAction::Continue => Unwind::To(self.patch.resume_block()),
|
|
|
|
UnwindAction::Unreachable => {
|
|
|
|
Unwind::To(self.patch.unreachable_cleanup_block())
|
2022-10-10 19:50:49 +01:00
|
|
|
}
|
2023-10-12 17:42:49 +00:00
|
|
|
UnwindAction::Terminate(reason) => {
|
|
|
|
debug_assert_ne!(
|
|
|
|
reason,
|
|
|
|
UnwindTerminateReason::InCleanup,
|
|
|
|
"we are not in a cleanup block, InCleanup reason should be impossible"
|
|
|
|
);
|
|
|
|
Unwind::To(self.patch.terminate_block(reason))
|
2016-06-11 23:47:28 +03:00
|
|
|
}
|
2023-10-12 17:42:49 +00:00
|
|
|
};
|
|
|
|
self.init_data.seek_before(self.body.terminator_loc(bb));
|
|
|
|
elaborate_drop(
|
|
|
|
&mut Elaborator { ctxt: self },
|
|
|
|
terminator.source_info,
|
|
|
|
place,
|
|
|
|
path,
|
|
|
|
target,
|
|
|
|
unwind,
|
|
|
|
bb,
|
|
|
|
)
|
|
|
|
}
|
|
|
|
LookupResult::Parent(None) => {}
|
|
|
|
LookupResult::Parent(Some(_)) => {
|
|
|
|
if !replace {
|
2024-02-17 01:23:40 +11:00
|
|
|
self.tcx.dcx().span_bug(
|
2023-10-12 17:42:49 +00:00
|
|
|
terminator.source_info.span,
|
|
|
|
format!("drop of untracked value {bb:?}"),
|
|
|
|
);
|
2016-06-11 23:47:28 +03:00
|
|
|
}
|
2023-10-12 17:42:49 +00:00
|
|
|
// A drop and replace behind a pointer/array/whatever.
|
|
|
|
// The borrow checker requires that these locations are initialized before the assignment,
|
|
|
|
// so we just leave an unconditional drop.
|
|
|
|
assert!(!data.is_cleanup);
|
2016-05-17 02:26:18 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn constant_bool(&self, span: Span, val: bool) -> Rvalue<'tcx> {
|
2023-09-20 20:51:14 +02:00
|
|
|
Rvalue::Use(Operand::Constant(Box::new(ConstOperand {
|
2017-08-06 22:54:09 -07:00
|
|
|
span,
|
2018-08-09 06:18:00 -04:00
|
|
|
user_ty: None,
|
2023-09-20 20:51:14 +02:00
|
|
|
const_: Const::from_bool(self.tcx, val),
|
2017-05-12 01:38:26 +03:00
|
|
|
})))
|
2016-05-17 02:26:18 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
fn set_drop_flag(&mut self, loc: Location, path: MovePathIndex, val: DropFlagState) {
|
2023-04-28 20:08:23 +00:00
|
|
|
if let Some(flag) = self.drop_flags[path] {
|
2019-06-03 18:26:48 -04:00
|
|
|
let span = self.patch.source_info_for_location(self.body, loc).span;
|
2016-05-17 02:26:18 +03:00
|
|
|
let val = self.constant_bool(span, val.value());
|
2019-06-24 17:46:09 +02:00
|
|
|
self.patch.add_assign(loc, Place::from(flag), val);
|
2016-05-17 02:26:18 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn drop_flags_on_init(&mut self) {
|
2018-05-01 09:46:11 -04:00
|
|
|
let loc = Location::START;
|
2019-06-03 18:26:48 -04:00
|
|
|
let span = self.patch.source_info_for_location(self.body, loc).span;
|
2016-05-17 02:26:18 +03:00
|
|
|
let false_ = self.constant_bool(span, false);
|
2023-04-28 20:08:23 +00:00
|
|
|
for flag in self.drop_flags.iter().flatten() {
|
2019-06-24 17:46:09 +02:00
|
|
|
self.patch.add_assign(loc, Place::from(*flag), false_.clone());
|
2016-05-17 02:26:18 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn drop_flags_for_fn_rets(&mut self) {
|
2022-07-05 00:00:00 +00:00
|
|
|
for (bb, data) in self.body.basic_blocks.iter_enumerated() {
|
2016-05-17 02:26:18 +03:00
|
|
|
if let TerminatorKind::Call {
|
2022-10-08 23:47:59 +01:00
|
|
|
destination,
|
|
|
|
target: Some(tgt),
|
|
|
|
unwind: UnwindAction::Cleanup(_),
|
|
|
|
..
|
2016-05-17 02:26:18 +03:00
|
|
|
} = data.terminator().kind
|
|
|
|
{
|
|
|
|
assert!(!self.patch.is_patched(bb));
|
|
|
|
|
2016-08-08 18:46:06 -07:00
|
|
|
let loc = Location { block: tgt, statement_index: 0 };
|
2022-04-16 09:27:54 -04:00
|
|
|
let path = self.move_data().rev_lookup.find(destination.as_ref());
|
2023-11-24 06:26:15 +11:00
|
|
|
on_lookup_result_bits(self.move_data(), path, |child| {
|
2016-05-27 15:07:08 +03:00
|
|
|
self.set_drop_flag(loc, child, DropFlagState::Present)
|
2016-05-17 02:26:18 +03:00
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn drop_flags_for_args(&mut self) {
|
2018-05-01 09:46:11 -04:00
|
|
|
let loc = Location::START;
|
2023-11-24 06:26:15 +11:00
|
|
|
rustc_mir_dataflow::drop_flag_effects_for_function_entry(self.body, self.env, |path, ds| {
|
|
|
|
self.set_drop_flag(loc, path, ds);
|
|
|
|
})
|
2016-05-17 02:26:18 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
fn drop_flags_for_locs(&mut self) {
|
|
|
|
// We intentionally iterate only over the *old* basic blocks.
|
|
|
|
//
|
|
|
|
// Basic blocks created by drop elaboration update their
|
|
|
|
// drop flags by themselves, to avoid the drop flags being
|
|
|
|
// clobbered before they are read.
|
|
|
|
|
2022-07-05 00:00:00 +00:00
|
|
|
for (bb, data) in self.body.basic_blocks.iter_enumerated() {
|
2016-05-17 02:26:18 +03:00
|
|
|
debug!("drop_flags_for_locs({:?})", data);
|
|
|
|
for i in 0..(data.statements.len() + 1) {
|
|
|
|
debug!("drop_flag_for_locs: stmt {}", i);
|
|
|
|
if i == data.statements.len() {
|
|
|
|
match data.terminator().kind {
|
|
|
|
TerminatorKind::Drop { .. } => {
|
|
|
|
// drop elaboration should handle that by itself
|
|
|
|
continue;
|
|
|
|
}
|
2023-08-19 13:10:25 +02:00
|
|
|
TerminatorKind::UnwindResume => {
|
2017-06-02 12:52:09 +02:00
|
|
|
// It is possible for `Resume` to be patched
|
|
|
|
// (in particular it can be patched to be replaced with
|
|
|
|
// a Goto; see `MirPatch::new`).
|
|
|
|
}
|
2016-05-17 02:26:18 +03:00
|
|
|
_ => {
|
|
|
|
assert!(!self.patch.is_patched(bb));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2016-08-08 18:46:06 -07:00
|
|
|
let loc = Location { block: bb, statement_index: i };
|
2021-01-05 19:53:07 +01:00
|
|
|
rustc_mir_dataflow::drop_flag_effects_for_location(
|
2019-06-03 18:26:48 -04:00
|
|
|
self.body,
|
|
|
|
self.env,
|
|
|
|
loc,
|
2023-03-05 21:02:14 +01:00
|
|
|
|path, ds| self.set_drop_flag(loc, path, ds),
|
2016-05-17 02:26:18 +03:00
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
// There may be a critical edge after this call,
|
|
|
|
// so mark the return as initialized *before* the
|
|
|
|
// call.
|
2022-10-08 23:47:59 +01:00
|
|
|
if let TerminatorKind::Call {
|
|
|
|
destination,
|
|
|
|
target: Some(_),
|
2023-08-21 09:57:10 +02:00
|
|
|
unwind:
|
|
|
|
UnwindAction::Continue | UnwindAction::Unreachable | UnwindAction::Terminate(_),
|
2022-10-08 23:47:59 +01:00
|
|
|
..
|
|
|
|
} = data.terminator().kind
|
2016-05-17 02:26:18 +03:00
|
|
|
{
|
|
|
|
assert!(!self.patch.is_patched(bb));
|
|
|
|
|
2016-08-08 18:46:06 -07:00
|
|
|
let loc = Location { block: bb, statement_index: data.statements.len() };
|
2022-04-16 09:27:54 -04:00
|
|
|
let path = self.move_data().rev_lookup.find(destination.as_ref());
|
2023-11-24 06:26:15 +11:00
|
|
|
on_lookup_result_bits(self.move_data(), path, |child| {
|
2016-05-27 15:07:08 +03:00
|
|
|
self.set_drop_flag(loc, child, DropFlagState::Present)
|
2016-05-17 02:26:18 +03:00
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|