1
Fork 0

Change ChunkedBitSet<MovePathIndex>s to MixedBitSet.

It's a performance win because `MixedBitSet` is faster and uses less
memory than `ChunkedBitSet`.

Also reflow some overlong comment lines in
`lint_tail_expr_drop_order.rs`.
This commit is contained in:
Nicholas Nethercote 2024-12-05 14:20:05 +11:00
parent 6ee1a7aaa0
commit a06547508a
4 changed files with 42 additions and 39 deletions

View file

@ -26,7 +26,7 @@ use rustc_data_structures::graph::dominators::Dominators;
use rustc_errors::Diag; use rustc_errors::Diag;
use rustc_hir as hir; use rustc_hir as hir;
use rustc_hir::def_id::LocalDefId; use rustc_hir::def_id::LocalDefId;
use rustc_index::bit_set::{BitSet, ChunkedBitSet}; use rustc_index::bit_set::{BitSet, MixedBitSet};
use rustc_index::{IndexSlice, IndexVec}; use rustc_index::{IndexSlice, IndexVec};
use rustc_infer::infer::{ use rustc_infer::infer::{
InferCtxt, NllRegionVariableOrigin, RegionVariableOrigin, TyCtxtInferExt, InferCtxt, NllRegionVariableOrigin, RegionVariableOrigin, TyCtxtInferExt,
@ -1797,7 +1797,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, '_, 'tcx> {
location: Location, location: Location,
desired_action: InitializationRequiringAction, desired_action: InitializationRequiringAction,
place_span: (PlaceRef<'tcx>, Span), place_span: (PlaceRef<'tcx>, Span),
maybe_uninits: &ChunkedBitSet<MovePathIndex>, maybe_uninits: &MixedBitSet<MovePathIndex>,
from: u64, from: u64,
to: u64, to: u64,
) { ) {

View file

@ -1,7 +1,7 @@
use std::assert_matches::assert_matches; use std::assert_matches::assert_matches;
use rustc_index::Idx; use rustc_index::Idx;
use rustc_index::bit_set::{BitSet, ChunkedBitSet}; use rustc_index::bit_set::{BitSet, MixedBitSet};
use rustc_middle::bug; use rustc_middle::bug;
use rustc_middle::mir::{self, Body, CallReturnPlaces, Location, TerminatorEdges}; use rustc_middle::mir::{self, Body, CallReturnPlaces, Location, TerminatorEdges};
use rustc_middle::ty::{self, TyCtxt}; use rustc_middle::ty::{self, TyCtxt};
@ -70,7 +70,7 @@ impl<'a, 'tcx> MaybeInitializedPlaces<'a, 'tcx> {
pub fn is_unwind_dead( pub fn is_unwind_dead(
&self, &self,
place: mir::Place<'tcx>, place: mir::Place<'tcx>,
state: &MaybeReachable<ChunkedBitSet<MovePathIndex>>, state: &MaybeReachable<MixedBitSet<MovePathIndex>>,
) -> bool { ) -> bool {
if let LookupResult::Exact(path) = self.move_data().rev_lookup.find(place.as_ref()) { if let LookupResult::Exact(path) = self.move_data().rev_lookup.find(place.as_ref()) {
let mut maybe_live = false; let mut maybe_live = false;
@ -244,8 +244,8 @@ impl<'tcx> MaybeUninitializedPlaces<'_, 'tcx> {
impl<'tcx> Analysis<'tcx> for MaybeInitializedPlaces<'_, 'tcx> { impl<'tcx> Analysis<'tcx> for MaybeInitializedPlaces<'_, 'tcx> {
/// There can be many more `MovePathIndex` than there are locals in a MIR body. /// There can be many more `MovePathIndex` than there are locals in a MIR body.
/// We use a chunked bitset to avoid paying too high a memory footprint. /// We use a mixed bitset to avoid paying too high a memory footprint.
type Domain = MaybeReachable<ChunkedBitSet<MovePathIndex>>; type Domain = MaybeReachable<MixedBitSet<MovePathIndex>>;
const NAME: &'static str = "maybe_init"; const NAME: &'static str = "maybe_init";
@ -256,7 +256,7 @@ impl<'tcx> Analysis<'tcx> for MaybeInitializedPlaces<'_, 'tcx> {
fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) { fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
*state = *state =
MaybeReachable::Reachable(ChunkedBitSet::new_empty(self.move_data().move_paths.len())); MaybeReachable::Reachable(MixedBitSet::new_empty(self.move_data().move_paths.len()));
drop_flag_effects_for_function_entry(self.body, self.move_data, |path, s| { drop_flag_effects_for_function_entry(self.body, self.move_data, |path, s| {
assert!(s == DropFlagState::Present); assert!(s == DropFlagState::Present);
state.gen_(path); state.gen_(path);
@ -371,14 +371,14 @@ impl<'tcx> Analysis<'tcx> for MaybeInitializedPlaces<'_, 'tcx> {
impl<'tcx> Analysis<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> { impl<'tcx> Analysis<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> {
/// There can be many more `MovePathIndex` than there are locals in a MIR body. /// There can be many more `MovePathIndex` than there are locals in a MIR body.
/// We use a chunked bitset to avoid paying too high a memory footprint. /// We use a mixed bitset to avoid paying too high a memory footprint.
type Domain = ChunkedBitSet<MovePathIndex>; type Domain = MixedBitSet<MovePathIndex>;
const NAME: &'static str = "maybe_uninit"; const NAME: &'static str = "maybe_uninit";
fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain { fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
// bottom = initialized (start_block_effect counters this at outset) // bottom = initialized (start_block_effect counters this at outset)
ChunkedBitSet::new_empty(self.move_data().move_paths.len()) MixedBitSet::new_empty(self.move_data().move_paths.len())
} }
// sets on_entry bits for Arg places // sets on_entry bits for Arg places
@ -492,14 +492,14 @@ impl<'tcx> Analysis<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> {
impl<'tcx> Analysis<'tcx> for EverInitializedPlaces<'_, 'tcx> { impl<'tcx> Analysis<'tcx> for EverInitializedPlaces<'_, 'tcx> {
/// There can be many more `InitIndex` than there are locals in a MIR body. /// There can be many more `InitIndex` than there are locals in a MIR body.
/// We use a chunked bitset to avoid paying too high a memory footprint. /// We use a mixed bitset to avoid paying too high a memory footprint.
type Domain = ChunkedBitSet<InitIndex>; type Domain = MixedBitSet<InitIndex>;
const NAME: &'static str = "ever_init"; const NAME: &'static str = "ever_init";
fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain { fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
// bottom = no initialized variables by default // bottom = no initialized variables by default
ChunkedBitSet::new_empty(self.move_data().inits.len()) MixedBitSet::new_empty(self.move_data().inits.len())
} }
fn initialize_start_block(&self, body: &mir::Body<'tcx>, state: &mut Self::Domain) { fn initialize_start_block(&self, body: &mir::Body<'tcx>, state: &mut Self::Domain) {

View file

@ -7,7 +7,7 @@ use rustc_data_structures::unord::{UnordMap, UnordSet};
use rustc_errors::Subdiagnostic; use rustc_errors::Subdiagnostic;
use rustc_hir::CRATE_HIR_ID; use rustc_hir::CRATE_HIR_ID;
use rustc_hir::def_id::{DefId, LocalDefId}; use rustc_hir::def_id::{DefId, LocalDefId};
use rustc_index::bit_set::ChunkedBitSet; use rustc_index::bit_set::MixedBitSet;
use rustc_index::{IndexSlice, IndexVec}; use rustc_index::{IndexSlice, IndexVec};
use rustc_macros::{LintDiagnostic, Subdiagnostic}; use rustc_macros::{LintDiagnostic, Subdiagnostic};
use rustc_middle::bug; use rustc_middle::bug;
@ -49,24 +49,24 @@ struct DropsReachable<'a, 'mir, 'tcx> {
move_data: &'a MoveData<'tcx>, move_data: &'a MoveData<'tcx>,
maybe_init: &'a mut ResultsCursor<'mir, 'tcx, MaybeInitializedPlaces<'mir, 'tcx>>, maybe_init: &'a mut ResultsCursor<'mir, 'tcx, MaybeInitializedPlaces<'mir, 'tcx>>,
block_drop_value_info: &'a mut IndexSlice<BasicBlock, MovePathIndexAtBlock>, block_drop_value_info: &'a mut IndexSlice<BasicBlock, MovePathIndexAtBlock>,
collected_drops: &'a mut ChunkedBitSet<MovePathIndex>, collected_drops: &'a mut MixedBitSet<MovePathIndex>,
visited: FxHashMap<BasicBlock, Rc<RefCell<ChunkedBitSet<MovePathIndex>>>>, visited: FxHashMap<BasicBlock, Rc<RefCell<MixedBitSet<MovePathIndex>>>>,
} }
impl<'a, 'mir, 'tcx> DropsReachable<'a, 'mir, 'tcx> { impl<'a, 'mir, 'tcx> DropsReachable<'a, 'mir, 'tcx> {
fn visit(&mut self, block: BasicBlock) { fn visit(&mut self, block: BasicBlock) {
let move_set_size = self.move_data.move_paths.len(); let move_set_size = self.move_data.move_paths.len();
let make_new_path_set = || Rc::new(RefCell::new(ChunkedBitSet::new_empty(move_set_size))); let make_new_path_set = || Rc::new(RefCell::new(MixedBitSet::new_empty(move_set_size)));
let data = &self.body.basic_blocks[block]; let data = &self.body.basic_blocks[block];
let Some(terminator) = &data.terminator else { return }; let Some(terminator) = &data.terminator else { return };
// Given that we observe these dropped locals here at `block` so far, // Given that we observe these dropped locals here at `block` so far, we will try to update
// we will try to update the successor blocks. // the successor blocks. An occupied entry at `block` in `self.visited` signals that we
// An occupied entry at `block` in `self.visited` signals that we have visited `block` before. // have visited `block` before.
let dropped_local_here = let dropped_local_here =
Rc::clone(self.visited.entry(block).or_insert_with(make_new_path_set)); Rc::clone(self.visited.entry(block).or_insert_with(make_new_path_set));
// We could have invoked reverse lookup for a `MovePathIndex` every time, but unfortunately it is expensive. // We could have invoked reverse lookup for a `MovePathIndex` every time, but unfortunately
// Let's cache them in `self.block_drop_value_info`. // it is expensive. Let's cache them in `self.block_drop_value_info`.
match self.block_drop_value_info[block] { match self.block_drop_value_info[block] {
MovePathIndexAtBlock::Some(dropped) => { MovePathIndexAtBlock::Some(dropped) => {
dropped_local_here.borrow_mut().insert(dropped); dropped_local_here.borrow_mut().insert(dropped);
@ -76,23 +76,24 @@ impl<'a, 'mir, 'tcx> DropsReachable<'a, 'mir, 'tcx> {
&& let LookupResult::Exact(idx) | LookupResult::Parent(Some(idx)) = && let LookupResult::Exact(idx) | LookupResult::Parent(Some(idx)) =
self.move_data.rev_lookup.find(place.as_ref()) self.move_data.rev_lookup.find(place.as_ref())
{ {
// Since we are working with MIRs at a very early stage, // Since we are working with MIRs at a very early stage, observing a `drop`
// observing a `drop` terminator is not indicative enough that // terminator is not indicative enough that the drop will definitely happen.
// the drop will definitely happen. // That is decided in the drop elaboration pass instead. Therefore, we need to
// That is decided in the drop elaboration pass instead. // consult with the maybe-initialization information.
// Therefore, we need to consult with the maybe-initialization information.
self.maybe_init.seek_before_primary_effect(Location { self.maybe_init.seek_before_primary_effect(Location {
block, block,
statement_index: data.statements.len(), statement_index: data.statements.len(),
}); });
// Check if the drop of `place` under inspection is really in effect. // Check if the drop of `place` under inspection is really in effect. This is
// This is true only when `place` may have been initialized along a control flow path from a BID to the drop program point today. // true only when `place` may have been initialized along a control flow path
// In other words, this is where the drop of `place` will happen in the future instead. // from a BID to the drop program point today. In other words, this is where
// the drop of `place` will happen in the future instead.
if let MaybeReachable::Reachable(maybe_init) = self.maybe_init.get() if let MaybeReachable::Reachable(maybe_init) = self.maybe_init.get()
&& maybe_init.contains(idx) && maybe_init.contains(idx)
{ {
// We also cache the drop information, so that we do not need to check on data-flow cursor again // We also cache the drop information, so that we do not need to check on
// data-flow cursor again.
self.block_drop_value_info[block] = MovePathIndexAtBlock::Some(idx); self.block_drop_value_info[block] = MovePathIndexAtBlock::Some(idx);
dropped_local_here.borrow_mut().insert(idx); dropped_local_here.borrow_mut().insert(idx);
} else { } else {
@ -139,8 +140,9 @@ impl<'a, 'mir, 'tcx> DropsReachable<'a, 'mir, 'tcx> {
// Let's check the observed dropped places in. // Let's check the observed dropped places in.
self.collected_drops.union(&*dropped_local_there.borrow()); self.collected_drops.union(&*dropped_local_there.borrow());
if self.drop_span.is_none() { if self.drop_span.is_none() {
// FIXME(@dingxiangfei2009): it turns out that `self.body.source_scopes` are still a bit wonky. // FIXME(@dingxiangfei2009): it turns out that `self.body.source_scopes` are
// There is a high chance that this span still points to a block rather than a statement semicolon. // still a bit wonky. There is a high chance that this span still points to a
// block rather than a statement semicolon.
*self.drop_span = Some(terminator.source_info.span); *self.drop_span = Some(terminator.source_info.span);
} }
// Now we have discovered a simple control flow path from a future drop point // Now we have discovered a simple control flow path from a future drop point
@ -394,10 +396,10 @@ pub(crate) fn run_lint<'tcx>(tcx: TyCtxt<'tcx>, def_id: LocalDefId, body: &Body<
for (&block, candidates) in &bid_per_block { for (&block, candidates) in &bid_per_block {
// We will collect drops on locals on paths between BID points to their actual drop locations // We will collect drops on locals on paths between BID points to their actual drop locations
// into `all_locals_dropped`. // into `all_locals_dropped`.
let mut all_locals_dropped = ChunkedBitSet::new_empty(move_data.move_paths.len()); let mut all_locals_dropped = MixedBitSet::new_empty(move_data.move_paths.len());
let mut drop_span = None; let mut drop_span = None;
for &(_, place) in candidates.iter() { for &(_, place) in candidates.iter() {
let mut collected_drops = ChunkedBitSet::new_empty(move_data.move_paths.len()); let mut collected_drops = MixedBitSet::new_empty(move_data.move_paths.len());
// ## On detecting change in relative drop order ## // ## On detecting change in relative drop order ##
// Iterate through each BID-containing block `block`. // Iterate through each BID-containing block `block`.
// If the place `P` targeted by the BID is "maybe initialized", // If the place `P` targeted by the BID is "maybe initialized",
@ -425,8 +427,9 @@ pub(crate) fn run_lint<'tcx>(tcx: TyCtxt<'tcx>, def_id: LocalDefId, body: &Body<
// We shall now exclude some local bindings for the following cases. // We shall now exclude some local bindings for the following cases.
{ {
let mut to_exclude = ChunkedBitSet::new_empty(all_locals_dropped.domain_size()); let mut to_exclude = MixedBitSet::new_empty(all_locals_dropped.domain_size());
// We will now do subtraction from the candidate dropped locals, because of the following reasons. // We will now do subtraction from the candidate dropped locals, because of the
// following reasons.
for path_idx in all_locals_dropped.iter() { for path_idx in all_locals_dropped.iter() {
let move_path = &move_data.move_paths[path_idx]; let move_path = &move_data.move_paths[path_idx];
let dropped_local = move_path.place.local; let dropped_local = move_path.place.local;

View file

@ -1,5 +1,5 @@
use rustc_abi::FieldIdx; use rustc_abi::FieldIdx;
use rustc_index::bit_set::ChunkedBitSet; use rustc_index::bit_set::MixedBitSet;
use rustc_middle::mir::{Body, TerminatorKind}; use rustc_middle::mir::{Body, TerminatorKind};
use rustc_middle::ty::{self, GenericArgsRef, Ty, TyCtxt, VariantDef}; use rustc_middle::ty::{self, GenericArgsRef, Ty, TyCtxt, VariantDef};
use rustc_mir_dataflow::impls::MaybeInitializedPlaces; use rustc_mir_dataflow::impls::MaybeInitializedPlaces;
@ -67,7 +67,7 @@ impl<'tcx> crate::MirPass<'tcx> for RemoveUninitDrops {
fn is_needs_drop_and_init<'tcx>( fn is_needs_drop_and_init<'tcx>(
tcx: TyCtxt<'tcx>, tcx: TyCtxt<'tcx>,
typing_env: ty::TypingEnv<'tcx>, typing_env: ty::TypingEnv<'tcx>,
maybe_inits: &ChunkedBitSet<MovePathIndex>, maybe_inits: &MixedBitSet<MovePathIndex>,
move_data: &MoveData<'tcx>, move_data: &MoveData<'tcx>,
ty: Ty<'tcx>, ty: Ty<'tcx>,
mpi: MovePathIndex, mpi: MovePathIndex,