Rollup merge of #136053 - Zalathar:defer-counters, r=saethlin

coverage: Defer part of counter-creation until codegen

Follow-up to #135481 and #135873.

One of the pleasant properties of the new counter-assignment algorithm is that we can stop partway through the process, store the intermediate state in MIR, and then resume the rest of the algorithm during codegen. This lets it take into account which parts of the control-flow graph were eliminated by MIR opts, resulting in fewer physical counters and simpler counter expressions.

Those improvements end up completely obsoleting much larger chunks of code that were previously responsible for cleaning up the coverage metadata after MIR opts, while also doing a more thorough cleanup job.

(That change also unlocks some further simplifications that I've kept out of this PR to limit its scope.)
This commit is contained in:
Jubilee 2025-02-10 00:51:49 -08:00 committed by GitHub
commit 7f8108afc8
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
28 changed files with 356 additions and 665 deletions

View file

@ -2,7 +2,6 @@ use std::cmp::Ordering;
use either::Either;
use itertools::Itertools;
use rustc_data_structures::captures::Captures;
use rustc_data_structures::fx::{FxHashMap, FxIndexMap};
use rustc_data_structures::graph::DirectedGraph;
use rustc_index::IndexVec;
@ -11,31 +10,35 @@ use rustc_middle::mir::coverage::{CounterId, CovTerm, Expression, ExpressionId,
use crate::coverage::counters::balanced_flow::BalancedFlowGraph;
use crate::coverage::counters::node_flow::{
CounterTerm, NodeCounters, make_node_counters, node_flow_data_for_balanced_graph,
CounterTerm, NodeCounters, NodeFlowData, node_flow_data_for_balanced_graph,
};
use crate::coverage::graph::{BasicCoverageBlock, CoverageGraph};
mod balanced_flow;
mod node_flow;
pub(crate) mod node_flow;
mod union_find;
/// Ensures that each BCB node needing a counter has one, by creating physical
/// counters or counter expressions for nodes as required.
pub(super) fn make_bcb_counters(
graph: &CoverageGraph,
bcb_needs_counter: &DenseBitSet<BasicCoverageBlock>,
) -> CoverageCounters {
/// Struct containing the results of [`prepare_bcb_counters_data`].
pub(crate) struct BcbCountersData {
pub(crate) node_flow_data: NodeFlowData<BasicCoverageBlock>,
pub(crate) priority_list: Vec<BasicCoverageBlock>,
}
/// Analyzes the coverage graph to create intermediate data structures that
/// will later be used (during codegen) to create physical counters or counter
/// expressions for each BCB node that needs one.
pub(crate) fn prepare_bcb_counters_data(graph: &CoverageGraph) -> BcbCountersData {
// Create the derived graphs that are necessary for subsequent steps.
let balanced_graph = BalancedFlowGraph::for_graph(graph, |n| !graph[n].is_out_summable);
let node_flow_data = node_flow_data_for_balanced_graph(&balanced_graph);
// Use those graphs to determine which nodes get physical counters, and how
// to compute the execution counts of other nodes from those counters.
// Also create a "priority list" of coverage graph nodes, to help determine
// which ones get physical counters or counter expressions. This needs to
// be done now, because the later parts of the counter-creation process
// won't have access to the original coverage graph.
let priority_list = make_node_flow_priority_list(graph, balanced_graph);
let node_counters = make_node_counters(&node_flow_data, &priority_list);
// Convert the counters into a form suitable for embedding into MIR.
transcribe_counters(&node_counters, bcb_needs_counter)
BcbCountersData { node_flow_data, priority_list }
}
/// Arranges the nodes in `balanced_graph` into a list, such that earlier nodes
@ -74,31 +77,33 @@ fn make_node_flow_priority_list(
}
// Converts node counters into a form suitable for embedding into MIR.
fn transcribe_counters(
pub(crate) fn transcribe_counters(
old: &NodeCounters<BasicCoverageBlock>,
bcb_needs_counter: &DenseBitSet<BasicCoverageBlock>,
bcbs_seen: &DenseBitSet<BasicCoverageBlock>,
) -> CoverageCounters {
let mut new = CoverageCounters::with_num_bcbs(bcb_needs_counter.domain_size());
for bcb in bcb_needs_counter.iter() {
if !bcbs_seen.contains(bcb) {
// This BCB's code was removed by MIR opts, so its counter is always zero.
new.set_node_counter(bcb, CovTerm::Zero);
continue;
}
// Our counter-creation algorithm doesn't guarantee that a node's list
// of terms starts or ends with a positive term, so partition the
// counters into "positive" and "negative" lists for easier handling.
let (mut pos, mut neg): (Vec<_>, Vec<_>) =
old.counter_terms[bcb].iter().partition_map(|&CounterTerm { node, op }| match op {
let (mut pos, mut neg): (Vec<_>, Vec<_>) = old.counter_terms[bcb]
.iter()
// Filter out any BCBs that were removed by MIR opts;
// this treats them as having an execution count of 0.
.filter(|term| bcbs_seen.contains(term.node))
.partition_map(|&CounterTerm { node, op }| match op {
Op::Add => Either::Left(node),
Op::Subtract => Either::Right(node),
});
if pos.is_empty() {
// If we somehow end up with no positive terms, fall back to
// creating a physical counter. There's no known way for this
// to happen, but we can avoid an ICE if it does.
debug_assert!(false, "{bcb:?} has no positive counter terms");
pos = vec![bcb];
neg = vec![];
}
// These intermediate sorts are not strictly necessary, but were helpful
// in reducing churn when switching to the current counter-creation scheme.
// They also help to slightly decrease the overall size of the expression
@ -116,7 +121,7 @@ fn transcribe_counters(
pos.sort();
neg.sort();
let pos_counter = new.make_sum(&pos).expect("`pos` should not be empty");
let pos_counter = new.make_sum(&pos).unwrap_or(CovTerm::Zero);
let new_counter = new.make_subtracted_sum(pos_counter, &neg);
new.set_node_counter(bcb, new_counter);
}
@ -129,15 +134,15 @@ fn transcribe_counters(
pub(super) struct CoverageCounters {
/// List of places where a counter-increment statement should be injected
/// into MIR, each with its corresponding counter ID.
phys_counter_for_node: FxIndexMap<BasicCoverageBlock, CounterId>,
next_counter_id: CounterId,
pub(crate) phys_counter_for_node: FxIndexMap<BasicCoverageBlock, CounterId>,
pub(crate) next_counter_id: CounterId,
/// Coverage counters/expressions that are associated with individual BCBs.
node_counters: IndexVec<BasicCoverageBlock, Option<CovTerm>>,
pub(crate) node_counters: IndexVec<BasicCoverageBlock, Option<CovTerm>>,
/// Table of expression data, associating each expression ID with its
/// corresponding operator (+ or -) and its LHS/RHS operands.
expressions: IndexVec<ExpressionId, Expression>,
pub(crate) expressions: IndexVec<ExpressionId, Expression>,
/// Remember expressions that have already been created (or simplified),
/// so that we don't create unnecessary duplicates.
expressions_memo: FxHashMap<Expression, CovTerm>,
@ -188,12 +193,6 @@ impl CoverageCounters {
self.make_expression(lhs, Op::Subtract, rhs_sum)
}
pub(super) fn num_counters(&self) -> usize {
let num_counters = self.phys_counter_for_node.len();
assert_eq!(num_counters, self.next_counter_id.as_usize());
num_counters
}
fn set_node_counter(&mut self, bcb: BasicCoverageBlock, counter: CovTerm) -> CovTerm {
let existing = self.node_counters[bcb].replace(counter);
assert!(
@ -202,34 +201,4 @@ impl CoverageCounters {
);
counter
}
pub(super) fn term_for_bcb(&self, bcb: BasicCoverageBlock) -> Option<CovTerm> {
self.node_counters[bcb]
}
/// Returns an iterator over all the nodes in the coverage graph that
/// should have a counter-increment statement injected into MIR, along with
/// each site's corresponding counter ID.
pub(super) fn counter_increment_sites(
&self,
) -> impl Iterator<Item = (CounterId, BasicCoverageBlock)> + Captures<'_> {
self.phys_counter_for_node.iter().map(|(&site, &id)| (id, site))
}
/// Returns an iterator over the subset of BCB nodes that have been associated
/// with a counter *expression*, along with the ID of that expression.
pub(super) fn bcb_nodes_with_coverage_expressions(
&self,
) -> impl Iterator<Item = (BasicCoverageBlock, ExpressionId)> + Captures<'_> {
self.node_counters.iter_enumerated().filter_map(|(bcb, &counter)| match counter {
// Yield the BCB along with its associated expression ID.
Some(CovTerm::Expression(id)) => Some((bcb, id)),
// This BCB is associated with a counter or nothing, so skip it.
Some(CovTerm::Counter { .. } | CovTerm::Zero) | None => None,
})
}
pub(super) fn into_expressions(self) -> IndexVec<ExpressionId, Expression> {
self.expressions
}
}

View file

@ -9,6 +9,7 @@
use rustc_data_structures::graph;
use rustc_index::bit_set::DenseBitSet;
use rustc_index::{Idx, IndexSlice, IndexVec};
pub(crate) use rustc_middle::mir::coverage::NodeFlowData;
use rustc_middle::mir::coverage::Op;
use crate::coverage::counters::union_find::UnionFind;
@ -16,30 +17,6 @@ use crate::coverage::counters::union_find::UnionFind;
#[cfg(test)]
mod tests;
/// Data representing a view of some underlying graph, in which each node's
/// successors have been merged into a single "supernode".
///
/// The resulting supernodes have no obvious meaning on their own.
/// However, merging successor nodes means that a node's out-edges can all
/// be combined into a single out-edge, whose flow is the same as the flow
/// (execution count) of its corresponding node in the original graph.
///
/// With all node flows now in the original graph now represented as edge flows
/// in the merged graph, it becomes possible to analyze the original node flows
/// using techniques for analyzing edge flows.
#[derive(Debug)]
pub(crate) struct NodeFlowData<Node: Idx> {
/// Maps each node to the supernode that contains it, indicated by some
/// arbitrary "root" node that is part of that supernode.
supernodes: IndexVec<Node, Node>,
/// For each node, stores the single supernode that all of its successors
/// have been merged into.
///
/// (Note that each node in a supernode can potentially have a _different_
/// successor supernode from its peers.)
succ_supernodes: IndexVec<Node, Node>,
}
/// Creates a "merged" view of an underlying graph.
///
/// The given graph is assumed to have [“balanced flow”](balanced-flow),

View file

@ -8,6 +8,7 @@ use rustc_data_structures::graph::dominators::Dominators;
use rustc_data_structures::graph::{self, DirectedGraph, StartNode};
use rustc_index::IndexVec;
use rustc_index::bit_set::DenseBitSet;
pub(crate) use rustc_middle::mir::coverage::{BasicCoverageBlock, START_BCB};
use rustc_middle::mir::{self, BasicBlock, Terminator, TerminatorKind};
use tracing::debug;
@ -269,15 +270,6 @@ impl graph::Predecessors for CoverageGraph {
}
}
rustc_index::newtype_index! {
/// A node in the control-flow graph of CoverageGraph.
#[orderable]
#[debug_format = "bcb{}"]
pub(crate) struct BasicCoverageBlock {
const START_BCB = 0;
}
}
/// `BasicCoverageBlockData` holds the data indexed by a `BasicCoverageBlock`.
///
/// A `BasicCoverageBlock` (BCB) represents the maximal-length sequence of MIR `BasicBlock`s without

View file

@ -1,9 +1,7 @@
use std::collections::BTreeSet;
use rustc_data_structures::fx::FxIndexMap;
use rustc_data_structures::graph::DirectedGraph;
use rustc_index::IndexVec;
use rustc_index::bit_set::DenseBitSet;
use rustc_middle::mir::coverage::{
BlockMarkerId, BranchSpan, ConditionId, ConditionInfo, CoverageInfoHi, CoverageKind,
};
@ -63,10 +61,6 @@ const MCDC_MAX_BITMAP_SIZE: usize = i32::MAX as usize;
#[derive(Default)]
pub(super) struct ExtractedMappings {
/// Store our own copy of [`CoverageGraph::num_nodes`], so that we don't
/// need access to the whole graph when allocating per-BCB data. This is
/// only public so that other code can still use exhaustive destructuring.
pub(super) num_bcbs: usize,
pub(super) code_mappings: Vec<CodeMapping>,
pub(super) branch_pairs: Vec<BranchPair>,
pub(super) mcdc_bitmap_bits: usize,
@ -118,7 +112,6 @@ pub(super) fn extract_all_mapping_info_from_mir<'tcx>(
);
ExtractedMappings {
num_bcbs: graph.num_nodes(),
code_mappings,
branch_pairs,
mcdc_bitmap_bits,
@ -127,60 +120,6 @@ pub(super) fn extract_all_mapping_info_from_mir<'tcx>(
}
}
impl ExtractedMappings {
pub(super) fn all_bcbs_with_counter_mappings(&self) -> DenseBitSet<BasicCoverageBlock> {
// Fully destructure self to make sure we don't miss any fields that have mappings.
let Self {
num_bcbs,
code_mappings,
branch_pairs,
mcdc_bitmap_bits: _,
mcdc_degraded_branches,
mcdc_mappings,
} = self;
// Identify which BCBs have one or more mappings.
let mut bcbs_with_counter_mappings = DenseBitSet::new_empty(*num_bcbs);
let mut insert = |bcb| {
bcbs_with_counter_mappings.insert(bcb);
};
for &CodeMapping { span: _, bcb } in code_mappings {
insert(bcb);
}
for &BranchPair { true_bcb, false_bcb, .. } in branch_pairs {
insert(true_bcb);
insert(false_bcb);
}
for &MCDCBranch { true_bcb, false_bcb, .. } in mcdc_degraded_branches
.iter()
.chain(mcdc_mappings.iter().map(|(_, branches)| branches.into_iter()).flatten())
{
insert(true_bcb);
insert(false_bcb);
}
// MC/DC decisions refer to BCBs, but don't require those BCBs to have counters.
if bcbs_with_counter_mappings.is_empty() {
debug_assert!(
mcdc_mappings.is_empty(),
"A function with no counter mappings shouldn't have any decisions: {mcdc_mappings:?}",
);
}
bcbs_with_counter_mappings
}
/// Returns the set of BCBs that have one or more `Code` mappings.
pub(super) fn bcbs_with_ordinary_code_mappings(&self) -> DenseBitSet<BasicCoverageBlock> {
let mut bcbs = DenseBitSet::new_empty(self.num_bcbs);
for &CodeMapping { span: _, bcb } in &self.code_mappings {
bcbs.insert(bcb);
}
bcbs
}
}
fn resolve_block_markers(
coverage_info_hi: &CoverageInfoHi,
mir_body: &mir::Body<'_>,

View file

@ -21,7 +21,7 @@ use rustc_span::Span;
use rustc_span::def_id::LocalDefId;
use tracing::{debug, debug_span, trace};
use crate::coverage::counters::CoverageCounters;
use crate::coverage::counters::BcbCountersData;
use crate::coverage::graph::CoverageGraph;
use crate::coverage::mappings::ExtractedMappings;
@ -82,28 +82,21 @@ fn instrument_function_for_coverage<'tcx>(tcx: TyCtxt<'tcx>, mir_body: &mut mir:
let extracted_mappings =
mappings::extract_all_mapping_info_from_mir(tcx, mir_body, &hir_info, &graph);
////////////////////////////////////////////////////
// Create an optimized mix of `Counter`s and `Expression`s for the `CoverageGraph`. Ensure
// every coverage span has a `Counter` or `Expression` assigned to its `BasicCoverageBlock`
// and all `Expression` dependencies (operands) are also generated, for any other
// `BasicCoverageBlock`s not already associated with a coverage span.
let bcbs_with_counter_mappings = extracted_mappings.all_bcbs_with_counter_mappings();
if bcbs_with_counter_mappings.is_empty() {
// No relevant spans were found in MIR, so skip instrumenting this function.
return;
}
let coverage_counters = counters::make_bcb_counters(&graph, &bcbs_with_counter_mappings);
let mappings = create_mappings(&extracted_mappings, &coverage_counters);
let mappings = create_mappings(&extracted_mappings);
if mappings.is_empty() {
// No spans could be converted into valid mappings, so skip this function.
debug!("no spans could be converted into valid mappings; skipping");
return;
}
inject_coverage_statements(mir_body, &graph, &extracted_mappings, &coverage_counters);
// Use the coverage graph to prepare intermediate data that will eventually
// be used to assign physical counters and counter expressions to points in
// the control-flow graph
let BcbCountersData { node_flow_data, priority_list } =
counters::prepare_bcb_counters_data(&graph);
// Inject coverage statements into MIR.
inject_coverage_statements(mir_body, &graph);
inject_mcdc_statements(mir_body, &graph, &extracted_mappings);
let mcdc_num_condition_bitmaps = extracted_mappings
@ -116,29 +109,25 @@ fn instrument_function_for_coverage<'tcx>(tcx: TyCtxt<'tcx>, mir_body: &mut mir:
mir_body.function_coverage_info = Some(Box::new(FunctionCoverageInfo {
function_source_hash: hir_info.function_source_hash,
body_span: hir_info.body_span,
num_counters: coverage_counters.num_counters(),
mcdc_bitmap_bits: extracted_mappings.mcdc_bitmap_bits,
expressions: coverage_counters.into_expressions(),
node_flow_data,
priority_list,
mappings,
mcdc_bitmap_bits: extracted_mappings.mcdc_bitmap_bits,
mcdc_num_condition_bitmaps,
}));
}
/// For each coverage span extracted from MIR, create a corresponding
/// mapping.
/// For each coverage span extracted from MIR, create a corresponding mapping.
///
/// Precondition: All BCBs corresponding to those spans have been given
/// coverage counters.
fn create_mappings(
extracted_mappings: &ExtractedMappings,
coverage_counters: &CoverageCounters,
) -> Vec<Mapping> {
let term_for_bcb =
|bcb| coverage_counters.term_for_bcb(bcb).expect("all BCBs with spans were given counters");
/// FIXME(Zalathar): This used to be where BCBs in the extracted mappings were
/// resolved to a `CovTerm`. But that is now handled elsewhere, so this
/// function can potentially be simplified even further.
fn create_mappings(extracted_mappings: &ExtractedMappings) -> Vec<Mapping> {
// Fully destructure the mappings struct to make sure we don't miss any kinds.
let ExtractedMappings {
num_bcbs: _,
code_mappings,
branch_pairs,
mcdc_bitmap_bits: _,
@ -150,23 +139,18 @@ fn create_mappings(
mappings.extend(code_mappings.iter().map(
// Ordinary code mappings are the simplest kind.
|&mappings::CodeMapping { span, bcb }| {
let kind = MappingKind::Code(term_for_bcb(bcb));
let kind = MappingKind::Code { bcb };
Mapping { kind, span }
},
));
mappings.extend(branch_pairs.iter().map(
|&mappings::BranchPair { span, true_bcb, false_bcb }| {
let true_term = term_for_bcb(true_bcb);
let false_term = term_for_bcb(false_bcb);
let kind = MappingKind::Branch { true_term, false_term };
let kind = MappingKind::Branch { true_bcb, false_bcb };
Mapping { kind, span }
},
));
let term_for_bcb =
|bcb| coverage_counters.term_for_bcb(bcb).expect("all BCBs with spans were given counters");
// MCDC branch mappings are appended with their decisions in case decisions were ignored.
mappings.extend(mcdc_degraded_branches.iter().map(
|&mappings::MCDCBranch {
@ -176,11 +160,7 @@ fn create_mappings(
condition_info: _,
true_index: _,
false_index: _,
}| {
let true_term = term_for_bcb(true_bcb);
let false_term = term_for_bcb(false_bcb);
Mapping { kind: MappingKind::Branch { true_term, false_term }, span }
},
}| { Mapping { kind: MappingKind::Branch { true_bcb, false_bcb }, span } },
));
for (decision, branches) in mcdc_mappings {
@ -201,12 +181,10 @@ fn create_mappings(
true_index: _,
false_index: _,
}| {
let true_term = term_for_bcb(true_bcb);
let false_term = term_for_bcb(false_bcb);
Mapping {
kind: MappingKind::MCDCBranch {
true_term,
false_term,
true_bcb,
false_bcb,
mcdc_params: condition_info,
},
span,
@ -227,41 +205,11 @@ fn create_mappings(
mappings
}
/// For each BCB node or BCB edge that has an associated coverage counter,
/// inject any necessary coverage statements into MIR.
fn inject_coverage_statements<'tcx>(
mir_body: &mut mir::Body<'tcx>,
graph: &CoverageGraph,
extracted_mappings: &ExtractedMappings,
coverage_counters: &CoverageCounters,
) {
// Inject counter-increment statements into MIR.
for (id, bcb) in coverage_counters.counter_increment_sites() {
let target_bb = graph[bcb].leader_bb();
inject_statement(mir_body, CoverageKind::CounterIncrement { id }, target_bb);
}
// For each counter expression that is directly associated with at least one
// span, we inject an "expression-used" statement, so that coverage codegen
// can check whether the injected statement survived MIR optimization.
// (BCB edges can't have spans, so we only need to process BCB nodes here.)
//
// We only do this for ordinary `Code` mappings, because branch and MC/DC
// mappings might have expressions that don't correspond to any single
// point in the control-flow graph.
//
// See the code in `rustc_codegen_llvm::coverageinfo::map_data` that deals
// with "expressions seen" and "zero terms".
let eligible_bcbs = extracted_mappings.bcbs_with_ordinary_code_mappings();
for (bcb, expression_id) in coverage_counters
.bcb_nodes_with_coverage_expressions()
.filter(|&(bcb, _)| eligible_bcbs.contains(bcb))
{
inject_statement(
mir_body,
CoverageKind::ExpressionUsed { id: expression_id },
graph[bcb].leader_bb(),
);
/// Inject any necessary coverage statements into MIR, so that they influence codegen.
fn inject_coverage_statements<'tcx>(mir_body: &mut mir::Body<'tcx>, graph: &CoverageGraph) {
for (bcb, data) in graph.iter_enumerated() {
let target_bb = data.leader_bb();
inject_statement(mir_body, CoverageKind::VirtualCounter { bcb }, target_bb);
}
}

View file

@ -1,10 +1,7 @@
use rustc_data_structures::captures::Captures;
use rustc_index::bit_set::DenseBitSet;
use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
use rustc_middle::mir::coverage::{
CounterId, CovTerm, CoverageIdsInfo, CoverageKind, Expression, ExpressionId,
FunctionCoverageInfo, MappingKind, Op,
};
use rustc_middle::mir::coverage::{BasicCoverageBlock, CoverageIdsInfo, CoverageKind, MappingKind};
use rustc_middle::mir::{Body, Statement, StatementKind};
use rustc_middle::ty::{self, TyCtxt};
use rustc_middle::util::Providers;
@ -12,6 +9,9 @@ use rustc_span::def_id::LocalDefId;
use rustc_span::sym;
use tracing::trace;
use crate::coverage::counters::node_flow::make_node_counters;
use crate::coverage::counters::{CoverageCounters, transcribe_counters};
/// Registers query/hook implementations related to coverage.
pub(crate) fn provide(providers: &mut Providers) {
providers.hooks.is_eligible_for_coverage = is_eligible_for_coverage;
@ -89,39 +89,57 @@ fn coverage_ids_info<'tcx>(
let mir_body = tcx.instance_mir(instance_def);
let fn_cov_info = mir_body.function_coverage_info.as_deref()?;
let mut counters_seen = DenseBitSet::new_empty(fn_cov_info.num_counters);
let mut expressions_seen = DenseBitSet::new_filled(fn_cov_info.expressions.len());
// For each expression ID that is directly used by one or more mappings,
// mark it as not-yet-seen. This indicates that we expect to see a
// corresponding `ExpressionUsed` statement during MIR traversal.
for mapping in fn_cov_info.mappings.iter() {
// Currently we only worry about ordinary code mappings.
// For branch and MC/DC mappings, expressions might not correspond
// to any particular point in the control-flow graph.
// (Keep this in sync with the injection of `ExpressionUsed`
// statements in the `InstrumentCoverage` MIR pass.)
if let MappingKind::Code(CovTerm::Expression(id)) = mapping.kind {
expressions_seen.remove(id);
}
}
// Scan through the final MIR to see which BCBs survived MIR opts.
// Any BCB not in this set was optimized away.
let mut bcbs_seen = DenseBitSet::new_empty(fn_cov_info.priority_list.len());
for kind in all_coverage_in_mir_body(mir_body) {
match *kind {
CoverageKind::CounterIncrement { id } => {
counters_seen.insert(id);
}
CoverageKind::ExpressionUsed { id } => {
expressions_seen.insert(id);
CoverageKind::VirtualCounter { bcb } => {
bcbs_seen.insert(bcb);
}
_ => {}
}
}
let zero_expressions =
identify_zero_expressions(fn_cov_info, &counters_seen, &expressions_seen);
// Determine the set of BCBs that are referred to by mappings, and therefore
// need a counter. Any node not in this set will only get a counter if it
// is part of the counter expression for a node that is in the set.
let mut bcb_needs_counter =
DenseBitSet::<BasicCoverageBlock>::new_empty(fn_cov_info.priority_list.len());
for mapping in &fn_cov_info.mappings {
match mapping.kind {
MappingKind::Code { bcb } => {
bcb_needs_counter.insert(bcb);
}
MappingKind::Branch { true_bcb, false_bcb } => {
bcb_needs_counter.insert(true_bcb);
bcb_needs_counter.insert(false_bcb);
}
MappingKind::MCDCBranch { true_bcb, false_bcb, mcdc_params: _ } => {
bcb_needs_counter.insert(true_bcb);
bcb_needs_counter.insert(false_bcb);
}
MappingKind::MCDCDecision(_) => {}
}
}
Some(CoverageIdsInfo { counters_seen, zero_expressions })
// FIXME(Zalathar): It should be possible to sort `priority_list[1..]` by
// `!bcbs_seen.contains(bcb)` to simplify the mappings even further, at the
// expense of some churn in the tests. When doing so, also consider removing
// the sorts in `transcribe_counters`.
let node_counters = make_node_counters(&fn_cov_info.node_flow_data, &fn_cov_info.priority_list);
let coverage_counters = transcribe_counters(&node_counters, &bcb_needs_counter, &bcbs_seen);
let CoverageCounters {
phys_counter_for_node, next_counter_id, node_counters, expressions, ..
} = coverage_counters;
Some(CoverageIdsInfo {
num_counters: next_counter_id.as_u32(),
phys_counter_for_node,
term_for_bcb: node_counters,
expressions,
})
}
fn all_coverage_in_mir_body<'a, 'tcx>(
@ -139,94 +157,3 @@ fn is_inlined(body: &Body<'_>, statement: &Statement<'_>) -> bool {
let scope_data = &body.source_scopes[statement.source_info.scope];
scope_data.inlined.is_some() || scope_data.inlined_parent_scope.is_some()
}
/// Identify expressions that will always have a value of zero, and note their
/// IDs in a `DenseBitSet`. Mappings that refer to a zero expression can instead
/// become mappings to a constant zero value.
///
/// This function mainly exists to preserve the simplifications that were
/// already being performed by the Rust-side expression renumbering, so that
/// the resulting coverage mappings don't get worse.
fn identify_zero_expressions(
fn_cov_info: &FunctionCoverageInfo,
counters_seen: &DenseBitSet<CounterId>,
expressions_seen: &DenseBitSet<ExpressionId>,
) -> DenseBitSet<ExpressionId> {
// The set of expressions that either were optimized out entirely, or
// have zero as both of their operands, and will therefore always have
// a value of zero. Other expressions that refer to these as operands
// can have those operands replaced with `CovTerm::Zero`.
let mut zero_expressions = DenseBitSet::new_empty(fn_cov_info.expressions.len());
// Simplify a copy of each expression based on lower-numbered expressions,
// and then update the set of always-zero expressions if necessary.
// (By construction, expressions can only refer to other expressions
// that have lower IDs, so one pass is sufficient.)
for (id, expression) in fn_cov_info.expressions.iter_enumerated() {
if !expressions_seen.contains(id) {
// If an expression was not seen, it must have been optimized away,
// so any operand that refers to it can be replaced with zero.
zero_expressions.insert(id);
continue;
}
// We don't need to simplify the actual expression data in the
// expressions list; we can just simplify a temporary copy and then
// use that to update the set of always-zero expressions.
let Expression { mut lhs, op, mut rhs } = *expression;
// If an expression has an operand that is also an expression, the
// operand's ID must be strictly lower. This is what lets us find
// all zero expressions in one pass.
let assert_operand_expression_is_lower = |operand_id: ExpressionId| {
assert!(
operand_id < id,
"Operand {operand_id:?} should be less than {id:?} in {expression:?}",
)
};
// If an operand refers to a counter or expression that is always
// zero, then that operand can be replaced with `CovTerm::Zero`.
let maybe_set_operand_to_zero = |operand: &mut CovTerm| {
if let CovTerm::Expression(id) = *operand {
assert_operand_expression_is_lower(id);
}
if is_zero_term(&counters_seen, &zero_expressions, *operand) {
*operand = CovTerm::Zero;
}
};
maybe_set_operand_to_zero(&mut lhs);
maybe_set_operand_to_zero(&mut rhs);
// Coverage counter values cannot be negative, so if an expression
// involves subtraction from zero, assume that its RHS must also be zero.
// (Do this after simplifications that could set the LHS to zero.)
if lhs == CovTerm::Zero && op == Op::Subtract {
rhs = CovTerm::Zero;
}
// After the above simplifications, if both operands are zero, then
// we know that this expression is always zero too.
if lhs == CovTerm::Zero && rhs == CovTerm::Zero {
zero_expressions.insert(id);
}
}
zero_expressions
}
/// Returns `true` if the given term is known to have a value of zero, taking
/// into account knowledge of which counters are unused and which expressions
/// are always zero.
fn is_zero_term(
counters_seen: &DenseBitSet<CounterId>,
zero_expressions: &DenseBitSet<ExpressionId>,
term: CovTerm,
) -> bool {
match term {
CovTerm::Zero => true,
CovTerm::Counter(id) => !counters_seen.contains(id),
CovTerm::Expression(id) => zero_expressions.contains(id),
}
}

View file

@ -137,8 +137,7 @@ fn filtered_statement_span(statement: &Statement<'_>) -> Option<Span> {
// These coverage statements should not exist prior to coverage instrumentation.
StatementKind::Coverage(
CoverageKind::CounterIncrement { .. }
| CoverageKind::ExpressionUsed { .. }
CoverageKind::VirtualCounter { .. }
| CoverageKind::CondBitmapUpdate { .. }
| CoverageKind::TestVectorBitmapUpdate { .. },
) => bug!(