2020-06-21 23:29:08 -07:00
|
|
|
//! Metadata from source code coverage analysis and instrumentation.
|
|
|
|
|
2020-08-15 04:42:13 -07:00
|
|
|
use std::fmt::{self, Debug, Formatter};
|
2024-07-29 08:13:50 +10:00
|
|
|
|
2023-09-13 13:20:13 +10:00
|
|
|
use rustc_index::IndexVec;
|
2024-12-06 23:00:22 +11:00
|
|
|
use rustc_index::bit_set::BitSet;
|
2024-04-29 11:14:55 +10:00
|
|
|
use rustc_macros::{HashStable, TyDecodable, TyEncodable, TypeFoldable, TypeVisitable};
|
2024-11-04 14:53:52 +11:00
|
|
|
use rustc_span::Span;
|
2020-08-15 04:42:13 -07:00
|
|
|
|
2024-02-08 12:39:15 +11:00
|
|
|
rustc_index::newtype_index! {
|
|
|
|
/// Used by [`CoverageKind::BlockMarker`] to mark blocks during THIR-to-MIR
|
|
|
|
/// lowering, so that those blocks can be identified later.
|
|
|
|
#[derive(HashStable)]
|
|
|
|
#[encodable]
|
|
|
|
#[debug_format = "BlockMarkerId({})"]
|
|
|
|
pub struct BlockMarkerId {}
|
|
|
|
}
|
|
|
|
|
2020-08-15 04:42:13 -07:00
|
|
|
rustc_index::newtype_index! {
|
2023-06-29 12:36:19 +10:00
|
|
|
/// ID of a coverage counter. Values ascend from 0.
|
|
|
|
///
|
2023-10-18 12:44:47 +11:00
|
|
|
/// Before MIR inlining, counter IDs are local to their enclosing function.
|
|
|
|
/// After MIR inlining, coverage statements may have been inlined into
|
|
|
|
/// another function, so use the statement's source-scope to find which
|
|
|
|
/// function/instance its IDs are meaningful for.
|
|
|
|
///
|
2023-06-29 12:36:19 +10:00
|
|
|
/// Note that LLVM handles counter IDs as `uint32_t`, so there is no need
|
|
|
|
/// to use a larger representation on the Rust side.
|
2022-12-18 20:53:08 +01:00
|
|
|
#[derive(HashStable)]
|
2023-11-21 16:35:26 +11:00
|
|
|
#[encodable]
|
2023-11-21 17:35:46 +11:00
|
|
|
#[orderable]
|
2023-06-29 12:36:19 +10:00
|
|
|
#[debug_format = "CounterId({})"]
|
|
|
|
pub struct CounterId {}
|
2020-08-15 04:42:13 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
rustc_index::newtype_index! {
|
2023-06-29 12:14:04 +10:00
|
|
|
/// ID of a coverage-counter expression. Values ascend from 0.
|
|
|
|
///
|
2023-10-18 12:44:47 +11:00
|
|
|
/// Before MIR inlining, expression IDs are local to their enclosing function.
|
|
|
|
/// After MIR inlining, coverage statements may have been inlined into
|
|
|
|
/// another function, so use the statement's source-scope to find which
|
|
|
|
/// function/instance its IDs are meaningful for.
|
|
|
|
///
|
2023-06-29 12:14:04 +10:00
|
|
|
/// Note that LLVM handles expression IDs as `uint32_t`, so there is no need
|
|
|
|
/// to use a larger representation on the Rust side.
|
2022-12-18 20:53:08 +01:00
|
|
|
#[derive(HashStable)]
|
2023-11-21 16:35:26 +11:00
|
|
|
#[encodable]
|
2023-11-21 17:35:46 +11:00
|
|
|
#[orderable]
|
2023-06-29 12:14:04 +10:00
|
|
|
#[debug_format = "ExpressionId({})"]
|
|
|
|
pub struct ExpressionId {}
|
2020-10-05 16:36:10 -07:00
|
|
|
}
|
|
|
|
|
2024-04-19 10:53:04 +08:00
|
|
|
rustc_index::newtype_index! {
|
|
|
|
/// ID of a mcdc condition. Used by llvm to check mcdc coverage.
|
|
|
|
///
|
|
|
|
/// Note for future: the max limit of 0xFFFF is probably too loose. Actually llvm does not
|
|
|
|
/// support decisions with too many conditions (7 and more at LLVM 18 while may be hundreds at 19)
|
|
|
|
/// and represents it with `int16_t`. This max value may be changed once we could
|
|
|
|
/// figure out an accurate limit.
|
|
|
|
#[derive(HashStable)]
|
|
|
|
#[encodable]
|
|
|
|
#[orderable]
|
|
|
|
#[max = 0xFFFF]
|
|
|
|
#[debug_format = "ConditionId({})"]
|
|
|
|
pub struct ConditionId {}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl ConditionId {
|
2024-07-25 14:26:36 +08:00
|
|
|
pub const START: Self = Self::from_usize(0);
|
2024-04-19 10:53:04 +08:00
|
|
|
}
|
|
|
|
|
2023-08-31 16:03:12 +10:00
|
|
|
/// Enum that can hold a constant zero value, the ID of an physical coverage
|
|
|
|
/// counter, or the ID of a coverage-counter expression.
|
2023-06-26 22:28:48 +10:00
|
|
|
///
|
2023-08-31 16:03:12 +10:00
|
|
|
/// This was originally only used for expression operands (and named `Operand`),
|
|
|
|
/// but the zero/counter/expression distinction is also useful for representing
|
|
|
|
/// the value of code/gap mappings, and the true/false arms of branch mappings.
|
2023-06-26 22:28:48 +10:00
|
|
|
#[derive(Copy, Clone, PartialEq, Eq)]
|
|
|
|
#[derive(TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)]
|
2023-08-31 16:03:12 +10:00
|
|
|
pub enum CovTerm {
|
2023-06-26 22:28:48 +10:00
|
|
|
Zero,
|
2023-06-29 12:36:19 +10:00
|
|
|
Counter(CounterId),
|
2023-06-29 12:14:04 +10:00
|
|
|
Expression(ExpressionId),
|
2020-08-15 04:42:13 -07:00
|
|
|
}
|
|
|
|
|
2023-08-31 16:03:12 +10:00
|
|
|
impl Debug for CovTerm {
|
2023-06-26 22:28:48 +10:00
|
|
|
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
|
|
|
match self {
|
|
|
|
Self::Zero => write!(f, "Zero"),
|
|
|
|
Self::Counter(id) => f.debug_tuple("Counter").field(&id.as_u32()).finish(),
|
|
|
|
Self::Expression(id) => f.debug_tuple("Expression").field(&id.as_u32()).finish(),
|
|
|
|
}
|
2020-08-15 04:42:13 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-06-17 10:53:29 +01:00
|
|
|
#[derive(Clone, PartialEq, TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)]
|
2020-08-15 04:42:13 -07:00
|
|
|
pub enum CoverageKind {
|
2023-11-23 11:50:39 +11:00
|
|
|
/// Marks a span that might otherwise not be represented in MIR, so that
|
|
|
|
/// coverage instrumentation can associate it with its enclosing block/BCB.
|
|
|
|
///
|
2024-03-15 22:27:30 +11:00
|
|
|
/// Should be erased before codegen (at some point after `InstrumentCoverage`).
|
2023-11-23 11:50:39 +11:00
|
|
|
SpanMarker,
|
|
|
|
|
2024-02-08 12:39:15 +11:00
|
|
|
/// Marks its enclosing basic block with an ID that can be referred to by
|
2024-07-04 23:52:49 +10:00
|
|
|
/// side data in [`CoverageInfoHi`].
|
2024-02-08 12:39:15 +11:00
|
|
|
///
|
2024-03-15 22:27:30 +11:00
|
|
|
/// Should be erased before codegen (at some point after `InstrumentCoverage`).
|
2024-02-08 12:39:15 +11:00
|
|
|
BlockMarker { id: BlockMarkerId },
|
|
|
|
|
2023-09-13 12:51:43 +10:00
|
|
|
/// Marks the point in MIR control flow represented by a coverage counter.
|
|
|
|
///
|
|
|
|
/// This is eventually lowered to `llvm.instrprof.increment` in LLVM IR.
|
|
|
|
///
|
|
|
|
/// If this statement does not survive MIR optimizations, any mappings that
|
|
|
|
/// refer to this counter can have those references simplified to zero.
|
|
|
|
CounterIncrement { id: CounterId },
|
2023-09-13 13:20:13 +10:00
|
|
|
|
|
|
|
/// Marks the point in MIR control-flow represented by a coverage expression.
|
|
|
|
///
|
|
|
|
/// If this statement does not survive MIR optimizations, any mappings that
|
|
|
|
/// refer to this expression can have those references simplified to zero.
|
|
|
|
///
|
|
|
|
/// (This is only inserted for expression IDs that are directly used by
|
|
|
|
/// mappings. Intermediate expressions with no direct mappings are
|
|
|
|
/// retained/zeroed based on whether they are transitively used.)
|
|
|
|
ExpressionUsed { id: ExpressionId },
|
2024-04-19 10:53:04 +08:00
|
|
|
|
|
|
|
/// Marks the point in MIR control flow represented by a evaluated condition.
|
|
|
|
///
|
2024-07-25 15:23:35 +08:00
|
|
|
/// This is eventually lowered to instruments updating mcdc temp variables.
|
|
|
|
CondBitmapUpdate { index: u32, decision_depth: u16 },
|
2024-04-19 10:53:04 +08:00
|
|
|
|
|
|
|
/// Marks the point in MIR control flow represented by a evaluated decision.
|
|
|
|
///
|
|
|
|
/// This is eventually lowered to `llvm.instrprof.mcdc.tvbitmap.update` in LLVM IR.
|
2024-04-08 14:44:25 +00:00
|
|
|
TestVectorBitmapUpdate { bitmap_idx: u32, decision_depth: u16 },
|
2020-08-15 04:42:13 -07:00
|
|
|
}
|
|
|
|
|
2020-10-05 16:36:10 -07:00
|
|
|
impl Debug for CoverageKind {
|
|
|
|
fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
|
|
|
|
use CoverageKind::*;
|
|
|
|
match self {
|
2023-11-23 11:50:39 +11:00
|
|
|
SpanMarker => write!(fmt, "SpanMarker"),
|
2024-02-08 12:39:15 +11:00
|
|
|
BlockMarker { id } => write!(fmt, "BlockMarker({:?})", id.index()),
|
2023-09-13 12:51:43 +10:00
|
|
|
CounterIncrement { id } => write!(fmt, "CounterIncrement({:?})", id.index()),
|
2023-09-13 13:20:13 +10:00
|
|
|
ExpressionUsed { id } => write!(fmt, "ExpressionUsed({:?})", id.index()),
|
2024-07-25 15:23:35 +08:00
|
|
|
CondBitmapUpdate { index, decision_depth } => {
|
|
|
|
write!(fmt, "CondBitmapUpdate(index={:?}, depth={:?})", index, decision_depth)
|
2024-04-19 10:53:04 +08:00
|
|
|
}
|
2024-04-08 14:44:25 +00:00
|
|
|
TestVectorBitmapUpdate { bitmap_idx, decision_depth } => {
|
|
|
|
write!(fmt, "TestVectorUpdate({:?}, depth={:?})", bitmap_idx, decision_depth)
|
2024-04-19 10:53:04 +08:00
|
|
|
}
|
2020-08-15 04:42:13 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-11-29 14:52:41 +11:00
|
|
|
#[derive(Clone, TyEncodable, TyDecodable, Hash, HashStable, PartialEq, Eq, PartialOrd, Ord)]
|
|
|
|
#[derive(TypeFoldable, TypeVisitable)]
|
|
|
|
pub struct SourceRegion {
|
|
|
|
pub start_line: u32,
|
|
|
|
pub start_col: u32,
|
|
|
|
pub end_line: u32,
|
|
|
|
pub end_col: u32,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Debug for SourceRegion {
|
|
|
|
fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
|
|
|
|
let &Self { start_line, start_col, end_line, end_col } = self;
|
|
|
|
write!(fmt, "{start_line}:{start_col} - {end_line}:{end_col}")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-04-21 18:11:57 +10:00
|
|
|
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, HashStable)]
|
|
|
|
#[derive(TyEncodable, TyDecodable, TypeFoldable, TypeVisitable)]
|
2020-08-15 04:42:13 -07:00
|
|
|
pub enum Op {
|
|
|
|
Subtract,
|
|
|
|
Add,
|
2020-06-21 23:29:08 -07:00
|
|
|
}
|
Translate counters from Rust 1-based to LLVM 0-based counter ids
A colleague contacted me and asked why Rust's counters start at 1, when
Clangs appear to start at 0. There is a reason why Rust's internal
counters start at 1 (see the docs), and I tried to keep them consistent
when codegenned to LLVM's coverage mapping format. LLVM should be
tolerant of missing counters, but as my colleague pointed out,
`llvm-cov` will silently fail to generate a coverage report for a
function based on LLVM's assumption that the counters are 0-based.
See:
https://github.com/llvm/llvm-project/blob/main/llvm/lib/ProfileData/Coverage/CoverageMapping.cpp#L170
Apparently, if, for example, a function has no branches, it would have
exactly 1 counter. `CounterValues.size()` would be 1, and (with the
1-based index), the counter ID would be 1. This would fail the check
and abort reporting coverage for the function.
It turns out that by correcting for this during coverage map generation,
by subtracting 1 from the Rust Counter ID (both when generating the
counter increment intrinsic call, and when adding counters to the map),
some uncovered functions (including in tests) now appear covered! This
corrects the coverage for a few tests!
2021-04-02 00:08:48 -07:00
|
|
|
|
|
|
|
impl Op {
|
|
|
|
pub fn is_add(&self) -> bool {
|
|
|
|
matches!(self, Self::Add)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn is_subtract(&self) -> bool {
|
|
|
|
matches!(self, Self::Subtract)
|
|
|
|
}
|
|
|
|
}
|
2023-09-10 16:35:37 +10:00
|
|
|
|
2023-09-13 13:20:13 +10:00
|
|
|
#[derive(Clone, Debug)]
|
|
|
|
#[derive(TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)]
|
|
|
|
pub struct Expression {
|
|
|
|
pub lhs: CovTerm,
|
|
|
|
pub op: Op,
|
|
|
|
pub rhs: CovTerm,
|
|
|
|
}
|
|
|
|
|
2023-12-28 14:07:18 +11:00
|
|
|
#[derive(Clone, Debug)]
|
|
|
|
#[derive(TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)]
|
|
|
|
pub enum MappingKind {
|
|
|
|
/// Associates a normal region of code with a counter/expression/zero.
|
|
|
|
Code(CovTerm),
|
2023-11-16 17:48:23 +11:00
|
|
|
/// Associates a branch region with separate counters for true and false.
|
|
|
|
Branch { true_term: CovTerm, false_term: CovTerm },
|
2024-04-19 10:53:04 +08:00
|
|
|
/// Associates a branch region with separate counters for true and false.
|
|
|
|
MCDCBranch { true_term: CovTerm, false_term: CovTerm, mcdc_params: ConditionInfo },
|
|
|
|
/// Associates a decision region with a bitmap and number of conditions.
|
|
|
|
MCDCDecision(DecisionInfo),
|
2023-12-28 14:07:18 +11:00
|
|
|
}
|
|
|
|
|
|
|
|
impl MappingKind {
|
|
|
|
/// Returns a copy of this mapping kind, in which all coverage terms have
|
|
|
|
/// been replaced with ones returned by the given function.
|
|
|
|
pub fn map_terms(&self, map_fn: impl Fn(CovTerm) -> CovTerm) -> Self {
|
|
|
|
match *self {
|
|
|
|
Self::Code(term) => Self::Code(map_fn(term)),
|
2023-11-16 17:48:23 +11:00
|
|
|
Self::Branch { true_term, false_term } => {
|
|
|
|
Self::Branch { true_term: map_fn(true_term), false_term: map_fn(false_term) }
|
|
|
|
}
|
2024-04-19 10:53:04 +08:00
|
|
|
Self::MCDCBranch { true_term, false_term, mcdc_params } => Self::MCDCBranch {
|
|
|
|
true_term: map_fn(true_term),
|
|
|
|
false_term: map_fn(false_term),
|
|
|
|
mcdc_params,
|
|
|
|
},
|
|
|
|
Self::MCDCDecision(param) => Self::MCDCDecision(param),
|
2023-12-28 14:07:18 +11:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-09-04 12:50:51 +10:00
|
|
|
#[derive(Clone, Debug)]
|
|
|
|
#[derive(TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)]
|
|
|
|
pub struct Mapping {
|
2023-12-28 14:07:18 +11:00
|
|
|
pub kind: MappingKind,
|
2024-11-29 14:52:41 +11:00
|
|
|
pub source_region: SourceRegion,
|
2023-09-04 12:50:51 +10:00
|
|
|
}
|
|
|
|
|
2023-09-10 16:35:37 +10:00
|
|
|
/// Stores per-function coverage information attached to a `mir::Body`,
|
|
|
|
/// to be used in conjunction with the individual coverage statements injected
|
|
|
|
/// into the function's basic blocks.
|
|
|
|
#[derive(Clone, Debug)]
|
|
|
|
#[derive(TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)]
|
|
|
|
pub struct FunctionCoverageInfo {
|
|
|
|
pub function_source_hash: u64,
|
2024-11-04 14:53:52 +11:00
|
|
|
pub body_span: Span,
|
2023-09-13 12:15:40 +10:00
|
|
|
pub num_counters: usize,
|
2024-07-25 15:23:35 +08:00
|
|
|
pub mcdc_bitmap_bits: usize,
|
2023-09-13 13:20:13 +10:00
|
|
|
pub expressions: IndexVec<ExpressionId, Expression>,
|
2023-09-13 12:51:43 +10:00
|
|
|
pub mappings: Vec<Mapping>,
|
2024-04-10 10:13:04 +00:00
|
|
|
/// The depth of the deepest decision is used to know how many
|
|
|
|
/// temp condbitmaps should be allocated for the function.
|
2024-04-30 17:54:06 +10:00
|
|
|
pub mcdc_num_condition_bitmaps: usize,
|
2023-09-10 16:35:37 +10:00
|
|
|
}
|
2024-02-08 12:41:44 +11:00
|
|
|
|
2024-07-04 23:52:49 +10:00
|
|
|
/// Coverage information for a function, recorded during MIR building and
|
|
|
|
/// attached to the corresponding `mir::Body`. Used by the `InstrumentCoverage`
|
|
|
|
/// MIR pass.
|
|
|
|
///
|
|
|
|
/// ("Hi" indicates that this is "high-level" information collected at the
|
|
|
|
/// THIR/MIR boundary, before the MIR-based coverage instrumentation pass.)
|
2024-02-08 12:41:44 +11:00
|
|
|
#[derive(Clone, Debug)]
|
|
|
|
#[derive(TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)]
|
2024-07-04 23:52:49 +10:00
|
|
|
pub struct CoverageInfoHi {
|
2024-02-08 12:41:44 +11:00
|
|
|
/// 1 more than the highest-numbered [`CoverageKind::BlockMarker`] that was
|
|
|
|
/// injected into the MIR body. This makes it possible to allocate per-ID
|
|
|
|
/// data structures without having to scan the entire body first.
|
|
|
|
pub num_block_markers: usize,
|
|
|
|
pub branch_spans: Vec<BranchSpan>,
|
2024-07-25 15:23:35 +08:00
|
|
|
/// Branch spans generated by mcdc. Because of some limits mcdc builder give up generating
|
|
|
|
/// decisions including them so that they are handled as normal branch spans.
|
|
|
|
pub mcdc_degraded_branch_spans: Vec<MCDCBranchSpan>,
|
|
|
|
pub mcdc_spans: Vec<(MCDCDecisionSpan, Vec<MCDCBranchSpan>)>,
|
2024-02-08 12:41:44 +11:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Clone, Debug)]
|
|
|
|
#[derive(TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)]
|
|
|
|
pub struct BranchSpan {
|
|
|
|
pub span: Span,
|
|
|
|
pub true_marker: BlockMarkerId,
|
|
|
|
pub false_marker: BlockMarkerId,
|
|
|
|
}
|
2024-04-19 10:53:04 +08:00
|
|
|
|
|
|
|
#[derive(Copy, Clone, Debug)]
|
|
|
|
#[derive(TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)]
|
|
|
|
pub struct ConditionInfo {
|
|
|
|
pub condition_id: ConditionId,
|
2024-07-25 14:26:36 +08:00
|
|
|
pub true_next_id: Option<ConditionId>,
|
|
|
|
pub false_next_id: Option<ConditionId>,
|
2024-04-19 10:53:04 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Clone, Debug)]
|
|
|
|
#[derive(TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)]
|
|
|
|
pub struct MCDCBranchSpan {
|
|
|
|
pub span: Span,
|
2024-07-25 15:23:35 +08:00
|
|
|
pub condition_info: ConditionInfo,
|
2024-04-19 10:53:04 +08:00
|
|
|
pub true_marker: BlockMarkerId,
|
|
|
|
pub false_marker: BlockMarkerId,
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Copy, Clone, Debug)]
|
|
|
|
#[derive(TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)]
|
|
|
|
pub struct DecisionInfo {
|
|
|
|
pub bitmap_idx: u32,
|
2024-05-30 13:16:07 +10:00
|
|
|
pub num_conditions: u16,
|
2024-04-19 10:53:04 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Clone, Debug)]
|
|
|
|
#[derive(TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)]
|
|
|
|
pub struct MCDCDecisionSpan {
|
|
|
|
pub span: Span,
|
|
|
|
pub end_markers: Vec<BlockMarkerId>,
|
2024-04-08 14:44:25 +00:00
|
|
|
pub decision_depth: u16,
|
2024-07-25 15:23:35 +08:00
|
|
|
pub num_conditions: usize,
|
2024-04-19 10:53:04 +08:00
|
|
|
}
|
2024-12-06 23:00:22 +11:00
|
|
|
|
|
|
|
/// Summarizes coverage IDs inserted by the `InstrumentCoverage` MIR pass
|
|
|
|
/// (for compiler option `-Cinstrument-coverage`), after MIR optimizations
|
|
|
|
/// have had a chance to potentially remove some of them.
|
|
|
|
///
|
|
|
|
/// Used by the `coverage_ids_info` query.
|
|
|
|
#[derive(Clone, TyEncodable, TyDecodable, Debug, HashStable)]
|
|
|
|
pub struct CoverageIdsInfo {
|
|
|
|
pub counters_seen: BitSet<CounterId>,
|
|
|
|
pub expressions_seen: BitSet<ExpressionId>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl CoverageIdsInfo {
|
|
|
|
/// Coverage codegen needs to know how many coverage counters are ever
|
|
|
|
/// incremented within a function, so that it can set the `num-counters`
|
|
|
|
/// argument of the `llvm.instrprof.increment` intrinsic.
|
|
|
|
///
|
|
|
|
/// This may be less than the highest counter ID emitted by the
|
|
|
|
/// InstrumentCoverage MIR pass, if the highest-numbered counter increments
|
|
|
|
/// were removed by MIR optimizations.
|
|
|
|
pub fn num_counters_after_mir_opts(&self) -> u32 {
|
|
|
|
// FIXME(Zalathar): Currently this treats an unused counter as "used"
|
|
|
|
// if its ID is less than that of the highest counter that really is
|
|
|
|
// used. Fixing this would require adding a renumbering step somewhere.
|
|
|
|
self.counters_seen.last_set_in(..).map_or(0, |max| max.as_u32() + 1)
|
|
|
|
}
|
|
|
|
}
|