Rollup merge of #98868 - tmiasko:unreachable-coverage, r=wesleywiser

Fix unreachable coverage generation for inlined functions

To generate a function coverage we need at least one coverage counter,
so a coverage from unreachable blocks is retained only when some live
counters remain.

The previous implementation incorrectly retained unreachable coverage,
because it didn't account for the fact that those live counters can
belong to another function due to inlining.

Fixes #98833.
This commit is contained in:
Dylan DPC 2022-07-22 11:53:40 +05:30 committed by GitHub
commit 6e3dd69e36
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
6 changed files with 111 additions and 43 deletions

View file

@ -9,11 +9,8 @@ use super::FunctionCx;
impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
pub fn codegen_coverage(&self, bx: &mut Bx, coverage: Coverage, scope: SourceScope) { pub fn codegen_coverage(&self, bx: &mut Bx, coverage: Coverage, scope: SourceScope) {
// Determine the instance that coverage data was originally generated for. // Determine the instance that coverage data was originally generated for.
let scope_data = &self.mir.source_scopes[scope]; let instance = if let Some(inlined) = scope.inlined_instance(&self.mir.source_scopes) {
let instance = if let Some((inlined_instance, _)) = scope_data.inlined { self.monomorphize(inlined)
self.monomorphize(inlined_instance)
} else if let Some(inlined_scope) = scope_data.inlined_parent_scope {
self.monomorphize(self.mir.source_scopes[inlined_scope].inlined.unwrap().0)
} else { } else {
self.instance self.instance
}; };

View file

@ -1662,6 +1662,22 @@ impl SourceScope {
ClearCrossCrate::Clear => None, ClearCrossCrate::Clear => None,
} }
} }
/// The instance this source scope was inlined from, if any.
#[inline]
pub fn inlined_instance<'tcx>(
self,
source_scopes: &IndexVec<SourceScope, SourceScopeData<'tcx>>,
) -> Option<ty::Instance<'tcx>> {
let scope_data = &source_scopes[self];
if let Some((inlined_instance, _)) = scope_data.inlined {
Some(inlined_instance)
} else if let Some(inlined_scope) = scope_data.inlined_parent_scope {
Some(source_scopes[inlined_scope].inlined.unwrap().0)
} else {
None
}
}
} }
#[derive(Clone, Debug, TyEncodable, TyDecodable, HashStable, TypeFoldable, TypeVisitable)] #[derive(Clone, Debug, TyEncodable, TyDecodable, HashStable, TypeFoldable, TypeVisitable)]

View file

@ -28,6 +28,7 @@
//! return. //! return.
use crate::MirPass; use crate::MirPass;
use rustc_data_structures::fx::FxHashSet;
use rustc_index::vec::{Idx, IndexVec}; use rustc_index::vec::{Idx, IndexVec};
use rustc_middle::mir::coverage::*; use rustc_middle::mir::coverage::*;
use rustc_middle::mir::visit::{MutVisitor, MutatingUseContext, PlaceContext, Visitor}; use rustc_middle::mir::visit::{MutVisitor, MutatingUseContext, PlaceContext, Visitor};
@ -267,7 +268,8 @@ pub fn remove_dead_blocks<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
return; return;
} }
let basic_blocks = body.basic_blocks_mut(); let basic_blocks = body.basic_blocks.as_mut();
let source_scopes = &body.source_scopes;
let mut replacements: Vec<_> = (0..num_blocks).map(BasicBlock::new).collect(); let mut replacements: Vec<_> = (0..num_blocks).map(BasicBlock::new).collect();
let mut used_blocks = 0; let mut used_blocks = 0;
for alive_index in reachable.iter() { for alive_index in reachable.iter() {
@ -282,7 +284,7 @@ pub fn remove_dead_blocks<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
} }
if tcx.sess.instrument_coverage() { if tcx.sess.instrument_coverage() {
save_unreachable_coverage(basic_blocks, used_blocks); save_unreachable_coverage(basic_blocks, source_scopes, used_blocks);
} }
basic_blocks.raw.truncate(used_blocks); basic_blocks.raw.truncate(used_blocks);
@ -311,56 +313,62 @@ pub fn remove_dead_blocks<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
/// `Unreachable` coverage statements. These are non-executable statements whose /// `Unreachable` coverage statements. These are non-executable statements whose
/// code regions are still recorded in the coverage map, representing regions /// code regions are still recorded in the coverage map, representing regions
/// with `0` executions. /// with `0` executions.
///
/// If there are no live `Counter` `Coverage` statements remaining, we remove
/// dead `Coverage` statements along with the dead blocks. Since at least one
/// counter per function is required by LLVM (and necessary, to add the
/// `function_hash` to the counter's call to the LLVM intrinsic
/// `instrprof.increment()`).
///
/// The `generator::StateTransform` MIR pass and MIR inlining can create
/// atypical conditions, where all live `Counter`s are dropped from the MIR.
///
/// With MIR inlining we can have coverage counters belonging to different
/// instances in a single body, so the strategy described above is applied to
/// coverage counters from each instance individually.
fn save_unreachable_coverage( fn save_unreachable_coverage(
basic_blocks: &mut IndexVec<BasicBlock, BasicBlockData<'_>>, basic_blocks: &mut IndexVec<BasicBlock, BasicBlockData<'_>>,
source_scopes: &IndexVec<SourceScope, SourceScopeData<'_>>,
first_dead_block: usize, first_dead_block: usize,
) { ) {
let has_live_counters = basic_blocks.raw[0..first_dead_block].iter().any(|live_block| { // Identify instances that still have some live coverage counters left.
live_block.statements.iter().any(|statement| { let mut live = FxHashSet::default();
if let StatementKind::Coverage(coverage) = &statement.kind { for basic_block in &basic_blocks.raw[0..first_dead_block] {
matches!(coverage.kind, CoverageKind::Counter { .. }) for statement in &basic_block.statements {
} else { let StatementKind::Coverage(coverage) = &statement.kind else { continue };
false let CoverageKind::Counter { .. } = coverage.kind else { continue };
} let instance = statement.source_info.scope.inlined_instance(source_scopes);
}) live.insert(instance);
}); }
if !has_live_counters { }
// If there are no live `Counter` `Coverage` statements anymore, don't
// move dead coverage to the `START_BLOCK`. Just allow the dead if live.is_empty() {
// `Coverage` statements to be dropped with the dead blocks.
//
// The `generator::StateTransform` MIR pass can create atypical
// conditions, where all live `Counter`s are dropped from the MIR.
//
// At least one Counter per function is required by LLVM (and necessary,
// to add the `function_hash` to the counter's call to the LLVM
// intrinsic `instrprof.increment()`).
return; return;
} }
// Retain coverage info for dead blocks, so coverage reports will still // Retain coverage for instances that still have some live counters left.
// report `0` executions for the uncovered code regions. let mut retained_coverage = Vec::new();
let mut dropped_coverage = Vec::new(); for dead_block in &basic_blocks.raw[first_dead_block..] {
for dead_block in basic_blocks.raw[first_dead_block..].iter() { for statement in &dead_block.statements {
for statement in dead_block.statements.iter() { let StatementKind::Coverage(coverage) = &statement.kind else { continue };
if let StatementKind::Coverage(coverage) = &statement.kind { let Some(code_region) = &coverage.code_region else { continue };
if let Some(code_region) = &coverage.code_region { let instance = statement.source_info.scope.inlined_instance(source_scopes);
dropped_coverage.push((statement.source_info, code_region.clone())); if live.contains(&instance) {
} retained_coverage.push((statement.source_info, code_region.clone()));
} }
} }
} }
let start_block = &mut basic_blocks[START_BLOCK]; let start_block = &mut basic_blocks[START_BLOCK];
for (source_info, code_region) in dropped_coverage { start_block.statements.extend(retained_coverage.into_iter().map(
start_block.statements.push(Statement { |(source_info, code_region)| Statement {
source_info, source_info,
kind: StatementKind::Coverage(Box::new(Coverage { kind: StatementKind::Coverage(Box::new(Coverage {
kind: CoverageKind::Unreachable, kind: CoverageKind::Unreachable,
code_region: Some(code_region), code_region: Some(code_region),
})), })),
}) },
} ));
} }
pub struct SimplifyLocals; pub struct SimplifyLocals;

View file

@ -98,6 +98,7 @@ mod merging;
use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_data_structures::sync; use rustc_data_structures::sync;
use rustc_hir::def_id::DefIdSet; use rustc_hir::def_id::DefIdSet;
use rustc_middle::mir;
use rustc_middle::mir::mono::MonoItem; use rustc_middle::mir::mono::MonoItem;
use rustc_middle::mir::mono::{CodegenUnit, Linkage}; use rustc_middle::mir::mono::{CodegenUnit, Linkage};
use rustc_middle::ty::print::with_no_trimmed_paths; use rustc_middle::ty::print::with_no_trimmed_paths;
@ -479,9 +480,14 @@ fn codegened_and_inlined_items<'tcx>(tcx: TyCtxt<'tcx>, (): ()) -> &'tcx DefIdSe
if !visited.insert(did) { if !visited.insert(did) {
continue; continue;
} }
for scope in &tcx.instance_mir(instance.def).source_scopes { let body = tcx.instance_mir(instance.def);
if let Some((ref inlined, _)) = scope.inlined { for block in body.basic_blocks() {
result.insert(inlined.def_id()); for statement in &block.statements {
let mir::StatementKind::Coverage(_) = statement.kind else { continue };
let scope = statement.source_info.scope;
if let Some(inlined) = scope.inlined_instance(&body.source_scopes) {
result.insert(inlined.def_id());
}
} }
} }
} }

View file

@ -0,0 +1,21 @@
1| |// Regression test for issue #98833.
2| |// compile-flags: -Zinline-mir
3| |
4| 1|fn main() {
5| 1| println!("{}", live::<false>());
6| 1|}
7| |
8| |#[inline]
9| 1|fn live<const B: bool>() -> u32 {
10| 1| if B {
11| 0| dead()
12| | } else {
13| 1| 0
14| | }
15| 1|}
16| |
17| |#[inline]
18| 0|fn dead() -> u32 {
19| 0| 42
20| 0|}

View file

@ -0,0 +1,20 @@
// Regression test for issue #98833.
// compile-flags: -Zinline-mir
fn main() {
println!("{}", live::<false>());
}
#[inline]
fn live<const B: bool>() -> u32 {
if B {
dead()
} else {
0
}
}
#[inline]
fn dead() -> u32 {
42
}