add spans to injected coverage counters

added regions with counter expressions and counters.

Added codegen_llvm/coverageinfo mod for upcoming coverage map

Move coverage region collection to CodegenCx finalization

Moved from `query coverageinfo` (renamed from `query coverage_data`),
as discussed in the PR at:

https://github.com/rust-lang/rust/pull/73684#issuecomment-649882503

Address merge conflict in MIR instrument_coverage test

The MIR test output format changed for int types.

moved debug messages out of block.rs

This makes the block.rs calls to add coverage mapping data to the
CodegenCx much more concise and readable.

move coverage intrinsic handling into llvm impl

I realized that having half of the coverage intrinsic handling in
`rustc_codegen_ssa` and half in `rustc_codegen_llvm` meant that any
non-llvm backend would be bound to the same decisions about how the
coverage-related MIR terminators should be handled.

To fix this, I moved the non-codegen portion of coverage intrinsic
handling into its own trait, and implemented it in `rustc_codegen_llvm`
alongside `codegen_intrinsic_call`.

I also added the (required?) stubs for the new intrinsics to
`IntrepretCx::emulate_intrinsic()`, to ensure calls to this function do
not fail if called with these new but known intrinsics.

address PR Feedback on 28 June 2020 2:48pm PDT
This commit is contained in:
Rich Kadel 2020-06-21 23:29:08 -07:00
parent c977b8775d
commit 5239a68e72
27 changed files with 585 additions and 93 deletions

View file

@ -1956,7 +1956,40 @@ extern "rust-intrinsic" {
/// generation.
#[cfg(not(bootstrap))]
#[lang = "count_code_region"]
pub fn count_code_region(index: u32);
pub fn count_code_region(index: u32, start_byte_pos: u32, end_byte_pos: u32);
/// Internal marker for code coverage expressions, injected into the MIR when the
/// "instrument-coverage" option is enabled. This intrinsic is not converted into a
/// backend intrinsic call, but its arguments are extracted during the production of a
/// "coverage map", which is injected into the generated code, as additional data.
/// This marker identifies a code region and two other counters or counter expressions
/// whose sum is the number of times the code region was executed.
#[cfg(not(bootstrap))]
pub fn coverage_counter_add(
index: u32,
left_index: u32,
right_index: u32,
start_byte_pos: u32,
end_byte_pos: u32,
);
/// This marker identifies a code region and two other counters or counter expressions
/// whose difference is the number of times the code region was executed.
/// (See `coverage_counter_add` for more information.)
#[cfg(not(bootstrap))]
pub fn coverage_counter_subtract(
index: u32,
left_index: u32,
right_index: u32,
start_byte_pos: u32,
end_byte_pos: u32,
);
/// This marker identifies a code region to be added to the "coverage map" to indicate source
/// code that can never be reached.
/// (See `coverage_counter_add` for more information.)
#[cfg(not(bootstrap))]
pub fn coverage_unreachable(start_byte_pos: u32, end_byte_pos: u32);
/// See documentation of `<*const T>::guaranteed_eq` for details.
#[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]

View file

@ -150,6 +150,11 @@ pub fn compile_codegen_unit(
cx.create_used_variable()
}
// Finalize code coverage by injecting the coverage map
if cx.sess().opts.debugging_opts.instrument_coverage {
cx.coverageinfo_finalize();
}
// Finalize debuginfo
if cx.sess().opts.debuginfo != DebugInfo::None {
cx.debuginfo_finalize();

View file

@ -1,5 +1,6 @@
use crate::attributes;
use crate::callee::get_fn;
use crate::coverageinfo;
use crate::debuginfo;
use crate::llvm;
use crate::llvm_util;
@ -77,6 +78,7 @@ pub struct CodegenCx<'ll, 'tcx> {
pub pointee_infos: RefCell<FxHashMap<(Ty<'tcx>, Size), Option<PointeeInfo>>>,
pub isize_ty: &'ll Type,
pub coverage_cx: Option<coverageinfo::CrateCoverageContext<'tcx>>,
pub dbg_cx: Option<debuginfo::CrateDebugContext<'ll, 'tcx>>,
eh_personality: Cell<Option<&'ll Value>>,
@ -256,6 +258,13 @@ impl<'ll, 'tcx> CodegenCx<'ll, 'tcx> {
let (llcx, llmod) = (&*llvm_module.llcx, llvm_module.llmod());
let coverage_cx = if tcx.sess.opts.debugging_opts.instrument_coverage {
let covctx = coverageinfo::CrateCoverageContext::new();
Some(covctx)
} else {
None
};
let dbg_cx = if tcx.sess.opts.debuginfo != DebugInfo::None {
let dctx = debuginfo::CrateDebugContext::new(llmod);
debuginfo::metadata::compile_unit_metadata(tcx, &codegen_unit.name().as_str(), &dctx);
@ -285,6 +294,7 @@ impl<'ll, 'tcx> CodegenCx<'ll, 'tcx> {
scalar_lltypes: Default::default(),
pointee_infos: Default::default(),
isize_ty,
coverage_cx,
dbg_cx,
eh_personality: Cell::new(None),
rust_try_fn: Cell::new(None),
@ -296,6 +306,11 @@ impl<'ll, 'tcx> CodegenCx<'ll, 'tcx> {
crate fn statics_to_rauw(&self) -> &RefCell<Vec<(&'ll Value, &'ll Value)>> {
&self.statics_to_rauw
}
#[inline]
pub fn coverage_context(&'a self) -> &'a coverageinfo::CrateCoverageContext<'tcx> {
self.coverage_cx.as_ref().unwrap()
}
}
impl MiscMethods<'tcx> for CodegenCx<'ll, 'tcx> {
@ -749,8 +764,6 @@ impl CodegenCx<'b, 'tcx> {
ifn!("llvm.lifetime.start.p0i8", fn(t_i64, i8p) -> void);
ifn!("llvm.lifetime.end.p0i8", fn(t_i64, i8p) -> void);
ifn!("llvm.instrprof.increment", fn(i8p, t_i64, t_i32, t_i32) -> void);
ifn!("llvm.expect.i1", fn(i1, i1) -> i1);
ifn!("llvm.eh.typeid.for", fn(i8p) -> t_i32);
ifn!("llvm.localescape", fn(...) -> void);
@ -765,6 +778,10 @@ impl CodegenCx<'b, 'tcx> {
ifn!("llvm.va_end", fn(i8p) -> void);
ifn!("llvm.va_copy", fn(i8p, i8p) -> void);
if self.sess().opts.debugging_opts.instrument_coverage {
ifn!("llvm.instrprof.increment", fn(i8p, t_i64, t_i32, t_i32) -> void);
}
if self.sess().opts.debuginfo != DebugInfo::None {
ifn!("llvm.dbg.declare", fn(self.type_metadata(), self.type_metadata()) -> void);
ifn!("llvm.dbg.value", fn(self.type_metadata(), t_i64, self.type_metadata()) -> void);

View file

@ -0,0 +1,126 @@
use crate::builder::Builder;
use crate::common::CodegenCx;
use log::debug;
use rustc_codegen_ssa::coverageinfo::map::*;
use rustc_codegen_ssa::traits::{CoverageInfoBuilderMethods, CoverageInfoMethods};
use rustc_data_structures::fx::FxHashMap;
use rustc_middle::ty::Instance;
use std::cell::RefCell;
/// A context object for maintaining all state needed by the coverageinfo module.
pub struct CrateCoverageContext<'tcx> {
// Coverage region data for each instrumented function identified by DefId.
pub(crate) coverage_regions: RefCell<FxHashMap<Instance<'tcx>, FunctionCoverageRegions>>,
}
impl<'tcx> CrateCoverageContext<'tcx> {
pub fn new() -> Self {
Self { coverage_regions: Default::default() }
}
}
/// Generates and exports the Coverage Map.
// FIXME(richkadel): Actually generate and export the coverage map to LLVM.
// The current implementation is actually just debug messages to show the data is available.
pub fn finalize(cx: &CodegenCx<'_, '_>) {
let coverage_regions = &*cx.coverage_context().coverage_regions.borrow();
for instance in coverage_regions.keys() {
let coverageinfo = cx.tcx.coverageinfo(instance.def_id());
debug_assert!(coverageinfo.num_counters > 0);
debug!(
"Generate coverage map for: {:?}, hash: {}, num_counters: {}",
instance, coverageinfo.hash, coverageinfo.num_counters
);
let function_coverage_regions = &coverage_regions[instance];
for (index, region) in function_coverage_regions.indexed_regions() {
match region.kind {
CoverageKind::Counter => debug!(
" Counter {}, for {}..{}",
index, region.coverage_span.start_byte_pos, region.coverage_span.end_byte_pos
),
CoverageKind::CounterExpression(lhs, op, rhs) => debug!(
" CounterExpression {} = {} {:?} {}, for {}..{}",
index,
lhs,
op,
rhs,
region.coverage_span.start_byte_pos,
region.coverage_span.end_byte_pos
),
}
}
for unreachable in function_coverage_regions.unreachable_regions() {
debug!(
" Unreachable code region: {}..{}",
unreachable.start_byte_pos, unreachable.end_byte_pos
);
}
}
}
impl CoverageInfoMethods for CodegenCx<'ll, 'tcx> {
fn coverageinfo_finalize(&self) {
finalize(self)
}
}
impl CoverageInfoBuilderMethods<'tcx> for Builder<'a, 'll, 'tcx> {
fn add_counter_region(
&mut self,
instance: Instance<'tcx>,
index: u32,
start_byte_pos: u32,
end_byte_pos: u32,
) {
debug!(
"adding counter to coverage map: instance={:?}, index={}, byte range {}..{}",
instance, index, start_byte_pos, end_byte_pos,
);
let mut coverage_regions = self.coverage_context().coverage_regions.borrow_mut();
coverage_regions.entry(instance).or_default().add_counter(
index,
start_byte_pos,
end_byte_pos,
);
}
fn add_counter_expression_region(
&mut self,
instance: Instance<'tcx>,
index: u32,
lhs: u32,
op: CounterOp,
rhs: u32,
start_byte_pos: u32,
end_byte_pos: u32,
) {
debug!(
"adding counter expression to coverage map: instance={:?}, index={}, {} {:?} {}, byte range {}..{}",
instance, index, lhs, op, rhs, start_byte_pos, end_byte_pos,
);
let mut coverage_regions = self.coverage_context().coverage_regions.borrow_mut();
coverage_regions.entry(instance).or_default().add_counter_expression(
index,
lhs,
op,
rhs,
start_byte_pos,
end_byte_pos,
);
}
fn add_unreachable_region(
&mut self,
instance: Instance<'tcx>,
start_byte_pos: u32,
end_byte_pos: u32,
) {
debug!(
"adding unreachable code to coverage map: instance={:?}, byte range {}..{}",
instance, start_byte_pos, end_byte_pos,
);
let mut coverage_regions = self.coverage_context().coverage_regions.borrow_mut();
coverage_regions.entry(instance).or_default().add_unreachable(start_byte_pos, end_byte_pos);
}
}

View file

@ -13,12 +13,15 @@ use rustc_ast::ast;
use rustc_codegen_ssa::base::{compare_simd_types, to_immediate, wants_msvc_seh};
use rustc_codegen_ssa::common::span_invalid_monomorphization_error;
use rustc_codegen_ssa::common::{IntPredicate, TypeKind};
use rustc_codegen_ssa::coverageinfo::CounterOp;
use rustc_codegen_ssa::glue;
use rustc_codegen_ssa::mir::operand::{OperandRef, OperandValue};
use rustc_codegen_ssa::mir::place::PlaceRef;
use rustc_codegen_ssa::traits::*;
use rustc_codegen_ssa::MemFlags;
use rustc_hir as hir;
use rustc_middle::mir::coverage;
use rustc_middle::mir::Operand;
use rustc_middle::ty::layout::{FnAbiExt, HasTyCtxt};
use rustc_middle::ty::{self, Ty};
use rustc_middle::{bug, span_bug};
@ -81,6 +84,53 @@ fn get_simple_intrinsic(cx: &CodegenCx<'ll, '_>, name: &str) -> Option<&'ll Valu
}
impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> {
fn is_codegen_intrinsic(
&mut self,
intrinsic: &str,
args: &Vec<Operand<'tcx>>,
caller_instance: ty::Instance<'tcx>,
) -> bool {
match intrinsic {
"count_code_region" => {
use coverage::count_code_region_args::*;
self.add_counter_region(
caller_instance,
op_to_u32(&args[COUNTER_INDEX]),
op_to_u32(&args[START_BYTE_POS]),
op_to_u32(&args[END_BYTE_POS]),
);
true // Also inject the counter increment in the backend
}
"coverage_counter_add" | "coverage_counter_subtract" => {
use coverage::coverage_counter_expression_args::*;
self.add_counter_expression_region(
caller_instance,
op_to_u32(&args[COUNTER_EXPRESSION_INDEX]),
op_to_u32(&args[LEFT_INDEX]),
if intrinsic == "coverage_counter_add" {
CounterOp::Add
} else {
CounterOp::Subtract
},
op_to_u32(&args[RIGHT_INDEX]),
op_to_u32(&args[START_BYTE_POS]),
op_to_u32(&args[END_BYTE_POS]),
);
false // Does not inject backend code
}
"coverage_unreachable" => {
use coverage::coverage_unreachable_args::*;
self.add_unreachable_region(
caller_instance,
op_to_u32(&args[START_BYTE_POS]),
op_to_u32(&args[END_BYTE_POS]),
);
false // Does not inject backend code
}
_ => true, // Unhandled intrinsics should be passed to `codegen_intrinsic_call()`
}
}
fn codegen_intrinsic_call(
&mut self,
instance: ty::Instance<'tcx>,
@ -143,15 +193,16 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> {
// FIXME(richkadel): The current implementation assumes the MIR for the given
// caller_instance represents a single function. Validate and/or correct if inlining
// and/or monomorphization invalidates these assumptions.
let coverage_data = tcx.coverage_data(caller_instance.def_id());
let coverageinfo = tcx.coverageinfo(caller_instance.def_id());
let mangled_fn = tcx.symbol_name(caller_instance);
let (mangled_fn_name, _len_val) = self.const_str(mangled_fn.name);
let hash = self.const_u64(coverage_data.hash);
let num_counters = self.const_u32(coverage_data.num_counters);
let index = args[0].immediate();
let hash = self.const_u64(coverageinfo.hash);
let num_counters = self.const_u32(coverageinfo.num_counters);
use coverage::count_code_region_args::*;
let index = args[COUNTER_INDEX].immediate();
debug!(
"count_code_region to LLVM intrinsic instrprof.increment(fn_name={}, hash={:?}, num_counters={:?}, index={:?})",
mangled_fn.name, hash, num_counters, index
mangled_fn.name, hash, num_counters, index,
);
self.instrprof_increment(mangled_fn_name, hash, num_counters, index)
}
@ -2131,3 +2182,7 @@ fn float_type_width(ty: Ty<'_>) -> Option<u64> {
_ => None,
}
}
fn op_to_u32<'tcx>(op: &Operand<'tcx>) -> u32 {
Operand::scalar_from_const(op).to_u32().expect("Scalar is u32")
}

View file

@ -55,6 +55,7 @@ mod callee;
mod common;
mod consts;
mod context;
mod coverageinfo;
mod debuginfo;
mod declare;
mod intrinsic;

View file

@ -0,0 +1,83 @@
use rustc_data_structures::fx::FxHashMap;
use std::collections::hash_map;
use std::slice;
#[derive(Copy, Clone, Debug)]
pub enum CounterOp {
Add,
Subtract,
}
pub enum CoverageKind {
Counter,
CounterExpression(u32, CounterOp, u32),
}
pub struct CoverageSpan {
pub start_byte_pos: u32,
pub end_byte_pos: u32,
}
pub struct CoverageRegion {
pub kind: CoverageKind,
pub coverage_span: CoverageSpan,
}
/// Collects all of the coverage regions associated with (a) injected counters, (b) counter
/// expressions (additions or subtraction), and (c) unreachable regions (always counted as zero),
/// for a given Function. Counters and counter expressions are indexed because they can be operands
/// in an expression.
///
/// Note, it's important to distinguish the `unreachable` region type from what LLVM's refers to as
/// a "gap region" (or "gap area"). A gap region is a code region within a counted region (either
/// counter or expression), but the line or lines in the gap region are not executable (such as
/// lines with only whitespace or comments). According to LLVM Code Coverage Mapping documentation,
/// "A count for a gap area is only used as the line execution count if there are no other regions
/// on a line."
#[derive(Default)]
pub struct FunctionCoverageRegions {
indexed: FxHashMap<u32, CoverageRegion>,
unreachable: Vec<CoverageSpan>,
}
impl FunctionCoverageRegions {
pub fn add_counter(&mut self, index: u32, start_byte_pos: u32, end_byte_pos: u32) {
self.indexed.insert(
index,
CoverageRegion {
kind: CoverageKind::Counter,
coverage_span: CoverageSpan { start_byte_pos, end_byte_pos },
},
);
}
pub fn add_counter_expression(
&mut self,
index: u32,
lhs: u32,
op: CounterOp,
rhs: u32,
start_byte_pos: u32,
end_byte_pos: u32,
) {
self.indexed.insert(
index,
CoverageRegion {
kind: CoverageKind::CounterExpression(lhs, op, rhs),
coverage_span: CoverageSpan { start_byte_pos, end_byte_pos },
},
);
}
pub fn add_unreachable(&mut self, start_byte_pos: u32, end_byte_pos: u32) {
self.unreachable.push(CoverageSpan { start_byte_pos, end_byte_pos });
}
pub fn indexed_regions(&self) -> hash_map::Iter<'_, u32, CoverageRegion> {
self.indexed.iter()
}
pub fn unreachable_regions(&self) -> slice::Iter<'_, CoverageSpan> {
self.unreachable.iter()
}
}

View file

@ -0,0 +1,3 @@
pub mod map;
pub use map::CounterOp;

View file

@ -34,6 +34,7 @@ use std::path::{Path, PathBuf};
pub mod back;
pub mod base;
pub mod common;
pub mod coverageinfo;
pub mod debuginfo;
pub mod glue;
pub mod meth;

View file

@ -651,6 +651,18 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
}
if intrinsic.is_some() && intrinsic != Some("drop_in_place") {
let intrinsic = intrinsic.unwrap();
// `is_codegen_intrinsic()` allows the backend implementation to perform compile-time
// operations before converting the `args` to backend values.
if !bx.is_codegen_intrinsic(intrinsic, &args, self.instance) {
// If the intrinsic call was fully addressed by the `is_codegen_intrinsic()` call
// (as a compile-time operation), return immediately. This avoids the need to
// convert the arguments, the call to `codegen_intrinsic_call()`, and the return
// value handling.
return;
}
let dest = match ret_dest {
_ if fn_abi.ret.is_indirect() => llargs[0],
ReturnDest::Nothing => {
@ -670,7 +682,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
// third argument must be constant. This is
// checked by const-qualification, which also
// promotes any complex rvalues to constants.
if i == 2 && intrinsic.unwrap().starts_with("simd_shuffle") {
if i == 2 && intrinsic.starts_with("simd_shuffle") {
if let mir::Operand::Constant(constant) = arg {
let c = self.eval_mir_constant(constant);
let (llval, ty) = self.simd_shuffle_indices(

View file

@ -1,5 +1,6 @@
use super::abi::AbiBuilderMethods;
use super::asm::AsmBuilderMethods;
use super::coverageinfo::CoverageInfoBuilderMethods;
use super::debuginfo::DebugInfoBuilderMethods;
use super::intrinsic::IntrinsicCallMethods;
use super::type_::ArgAbiMethods;
@ -29,6 +30,7 @@ pub enum OverflowOp {
pub trait BuilderMethods<'a, 'tcx>:
HasCodegen<'tcx>
+ CoverageInfoBuilderMethods<'tcx>
+ DebugInfoBuilderMethods
+ ArgAbiMethods<'tcx>
+ AbiBuilderMethods<'tcx>

View file

@ -0,0 +1,35 @@
use super::BackendTypes;
use crate::coverageinfo::CounterOp;
use rustc_middle::ty::Instance;
pub trait CoverageInfoMethods: BackendTypes {
fn coverageinfo_finalize(&self);
}
pub trait CoverageInfoBuilderMethods<'tcx>: BackendTypes {
fn add_counter_region(
&mut self,
instance: Instance<'tcx>,
index: u32,
start_byte_pos: u32,
end_byte_pos: u32,
);
fn add_counter_expression_region(
&mut self,
instance: Instance<'tcx>,
index: u32,
lhs: u32,
op: CounterOp,
rhs: u32,
start_byte_pos: u32,
end_byte_pos: u32,
);
fn add_unreachable_region(
&mut self,
instance: Instance<'tcx>,
start_byte_pos: u32,
end_byte_pos: u32,
);
}

View file

@ -1,5 +1,6 @@
use super::BackendTypes;
use crate::mir::operand::OperandRef;
use rustc_middle::mir::Operand;
use rustc_middle::ty::{self, Ty};
use rustc_span::Span;
use rustc_target::abi::call::FnAbi;
@ -18,6 +19,16 @@ pub trait IntrinsicCallMethods<'tcx>: BackendTypes {
caller_instance: ty::Instance<'tcx>,
);
/// Intrinsic-specific pre-codegen processing, if any is required. Some intrinsics are handled
/// at compile time and do not generate code. Returns true if codegen is required or false if
/// the intrinsic does not need code generation.
fn is_codegen_intrinsic(
&mut self,
intrinsic: &str,
args: &Vec<Operand<'tcx>>,
caller_instance: ty::Instance<'tcx>,
) -> bool;
fn abort(&mut self);
fn assume(&mut self, val: Self::Value);
fn expect(&mut self, cond: Self::Value, expected: bool) -> Self::Value;

View file

@ -19,6 +19,7 @@ mod asm;
mod backend;
mod builder;
mod consts;
mod coverageinfo;
mod debuginfo;
mod declare;
mod intrinsic;
@ -32,6 +33,7 @@ pub use self::asm::{AsmBuilderMethods, AsmMethods, InlineAsmOperandRef};
pub use self::backend::{Backend, BackendTypes, CodegenBackend, ExtraBackendMethods};
pub use self::builder::{BuilderMethods, OverflowOp};
pub use self::consts::ConstMethods;
pub use self::coverageinfo::{CoverageInfoBuilderMethods, CoverageInfoMethods};
pub use self::debuginfo::{DebugInfoBuilderMethods, DebugInfoMethods};
pub use self::declare::{DeclareMethods, PreDefineMethods};
pub use self::intrinsic::IntrinsicCallMethods;
@ -56,6 +58,7 @@ pub trait CodegenMethods<'tcx>:
+ MiscMethods<'tcx>
+ ConstMethods<'tcx>
+ StaticMethods
+ CoverageInfoMethods
+ DebugInfoMethods<'tcx>
+ DeclareMethods<'tcx>
+ AsmMethods
@ -72,6 +75,7 @@ impl<'tcx, T> CodegenMethods<'tcx> for T where
+ MiscMethods<'tcx>
+ ConstMethods<'tcx>
+ StaticMethods
+ CoverageInfoMethods
+ DebugInfoMethods<'tcx>
+ DeclareMethods<'tcx>
+ AsmMethods

View file

@ -536,7 +536,8 @@ impl<I: Idx, T> IndexVec<I, T> {
}
/// Create an `IndexVec` with `n` elements, where the value of each
/// element is the result of `func(i)`
/// element is the result of `func(i)`. (The underlying vector will
/// be allocated only once, with a capacity of at least `n`.)
#[inline]
pub fn from_fn_n(func: impl FnMut(I) -> T, n: usize) -> Self {
let indices = (0..n).map(I::new);

View file

@ -0,0 +1,24 @@
//! Metadata from source code coverage analysis and instrumentation.
/// Positional arguments to `libcore::count_code_region()`
pub mod count_code_region_args {
pub const COUNTER_INDEX: usize = 0;
pub const START_BYTE_POS: usize = 1;
pub const END_BYTE_POS: usize = 2;
}
/// Positional arguments to `libcore::coverage_counter_add()` and
/// `libcore::coverage_counter_subtract()`
pub mod coverage_counter_expression_args {
pub const COUNTER_EXPRESSION_INDEX: usize = 0;
pub const LEFT_INDEX: usize = 1;
pub const RIGHT_INDEX: usize = 2;
pub const START_BYTE_POS: usize = 3;
pub const END_BYTE_POS: usize = 4;
}
/// Positional arguments to `libcore::coverage_unreachable()`
pub mod coverage_unreachable_args {
pub const START_BYTE_POS: usize = 0;
pub const END_BYTE_POS: usize = 1;
}

View file

@ -40,6 +40,7 @@ use std::{iter, mem, option};
use self::predecessors::{PredecessorCache, Predecessors};
pub use self::query::*;
pub mod coverage;
pub mod interpret;
pub mod mono;
mod predecessors;
@ -2307,6 +2308,18 @@ impl<'tcx> Operand<'tcx> {
})
}
/// Convenience helper to make a `Scalar` from the given `Operand`, assuming that `Operand`
/// wraps a constant literal value. Panics if this is not the case.
pub fn scalar_from_const(operand: &Operand<'tcx>) -> Scalar {
match operand {
Operand::Constant(constant) => match constant.literal.val.try_to_scalar() {
Some(scalar) => scalar,
_ => panic!("{:?}: Scalar value expected", constant.literal.val),
},
_ => panic!("{:?}: Constant expected", operand),
}
}
pub fn to_copy(&self) -> Self {
match *self {
Operand::Copy(_) | Operand::Constant(_) => self.clone(),
@ -2980,18 +2993,3 @@ impl Location {
}
}
}
/// Coverage data associated with each function (MIR) instrumented with coverage counters, when
/// compiled with `-Zinstrument_coverage`. The query `tcx.coverage_data(DefId)` computes these
/// values on demand (during code generation). This query is only valid after executing the MIR pass
/// `InstrumentCoverage`.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug, HashStable)]
pub struct CoverageData {
/// A hash value that can be used by the consumer of the coverage profile data to detect
/// changes to the instrumented source of the associated MIR body (typically, for an
/// individual function).
pub hash: u64,
/// The total number of coverage region counters added to the MIR `Body`.
pub num_counters: u32,
}

View file

@ -309,3 +309,17 @@ pub struct DestructuredConst<'tcx> {
pub variant: Option<VariantIdx>,
pub fields: &'tcx [&'tcx ty::Const<'tcx>],
}
/// Coverage information summarized from a MIR if instrumented for source code coverage (see
/// compiler option `-Zinstrument-coverage`). This information is generated by the
/// `InstrumentCoverage` MIR pass and can be retrieved via the `coverageinfo` query.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug, HashStable)]
pub struct CoverageInfo {
/// A hash value that can be used by the consumer of the coverage profile data to detect
/// changes to the instrumented source of the associated MIR body (typically, for an
/// individual function).
pub hash: u64,
/// The total number of coverage region counters added to the MIR `Body`.
pub num_counters: u32,
}

View file

@ -231,8 +231,10 @@ rustc_queries! {
cache_on_disk_if { key.is_local() }
}
query coverage_data(key: DefId) -> mir::CoverageData {
desc { |tcx| "retrieving coverage data from MIR for `{}`", tcx.def_path_str(key) }
/// Returns coverage summary info for a function, after executing the `InstrumentCoverage`
/// MIR pass (assuming the -Zinstrument-coverage option is enabled).
query coverageinfo(key: DefId) -> mir::CoverageInfo {
desc { |tcx| "retrieving coverage info from MIR for `{}`", tcx.def_path_str(key) }
storage(ArenaCacheSelector<'tcx>)
cache_on_disk_if { key.is_local() }
}

View file

@ -410,7 +410,10 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
self.copy_op(self.operand_index(args[0], index)?, dest)?;
}
// FIXME(#73156): Handle source code coverage in const eval
sym::count_code_region => (),
sym::count_code_region
| sym::coverage_counter_add
| sym::coverage_counter_subtract
| sym::coverage_unreachable => (),
_ => return Ok(false),
}

View file

@ -39,9 +39,16 @@ struct CallSite<'tcx> {
impl<'tcx> MirPass<'tcx> for Inline {
fn run_pass(&self, tcx: TyCtxt<'tcx>, source: MirSource<'tcx>, body: &mut Body<'tcx>) {
if tcx.sess.opts.debugging_opts.mir_opt_level >= 2 {
if tcx.sess.opts.debugging_opts.instrument_coverage {
// The current implementation of source code coverage injects code region counters
// into the MIR, and assumes a 1-to-1 correspondence between MIR and source-code-
// based function.
debug!("function inlining is disabled when compiling with `instrument_coverage`");
} else {
Inliner { tcx, source }.run_pass(body);
}
}
}
}
struct Inliner<'tcx> {

View file

@ -5,65 +5,71 @@ use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_hir::lang_items;
use rustc_middle::hir;
use rustc_middle::ich::StableHashingContext;
use rustc_middle::mir::interpret::{ConstValue, Scalar};
use rustc_middle::mir::coverage::*;
use rustc_middle::mir::interpret::Scalar;
use rustc_middle::mir::CoverageInfo;
use rustc_middle::mir::{
self, traversal, BasicBlock, BasicBlockData, CoverageData, Operand, Place, SourceInfo,
StatementKind, Terminator, TerminatorKind, START_BLOCK,
self, traversal, BasicBlock, BasicBlockData, Operand, Place, SourceInfo, StatementKind,
Terminator, TerminatorKind, START_BLOCK,
};
use rustc_middle::ty;
use rustc_middle::ty::query::Providers;
use rustc_middle::ty::FnDef;
use rustc_middle::ty::TyCtxt;
use rustc_middle::ty::{ConstKind, FnDef};
use rustc_span::def_id::DefId;
use rustc_span::Span;
use rustc_span::{Pos, Span};
/// Inserts call to count_code_region() as a placeholder to be replaced during code generation with
/// the intrinsic llvm.instrprof.increment.
pub struct InstrumentCoverage;
/// The `query` provider for `CoverageData`, requested by `codegen_intrinsic_call()` when
/// The `query` provider for `CoverageInfo`, requested by `codegen_intrinsic_call()` when
/// constructing the arguments for `llvm.instrprof.increment`.
pub(crate) fn provide(providers: &mut Providers<'_>) {
providers.coverage_data = |tcx, def_id| {
let mir_body = tcx.optimized_mir(def_id);
providers.coverageinfo = |tcx, def_id| coverageinfo_from_mir(tcx, def_id);
}
fn coverageinfo_from_mir<'tcx>(tcx: TyCtxt<'tcx>, mir_def_id: DefId) -> CoverageInfo {
let mir_body = tcx.optimized_mir(mir_def_id);
// FIXME(richkadel): The current implementation assumes the MIR for the given DefId
// represents a single function. Validate and/or correct if inlining and/or monomorphization
// invalidates these assumptions.
let count_code_region_fn =
tcx.require_lang_item(lang_items::CountCodeRegionFnLangItem, None);
let mut num_counters: u32 = 0;
// represents a single function. Validate and/or correct if inlining (which should be disabled
// if -Zinstrument-coverage is enabled) and/or monomorphization invalidates these assumptions.
let count_code_region_fn = tcx.require_lang_item(lang_items::CountCodeRegionFnLangItem, None);
// The `num_counters` argument to `llvm.instrprof.increment` is the number of injected
// counters, with each counter having an index from `0..num_counters-1`. MIR optimization
// may split and duplicate some BasicBlock sequences. Simply counting the calls may not
// not work; but computing the num_counters by adding `1` to the highest index (for a given
// instrumented function) is valid.
for (_, data) in traversal::preorder(mir_body) {
if let Some(terminator) = &data.terminator {
if let TerminatorKind::Call { func: Operand::Constant(func), args, .. } =
&terminator.kind
let mut num_counters: u32 = 0;
for terminator in traversal::preorder(mir_body)
.map(|(_, data)| (data, count_code_region_fn))
.filter_map(terminators_that_call_given_fn)
{
if let FnDef(called_fn_def_id, _) = func.literal.ty.kind {
if called_fn_def_id == count_code_region_fn {
if let Operand::Constant(constant) =
args.get(0).expect("count_code_region has at least one arg")
{
if let ConstKind::Value(ConstValue::Scalar(value)) =
constant.literal.val
{
let index = value
.to_u32()
.expect("count_code_region index at arg0 is u32");
if let TerminatorKind::Call { args, .. } = &terminator.kind {
let index_arg = args.get(count_code_region_args::COUNTER_INDEX).expect("arg found");
let index =
mir::Operand::scalar_from_const(index_arg).to_u32().expect("index arg is u32");
num_counters = std::cmp::max(num_counters, index + 1);
}
}
let hash = if num_counters > 0 { hash_mir_source(tcx, mir_def_id) } else { 0 };
CoverageInfo { num_counters, hash }
}
fn terminators_that_call_given_fn(
(data, fn_def_id): (&'tcx BasicBlockData<'tcx>, DefId),
) -> Option<&'tcx Terminator<'tcx>> {
if let Some(terminator) = &data.terminator {
if let TerminatorKind::Call { func: Operand::Constant(func), .. } = &terminator.kind {
if let FnDef(called_fn_def_id, _) = func.literal.ty.kind {
if called_fn_def_id == fn_def_id {
return Some(&terminator);
}
}
}
}
}
let hash = if num_counters > 0 { hash_mir_source(tcx, def_id) } else { 0 };
CoverageData { num_counters, hash }
};
None
}
struct Instrumentor<'tcx> {
@ -102,17 +108,16 @@ impl<'tcx> Instrumentor<'tcx> {
fn inject_counters(&mut self, mir_body: &mut mir::Body<'tcx>) {
// FIXME(richkadel): As a first step, counters are only injected at the top of each
// function. The complete solution will inject counters at each conditional code branch.
let top_of_function = START_BLOCK;
let entire_function = mir_body.span;
self.inject_counter(mir_body, top_of_function, entire_function);
let code_region = mir_body.span;
let next_block = START_BLOCK;
self.inject_counter(mir_body, code_region, next_block);
}
fn inject_counter(
&mut self,
mir_body: &mut mir::Body<'tcx>,
next_block: BasicBlock,
code_region: Span,
next_block: BasicBlock,
) {
let injection_point = code_region.shrink_to_lo();
@ -121,12 +126,20 @@ impl<'tcx> Instrumentor<'tcx> {
self.tcx.require_lang_item(lang_items::CountCodeRegionFnLangItem, None),
injection_point,
);
let counter_index = Operand::const_from_scalar(
self.tcx,
self.tcx.types.u32,
Scalar::from_u32(self.next_counter()),
injection_point,
);
let index = self.next_counter();
let mut args = Vec::new();
use count_code_region_args::*;
debug_assert_eq!(COUNTER_INDEX, args.len());
args.push(self.const_u32(index, injection_point));
debug_assert_eq!(START_BYTE_POS, args.len());
args.push(self.const_u32(code_region.lo().to_u32(), injection_point));
debug_assert_eq!(END_BYTE_POS, args.len());
args.push(self.const_u32(code_region.hi().to_u32(), injection_point));
let mut patch = MirPatch::new(mir_body);
@ -136,7 +149,7 @@ impl<'tcx> Instrumentor<'tcx> {
new_block,
TerminatorKind::Call {
func: count_code_region_fn,
args: vec![counter_index],
args,
// new_block will swapped with the next_block, after applying patch
destination: Some((Place::from(temp), new_block)),
cleanup: None,
@ -154,6 +167,10 @@ impl<'tcx> Instrumentor<'tcx> {
// `next_block`), just swap the indexes, leaving the rest of the graph unchanged.
mir_body.basic_blocks_mut().swap(next_block, new_block);
}
fn const_u32(&self, value: u32, span: Span) -> Operand<'tcx> {
Operand::const_from_scalar(self.tcx, self.tcx.types.u32, Scalar::from_u32(value), span)
}
}
fn function_handle<'tcx>(tcx: TyCtxt<'tcx>, fn_def_id: DefId, span: Span) -> Operand<'tcx> {

View file

@ -881,8 +881,9 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options,
(default: no)"),
instrument_coverage: bool = (false, parse_bool, [TRACKED],
"instrument the generated code with LLVM code region counters to (in the \
future) generate coverage reports (default: no; note, the compiler build \
config must include `profiler = true`)"),
future) generate coverage reports; disables/overrides some optimization \
options (note, the compiler build config must include `profiler = true`) \
(default: no)"),
instrument_mcount: bool = (false, parse_bool, [TRACKED],
"insert function instrument code for mcount-based tracing (default: no)"),
keep_hygiene_data: bool = (false, parse_bool, [UNTRACKED],

View file

@ -242,6 +242,9 @@ symbols! {
core,
core_intrinsics,
count_code_region,
coverage_counter_add,
coverage_counter_subtract,
coverage_unreachable,
crate_id,
crate_in_paths,
crate_local,

View file

@ -352,7 +352,17 @@ pub fn check_intrinsic_type(tcx: TyCtxt<'_>, it: &hir::ForeignItem<'_>) {
return;
}
"count_code_region" => (0, vec![tcx.types.u32], tcx.mk_unit()),
"count_code_region" => {
(0, vec![tcx.types.u32, tcx.types.u32, tcx.types.u32], tcx.mk_unit())
}
"coverage_counter_add" | "coverage_counter_subtract" => (
0,
vec![tcx.types.u32, tcx.types.u32, tcx.types.u32, tcx.types.u32, tcx.types.u32],
tcx.mk_unit(),
),
"coverage_unreachable" => (0, vec![tcx.types.u32, tcx.types.u32], tcx.mk_unit()),
ref other => {
struct_span_err!(

View file

@ -7,19 +7,31 @@
bb0: {
+ StorageLive(_1); // scope 0 at $DIR/instrument_coverage.rs:18:1: 20:2
+ _1 = const std::intrinsics::count_code_region(const 0_u32) -> bb2; // scope 0 at $DIR/instrument_coverage.rs:18:1: 20:2
+ _1 = const std::intrinsics::count_code_region(const 0_u32, const 484_u32, const 513_u32) -> bb2; // scope 0 at $DIR/instrument_coverage.rs:18:1: 20:2
+ // ty::Const
+ // + ty: unsafe extern "rust-intrinsic" fn(u32) {std::intrinsics::count_code_region}
+ // + ty: unsafe extern "rust-intrinsic" fn(u32, u32, u32) {std::intrinsics::count_code_region}
+ // + val: Value(Scalar(<ZST>))
+ // mir::Constant
+ // + span: $DIR/instrument_coverage.rs:18:1: 18:1
+ // + literal: Const { ty: unsafe extern "rust-intrinsic" fn(u32) {std::intrinsics::count_code_region}, val: Value(Scalar(<ZST>)) }
+ // + literal: Const { ty: unsafe extern "rust-intrinsic" fn(u32, u32, u32) {std::intrinsics::count_code_region}, val: Value(Scalar(<ZST>)) }
+ // ty::Const
+ // + ty: u32
+ // + val: Value(Scalar(0x00000000))
+ // mir::Constant
+ // + span: $DIR/instrument_coverage.rs:18:1: 18:1
+ // + literal: Const { ty: u32, val: Value(Scalar(0x00000000)) }
+ // ty::Const
+ // + ty: u32
+ // + val: Value(Scalar(0x000001e4))
+ // mir::Constant
+ // + span: $DIR/instrument_coverage.rs:18:1: 18:1
+ // + literal: Const { ty: u32, val: Value(Scalar(0x000001e4)) }
+ // ty::Const
+ // + ty: u32
+ // + val: Value(Scalar(0x00000201))
+ // mir::Constant
+ // + span: $DIR/instrument_coverage.rs:18:1: 18:1
+ // + literal: Const { ty: u32, val: Value(Scalar(0x00000201)) }
+ }
+
+ bb1 (cleanup): {

View file

@ -11,19 +11,31 @@
bb0: {
- falseUnwind -> [real: bb1, cleanup: bb2]; // scope 0 at $DIR/instrument_coverage.rs:10:5: 14:6
+ StorageLive(_4); // scope 0 at $DIR/instrument_coverage.rs:9:1: 15:2
+ _4 = const std::intrinsics::count_code_region(const 0_u32) -> bb7; // scope 0 at $DIR/instrument_coverage.rs:9:1: 15:2
+ _4 = const std::intrinsics::count_code_region(const 0_u32, const 387_u32, const 465_u32) -> bb7; // scope 0 at $DIR/instrument_coverage.rs:9:1: 15:2
+ // ty::Const
+ // + ty: unsafe extern "rust-intrinsic" fn(u32) {std::intrinsics::count_code_region}
+ // + ty: unsafe extern "rust-intrinsic" fn(u32, u32, u32) {std::intrinsics::count_code_region}
+ // + val: Value(Scalar(<ZST>))
+ // mir::Constant
+ // + span: $DIR/instrument_coverage.rs:9:1: 9:1
+ // + literal: Const { ty: unsafe extern "rust-intrinsic" fn(u32) {std::intrinsics::count_code_region}, val: Value(Scalar(<ZST>)) }
+ // + literal: Const { ty: unsafe extern "rust-intrinsic" fn(u32, u32, u32) {std::intrinsics::count_code_region}, val: Value(Scalar(<ZST>)) }
+ // ty::Const
+ // + ty: u32
+ // + val: Value(Scalar(0x00000000))
+ // mir::Constant
+ // + span: $DIR/instrument_coverage.rs:9:1: 9:1
+ // + literal: Const { ty: u32, val: Value(Scalar(0x00000000)) }
+ // ty::Const
+ // + ty: u32
+ // + val: Value(Scalar(0x00000183))
+ // mir::Constant
+ // + span: $DIR/instrument_coverage.rs:9:1: 9:1
+ // + literal: Const { ty: u32, val: Value(Scalar(0x00000183)) }
+ // ty::Const
+ // + ty: u32
+ // + val: Value(Scalar(0x000001d1))
+ // mir::Constant
+ // + span: $DIR/instrument_coverage.rs:9:1: 9:1
+ // + literal: Const { ty: u32, val: Value(Scalar(0x000001d1)) }
}
bb1: {