1
Fork 0

code coverage foundation for hash and num_counters

Replaced dummy values for hash and num_counters with computed values,
and refactored InstrumentCoverage pass to simplify injecting more
counters per function in upcoming versions.

Improved usage documentation and error messaging.
This commit is contained in:
Rich Kadel 2020-06-18 13:29:43 -07:00
parent 791ccccddc
commit 8c7c84b4e8
11 changed files with 225 additions and 81 deletions

View file

@ -16,6 +16,7 @@ use rustc_codegen_ssa::common::TypeKind;
use rustc_codegen_ssa::glue;
use rustc_codegen_ssa::mir::operand::{OperandRef, OperandValue};
use rustc_codegen_ssa::mir::place::PlaceRef;
use rustc_codegen_ssa::mir::FunctionCx;
use rustc_codegen_ssa::traits::*;
use rustc_codegen_ssa::MemFlags;
use rustc_hir as hir;
@ -23,7 +24,6 @@ use rustc_middle::ty::layout::{FnAbiExt, HasTyCtxt};
use rustc_middle::ty::{self, Ty};
use rustc_middle::{bug, span_bug};
use rustc_span::Span;
use rustc_span::Symbol;
use rustc_target::abi::{self, HasDataLayout, LayoutOf, Primitive};
use rustc_target::spec::PanicStrategy;
@ -82,14 +82,14 @@ fn get_simple_intrinsic(cx: &CodegenCx<'ll, '_>, name: &str) -> Option<&'ll Valu
}
impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> {
fn codegen_intrinsic_call(
fn codegen_intrinsic_call<'b, Bx: BuilderMethods<'b, 'tcx>>(
&mut self,
fx: &FunctionCx<'b, 'tcx, Bx>,
instance: ty::Instance<'tcx>,
fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
args: &[OperandRef<'tcx, &'ll Value>],
llresult: &'ll Value,
span: Span,
caller_instance: ty::Instance<'tcx>,
) {
let tcx = self.tcx;
let callee_ty = instance.monomorphic_ty(tcx);
@ -141,26 +141,17 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> {
self.call(llfn, &[], None)
}
"count_code_region" => {
if let ty::InstanceDef::Item(fn_def_id) = caller_instance.def {
let caller_fn_path = tcx.def_path_str(fn_def_id);
debug!(
"count_code_region to llvm.instrprof.increment(fn_name={})",
caller_fn_path
);
// FIXME(richkadel): (1) Replace raw function name with mangled function name;
// (2) Replace hardcoded `1234` in `hash` with a computed hash (as discussed in)
// the MCP (compiler-team/issues/278); and replace the hardcoded `1` for
// `num_counters` with the actual number of counters per function (when the
// changes are made to inject more than one counter per function).
let (fn_name, _len_val) = self.const_str(Symbol::intern(&caller_fn_path));
let coverage_data = fx.mir.coverage_data.as_ref().unwrap();
let mangled_fn = tcx.symbol_name(fx.instance);
let (mangled_fn_name, _len_val) = self.const_str(mangled_fn.name);
let hash = self.const_u64(coverage_data.hash);
let index = args[0].immediate();
let hash = self.const_u64(1234);
let num_counters = self.const_u32(1);
self.instrprof_increment(fn_name, hash, num_counters, index)
} else {
bug!("intrinsic count_code_region: no src.instance");
}
let num_counters = self.const_u32(coverage_data.num_counters as u32);
debug!(
"count_code_region to LLVM intrinsic instrprof.increment(fn_name={}, hash={:?}, num_counters={:?}, index={:?})",
mangled_fn.name, hash, index, num_counters
);
self.instrprof_increment(mangled_fn_name, hash, num_counters, index)
}
"va_start" => self.va_start(args[0].immediate()),
"va_end" => self.va_end(args[0].immediate()),

View file

@ -688,12 +688,12 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
.collect();
bx.codegen_intrinsic_call(
self,
*instance.as_ref().unwrap(),
&fn_abi,
&args,
dest,
terminator.source_info.span,
self.instance,
);
if let ReturnDest::IndirectOperand(dst, _) = ret_dest {

View file

@ -21,9 +21,9 @@ use self::operand::{OperandRef, OperandValue};
/// Master context for codegenning from MIR.
pub struct FunctionCx<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> {
instance: Instance<'tcx>,
pub instance: Instance<'tcx>,
mir: &'tcx mir::Body<'tcx>,
pub mir: &'tcx mir::Body<'tcx>,
debug_context: Option<FunctionDebugContext<Bx::DIScope>>,

View file

@ -1,5 +1,7 @@
use super::BackendTypes;
use crate::mir::operand::OperandRef;
use crate::mir::FunctionCx;
use crate::traits::BuilderMethods;
use rustc_middle::ty::{self, Ty};
use rustc_span::Span;
use rustc_target::abi::call::FnAbi;
@ -8,14 +10,14 @@ pub trait IntrinsicCallMethods<'tcx>: BackendTypes {
/// Remember to add all intrinsics here, in librustc_typeck/check/mod.rs,
/// and in libcore/intrinsics.rs; if you need access to any llvm intrinsics,
/// add them to librustc_codegen_llvm/context.rs
fn codegen_intrinsic_call(
fn codegen_intrinsic_call<'a, Bx: BuilderMethods<'a, 'tcx>>(
&mut self,
fx: &FunctionCx<'a, 'tcx, Bx>,
instance: ty::Instance<'tcx>,
fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
args: &[OperandRef<'tcx, Self::Value>],
llresult: Self::Value,
span: Span,
caller_instance: ty::Instance<'tcx>,
);
fn abort(&mut self);

View file

@ -488,6 +488,8 @@ impl<'a> CrateLocator<'a> {
&& self.triple != TargetTriple::from_triple(config::host_triple())
{
err.note(&format!("the `{}` target may not be installed", self.triple));
} else if self.crate_name == sym::profiler_builtins {
err.note(&"the compiler may have been built without `profiler = true`");
}
err.span_label(self.span, "can't find crate");
err

View file

@ -67,13 +67,15 @@ impl<'a> StableHashingContext<'a> {
/// Don't use it for anything else or you'll run the risk of
/// leaking data out of the tracking system.
#[inline]
pub fn new(
fn new_with_or_without_spans(
sess: &'a Session,
krate: &'a hir::Crate<'a>,
definitions: &'a Definitions,
cstore: &'a dyn CrateStore,
always_ignore_spans: bool,
) -> Self {
let hash_spans_initial = !sess.opts.debugging_opts.incremental_ignore_spans;
let hash_spans_initial =
!always_ignore_spans && !sess.opts.debugging_opts.incremental_ignore_spans;
StableHashingContext {
sess,
@ -88,6 +90,33 @@ impl<'a> StableHashingContext<'a> {
}
}
#[inline]
pub fn new(
sess: &'a Session,
krate: &'a hir::Crate<'a>,
definitions: &'a Definitions,
cstore: &'a dyn CrateStore,
) -> Self {
Self::new_with_or_without_spans(
sess,
krate,
definitions,
cstore,
/*always_ignore_spans=*/ false,
)
}
#[inline]
pub fn ignore_spans(
sess: &'a Session,
krate: &'a hir::Crate<'a>,
definitions: &'a Definitions,
cstore: &'a dyn CrateStore,
) -> Self {
let always_ignore_spans = true;
Self::new_with_or_without_spans(sess, krate, definitions, cstore, always_ignore_spans)
}
#[inline]
pub fn sess(&self) -> &'a Session {
self.sess

View file

@ -88,6 +88,19 @@ impl MirPhase {
}
}
/// Coverage data computed by the `InstrumentCoverage` MIR pass, when compiling with
/// `-Zinstrument_coverage`.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug, HashStable, TypeFoldable)]
pub struct CoverageData {
/// A hash value that can be used by the consumer of the coverage profile data to detect
/// changes to the instrumented source of the associated MIR body (typically, for an
/// individual function).
pub hash: u64,
/// The total number of coverage region counters added to this MIR Body.
pub num_counters: usize,
}
/// The lowered representation of a single function.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug, HashStable, TypeFoldable)]
pub struct Body<'tcx> {
@ -164,13 +177,17 @@ pub struct Body<'tcx> {
/// The user may be writing e.g. `&[(SOME_CELL, 42)][i].1` and this would get promoted, because
/// we'd statically know that no thing with interior mutability will ever be available to the
/// user without some serious unsafe code. Now this means that our promoted is actually
/// `&[(SOME_CELL, 42)]` and the MIR using it will do the `&promoted[i].1` projection because the
/// index may be a runtime value. Such a promoted value is illegal because it has reachable
/// `&[(SOME_CELL, 42)]` and the MIR using it will do the `&promoted[i].1` projection because
/// the index may be a runtime value. Such a promoted value is illegal because it has reachable
/// interior mutability. This flag just makes this situation very obvious where the previous
/// implementation without the flag hid this situation silently.
/// FIXME(oli-obk): rewrite the promoted during promotion to eliminate the cell components.
pub ignore_interior_mut_in_const_validation: bool,
/// If compiling with `-Zinstrument_coverage`, the `InstrumentCoverage` pass stores summary
/// information associated with the MIR, used in code generation of the coverage counters.
pub coverage_data: Option<CoverageData>,
predecessor_cache: PredecessorCache,
}
@ -211,6 +228,7 @@ impl<'tcx> Body<'tcx> {
required_consts: Vec::new(),
ignore_interior_mut_in_const_validation: false,
control_flow_destroyed,
coverage_data: None,
predecessor_cache: PredecessorCache::new(),
}
}
@ -238,6 +256,7 @@ impl<'tcx> Body<'tcx> {
generator_kind: None,
var_debug_info: Vec::new(),
ignore_interior_mut_in_const_validation: false,
coverage_data: None,
predecessor_cache: PredecessorCache::new(),
}
}

View file

@ -1284,6 +1284,13 @@ impl<'tcx> TyCtxt<'tcx> {
StableHashingContext::new(self.sess, krate, self.definitions, &*self.cstore)
}
#[inline(always)]
pub fn create_no_span_stable_hashing_context(self) -> StableHashingContext<'tcx> {
let krate = self.gcx.untracked_crate;
StableHashingContext::ignore_spans(self.sess, krate, self.definitions, &*self.cstore)
}
// This method makes sure that we have a DepNode and a Fingerprint for
// every upstream crate. It needs to be called once right after the tcx is
// created.

View file

@ -1,8 +1,15 @@
use crate::transform::{MirPass, MirSource};
use crate::util::patch::MirPatch;
use rustc_data_structures::fingerprint::Fingerprint;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_hir::lang_items;
use rustc_hir::*;
use rustc_middle::ich::StableHashingContext;
use rustc_middle::mir::interpret::Scalar;
use rustc_middle::mir::*;
use rustc_middle::mir::{
self, BasicBlock, BasicBlockData, CoverageData, Operand, Place, SourceInfo, StatementKind,
Terminator, TerminatorKind, START_BLOCK,
};
use rustc_middle::ty;
use rustc_middle::ty::TyCtxt;
use rustc_span::def_id::DefId;
@ -12,42 +19,82 @@ use rustc_span::Span;
/// the intrinsic llvm.instrprof.increment.
pub struct InstrumentCoverage;
struct Instrumentor<'tcx> {
tcx: TyCtxt<'tcx>,
num_counters: usize,
}
impl<'tcx> MirPass<'tcx> for InstrumentCoverage {
fn run_pass(&self, tcx: TyCtxt<'tcx>, src: MirSource<'tcx>, body: &mut Body<'tcx>) {
fn run_pass(&self, tcx: TyCtxt<'tcx>, src: MirSource<'tcx>, mir_body: &mut mir::Body<'tcx>) {
if tcx.sess.opts.debugging_opts.instrument_coverage {
debug!("instrumenting {:?}", src.def_id());
instrument_coverage(tcx, body);
// If the InstrumentCoverage pass is called on promoted MIRs, skip them.
// See: https://github.com/rust-lang/rust/pull/73011#discussion_r438317601
if src.promoted.is_none() {
assert!(mir_body.coverage_data.is_none());
let hash = hash_mir_source(tcx, &src);
debug!(
"instrumenting {:?}, hash: {}, span: {}",
src.def_id(),
hash,
tcx.sess.source_map().span_to_string(mir_body.span)
);
let num_counters = Instrumentor::new(tcx).inject_counters(mir_body);
mir_body.coverage_data = Some(CoverageData { hash, num_counters });
}
}
}
}
// The first counter (start of the function) is index zero.
const INIT_FUNCTION_COUNTER: u32 = 0;
impl<'tcx> Instrumentor<'tcx> {
fn new(tcx: TyCtxt<'tcx>) -> Self {
Self { tcx, num_counters: 0 }
}
/// Injects calls to placeholder function `count_code_region()`.
// FIXME(richkadel): As a first step, counters are only injected at the top of each function.
// The complete solution will inject counters at each conditional code branch.
pub fn instrument_coverage<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
let span = body.span.shrink_to_lo();
fn next_counter(&mut self) -> u32 {
let next = self.num_counters as u32;
self.num_counters += 1;
next
}
fn inject_counters(&mut self, mir_body: &mut mir::Body<'tcx>) -> usize {
// FIXME(richkadel): As a first step, counters are only injected at the top of each
// function. The complete solution will inject counters at each conditional code branch.
let top_of_function = START_BLOCK;
let entire_function = mir_body.span;
self.inject_counter(mir_body, top_of_function, entire_function);
self.num_counters
}
fn inject_counter(
&mut self,
mir_body: &mut mir::Body<'tcx>,
next_block: BasicBlock,
code_region: Span,
) {
let injection_point = code_region.shrink_to_lo();
let count_code_region_fn = function_handle(
tcx,
tcx.require_lang_item(lang_items::CountCodeRegionFnLangItem, None),
span,
self.tcx,
self.tcx.require_lang_item(lang_items::CountCodeRegionFnLangItem, None),
injection_point,
);
let counter_index = Operand::const_from_scalar(
tcx,
tcx.types.u32,
Scalar::from_u32(INIT_FUNCTION_COUNTER),
span,
self.tcx,
self.tcx.types.u32,
Scalar::from_u32(self.next_counter()),
injection_point,
);
let mut patch = MirPatch::new(body);
let mut patch = MirPatch::new(mir_body);
let new_block = patch.new_block(placeholder_block(SourceInfo::outermost(body.span)));
let next_block = START_BLOCK;
let temp = patch.new_temp(tcx.mk_unit(), body.span);
let temp = patch.new_temp(self.tcx.mk_unit(), code_region);
let new_block = patch.new_block(placeholder_block(code_region));
patch.patch_terminator(
new_block,
TerminatorKind::Call {
@ -57,19 +104,19 @@ pub fn instrument_coverage<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
destination: Some((Place::from(temp), new_block)),
cleanup: None,
from_hir_call: false,
fn_span: span,
fn_span: injection_point,
},
);
patch.add_statement(new_block.start_location(), StatementKind::StorageLive(temp));
patch.add_statement(next_block.start_location(), StatementKind::StorageDead(temp));
patch.apply(body);
patch.apply(mir_body);
// To insert the `new_block` in front of the first block in the counted branch (for example,
// the START_BLOCK, at the top of the function), just swap the indexes, leaving the rest of the
// graph unchanged.
body.basic_blocks_mut().swap(next_block, new_block);
// To insert the `new_block` in front of the first block in the counted branch (the
// `next_block`), just swap the indexes, leaving the rest of the graph unchanged.
mir_body.basic_blocks_mut().swap(next_block, new_block);
}
}
fn function_handle<'tcx>(tcx: TyCtxt<'tcx>, fn_def_id: DefId, span: Span) -> Operand<'tcx> {
@ -79,14 +126,59 @@ fn function_handle<'tcx>(tcx: TyCtxt<'tcx>, fn_def_id: DefId, span: Span) -> Ope
Operand::function_handle(tcx, fn_def_id, substs, span)
}
fn placeholder_block<'tcx>(source_info: SourceInfo) -> BasicBlockData<'tcx> {
fn placeholder_block(span: Span) -> BasicBlockData<'tcx> {
BasicBlockData {
statements: vec![],
terminator: Some(Terminator {
source_info,
source_info: SourceInfo::outermost(span),
// this gets overwritten by the counter Call
kind: TerminatorKind::Unreachable,
}),
is_cleanup: false,
}
}
fn hash_mir_source<'tcx>(tcx: TyCtxt<'tcx>, src: &MirSource<'tcx>) -> u64 {
let fn_body_id = match tcx.hir().get_if_local(src.def_id()) {
Some(node) => match associated_body(node) {
Some(body_id) => body_id,
_ => bug!("instrumented MirSource does not include a function body: {:?}", node),
},
None => bug!("instrumented MirSource is not local: {:?}", src),
};
let hir_body = tcx.hir().body(fn_body_id);
let mut hcx = tcx.create_no_span_stable_hashing_context();
hash(&mut hcx, &hir_body.value).to_smaller_hash()
}
fn hash(
hcx: &mut StableHashingContext<'tcx>,
node: &impl HashStable<StableHashingContext<'tcx>>,
) -> Fingerprint {
let mut stable_hasher = StableHasher::new();
node.hash_stable(hcx, &mut stable_hasher);
stable_hasher.finish()
}
fn associated_body<'hir>(node: Node<'hir>) -> Option<BodyId> {
match node {
Node::Item(Item {
kind: ItemKind::Const(_, body) | ItemKind::Static(.., body) | ItemKind::Fn(.., body),
..
})
| Node::TraitItem(TraitItem {
kind:
TraitItemKind::Const(_, Some(body)) | TraitItemKind::Fn(_, TraitFn::Provided(body)),
..
})
| Node::ImplItem(ImplItem {
kind: ImplItemKind::Const(_, body) | ImplItemKind::Fn(_, body),
..
})
| Node::Expr(Expr { kind: ExprKind::Closure(.., body, _, _), .. }) => Some(*body),
Node::AnonConst(constant) => Some(constant.body),
_ => None,
}
}

View file

@ -877,8 +877,9 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options,
(such as entering an empty infinite loop) by inserting llvm.sideeffect \
(default: no)"),
instrument_coverage: bool = (false, parse_bool, [TRACKED],
"instrument the generated code with LLVM code region counters to \
(in the future) generate coverage reports (experimental; default: no)"),
"instrument the generated code with LLVM code region counters to (in the \
future) generate coverage reports (default: no; note, the compiler build \
config must include `profiler = true`)"),
instrument_mcount: bool = (false, parse_bool, [TRACKED],
"insert function instrument code for mcount-based tracing (default: no)"),
keep_hygiene_data: bool = (false, parse_bool, [UNTRACKED],

View file

@ -587,6 +587,7 @@ symbols! {
proc_macro_mod,
proc_macro_non_items,
proc_macro_path_invoc,
profiler_builtins,
profiler_runtime,
ptr_offset_from,
pub_restricted,