1
Fork 0

code coverage foundation for hash and num_counters

Replaced dummy values for hash and num_counters with computed values,
and refactored InstrumentCoverage pass to simplify injecting more
counters per function in upcoming versions.

Improved usage documentation and error messaging.
This commit is contained in:
Rich Kadel 2020-06-18 13:29:43 -07:00
parent 791ccccddc
commit 8c7c84b4e8
11 changed files with 225 additions and 81 deletions

View file

@ -16,6 +16,7 @@ use rustc_codegen_ssa::common::TypeKind;
use rustc_codegen_ssa::glue; use rustc_codegen_ssa::glue;
use rustc_codegen_ssa::mir::operand::{OperandRef, OperandValue}; use rustc_codegen_ssa::mir::operand::{OperandRef, OperandValue};
use rustc_codegen_ssa::mir::place::PlaceRef; use rustc_codegen_ssa::mir::place::PlaceRef;
use rustc_codegen_ssa::mir::FunctionCx;
use rustc_codegen_ssa::traits::*; use rustc_codegen_ssa::traits::*;
use rustc_codegen_ssa::MemFlags; use rustc_codegen_ssa::MemFlags;
use rustc_hir as hir; use rustc_hir as hir;
@ -23,7 +24,6 @@ use rustc_middle::ty::layout::{FnAbiExt, HasTyCtxt};
use rustc_middle::ty::{self, Ty}; use rustc_middle::ty::{self, Ty};
use rustc_middle::{bug, span_bug}; use rustc_middle::{bug, span_bug};
use rustc_span::Span; use rustc_span::Span;
use rustc_span::Symbol;
use rustc_target::abi::{self, HasDataLayout, LayoutOf, Primitive}; use rustc_target::abi::{self, HasDataLayout, LayoutOf, Primitive};
use rustc_target::spec::PanicStrategy; use rustc_target::spec::PanicStrategy;
@ -82,14 +82,14 @@ fn get_simple_intrinsic(cx: &CodegenCx<'ll, '_>, name: &str) -> Option<&'ll Valu
} }
impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> { impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> {
fn codegen_intrinsic_call( fn codegen_intrinsic_call<'b, Bx: BuilderMethods<'b, 'tcx>>(
&mut self, &mut self,
fx: &FunctionCx<'b, 'tcx, Bx>,
instance: ty::Instance<'tcx>, instance: ty::Instance<'tcx>,
fn_abi: &FnAbi<'tcx, Ty<'tcx>>, fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
args: &[OperandRef<'tcx, &'ll Value>], args: &[OperandRef<'tcx, &'ll Value>],
llresult: &'ll Value, llresult: &'ll Value,
span: Span, span: Span,
caller_instance: ty::Instance<'tcx>,
) { ) {
let tcx = self.tcx; let tcx = self.tcx;
let callee_ty = instance.monomorphic_ty(tcx); let callee_ty = instance.monomorphic_ty(tcx);
@ -141,26 +141,17 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> {
self.call(llfn, &[], None) self.call(llfn, &[], None)
} }
"count_code_region" => { "count_code_region" => {
if let ty::InstanceDef::Item(fn_def_id) = caller_instance.def { let coverage_data = fx.mir.coverage_data.as_ref().unwrap();
let caller_fn_path = tcx.def_path_str(fn_def_id); let mangled_fn = tcx.symbol_name(fx.instance);
debug!( let (mangled_fn_name, _len_val) = self.const_str(mangled_fn.name);
"count_code_region to llvm.instrprof.increment(fn_name={})", let hash = self.const_u64(coverage_data.hash);
caller_fn_path let index = args[0].immediate();
); let num_counters = self.const_u32(coverage_data.num_counters as u32);
debug!(
// FIXME(richkadel): (1) Replace raw function name with mangled function name; "count_code_region to LLVM intrinsic instrprof.increment(fn_name={}, hash={:?}, num_counters={:?}, index={:?})",
// (2) Replace hardcoded `1234` in `hash` with a computed hash (as discussed in) mangled_fn.name, hash, index, num_counters
// the MCP (compiler-team/issues/278); and replace the hardcoded `1` for );
// `num_counters` with the actual number of counters per function (when the self.instrprof_increment(mangled_fn_name, hash, num_counters, index)
// changes are made to inject more than one counter per function).
let (fn_name, _len_val) = self.const_str(Symbol::intern(&caller_fn_path));
let index = args[0].immediate();
let hash = self.const_u64(1234);
let num_counters = self.const_u32(1);
self.instrprof_increment(fn_name, hash, num_counters, index)
} else {
bug!("intrinsic count_code_region: no src.instance");
}
} }
"va_start" => self.va_start(args[0].immediate()), "va_start" => self.va_start(args[0].immediate()),
"va_end" => self.va_end(args[0].immediate()), "va_end" => self.va_end(args[0].immediate()),

View file

@ -688,12 +688,12 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
.collect(); .collect();
bx.codegen_intrinsic_call( bx.codegen_intrinsic_call(
self,
*instance.as_ref().unwrap(), *instance.as_ref().unwrap(),
&fn_abi, &fn_abi,
&args, &args,
dest, dest,
terminator.source_info.span, terminator.source_info.span,
self.instance,
); );
if let ReturnDest::IndirectOperand(dst, _) = ret_dest { if let ReturnDest::IndirectOperand(dst, _) = ret_dest {

View file

@ -21,9 +21,9 @@ use self::operand::{OperandRef, OperandValue};
/// Master context for codegenning from MIR. /// Master context for codegenning from MIR.
pub struct FunctionCx<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> { pub struct FunctionCx<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> {
instance: Instance<'tcx>, pub instance: Instance<'tcx>,
mir: &'tcx mir::Body<'tcx>, pub mir: &'tcx mir::Body<'tcx>,
debug_context: Option<FunctionDebugContext<Bx::DIScope>>, debug_context: Option<FunctionDebugContext<Bx::DIScope>>,

View file

@ -1,5 +1,7 @@
use super::BackendTypes; use super::BackendTypes;
use crate::mir::operand::OperandRef; use crate::mir::operand::OperandRef;
use crate::mir::FunctionCx;
use crate::traits::BuilderMethods;
use rustc_middle::ty::{self, Ty}; use rustc_middle::ty::{self, Ty};
use rustc_span::Span; use rustc_span::Span;
use rustc_target::abi::call::FnAbi; use rustc_target::abi::call::FnAbi;
@ -8,14 +10,14 @@ pub trait IntrinsicCallMethods<'tcx>: BackendTypes {
/// Remember to add all intrinsics here, in librustc_typeck/check/mod.rs, /// Remember to add all intrinsics here, in librustc_typeck/check/mod.rs,
/// and in libcore/intrinsics.rs; if you need access to any llvm intrinsics, /// and in libcore/intrinsics.rs; if you need access to any llvm intrinsics,
/// add them to librustc_codegen_llvm/context.rs /// add them to librustc_codegen_llvm/context.rs
fn codegen_intrinsic_call( fn codegen_intrinsic_call<'a, Bx: BuilderMethods<'a, 'tcx>>(
&mut self, &mut self,
fx: &FunctionCx<'a, 'tcx, Bx>,
instance: ty::Instance<'tcx>, instance: ty::Instance<'tcx>,
fn_abi: &FnAbi<'tcx, Ty<'tcx>>, fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
args: &[OperandRef<'tcx, Self::Value>], args: &[OperandRef<'tcx, Self::Value>],
llresult: Self::Value, llresult: Self::Value,
span: Span, span: Span,
caller_instance: ty::Instance<'tcx>,
); );
fn abort(&mut self); fn abort(&mut self);

View file

@ -488,6 +488,8 @@ impl<'a> CrateLocator<'a> {
&& self.triple != TargetTriple::from_triple(config::host_triple()) && self.triple != TargetTriple::from_triple(config::host_triple())
{ {
err.note(&format!("the `{}` target may not be installed", self.triple)); err.note(&format!("the `{}` target may not be installed", self.triple));
} else if self.crate_name == sym::profiler_builtins {
err.note(&"the compiler may have been built without `profiler = true`");
} }
err.span_label(self.span, "can't find crate"); err.span_label(self.span, "can't find crate");
err err

View file

@ -67,13 +67,15 @@ impl<'a> StableHashingContext<'a> {
/// Don't use it for anything else or you'll run the risk of /// Don't use it for anything else or you'll run the risk of
/// leaking data out of the tracking system. /// leaking data out of the tracking system.
#[inline] #[inline]
pub fn new( fn new_with_or_without_spans(
sess: &'a Session, sess: &'a Session,
krate: &'a hir::Crate<'a>, krate: &'a hir::Crate<'a>,
definitions: &'a Definitions, definitions: &'a Definitions,
cstore: &'a dyn CrateStore, cstore: &'a dyn CrateStore,
always_ignore_spans: bool,
) -> Self { ) -> Self {
let hash_spans_initial = !sess.opts.debugging_opts.incremental_ignore_spans; let hash_spans_initial =
!always_ignore_spans && !sess.opts.debugging_opts.incremental_ignore_spans;
StableHashingContext { StableHashingContext {
sess, sess,
@ -88,6 +90,33 @@ impl<'a> StableHashingContext<'a> {
} }
} }
#[inline]
pub fn new(
sess: &'a Session,
krate: &'a hir::Crate<'a>,
definitions: &'a Definitions,
cstore: &'a dyn CrateStore,
) -> Self {
Self::new_with_or_without_spans(
sess,
krate,
definitions,
cstore,
/*always_ignore_spans=*/ false,
)
}
#[inline]
pub fn ignore_spans(
sess: &'a Session,
krate: &'a hir::Crate<'a>,
definitions: &'a Definitions,
cstore: &'a dyn CrateStore,
) -> Self {
let always_ignore_spans = true;
Self::new_with_or_without_spans(sess, krate, definitions, cstore, always_ignore_spans)
}
#[inline] #[inline]
pub fn sess(&self) -> &'a Session { pub fn sess(&self) -> &'a Session {
self.sess self.sess

View file

@ -88,6 +88,19 @@ impl MirPhase {
} }
} }
/// Coverage data computed by the `InstrumentCoverage` MIR pass, when compiling with
/// `-Zinstrument_coverage`.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug, HashStable, TypeFoldable)]
pub struct CoverageData {
/// A hash value that can be used by the consumer of the coverage profile data to detect
/// changes to the instrumented source of the associated MIR body (typically, for an
/// individual function).
pub hash: u64,
/// The total number of coverage region counters added to this MIR Body.
pub num_counters: usize,
}
/// The lowered representation of a single function. /// The lowered representation of a single function.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug, HashStable, TypeFoldable)] #[derive(Clone, RustcEncodable, RustcDecodable, Debug, HashStable, TypeFoldable)]
pub struct Body<'tcx> { pub struct Body<'tcx> {
@ -164,13 +177,17 @@ pub struct Body<'tcx> {
/// The user may be writing e.g. `&[(SOME_CELL, 42)][i].1` and this would get promoted, because /// The user may be writing e.g. `&[(SOME_CELL, 42)][i].1` and this would get promoted, because
/// we'd statically know that no thing with interior mutability will ever be available to the /// we'd statically know that no thing with interior mutability will ever be available to the
/// user without some serious unsafe code. Now this means that our promoted is actually /// user without some serious unsafe code. Now this means that our promoted is actually
/// `&[(SOME_CELL, 42)]` and the MIR using it will do the `&promoted[i].1` projection because the /// `&[(SOME_CELL, 42)]` and the MIR using it will do the `&promoted[i].1` projection because
/// index may be a runtime value. Such a promoted value is illegal because it has reachable /// the index may be a runtime value. Such a promoted value is illegal because it has reachable
/// interior mutability. This flag just makes this situation very obvious where the previous /// interior mutability. This flag just makes this situation very obvious where the previous
/// implementation without the flag hid this situation silently. /// implementation without the flag hid this situation silently.
/// FIXME(oli-obk): rewrite the promoted during promotion to eliminate the cell components. /// FIXME(oli-obk): rewrite the promoted during promotion to eliminate the cell components.
pub ignore_interior_mut_in_const_validation: bool, pub ignore_interior_mut_in_const_validation: bool,
/// If compiling with `-Zinstrument_coverage`, the `InstrumentCoverage` pass stores summary
/// information associated with the MIR, used in code generation of the coverage counters.
pub coverage_data: Option<CoverageData>,
predecessor_cache: PredecessorCache, predecessor_cache: PredecessorCache,
} }
@ -211,6 +228,7 @@ impl<'tcx> Body<'tcx> {
required_consts: Vec::new(), required_consts: Vec::new(),
ignore_interior_mut_in_const_validation: false, ignore_interior_mut_in_const_validation: false,
control_flow_destroyed, control_flow_destroyed,
coverage_data: None,
predecessor_cache: PredecessorCache::new(), predecessor_cache: PredecessorCache::new(),
} }
} }
@ -238,6 +256,7 @@ impl<'tcx> Body<'tcx> {
generator_kind: None, generator_kind: None,
var_debug_info: Vec::new(), var_debug_info: Vec::new(),
ignore_interior_mut_in_const_validation: false, ignore_interior_mut_in_const_validation: false,
coverage_data: None,
predecessor_cache: PredecessorCache::new(), predecessor_cache: PredecessorCache::new(),
} }
} }

View file

@ -1284,6 +1284,13 @@ impl<'tcx> TyCtxt<'tcx> {
StableHashingContext::new(self.sess, krate, self.definitions, &*self.cstore) StableHashingContext::new(self.sess, krate, self.definitions, &*self.cstore)
} }
#[inline(always)]
pub fn create_no_span_stable_hashing_context(self) -> StableHashingContext<'tcx> {
let krate = self.gcx.untracked_crate;
StableHashingContext::ignore_spans(self.sess, krate, self.definitions, &*self.cstore)
}
// This method makes sure that we have a DepNode and a Fingerprint for // This method makes sure that we have a DepNode and a Fingerprint for
// every upstream crate. It needs to be called once right after the tcx is // every upstream crate. It needs to be called once right after the tcx is
// created. // created.

View file

@ -1,8 +1,15 @@
use crate::transform::{MirPass, MirSource}; use crate::transform::{MirPass, MirSource};
use crate::util::patch::MirPatch; use crate::util::patch::MirPatch;
use rustc_data_structures::fingerprint::Fingerprint;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_hir::lang_items; use rustc_hir::lang_items;
use rustc_hir::*;
use rustc_middle::ich::StableHashingContext;
use rustc_middle::mir::interpret::Scalar; use rustc_middle::mir::interpret::Scalar;
use rustc_middle::mir::*; use rustc_middle::mir::{
self, BasicBlock, BasicBlockData, CoverageData, Operand, Place, SourceInfo, StatementKind,
Terminator, TerminatorKind, START_BLOCK,
};
use rustc_middle::ty; use rustc_middle::ty;
use rustc_middle::ty::TyCtxt; use rustc_middle::ty::TyCtxt;
use rustc_span::def_id::DefId; use rustc_span::def_id::DefId;
@ -12,64 +19,104 @@ use rustc_span::Span;
/// the intrinsic llvm.instrprof.increment. /// the intrinsic llvm.instrprof.increment.
pub struct InstrumentCoverage; pub struct InstrumentCoverage;
struct Instrumentor<'tcx> {
tcx: TyCtxt<'tcx>,
num_counters: usize,
}
impl<'tcx> MirPass<'tcx> for InstrumentCoverage { impl<'tcx> MirPass<'tcx> for InstrumentCoverage {
fn run_pass(&self, tcx: TyCtxt<'tcx>, src: MirSource<'tcx>, body: &mut Body<'tcx>) { fn run_pass(&self, tcx: TyCtxt<'tcx>, src: MirSource<'tcx>, mir_body: &mut mir::Body<'tcx>) {
if tcx.sess.opts.debugging_opts.instrument_coverage { if tcx.sess.opts.debugging_opts.instrument_coverage {
debug!("instrumenting {:?}", src.def_id()); // If the InstrumentCoverage pass is called on promoted MIRs, skip them.
instrument_coverage(tcx, body); // See: https://github.com/rust-lang/rust/pull/73011#discussion_r438317601
if src.promoted.is_none() {
assert!(mir_body.coverage_data.is_none());
let hash = hash_mir_source(tcx, &src);
debug!(
"instrumenting {:?}, hash: {}, span: {}",
src.def_id(),
hash,
tcx.sess.source_map().span_to_string(mir_body.span)
);
let num_counters = Instrumentor::new(tcx).inject_counters(mir_body);
mir_body.coverage_data = Some(CoverageData { hash, num_counters });
}
} }
} }
} }
// The first counter (start of the function) is index zero. impl<'tcx> Instrumentor<'tcx> {
const INIT_FUNCTION_COUNTER: u32 = 0; fn new(tcx: TyCtxt<'tcx>) -> Self {
Self { tcx, num_counters: 0 }
}
/// Injects calls to placeholder function `count_code_region()`. fn next_counter(&mut self) -> u32 {
// FIXME(richkadel): As a first step, counters are only injected at the top of each function. let next = self.num_counters as u32;
// The complete solution will inject counters at each conditional code branch. self.num_counters += 1;
pub fn instrument_coverage<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { next
let span = body.span.shrink_to_lo(); }
let count_code_region_fn = function_handle( fn inject_counters(&mut self, mir_body: &mut mir::Body<'tcx>) -> usize {
tcx, // FIXME(richkadel): As a first step, counters are only injected at the top of each
tcx.require_lang_item(lang_items::CountCodeRegionFnLangItem, None), // function. The complete solution will inject counters at each conditional code branch.
span, let top_of_function = START_BLOCK;
); let entire_function = mir_body.span;
let counter_index = Operand::const_from_scalar(
tcx,
tcx.types.u32,
Scalar::from_u32(INIT_FUNCTION_COUNTER),
span,
);
let mut patch = MirPatch::new(body); self.inject_counter(mir_body, top_of_function, entire_function);
let new_block = patch.new_block(placeholder_block(SourceInfo::outermost(body.span))); self.num_counters
let next_block = START_BLOCK; }
let temp = patch.new_temp(tcx.mk_unit(), body.span); fn inject_counter(
patch.patch_terminator( &mut self,
new_block, mir_body: &mut mir::Body<'tcx>,
TerminatorKind::Call { next_block: BasicBlock,
func: count_code_region_fn, code_region: Span,
args: vec![counter_index], ) {
// new_block will swapped with the next_block, after applying patch let injection_point = code_region.shrink_to_lo();
destination: Some((Place::from(temp), new_block)),
cleanup: None,
from_hir_call: false,
fn_span: span,
},
);
patch.add_statement(new_block.start_location(), StatementKind::StorageLive(temp)); let count_code_region_fn = function_handle(
patch.add_statement(next_block.start_location(), StatementKind::StorageDead(temp)); self.tcx,
self.tcx.require_lang_item(lang_items::CountCodeRegionFnLangItem, None),
injection_point,
);
let counter_index = Operand::const_from_scalar(
self.tcx,
self.tcx.types.u32,
Scalar::from_u32(self.next_counter()),
injection_point,
);
patch.apply(body); let mut patch = MirPatch::new(mir_body);
// To insert the `new_block` in front of the first block in the counted branch (for example, let temp = patch.new_temp(self.tcx.mk_unit(), code_region);
// the START_BLOCK, at the top of the function), just swap the indexes, leaving the rest of the let new_block = patch.new_block(placeholder_block(code_region));
// graph unchanged. patch.patch_terminator(
body.basic_blocks_mut().swap(next_block, new_block); new_block,
TerminatorKind::Call {
func: count_code_region_fn,
args: vec![counter_index],
// new_block will swapped with the next_block, after applying patch
destination: Some((Place::from(temp), new_block)),
cleanup: None,
from_hir_call: false,
fn_span: injection_point,
},
);
patch.add_statement(new_block.start_location(), StatementKind::StorageLive(temp));
patch.add_statement(next_block.start_location(), StatementKind::StorageDead(temp));
patch.apply(mir_body);
// To insert the `new_block` in front of the first block in the counted branch (the
// `next_block`), just swap the indexes, leaving the rest of the graph unchanged.
mir_body.basic_blocks_mut().swap(next_block, new_block);
}
} }
fn function_handle<'tcx>(tcx: TyCtxt<'tcx>, fn_def_id: DefId, span: Span) -> Operand<'tcx> { fn function_handle<'tcx>(tcx: TyCtxt<'tcx>, fn_def_id: DefId, span: Span) -> Operand<'tcx> {
@ -79,14 +126,59 @@ fn function_handle<'tcx>(tcx: TyCtxt<'tcx>, fn_def_id: DefId, span: Span) -> Ope
Operand::function_handle(tcx, fn_def_id, substs, span) Operand::function_handle(tcx, fn_def_id, substs, span)
} }
fn placeholder_block<'tcx>(source_info: SourceInfo) -> BasicBlockData<'tcx> { fn placeholder_block(span: Span) -> BasicBlockData<'tcx> {
BasicBlockData { BasicBlockData {
statements: vec![], statements: vec![],
terminator: Some(Terminator { terminator: Some(Terminator {
source_info, source_info: SourceInfo::outermost(span),
// this gets overwritten by the counter Call // this gets overwritten by the counter Call
kind: TerminatorKind::Unreachable, kind: TerminatorKind::Unreachable,
}), }),
is_cleanup: false, is_cleanup: false,
} }
} }
fn hash_mir_source<'tcx>(tcx: TyCtxt<'tcx>, src: &MirSource<'tcx>) -> u64 {
let fn_body_id = match tcx.hir().get_if_local(src.def_id()) {
Some(node) => match associated_body(node) {
Some(body_id) => body_id,
_ => bug!("instrumented MirSource does not include a function body: {:?}", node),
},
None => bug!("instrumented MirSource is not local: {:?}", src),
};
let hir_body = tcx.hir().body(fn_body_id);
let mut hcx = tcx.create_no_span_stable_hashing_context();
hash(&mut hcx, &hir_body.value).to_smaller_hash()
}
fn hash(
hcx: &mut StableHashingContext<'tcx>,
node: &impl HashStable<StableHashingContext<'tcx>>,
) -> Fingerprint {
let mut stable_hasher = StableHasher::new();
node.hash_stable(hcx, &mut stable_hasher);
stable_hasher.finish()
}
fn associated_body<'hir>(node: Node<'hir>) -> Option<BodyId> {
match node {
Node::Item(Item {
kind: ItemKind::Const(_, body) | ItemKind::Static(.., body) | ItemKind::Fn(.., body),
..
})
| Node::TraitItem(TraitItem {
kind:
TraitItemKind::Const(_, Some(body)) | TraitItemKind::Fn(_, TraitFn::Provided(body)),
..
})
| Node::ImplItem(ImplItem {
kind: ImplItemKind::Const(_, body) | ImplItemKind::Fn(_, body),
..
})
| Node::Expr(Expr { kind: ExprKind::Closure(.., body, _, _), .. }) => Some(*body),
Node::AnonConst(constant) => Some(constant.body),
_ => None,
}
}

View file

@ -877,8 +877,9 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options,
(such as entering an empty infinite loop) by inserting llvm.sideeffect \ (such as entering an empty infinite loop) by inserting llvm.sideeffect \
(default: no)"), (default: no)"),
instrument_coverage: bool = (false, parse_bool, [TRACKED], instrument_coverage: bool = (false, parse_bool, [TRACKED],
"instrument the generated code with LLVM code region counters to \ "instrument the generated code with LLVM code region counters to (in the \
(in the future) generate coverage reports (experimental; default: no)"), future) generate coverage reports (default: no; note, the compiler build \
config must include `profiler = true`)"),
instrument_mcount: bool = (false, parse_bool, [TRACKED], instrument_mcount: bool = (false, parse_bool, [TRACKED],
"insert function instrument code for mcount-based tracing (default: no)"), "insert function instrument code for mcount-based tracing (default: no)"),
keep_hygiene_data: bool = (false, parse_bool, [UNTRACKED], keep_hygiene_data: bool = (false, parse_bool, [UNTRACKED],

View file

@ -587,6 +587,7 @@ symbols! {
proc_macro_mod, proc_macro_mod,
proc_macro_non_items, proc_macro_non_items,
proc_macro_path_invoc, proc_macro_path_invoc,
profiler_builtins,
profiler_runtime, profiler_runtime,
ptr_offset_from, ptr_offset_from,
pub_restricted, pub_restricted,