Use measureme in self-profiler

Related to #58372
Related to #58967
This commit is contained in:
Wesley Wiser 2019-04-04 19:41:49 -04:00
parent 99da733f7f
commit 56e434d84d
16 changed files with 208 additions and 474 deletions

View file

@ -1474,6 +1474,16 @@ dependencies = [
"toml-query 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", "toml-query 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "measureme"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
"memmap 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "memchr" name = "memchr"
version = "2.1.1" version = "2.1.1"
@ -2326,6 +2336,7 @@ dependencies = [
"jobserver 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)", "jobserver 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"measureme 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"polonius-engine 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", "polonius-engine 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
@ -4106,6 +4117,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08" "checksum matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08"
"checksum mdbook 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "90b5a8d7e341ceee5db3882a06078d42661ddcfa2b3687319cc5da76ec4e782f" "checksum mdbook 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "90b5a8d7e341ceee5db3882a06078d42661ddcfa2b3687319cc5da76ec4e782f"
"checksum mdbook 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "0ba0d44cb4089c741b9a91f3e5218298a40699c2f3a070a85014eed290c60819" "checksum mdbook 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "0ba0d44cb4089c741b9a91f3e5218298a40699c2f3a070a85014eed290c60819"
"checksum measureme 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "36bb2b263a6795d352035024d6b30ce465bb79a5e5280d74c3b5f8464c657bcc"
"checksum memchr 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0a3eb002f0535929f1199681417029ebea04aadc0c7a4224b46be99c7f5d6a16" "checksum memchr 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0a3eb002f0535929f1199681417029ebea04aadc0c7a4224b46be99c7f5d6a16"
"checksum memmap 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e2ffa2c986de11a9df78620c01eeaaf27d94d3ff02bf81bfcca953102dd0c6ff" "checksum memmap 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e2ffa2c986de11a9df78620c01eeaaf27d94d3ff02bf81bfcca953102dd0c6ff"
"checksum memoffset 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0f9dc261e2b62d7a622bf416ea3c5245cdd5d9a7fcc428c0d06804dfce1775b3" "checksum memoffset 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0f9dc261e2b62d7a622bf416ea3c5245cdd5d9a7fcc428c0d06804dfce1775b3"

View file

@ -36,6 +36,7 @@ byteorder = { version = "1.1", features = ["i128"]}
chalk-engine = { version = "0.9.0", default-features=false } chalk-engine = { version = "0.9.0", default-features=false }
rustc_fs_util = { path = "../librustc_fs_util" } rustc_fs_util = { path = "../librustc_fs_util" }
smallvec = { version = "0.6.7", features = ["union", "may_dangle"] } smallvec = { version = "0.6.7", features = ["union", "may_dangle"] }
measureme = "0.2.1"
# Note that these dependencies are a lie, they're just here to get linkage to # Note that these dependencies are a lie, they're just here to get linkage to
# work. # work.

View file

@ -46,8 +46,6 @@ use std::path::PathBuf;
use std::time::Duration; use std::time::Duration;
use std::sync::{Arc, mpsc}; use std::sync::{Arc, mpsc};
use parking_lot::Mutex as PlMutex;
mod code_stats; mod code_stats;
pub mod config; pub mod config;
pub mod filesearch; pub mod filesearch;
@ -130,7 +128,7 @@ pub struct Session {
pub profile_channel: Lock<Option<mpsc::Sender<ProfileQueriesMsg>>>, pub profile_channel: Lock<Option<mpsc::Sender<ProfileQueriesMsg>>>,
/// Used by -Z self-profile /// Used by -Z self-profile
pub self_profiling: Option<Arc<PlMutex<SelfProfiler>>>, pub self_profiling: Option<Arc<SelfProfiler>>,
/// Some measurements that are being gathered during compilation. /// Some measurements that are being gathered during compilation.
pub perf_stats: PerfStats, pub perf_stats: PerfStats,
@ -838,19 +836,17 @@ impl Session {
#[inline(never)] #[inline(never)]
#[cold] #[cold]
fn profiler_active<F: FnOnce(&mut SelfProfiler) -> ()>(&self, f: F) { fn profiler_active<F: FnOnce(&SelfProfiler) -> ()>(&self, f: F) {
match &self.self_profiling { match &self.self_profiling {
None => bug!("profiler_active() called but there was no profiler active"), None => bug!("profiler_active() called but there was no profiler active"),
Some(profiler) => { Some(profiler) => {
let mut p = profiler.lock(); f(&profiler);
f(&mut p);
} }
} }
} }
#[inline(always)] #[inline(always)]
pub fn profiler<F: FnOnce(&mut SelfProfiler) -> ()>(&self, f: F) { pub fn profiler<F: FnOnce(&SelfProfiler) -> ()>(&self, f: F) {
if unlikely!(self.self_profiling.is_some()) { if unlikely!(self.self_profiling.is_some()) {
self.profiler_active(f) self.profiler_active(f)
} }
@ -1138,7 +1134,19 @@ fn build_session_(
driver_lint_caps: FxHashMap<lint::LintId, lint::Level>, driver_lint_caps: FxHashMap<lint::LintId, lint::Level>,
) -> Session { ) -> Session {
let self_profiler = let self_profiler =
if sopts.debugging_opts.self_profile { Some(Arc::new(PlMutex::new(SelfProfiler::new()))) } if sopts.debugging_opts.self_profile {
let profiler = SelfProfiler::new();
match profiler {
Ok(profiler) => {
crate::ty::query::QueryName::register_with_profiler(&profiler);
Some(Arc::new(profiler))
},
Err(e) => {
early_warn(sopts.error_format, &format!("failed to create profiler: {}", e));
None
}
}
}
else { None }; else { None };
let host_triple = TargetTriple::from_triple(config::host_triple()); let host_triple = TargetTriple::from_triple(config::host_triple());

View file

@ -3,7 +3,7 @@ use crate::dep_graph::DepNode;
use crate::hir::def_id::{CrateNum, DefId}; use crate::hir::def_id::{CrateNum, DefId};
use crate::ty::TyCtxt; use crate::ty::TyCtxt;
use crate::ty::query::queries; use crate::ty::query::queries;
use crate::ty::query::Query; use crate::ty::query::{Query, QueryName};
use crate::ty::query::QueryCache; use crate::ty::query::QueryCache;
use crate::ty::query::plumbing::CycleError; use crate::ty::query::plumbing::CycleError;
use crate::util::profiling::ProfileCategory; use crate::util::profiling::ProfileCategory;
@ -18,7 +18,7 @@ use crate::ich::StableHashingContext;
// Query configuration and description traits. // Query configuration and description traits.
pub trait QueryConfig<'tcx> { pub trait QueryConfig<'tcx> {
const NAME: &'static str; const NAME: QueryName;
const CATEGORY: ProfileCategory; const CATEGORY: ProfileCategory;
type Key: Eq + Hash + Clone + Debug; type Key: Eq + Hash + Clone + Debug;

View file

@ -41,7 +41,6 @@ use crate::ty::subst::SubstsRef;
use crate::util::nodemap::{DefIdSet, DefIdMap, ItemLocalSet}; use crate::util::nodemap::{DefIdSet, DefIdMap, ItemLocalSet};
use crate::util::common::{ErrorReported}; use crate::util::common::{ErrorReported};
use crate::util::profiling::ProfileCategory::*; use crate::util::profiling::ProfileCategory::*;
use crate::session::Session;
use rustc_data_structures::svh::Svh; use rustc_data_structures::svh::Svh;
use rustc_data_structures::bit_set::BitSet; use rustc_data_structures::bit_set::BitSet;

View file

@ -114,7 +114,7 @@ impl<'a, 'tcx, Q: QueryDescription<'tcx>> JobOwner<'a, 'tcx, Q> {
let mut lock = cache.borrow_mut(); let mut lock = cache.borrow_mut();
if let Some(value) = lock.results.get(key) { if let Some(value) = lock.results.get(key) {
profq_msg!(tcx, ProfileQueriesMsg::CacheHit); profq_msg!(tcx, ProfileQueriesMsg::CacheHit);
tcx.sess.profiler(|p| p.record_query_hit(Q::NAME, Q::CATEGORY)); tcx.sess.profiler(|p| p.record_query_hit(Q::NAME));
let result = (value.value.clone(), value.index); let result = (value.value.clone(), value.index);
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
{ {
@ -130,7 +130,7 @@ impl<'a, 'tcx, Q: QueryDescription<'tcx>> JobOwner<'a, 'tcx, Q> {
//in another thread has completed. Record how long we wait in the //in another thread has completed. Record how long we wait in the
//self-profiler //self-profiler
#[cfg(parallel_compiler)] #[cfg(parallel_compiler)]
tcx.sess.profiler(|p| p.query_blocked_start(Q::NAME, Q::CATEGORY)); tcx.sess.profiler(|p| p.query_blocked_start(Q::NAME));
job.clone() job.clone()
}, },
@ -172,7 +172,7 @@ impl<'a, 'tcx, Q: QueryDescription<'tcx>> JobOwner<'a, 'tcx, Q> {
#[cfg(parallel_compiler)] #[cfg(parallel_compiler)]
{ {
let result = job.r#await(tcx, span); let result = job.r#await(tcx, span);
tcx.sess.profiler(|p| p.query_blocked_end(Q::NAME, Q::CATEGORY)); tcx.sess.profiler(|p| p.query_blocked_end(Q::NAME));
if let Err(cycle) = result { if let Err(cycle) = result {
return TryGetJob::Cycle(Q::handle_cycle_error(tcx, cycle)); return TryGetJob::Cycle(Q::handle_cycle_error(tcx, cycle));
@ -358,14 +358,14 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
key: Q::Key) key: Q::Key)
-> Q::Value { -> Q::Value {
debug!("ty::query::get_query<{}>(key={:?}, span={:?})", debug!("ty::query::get_query<{}>(key={:?}, span={:?})",
Q::NAME, Q::NAME.as_str(),
key, key,
span); span);
profq_msg!(self, profq_msg!(self,
ProfileQueriesMsg::QueryBegin( ProfileQueriesMsg::QueryBegin(
span.data(), span.data(),
profq_query_msg!(Q::NAME, self, key), profq_query_msg!(Q::NAME.as_str(), self, key),
) )
); );
@ -389,7 +389,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
if dep_node.kind.is_anon() { if dep_node.kind.is_anon() {
profq_msg!(self, ProfileQueriesMsg::ProviderBegin); profq_msg!(self, ProfileQueriesMsg::ProviderBegin);
self.sess.profiler(|p| p.start_query(Q::NAME, Q::CATEGORY)); self.sess.profiler(|p| p.start_query(Q::NAME));
let ((result, dep_node_index), diagnostics) = with_diagnostics(|diagnostics| { let ((result, dep_node_index), diagnostics) = with_diagnostics(|diagnostics| {
self.start_query(job.job.clone(), diagnostics, |tcx| { self.start_query(job.job.clone(), diagnostics, |tcx| {
@ -399,7 +399,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
}) })
}); });
self.sess.profiler(|p| p.end_query(Q::NAME, Q::CATEGORY)); self.sess.profiler(|p| p.end_query(Q::NAME));
profq_msg!(self, ProfileQueriesMsg::ProviderEnd); profq_msg!(self, ProfileQueriesMsg::ProviderEnd);
self.dep_graph.read_index(dep_node_index); self.dep_graph.read_index(dep_node_index);
@ -474,14 +474,14 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
let result = if let Some(result) = result { let result = if let Some(result) = result {
profq_msg!(self, ProfileQueriesMsg::CacheHit); profq_msg!(self, ProfileQueriesMsg::CacheHit);
self.sess.profiler(|p| p.record_query_hit(Q::NAME, Q::CATEGORY)); self.sess.profiler(|p| p.record_query_hit(Q::NAME));
result result
} else { } else {
// We could not load a result from the on-disk cache, so // We could not load a result from the on-disk cache, so
// recompute. // recompute.
self.sess.profiler(|p| p.start_query(Q::NAME, Q::CATEGORY)); self.sess.profiler(|p| p.start_query(Q::NAME));
// The dep-graph for this computation is already in // The dep-graph for this computation is already in
// place // place
@ -489,7 +489,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
Q::compute(self, key) Q::compute(self, key)
}); });
self.sess.profiler(|p| p.end_query(Q::NAME, Q::CATEGORY)); self.sess.profiler(|p| p.end_query(Q::NAME));
result result
}; };
@ -552,7 +552,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
key, dep_node); key, dep_node);
profq_msg!(self, ProfileQueriesMsg::ProviderBegin); profq_msg!(self, ProfileQueriesMsg::ProviderBegin);
self.sess.profiler(|p| p.start_query(Q::NAME, Q::CATEGORY)); self.sess.profiler(|p| p.start_query(Q::NAME));
let ((result, dep_node_index), diagnostics) = with_diagnostics(|diagnostics| { let ((result, dep_node_index), diagnostics) = with_diagnostics(|diagnostics| {
self.start_query(job.job.clone(), diagnostics, |tcx| { self.start_query(job.job.clone(), diagnostics, |tcx| {
@ -572,7 +572,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
}) })
}); });
self.sess.profiler(|p| p.end_query(Q::NAME, Q::CATEGORY)); self.sess.profiler(|p| p.end_query(Q::NAME));
profq_msg!(self, ProfileQueriesMsg::ProviderEnd); profq_msg!(self, ProfileQueriesMsg::ProviderEnd);
if unlikely!(self.sess.opts.debugging_opts.query_dep_graph) { if unlikely!(self.sess.opts.debugging_opts.query_dep_graph) {
@ -619,7 +619,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
let _ = self.get_query::<Q>(DUMMY_SP, key); let _ = self.get_query::<Q>(DUMMY_SP, key);
} else { } else {
profq_msg!(self, ProfileQueriesMsg::CacheHit); profq_msg!(self, ProfileQueriesMsg::CacheHit);
self.sess.profiler(|p| p.record_query_hit(Q::NAME, Q::CATEGORY)); self.sess.profiler(|p| p.record_query_hit(Q::NAME));
} }
} }
@ -632,7 +632,8 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
) { ) {
profq_msg!( profq_msg!(
self, self,
ProfileQueriesMsg::QueryBegin(span.data(), profq_query_msg!(Q::NAME, self, key)) ProfileQueriesMsg::QueryBegin(span.data(),
profq_query_msg!(Q::NAME.as_str(), self, key))
); );
// We may be concurrently trying both execute and force a query // We may be concurrently trying both execute and force a query
@ -725,18 +726,6 @@ macro_rules! define_queries_inner {
} }
} }
pub fn record_computed_queries(&self, sess: &Session) {
sess.profiler(|p| {
$(
p.record_computed_queries(
<queries::$name<'_> as QueryConfig<'_>>::NAME,
<queries::$name<'_> as QueryConfig<'_>>::CATEGORY,
self.$name.lock().results.len()
);
)*
});
}
#[cfg(parallel_compiler)] #[cfg(parallel_compiler)]
pub fn collect_active_jobs(&self) -> Vec<Lrc<QueryJob<$tcx>>> { pub fn collect_active_jobs(&self) -> Vec<Lrc<QueryJob<$tcx>>> {
let mut jobs = Vec::new(); let mut jobs = Vec::new();
@ -854,6 +843,24 @@ macro_rules! define_queries_inner {
} }
} }
#[allow(nonstandard_style)]
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum QueryName {
$($name),*
}
impl QueryName {
pub fn register_with_profiler(profiler: &crate::util::profiling::SelfProfiler) {
$(profiler.register_query_name(QueryName::$name);)*
}
pub fn as_str(&self) -> &'static str {
match self {
$(QueryName::$name => stringify!($name),)*
}
}
}
#[allow(nonstandard_style)] #[allow(nonstandard_style)]
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub enum Query<$tcx> { pub enum Query<$tcx> {
@ -894,6 +901,12 @@ macro_rules! define_queries_inner {
$(Query::$name(key) => key.default_span(tcx),)* $(Query::$name(key) => key.default_span(tcx),)*
} }
} }
pub fn query_name(&self) -> QueryName {
match self {
$(Query::$name(_) => QueryName::$name,)*
}
}
} }
impl<'a, $tcx> HashStable<StableHashingContext<'a>> for Query<$tcx> { impl<'a, $tcx> HashStable<StableHashingContext<'a>> for Query<$tcx> {
@ -930,7 +943,7 @@ macro_rules! define_queries_inner {
type Key = $K; type Key = $K;
type Value = $V; type Value = $V;
const NAME: &'static str = stringify!($name); const NAME: QueryName = QueryName::$name;
const CATEGORY: ProfileCategory = $category; const CATEGORY: ProfileCategory = $category;
} }

View file

@ -1,14 +1,20 @@
use std::borrow::Cow; use std::borrow::Cow;
use std::fs; use std::error::Error;
use std::io::{BufWriter, Write}; use std::mem::{self, Discriminant};
use std::mem;
use std::process; use std::process;
use std::thread::ThreadId; use std::thread::ThreadId;
use std::time::{Duration, Instant, SystemTime}; use std::u32;
use crate::session::config::Options; use crate::ty::query::QueryName;
use rustc_data_structures::fx::FxHashMap; use measureme::{StringId, TimestampKind};
/// MmapSerializatioSink is faster on macOS and Linux
/// but FileSerializationSink is faster on Windows
#[cfg(not(windows))]
type Profiler = measureme::Profiler<measureme::MmapSerializationSink>;
#[cfg(windows)]
type Profiler = measureme::Profiler<measureme::FileSerializationSink>;
#[derive(Clone, Copy, Debug, PartialEq, Eq, Ord, PartialOrd)] #[derive(Clone, Copy, Debug, PartialEq, Eq, Ord, PartialOrd)]
pub enum ProfileCategory { pub enum ProfileCategory {
@ -35,409 +41,129 @@ pub enum ProfilerEvent {
QueryBlockedEnd { query_name: &'static str, category: ProfileCategory, time: u64 }, QueryBlockedEnd { query_name: &'static str, category: ProfileCategory, time: u64 },
} }
impl ProfilerEvent {
fn timestamp(&self) -> u64 {
use self::ProfilerEvent::*;
match self {
QueryStart { time, .. } |
QueryEnd { time, .. } |
GenericActivityStart { time, .. } |
GenericActivityEnd { time, .. } |
QueryCacheHit { time, .. } |
QueryCount { time, .. } |
IncrementalLoadResultStart { time, .. } |
IncrementalLoadResultEnd { time, .. } |
QueryBlockedStart { time, .. } |
QueryBlockedEnd { time, .. } => *time
}
}
}
fn thread_id_to_u64(tid: ThreadId) -> u64 { fn thread_id_to_u64(tid: ThreadId) -> u64 {
unsafe { mem::transmute::<ThreadId, u64>(tid) } unsafe { mem::transmute::<ThreadId, u64>(tid) }
} }
pub struct SelfProfiler { pub struct SelfProfiler {
events: FxHashMap<ThreadId, Vec<ProfilerEvent>>, profiler: Profiler,
start_time: SystemTime, query_event_kind: StringId,
start_instant: Instant, generic_activity_event_kind: StringId,
incremental_load_result_event_kind: StringId,
query_blocked_event_kind: StringId,
query_cache_hit_event_kind: StringId,
} }
impl SelfProfiler { impl SelfProfiler {
pub fn new() -> SelfProfiler { pub fn new() -> Result<SelfProfiler, Box<dyn Error>> {
let profiler = SelfProfiler { let filename = format!("pid-{}.rustc_profile", process::id());
events: Default::default(), let path = std::path::Path::new(&filename);
start_time: SystemTime::now(), let profiler = Profiler::new(path)?;
start_instant: Instant::now(),
let query_event_kind = profiler.alloc_string("Query");
let generic_activity_event_kind = profiler.alloc_string("GenericActivity");
let incremental_load_result_event_kind = profiler.alloc_string("IncrementalLoadResult");
let query_blocked_event_kind = profiler.alloc_string("QueryBlocked");
let query_cache_hit_event_kind = profiler.alloc_string("QueryCacheHit");
Ok(SelfProfiler {
profiler,
query_event_kind,
generic_activity_event_kind,
incremental_load_result_event_kind,
query_blocked_event_kind,
query_cache_hit_event_kind,
})
}
fn get_query_name_string_id(query_name: QueryName) -> StringId {
let discriminant = unsafe {
mem::transmute::<Discriminant<QueryName>, u64>(mem::discriminant(&query_name))
}; };
profiler StringId::reserved(discriminant as u32)
}
pub fn register_query_name(&self, query_name: QueryName) {
let id = SelfProfiler::get_query_name_string_id(query_name);
self.profiler.alloc_string_with_reserved_id(id, query_name.as_str());
} }
#[inline] #[inline]
pub fn start_activity( pub fn start_activity(
&mut self, &self,
category: ProfileCategory,
label: impl Into<Cow<'static, str>>, label: impl Into<Cow<'static, str>>,
) { ) {
self.record(ProfilerEvent::GenericActivityStart { self.record(&label.into(), self.generic_activity_event_kind, TimestampKind::Start);
category,
label: label.into(),
time: self.get_time_from_start(),
})
} }
#[inline] #[inline]
pub fn end_activity( pub fn end_activity(
&mut self, &self,
category: ProfileCategory,
label: impl Into<Cow<'static, str>>, label: impl Into<Cow<'static, str>>,
) { ) {
self.record(ProfilerEvent::GenericActivityEnd { self.record(&label.into(), self.generic_activity_event_kind, TimestampKind::End);
category,
label: label.into(),
time: self.get_time_from_start(),
})
} }
#[inline] #[inline]
pub fn record_computed_queries( pub fn record_query_hit(&self, query_name: QueryName) {
&mut self, self.record_query(query_name, self.query_cache_hit_event_kind, TimestampKind::Instant);
query_name: &'static str,
category: ProfileCategory,
count: usize)
{
self.record(ProfilerEvent::QueryCount {
query_name,
category,
count,
time: self.get_time_from_start(),
})
} }
#[inline] #[inline]
pub fn record_query_hit(&mut self, query_name: &'static str, category: ProfileCategory) { pub fn start_query(&self, query_name: QueryName) {
self.record(ProfilerEvent::QueryCacheHit { self.record_query(query_name, self.query_event_kind, TimestampKind::Start);
query_name,
category,
time: self.get_time_from_start(),
})
} }
#[inline] #[inline]
pub fn start_query(&mut self, query_name: &'static str, category: ProfileCategory) { pub fn end_query(&self, query_name: QueryName) {
self.record(ProfilerEvent::QueryStart { self.record_query(query_name, self.query_event_kind, TimestampKind::End);
query_name,
category,
time: self.get_time_from_start(),
});
} }
#[inline] #[inline]
pub fn end_query(&mut self, query_name: &'static str, category: ProfileCategory) { pub fn incremental_load_result_start(&self, query_name: QueryName) {
self.record(ProfilerEvent::QueryEnd { self.record_query(
query_name, query_name,
category, self.incremental_load_result_event_kind,
time: self.get_time_from_start(), TimestampKind::Start
}) );
} }
#[inline] #[inline]
pub fn incremental_load_result_start(&mut self, query_name: &'static str) { pub fn incremental_load_result_end(&self, query_name: QueryName) {
self.record(ProfilerEvent::IncrementalLoadResultStart { self.record_query(query_name, self.incremental_load_result_event_kind, TimestampKind::End);
query_name,
time: self.get_time_from_start(),
})
} }
#[inline] #[inline]
pub fn incremental_load_result_end(&mut self, query_name: &'static str) { pub fn query_blocked_start(&self, query_name: QueryName) {
self.record(ProfilerEvent::IncrementalLoadResultEnd { self.record_query(query_name, self.query_blocked_event_kind, TimestampKind::Start);
query_name,
time: self.get_time_from_start(),
})
} }
#[inline] #[inline]
pub fn query_blocked_start(&mut self, query_name: &'static str, category: ProfileCategory) { pub fn query_blocked_end(&self, query_name: QueryName) {
self.record(ProfilerEvent::QueryBlockedStart { self.record_query(query_name, self.query_blocked_event_kind, TimestampKind::End);
query_name,
category,
time: self.get_time_from_start(),
})
} }
#[inline] #[inline]
pub fn query_blocked_end(&mut self, query_name: &'static str, category: ProfileCategory) { fn record(&self, event_id: &str, event_kind: StringId, timestamp_kind: TimestampKind) {
self.record(ProfilerEvent::QueryBlockedEnd { let thread_id = thread_id_to_u64(std::thread::current().id());
query_name,
category, let event_id = self.profiler.alloc_string(event_id);
time: self.get_time_from_start(), self.profiler.record_event(event_kind, event_id, thread_id, timestamp_kind);
})
} }
#[inline] #[inline]
fn record(&mut self, event: ProfilerEvent) { fn record_query(
let thread_id = std::thread::current().id(); &self,
let events = self.events.entry(thread_id).or_default(); query_name: QueryName,
event_kind: StringId,
timestamp_kind: TimestampKind,
) {
let dep_node_name = SelfProfiler::get_query_name_string_id(query_name);
events.push(event); let thread_id = thread_id_to_u64(std::thread::current().id());
}
#[inline] self.profiler.record_event(event_kind, dep_node_name, thread_id, timestamp_kind);
fn get_time_from_start(&self) -> u64 {
let duration = Instant::now() - self.start_instant;
duration.as_nanos() as u64
}
pub fn dump_raw_events(&self, opts: &Options) {
use self::ProfilerEvent::*;
let pid = process::id();
let filename =
format!("{}.profile_events.json", opts.crate_name.clone().unwrap_or_default());
let mut file = BufWriter::new(fs::File::create(filename).unwrap());
let threads: Vec<_> =
self.events
.keys()
.into_iter()
.map(|tid| format!("{}", thread_id_to_u64(*tid)))
.collect();
write!(file,
"{{\
\"processes\": {{\
\"{}\": {{\
\"threads\": [{}],\
\"crate_name\": \"{}\",\
\"opt_level\": \"{:?}\",\
\"incremental\": {}\
}}\
}},\
\"events\": [\
",
pid,
threads.join(","),
opts.crate_name.clone().unwrap_or_default(),
opts.optimize,
if opts.incremental.is_some() { "true" } else { "false" },
).unwrap();
let mut is_first = true;
for (thread_id, events) in &self.events {
let thread_id = thread_id_to_u64(*thread_id);
for event in events {
if is_first {
is_first = false;
} else {
writeln!(file, ",").unwrap();
}
let (secs, nanos) = {
let time = self.start_time + Duration::from_nanos(event.timestamp());
let time_since_unix =
time.duration_since(SystemTime::UNIX_EPOCH).unwrap_or_default();
(time_since_unix.as_secs(), time_since_unix.subsec_nanos())
};
match event {
QueryStart { query_name, category, time: _ } =>
write!(file,
"{{ \
\"QueryStart\": {{ \
\"query_name\": \"{}\",\
\"category\": \"{:?}\",\
\"time\": {{\
\"secs\": {},\
\"nanos\": {}\
}},\
\"thread_id\": {}\
}}\
}}",
query_name,
category,
secs,
nanos,
thread_id,
).unwrap(),
QueryEnd { query_name, category, time: _ } =>
write!(file,
"{{\
\"QueryEnd\": {{\
\"query_name\": \"{}\",\
\"category\": \"{:?}\",\
\"time\": {{\
\"secs\": {},\
\"nanos\": {}\
}},\
\"thread_id\": {}\
}}\
}}",
query_name,
category,
secs,
nanos,
thread_id,
).unwrap(),
GenericActivityStart { category, label, time: _ } =>
write!(file,
"{{
\"GenericActivityStart\": {{\
\"category\": \"{:?}\",\
\"label\": \"{}\",\
\"time\": {{\
\"secs\": {},\
\"nanos\": {}\
}},\
\"thread_id\": {}\
}}\
}}",
category,
label,
secs,
nanos,
thread_id,
).unwrap(),
GenericActivityEnd { category, label, time: _ } =>
write!(file,
"{{\
\"GenericActivityEnd\": {{\
\"category\": \"{:?}\",\
\"label\": \"{}\",\
\"time\": {{\
\"secs\": {},\
\"nanos\": {}\
}},\
\"thread_id\": {}\
}}\
}}",
category,
label,
secs,
nanos,
thread_id,
).unwrap(),
QueryCacheHit { query_name, category, time: _ } =>
write!(file,
"{{\
\"QueryCacheHit\": {{\
\"query_name\": \"{}\",\
\"category\": \"{:?}\",\
\"time\": {{\
\"secs\": {},\
\"nanos\": {}\
}},\
\"thread_id\": {}\
}}\
}}",
query_name,
category,
secs,
nanos,
thread_id,
).unwrap(),
QueryCount { query_name, category, count, time: _ } =>
write!(file,
"{{\
\"QueryCount\": {{\
\"query_name\": \"{}\",\
\"category\": \"{:?}\",\
\"count\": {},\
\"time\": {{\
\"secs\": {},\
\"nanos\": {}\
}},\
\"thread_id\": {}\
}}\
}}",
query_name,
category,
count,
secs,
nanos,
thread_id,
).unwrap(),
IncrementalLoadResultStart { query_name, time: _ } =>
write!(file,
"{{\
\"IncrementalLoadResultStart\": {{\
\"query_name\": \"{}\",\
\"time\": {{\
\"secs\": {},\
\"nanos\": {}\
}},\
\"thread_id\": {}\
}}\
}}",
query_name,
secs,
nanos,
thread_id,
).unwrap(),
IncrementalLoadResultEnd { query_name, time: _ } =>
write!(file,
"{{\
\"IncrementalLoadResultEnd\": {{\
\"query_name\": \"{}\",\
\"time\": {{\
\"secs\": {},\
\"nanos\": {}\
}},\
\"thread_id\": {}\
}}\
}}",
query_name,
secs,
nanos,
thread_id,
).unwrap(),
QueryBlockedStart { query_name, category, time: _ } =>
write!(file,
"{{\
\"QueryBlockedStart\": {{\
\"query_name\": \"{}\",\
\"category\": \"{:?}\",\
\"time\": {{\
\"secs\": {},\
\"nanos\": {}\
}},\
\"thread_id\": {}\
}}\
}}",
query_name,
category,
secs,
nanos,
thread_id,
).unwrap(),
QueryBlockedEnd { query_name, category, time: _ } =>
write!(file,
"{{\
\"QueryBlockedEnd\": {{\
\"query_name\": \"{}\",\
\"category\": \"{:?}\",\
\"time\": {{\
\"secs\": {},\
\"nanos\": {}\
}},\
\"thread_id\": {}\
}}\
}}",
query_name,
category,
secs,
nanos,
thread_id,
).unwrap(),
}
}
}
write!(file, "] }}").unwrap();
} }
} }

View file

@ -15,7 +15,6 @@ use rustc::hir::def_id::LOCAL_CRATE;
use rustc::middle::exported_symbols::SymbolExportLevel; use rustc::middle::exported_symbols::SymbolExportLevel;
use rustc::session::config::{self, Lto}; use rustc::session::config::{self, Lto};
use rustc::util::common::time_ext; use rustc::util::common::time_ext;
use rustc::util::profiling::ProfileCategory;
use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::fx::FxHashMap;
use rustc_codegen_ssa::{ModuleCodegen, ModuleKind}; use rustc_codegen_ssa::{ModuleCodegen, ModuleKind};
@ -67,8 +66,7 @@ fn prepare_lto(cgcx: &CodegenContext<LlvmCodegenBackend>,
.iter() .iter()
.filter_map(symbol_filter) .filter_map(symbol_filter)
.collect::<Vec<CString>>(); .collect::<Vec<CString>>();
let _timer = cgcx.profile_activity(ProfileCategory::Codegen, let _timer = cgcx.profile_activity("generate_symbol_white_list_for_thinlto");
"generate_symbol_white_list_for_thinlto");
info!("{} symbols to preserve in this crate", symbol_white_list.len()); info!("{} symbols to preserve in this crate", symbol_white_list.len());
// If we're performing LTO for the entire crate graph, then for each of our // If we're performing LTO for the entire crate graph, then for each of our
@ -97,8 +95,7 @@ fn prepare_lto(cgcx: &CodegenContext<LlvmCodegenBackend>,
} }
for &(cnum, ref path) in cgcx.each_linked_rlib_for_lto.iter() { for &(cnum, ref path) in cgcx.each_linked_rlib_for_lto.iter() {
let _timer = cgcx.profile_activity(ProfileCategory::Codegen, let _timer = cgcx.profile_activity(format!("load: {}", path.display()));
format!("load: {}", path.display()));
let exported_symbols = cgcx.exported_symbols let exported_symbols = cgcx.exported_symbols
.as_ref().expect("needs exported symbols for LTO"); .as_ref().expect("needs exported symbols for LTO");
symbol_white_list.extend( symbol_white_list.extend(
@ -727,8 +724,7 @@ pub unsafe fn optimize_thin_module(
// Like with "fat" LTO, get some better optimizations if landing pads // Like with "fat" LTO, get some better optimizations if landing pads
// are disabled by removing all landing pads. // are disabled by removing all landing pads.
if cgcx.no_landing_pads { if cgcx.no_landing_pads {
let _timer = cgcx.profile_activity(ProfileCategory::Codegen, let _timer = cgcx.profile_activity("LLVM_remove_landing_pads");
"LLVM_remove_landing_pads");
llvm::LLVMRustMarkAllFunctionsNounwind(llmod); llvm::LLVMRustMarkAllFunctionsNounwind(llmod);
save_temp_bitcode(&cgcx, &module, "thin-lto-after-nounwind"); save_temp_bitcode(&cgcx, &module, "thin-lto-after-nounwind");
} }

View file

@ -18,7 +18,6 @@ use rustc::session::Session;
use rustc::ty::TyCtxt; use rustc::ty::TyCtxt;
use rustc_codegen_ssa::{ModuleCodegen, CompiledModule}; use rustc_codegen_ssa::{ModuleCodegen, CompiledModule};
use rustc::util::common::time_ext; use rustc::util::common::time_ext;
use rustc::util::profiling::ProfileCategory;
use rustc_fs_util::{path_to_c_string, link_or_copy}; use rustc_fs_util::{path_to_c_string, link_or_copy};
use rustc_data_structures::small_c_str::SmallCStr; use rustc_data_structures::small_c_str::SmallCStr;
use errors::{Handler, FatalError}; use errors::{Handler, FatalError};
@ -414,7 +413,7 @@ pub(crate) unsafe fn optimize(cgcx: &CodegenContext<LlvmCodegenBackend>,
// Finally, run the actual optimization passes // Finally, run the actual optimization passes
{ {
let _timer = cgcx.profile_activity(ProfileCategory::Codegen, "LLVM_function_passes"); let _timer = cgcx.profile_activity("LLVM_function_passes");
time_ext(config.time_passes, time_ext(config.time_passes,
None, None,
&format!("llvm function passes [{}]", module_name.unwrap()), &format!("llvm function passes [{}]", module_name.unwrap()),
@ -423,7 +422,7 @@ pub(crate) unsafe fn optimize(cgcx: &CodegenContext<LlvmCodegenBackend>,
}); });
} }
{ {
let _timer = cgcx.profile_activity(ProfileCategory::Codegen, "LLVM_module_passes"); let _timer = cgcx.profile_activity("LLVM_module_passes");
time_ext(config.time_passes, time_ext(config.time_passes,
None, None,
&format!("llvm module passes [{}]", module_name.unwrap()), &format!("llvm module passes [{}]", module_name.unwrap()),
@ -445,7 +444,7 @@ pub(crate) unsafe fn codegen(cgcx: &CodegenContext<LlvmCodegenBackend>,
config: &ModuleConfig) config: &ModuleConfig)
-> Result<CompiledModule, FatalError> -> Result<CompiledModule, FatalError>
{ {
let _timer = cgcx.profile_activity(ProfileCategory::Codegen, "codegen"); let _timer = cgcx.profile_activity("codegen");
{ {
let llmod = module.module_llvm.llmod(); let llmod = module.module_llvm.llmod();
let llcx = &*module.module_llvm.llcx; let llcx = &*module.module_llvm.llcx;
@ -496,12 +495,12 @@ pub(crate) unsafe fn codegen(cgcx: &CodegenContext<LlvmCodegenBackend>,
if write_bc || config.emit_bc_compressed || config.embed_bitcode { if write_bc || config.emit_bc_compressed || config.embed_bitcode {
let _timer = cgcx.profile_activity(ProfileCategory::Codegen, "LLVM_make_bitcode"); let _timer = cgcx.profile_activity("LLVM_make_bitcode");
let thin = ThinBuffer::new(llmod); let thin = ThinBuffer::new(llmod);
let data = thin.data(); let data = thin.data();
if write_bc { if write_bc {
let _timer = cgcx.profile_activity(ProfileCategory::Codegen, "LLVM_emit_bitcode"); let _timer = cgcx.profile_activity("LLVM_emit_bitcode");
if let Err(e) = fs::write(&bc_out, data) { if let Err(e) = fs::write(&bc_out, data) {
let msg = format!("failed to write bytecode to {}: {}", bc_out.display(), e); let msg = format!("failed to write bytecode to {}: {}", bc_out.display(), e);
diag_handler.err(&msg); diag_handler.err(&msg);
@ -509,13 +508,12 @@ pub(crate) unsafe fn codegen(cgcx: &CodegenContext<LlvmCodegenBackend>,
} }
if config.embed_bitcode { if config.embed_bitcode {
let _timer = cgcx.profile_activity(ProfileCategory::Codegen, "LLVM_embed_bitcode"); let _timer = cgcx.profile_activity("LLVM_embed_bitcode");
embed_bitcode(cgcx, llcx, llmod, Some(data)); embed_bitcode(cgcx, llcx, llmod, Some(data));
} }
if config.emit_bc_compressed { if config.emit_bc_compressed {
let _timer = cgcx.profile_activity(ProfileCategory::Codegen, let _timer = cgcx.profile_activity("LLVM_compress_bitcode");
"LLVM_compress_bitcode");
let dst = bc_out.with_extension(RLIB_BYTECODE_EXTENSION); let dst = bc_out.with_extension(RLIB_BYTECODE_EXTENSION);
let data = bytecode::encode(&module.name, data); let data = bytecode::encode(&module.name, data);
if let Err(e) = fs::write(&dst, data) { if let Err(e) = fs::write(&dst, data) {
@ -530,7 +528,7 @@ pub(crate) unsafe fn codegen(cgcx: &CodegenContext<LlvmCodegenBackend>,
time_ext(config.time_passes, None, &format!("codegen passes [{}]", module_name.unwrap()), time_ext(config.time_passes, None, &format!("codegen passes [{}]", module_name.unwrap()),
|| -> Result<(), FatalError> { || -> Result<(), FatalError> {
if config.emit_ir { if config.emit_ir {
let _timer = cgcx.profile_activity(ProfileCategory::Codegen, "LLVM_emit_ir"); let _timer = cgcx.profile_activity("LLVM_emit_ir");
let out = cgcx.output_filenames.temp_path(OutputType::LlvmAssembly, module_name); let out = cgcx.output_filenames.temp_path(OutputType::LlvmAssembly, module_name);
let out_c = path_to_c_string(&out); let out_c = path_to_c_string(&out);
@ -577,7 +575,7 @@ pub(crate) unsafe fn codegen(cgcx: &CodegenContext<LlvmCodegenBackend>,
} }
if config.emit_asm || asm_to_obj { if config.emit_asm || asm_to_obj {
let _timer = cgcx.profile_activity(ProfileCategory::Codegen, "LLVM_emit_asm"); let _timer = cgcx.profile_activity("LLVM_emit_asm");
let path = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name); let path = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name);
// We can't use the same module for asm and binary output, because that triggers // We can't use the same module for asm and binary output, because that triggers
@ -595,13 +593,13 @@ pub(crate) unsafe fn codegen(cgcx: &CodegenContext<LlvmCodegenBackend>,
} }
if write_obj { if write_obj {
let _timer = cgcx.profile_activity(ProfileCategory::Codegen, "LLVM_emit_obj"); let _timer = cgcx.profile_activity("LLVM_emit_obj");
with_codegen(tm, llmod, config.no_builtins, |cpm| { with_codegen(tm, llmod, config.no_builtins, |cpm| {
write_output_file(diag_handler, tm, cpm, llmod, &obj_out, write_output_file(diag_handler, tm, cpm, llmod, &obj_out,
llvm::FileType::ObjectFile) llvm::FileType::ObjectFile)
})?; })?;
} else if asm_to_obj { } else if asm_to_obj {
let _timer = cgcx.profile_activity(ProfileCategory::Codegen, "LLVM_asm_to_obj"); let _timer = cgcx.profile_activity("LLVM_asm_to_obj");
let assembly = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name); let assembly = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name);
run_assembler(cgcx, diag_handler, &assembly, &obj_out); run_assembler(cgcx, diag_handler, &assembly, &obj_out);

View file

@ -65,7 +65,6 @@ use rustc::middle::cstore::{EncodedMetadata, MetadataLoader};
use rustc::session::Session; use rustc::session::Session;
use rustc::session::config::{OutputFilenames, OutputType, PrintRequest, OptLevel}; use rustc::session::config::{OutputFilenames, OutputType, PrintRequest, OptLevel};
use rustc::ty::{self, TyCtxt}; use rustc::ty::{self, TyCtxt};
use rustc::util::profiling::ProfileCategory;
use rustc::util::common::ErrorReported; use rustc::util::common::ErrorReported;
use rustc_mir::monomorphize; use rustc_mir::monomorphize;
use rustc_codegen_ssa::ModuleCodegen; use rustc_codegen_ssa::ModuleCodegen;
@ -330,12 +329,12 @@ impl CodegenBackend for LlvmCodegenBackend {
// Run the linker on any artifacts that resulted from the LLVM run. // Run the linker on any artifacts that resulted from the LLVM run.
// This should produce either a finished executable or library. // This should produce either a finished executable or library.
sess.profiler(|p| p.start_activity(ProfileCategory::Linking, "link_crate")); sess.profiler(|p| p.start_activity("link_crate"));
time(sess, "linking", || { time(sess, "linking", || {
back::link::link_binary(sess, &codegen_results, back::link::link_binary(sess, &codegen_results,
outputs, &codegen_results.crate_name.as_str()); outputs, &codegen_results.crate_name.as_str());
}); });
sess.profiler(|p| p.end_activity(ProfileCategory::Linking, "link_crate")); sess.profiler(|p| p.end_activity("link_crate"));
// Now that we won't touch anything in the incremental compilation directory // Now that we won't touch anything in the incremental compilation directory
// any more, we can finalize it (which involves renaming it) // any more, we can finalize it (which involves renaming it)

View file

@ -18,7 +18,7 @@ use rustc::util::nodemap::FxHashMap;
use rustc::hir::def_id::{CrateNum, LOCAL_CRATE}; use rustc::hir::def_id::{CrateNum, LOCAL_CRATE};
use rustc::ty::TyCtxt; use rustc::ty::TyCtxt;
use rustc::util::common::{time_depth, set_time_depth, print_time_passes_entry}; use rustc::util::common::{time_depth, set_time_depth, print_time_passes_entry};
use rustc::util::profiling::{ProfileCategory, SelfProfiler}; use rustc::util::profiling::SelfProfiler;
use rustc_fs_util::link_or_copy; use rustc_fs_util::link_or_copy;
use rustc_data_structures::svh::Svh; use rustc_data_structures::svh::Svh;
use rustc_errors::{Handler, Level, DiagnosticBuilder, FatalError, DiagnosticId}; use rustc_errors::{Handler, Level, DiagnosticBuilder, FatalError, DiagnosticId};
@ -29,7 +29,6 @@ use syntax::ext::hygiene::Mark;
use syntax_pos::MultiSpan; use syntax_pos::MultiSpan;
use syntax_pos::symbol::Symbol; use syntax_pos::symbol::Symbol;
use jobserver::{Client, Acquired}; use jobserver::{Client, Acquired};
use parking_lot::Mutex as PlMutex;
use std::any::Any; use std::any::Any;
use std::borrow::Cow; use std::borrow::Cow;
@ -198,25 +197,21 @@ impl<B: WriteBackendMethods> Clone for TargetMachineFactory<B> {
} }
pub struct ProfileGenericActivityTimer { pub struct ProfileGenericActivityTimer {
profiler: Option<Arc<PlMutex<SelfProfiler>>>, profiler: Option<Arc<SelfProfiler>>,
category: ProfileCategory,
label: Cow<'static, str>, label: Cow<'static, str>,
} }
impl ProfileGenericActivityTimer { impl ProfileGenericActivityTimer {
pub fn start( pub fn start(
profiler: Option<Arc<PlMutex<SelfProfiler>>>, profiler: Option<Arc<SelfProfiler>>,
category: ProfileCategory,
label: Cow<'static, str>, label: Cow<'static, str>,
) -> ProfileGenericActivityTimer { ) -> ProfileGenericActivityTimer {
if let Some(profiler) = &profiler { if let Some(profiler) = &profiler {
let mut p = profiler.lock(); profiler.start_activity(label.clone());
p.start_activity(category, label.clone());
} }
ProfileGenericActivityTimer { ProfileGenericActivityTimer {
profiler, profiler,
category,
label, label,
} }
} }
@ -225,8 +220,7 @@ impl ProfileGenericActivityTimer {
impl Drop for ProfileGenericActivityTimer { impl Drop for ProfileGenericActivityTimer {
fn drop(&mut self) { fn drop(&mut self) {
if let Some(profiler) = &self.profiler { if let Some(profiler) = &self.profiler {
let mut p = profiler.lock(); profiler.end_activity(self.label.clone());
p.end_activity(self.category, self.label.clone());
} }
} }
} }
@ -237,7 +231,7 @@ pub struct CodegenContext<B: WriteBackendMethods> {
// Resources needed when running LTO // Resources needed when running LTO
pub backend: B, pub backend: B,
pub time_passes: bool, pub time_passes: bool,
pub profiler: Option<Arc<PlMutex<SelfProfiler>>>, pub profiler: Option<Arc<SelfProfiler>>,
pub lto: Lto, pub lto: Lto,
pub no_landing_pads: bool, pub no_landing_pads: bool,
pub save_temps: bool, pub save_temps: bool,
@ -291,19 +285,17 @@ impl<B: WriteBackendMethods> CodegenContext<B> {
#[inline(never)] #[inline(never)]
#[cold] #[cold]
fn profiler_active<F: FnOnce(&mut SelfProfiler) -> ()>(&self, f: F) { fn profiler_active<F: FnOnce(&SelfProfiler) -> ()>(&self, f: F) {
match &self.profiler { match &self.profiler {
None => bug!("profiler_active() called but there was no profiler active"), None => bug!("profiler_active() called but there was no profiler active"),
Some(profiler) => { Some(profiler) => {
let mut p = profiler.lock(); f(&*profiler);
f(&mut p);
} }
} }
} }
#[inline(always)] #[inline(always)]
pub fn profile<F: FnOnce(&mut SelfProfiler) -> ()>(&self, f: F) { pub fn profile<F: FnOnce(&SelfProfiler) -> ()>(&self, f: F) {
if unlikely!(self.profiler.is_some()) { if unlikely!(self.profiler.is_some()) {
self.profiler_active(f) self.profiler_active(f)
} }
@ -311,10 +303,9 @@ impl<B: WriteBackendMethods> CodegenContext<B> {
pub fn profile_activity( pub fn profile_activity(
&self, &self,
category: ProfileCategory,
label: impl Into<Cow<'static, str>>, label: impl Into<Cow<'static, str>>,
) -> ProfileGenericActivityTimer { ) -> ProfileGenericActivityTimer {
ProfileGenericActivityTimer::start(self.profiler.clone(), category, label.into()) ProfileGenericActivityTimer::start(self.profiler.clone(), label.into())
} }
} }
@ -324,7 +315,7 @@ fn generate_lto_work<B: ExtraBackendMethods>(
needs_thin_lto: Vec<(String, B::ThinBuffer)>, needs_thin_lto: Vec<(String, B::ThinBuffer)>,
import_only_modules: Vec<(SerializedModule<B::ModuleBuffer>, WorkProduct)> import_only_modules: Vec<(SerializedModule<B::ModuleBuffer>, WorkProduct)>
) -> Vec<(WorkItem<B>, u64)> { ) -> Vec<(WorkItem<B>, u64)> {
cgcx.profile(|p| p.start_activity(ProfileCategory::Linking, "codegen_run_lto")); cgcx.profile(|p| p.start_activity("codegen_run_lto"));
let (lto_modules, copy_jobs) = if !needs_fat_lto.is_empty() { let (lto_modules, copy_jobs) = if !needs_fat_lto.is_empty() {
assert!(needs_thin_lto.is_empty()); assert!(needs_thin_lto.is_empty());
@ -351,7 +342,7 @@ fn generate_lto_work<B: ExtraBackendMethods>(
}), 0) }), 0)
})).collect(); })).collect();
cgcx.profile(|p| p.end_activity(ProfileCategory::Linking, "codegen_run_lto")); cgcx.profile(|p| p.end_activity("codegen_run_lto"));
result result
} }
@ -1655,9 +1646,9 @@ fn spawn_work<B: ExtraBackendMethods>(
// surface that there was an error in this worker. // surface that there was an error in this worker.
bomb.result = { bomb.result = {
let label = work.name(); let label = work.name();
cgcx.profile(|p| p.start_activity(ProfileCategory::Codegen, label.clone())); cgcx.profile(|p| p.start_activity(label.clone()));
let result = execute_work_item(&cgcx, work).ok(); let result = execute_work_item(&cgcx, work).ok();
cgcx.profile(|p| p.end_activity(ProfileCategory::Codegen, label)); cgcx.profile(|p| p.end_activity(label));
result result
}; };

View file

@ -25,7 +25,6 @@ use rustc::ty::layout::{self, Align, TyLayout, LayoutOf, VariantIdx, HasTyCtxt};
use rustc::ty::query::Providers; use rustc::ty::query::Providers;
use rustc::middle::cstore::{self, LinkagePreference}; use rustc::middle::cstore::{self, LinkagePreference};
use rustc::util::common::{time, print_time_passes_entry}; use rustc::util::common::{time, print_time_passes_entry};
use rustc::util::profiling::ProfileCategory;
use rustc::session::config::{self, EntryFnType, Lto}; use rustc::session::config::{self, EntryFnType, Lto};
use rustc::session::Session; use rustc::session::Session;
use rustc_mir::monomorphize::item::DefPathBasedNames; use rustc_mir::monomorphize::item::DefPathBasedNames;
@ -539,7 +538,7 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
let cgu_name_builder = &mut CodegenUnitNameBuilder::new(tcx); let cgu_name_builder = &mut CodegenUnitNameBuilder::new(tcx);
// Codegen the metadata. // Codegen the metadata.
tcx.sess.profiler(|p| p.start_activity(ProfileCategory::Codegen, "codegen crate metadata")); tcx.sess.profiler(|p| p.start_activity("codegen crate metadata"));
let metadata_cgu_name = cgu_name_builder.build_cgu_name(LOCAL_CRATE, let metadata_cgu_name = cgu_name_builder.build_cgu_name(LOCAL_CRATE,
&["crate"], &["crate"],
@ -549,7 +548,7 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
let metadata = time(tcx.sess, "write metadata", || { let metadata = time(tcx.sess, "write metadata", || {
backend.write_metadata(tcx, &mut metadata_llvm_module) backend.write_metadata(tcx, &mut metadata_llvm_module)
}); });
tcx.sess.profiler(|p| p.end_activity(ProfileCategory::Codegen, "codegen crate metadata")); tcx.sess.profiler(|p| p.end_activity("codegen crate metadata"));
let metadata_module = ModuleCodegen { let metadata_module = ModuleCodegen {
name: metadata_cgu_name, name: metadata_cgu_name,
@ -662,14 +661,12 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
match cgu_reuse { match cgu_reuse {
CguReuse::No => { CguReuse::No => {
tcx.sess.profiler(|p| p.start_activity(ProfileCategory::Codegen, tcx.sess.profiler(|p| p.start_activity(format!("codegen {}", cgu.name())));
format!("codegen {}", cgu.name())));
let start_time = Instant::now(); let start_time = Instant::now();
let stats = backend.compile_codegen_unit(tcx, *cgu.name()); let stats = backend.compile_codegen_unit(tcx, *cgu.name());
all_stats.extend(stats); all_stats.extend(stats);
total_codegen_time += start_time.elapsed(); total_codegen_time += start_time.elapsed();
tcx.sess.profiler(|p| p.end_activity(ProfileCategory::Codegen, tcx.sess.profiler(|p| p.end_activity(format!("codegen {}", cgu.name())));
format!("codegen {}", cgu.name())));
false false
} }
CguReuse::PreLto => { CguReuse::PreLto => {

View file

@ -123,10 +123,6 @@ where
profile::dump(&compiler.sess, "profile_queries".to_string()) profile::dump(&compiler.sess, "profile_queries".to_string())
} }
if compiler.sess.opts.debugging_opts.self_profile {
compiler.sess.profiler(|p| p.dump_raw_events(&compiler.sess.opts));
}
r r
} }

View file

@ -69,7 +69,7 @@ pub fn parse<'a>(sess: &'a Session, input: &Input) -> PResult<'a, ast::Crate> {
.set_continue_after_error(sess.opts.debugging_opts.continue_parse_after_error); .set_continue_after_error(sess.opts.debugging_opts.continue_parse_after_error);
hygiene::set_default_edition(sess.edition()); hygiene::set_default_edition(sess.edition());
sess.profiler(|p| p.start_activity(ProfileCategory::Parsing, "parsing")); sess.profiler(|p| p.start_activity("parsing"));
let krate = time(sess, "parsing", || match *input { let krate = time(sess, "parsing", || match *input {
Input::File(ref file) => parse::parse_crate_from_file(file, &sess.parse_sess), Input::File(ref file) => parse::parse_crate_from_file(file, &sess.parse_sess),
Input::Str { Input::Str {
@ -77,7 +77,7 @@ pub fn parse<'a>(sess: &'a Session, input: &Input) -> PResult<'a, ast::Crate> {
ref name, ref name,
} => parse::parse_crate_from_source_str(name.clone(), input.clone(), &sess.parse_sess), } => parse::parse_crate_from_source_str(name.clone(), input.clone(), &sess.parse_sess),
})?; })?;
sess.profiler(|p| p.end_activity(ProfileCategory::Parsing, "parsing")); sess.profiler(|p| p.end_activity("parsing"));
sess.diagnostic().set_continue_after_error(true); sess.diagnostic().set_continue_after_error(true);
@ -375,7 +375,7 @@ fn configure_and_expand_inner<'a>(
syntax_ext::register_builtins(&mut resolver, plugin_info.syntax_exts); syntax_ext::register_builtins(&mut resolver, plugin_info.syntax_exts);
// Expand all macros // Expand all macros
sess.profiler(|p| p.start_activity(ProfileCategory::Expansion, "macro expansion")); sess.profiler(|p| p.start_activity("macro expansion"));
krate = time(sess, "expansion", || { krate = time(sess, "expansion", || {
// Windows dlls do not have rpaths, so they don't know how to find their // Windows dlls do not have rpaths, so they don't know how to find their
// dependencies. It's up to us to tell the system where to find all the // dependencies. It's up to us to tell the system where to find all the
@ -450,7 +450,7 @@ fn configure_and_expand_inner<'a>(
} }
krate krate
}); });
sess.profiler(|p| p.end_activity(ProfileCategory::Expansion, "macro expansion")); sess.profiler(|p| p.end_activity("macro expansion"));
time(sess, "maybe building test harness", || { time(sess, "maybe building test harness", || {
syntax::test::modify_for_testing( syntax::test::modify_for_testing(
@ -869,8 +869,6 @@ pub fn create_global_ctxt(
yield BoxedGlobalCtxt::initial_yield(()); yield BoxedGlobalCtxt::initial_yield(());
box_region_allow_access!(for('gcx), (&'gcx GlobalCtxt<'gcx>), (gcx)); box_region_allow_access!(for('gcx), (&'gcx GlobalCtxt<'gcx>), (gcx));
gcx.queries.record_computed_queries(sess);
if sess.opts.debugging_opts.query_stats { if sess.opts.debugging_opts.query_stats {
gcx.queries.print_stats(); gcx.queries.print_stats();
} }
@ -1022,9 +1020,9 @@ pub fn start_codegen<'tcx>(
::rustc::middle::dependency_format::calculate(tcx) ::rustc::middle::dependency_format::calculate(tcx)
}); });
tcx.sess.profiler(|p| p.start_activity(ProfileCategory::Codegen, "codegen crate")); tcx.sess.profiler(|p| p.start_activity("codegen crate"));
let codegen = time(tcx.sess, "codegen", move || codegen_backend.codegen_crate(tcx, rx)); let codegen = time(tcx.sess, "codegen", move || codegen_backend.codegen_crate(tcx, rx));
tcx.sess.profiler(|p| p.end_activity(ProfileCategory::Codegen, "codegen crate")); tcx.sess.profiler(|p| p.end_activity("codegen crate"));
if log_enabled!(::log::Level::Info) { if log_enabled!(::log::Level::Info) {
println!("Post-codegen"); println!("Post-codegen");

View file

@ -109,7 +109,6 @@ use rustc::ty::subst::SubstsRef;
use rustc::ty::{self, Ty, TyCtxt}; use rustc::ty::{self, Ty, TyCtxt};
use rustc::ty::query::Providers; use rustc::ty::query::Providers;
use rustc::util; use rustc::util;
use rustc::util::profiling::ProfileCategory;
use syntax_pos::Span; use syntax_pos::Span;
use util::common::time; use util::common::time;
@ -319,7 +318,7 @@ pub fn provide(providers: &mut Providers<'_>) {
pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
-> Result<(), ErrorReported> -> Result<(), ErrorReported>
{ {
tcx.sess.profiler(|p| p.start_activity(ProfileCategory::TypeChecking, "type-check crate")); tcx.sess.profiler(|p| p.start_activity("type-check crate"));
// this ensures that later parts of type checking can assume that items // this ensures that later parts of type checking can assume that items
// have valid types and not error // have valid types and not error
@ -370,7 +369,7 @@ pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
check_unused::check_crate(tcx); check_unused::check_crate(tcx);
check_for_entry_fn(tcx); check_for_entry_fn(tcx);
tcx.sess.profiler(|p| p.end_activity(ProfileCategory::TypeChecking, "type-check crate")); tcx.sess.profiler(|p| p.end_activity("type-check crate"));
if tcx.sess.err_count() == 0 { if tcx.sess.err_count() == 0 {
Ok(()) Ok(())

View file

@ -96,6 +96,7 @@ const WHITELIST: &[Crate<'_>] = &[
Crate("lock_api"), Crate("lock_api"),
Crate("log"), Crate("log"),
Crate("log_settings"), Crate("log_settings"),
Crate("measureme"),
Crate("memchr"), Crate("memchr"),
Crate("memmap"), Crate("memmap"),
Crate("memoffset"), Crate("memoffset"),