Auto merge of #69440 - Dylan-DPC:rollup-hj4bo9l, r=Dylan-DPC
Rollup of 6 pull requests Successful merges: - #69220 (Add documentation for the `-Zself-profile` flag) - #69391 (Add rustdoc aliases to `ptr::copy` and `ptr::copy_nonoverlapping`) - #69427 (Cleanup e0368 e0369) - #69433 (don't explicitly compare against true or false) - #69435 (Replace uses of Cell::get + Cell::set with Cell::replace.) - #69437 (no more codegen for miri_start_panic) Failed merges: r? @ghost
This commit is contained in:
commit
e3a277943e
18 changed files with 150 additions and 32 deletions
|
@ -0,0 +1,74 @@
|
||||||
|
# `self-profile-events`
|
||||||
|
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
The `-Zself-profile-events` compiler flag controls what events are recorded by the self-profiler when it is enabled via the `-Zself-profile` flag.
|
||||||
|
|
||||||
|
This flag takes a comma delimited list of event types to record.
|
||||||
|
|
||||||
|
For example:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ rustc -Zself-profile -Zself-profile-events=default,args
|
||||||
|
```
|
||||||
|
|
||||||
|
## Event types
|
||||||
|
|
||||||
|
- `query-provider`
|
||||||
|
- Traces each query used internally by the compiler.
|
||||||
|
|
||||||
|
- `generic-activity`
|
||||||
|
- Traces other parts of the compiler not covered by the query system.
|
||||||
|
|
||||||
|
- `query-cache-hit`
|
||||||
|
- Adds tracing information that records when the in-memory query cache is "hit" and does not need to re-execute a query which has been cached.
|
||||||
|
- Disabled by default because this significantly increases the trace file size.
|
||||||
|
|
||||||
|
- `query-blocked`
|
||||||
|
- Tracks time that a query tries to run but is blocked waiting on another thread executing the same query to finish executing.
|
||||||
|
- Query blocking only occurs when the compiler is built with parallel mode support.
|
||||||
|
|
||||||
|
- `incr-cache-load`
|
||||||
|
- Tracks time that is spent loading and deserializing query results from the incremental compilation on-disk cache.
|
||||||
|
|
||||||
|
- `query-keys`
|
||||||
|
- Adds a serialized representation of each query's query key to the tracing data.
|
||||||
|
- Disabled by default because this significantly increases the trace file size.
|
||||||
|
|
||||||
|
- `function-args`
|
||||||
|
- Adds additional tracing data to some `generic-activity` events.
|
||||||
|
- Disabled by default for parity with `query-keys`.
|
||||||
|
|
||||||
|
- `llvm`
|
||||||
|
- Adds tracing information about LLVM passes and codegeneration.
|
||||||
|
- Disabled by default because this only works when `-Znew-llvm-pass-manager` is enabled.
|
||||||
|
|
||||||
|
## Event synonyms
|
||||||
|
|
||||||
|
- `none`
|
||||||
|
- Disables all events.
|
||||||
|
Equivalent to the self-profiler being disabled.
|
||||||
|
|
||||||
|
- `default`
|
||||||
|
- The default set of events which stikes a balance between providing detailed tracing data and adding additional overhead to the compilation.
|
||||||
|
|
||||||
|
- `args`
|
||||||
|
- Equivalent to `query-keys` and `function-args`.
|
||||||
|
|
||||||
|
- `all`
|
||||||
|
- Enables all events.
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
Enable the profiler and capture the default set of events (both invocations are equivalent):
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ rustc -Zself-profile
|
||||||
|
$ rustc -Zself-profile -Zself-profile-events=default
|
||||||
|
```
|
||||||
|
|
||||||
|
Enable the profiler and capture the default events and their arguments:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ rustc -Zself-profile -Zself-profile-events=default,args
|
||||||
|
```
|
47
src/doc/unstable-book/src/compiler-flags/self-profile.md
Normal file
47
src/doc/unstable-book/src/compiler-flags/self-profile.md
Normal file
|
@ -0,0 +1,47 @@
|
||||||
|
# `self-profile`
|
||||||
|
|
||||||
|
--------------------
|
||||||
|
|
||||||
|
The `-Zself-profile` compiler flag enables rustc's internal profiler.
|
||||||
|
When enabled, the compiler will output three binary files in the specified directory (or the current working directory if no directory is specified).
|
||||||
|
These files can be analyzed by using the tools in the [`measureme`] repository.
|
||||||
|
|
||||||
|
To control the data recorded in the trace files, use the `-Zself-profile-events` flag.
|
||||||
|
|
||||||
|
For example:
|
||||||
|
|
||||||
|
First, run a compilation session and provide the `-Zself-profile` flag:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ rustc --crate-name foo -Zself-profile`
|
||||||
|
```
|
||||||
|
|
||||||
|
This will generate three files in the working directory such as:
|
||||||
|
|
||||||
|
- `foo-1234.events`
|
||||||
|
- `foo-1234.string_data`
|
||||||
|
- `foo-1234.string_index`
|
||||||
|
|
||||||
|
Where `foo` is the name of the crate and `1234` is the process id of the rustc process.
|
||||||
|
|
||||||
|
To get a summary of where the compiler is spending its time:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ ../measureme/target/release/summarize summarize foo-1234
|
||||||
|
```
|
||||||
|
|
||||||
|
To generate a flamegraph of the same data:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ ../measureme/target/release/inferno foo-1234
|
||||||
|
```
|
||||||
|
|
||||||
|
To dump the event data in a Chromium-profiler compatible format:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ ../measureme/target/release/crox foo-1234
|
||||||
|
```
|
||||||
|
|
||||||
|
For more information, consult the [`measureme`] documentation.
|
||||||
|
|
||||||
|
[`measureme`]: https://github.com/rust-lang/measureme.git
|
|
@ -1515,6 +1515,7 @@ fn overlaps<T>(src: *const T, dst: *const T, count: usize) -> bool {
|
||||||
/// ```
|
/// ```
|
||||||
///
|
///
|
||||||
/// [`Vec::append`]: ../../std/vec/struct.Vec.html#method.append
|
/// [`Vec::append`]: ../../std/vec/struct.Vec.html#method.append
|
||||||
|
#[doc(alias = "memcpy")]
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
#[inline]
|
#[inline]
|
||||||
pub unsafe fn copy_nonoverlapping<T>(src: *const T, dst: *mut T, count: usize) {
|
pub unsafe fn copy_nonoverlapping<T>(src: *const T, dst: *mut T, count: usize) {
|
||||||
|
@ -1579,6 +1580,7 @@ pub unsafe fn copy_nonoverlapping<T>(src: *const T, dst: *mut T, count: usize) {
|
||||||
/// dst
|
/// dst
|
||||||
/// }
|
/// }
|
||||||
/// ```
|
/// ```
|
||||||
|
#[doc(alias = "memmove")]
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
#[inline]
|
#[inline]
|
||||||
pub unsafe fn copy<T>(src: *const T, dst: *mut T, count: usize) {
|
pub unsafe fn copy<T>(src: *const T, dst: *mut T, count: usize) {
|
||||||
|
|
|
@ -598,7 +598,7 @@ impl AllocationDefinedness {
|
||||||
pub fn all_bytes_undef(&self) -> bool {
|
pub fn all_bytes_undef(&self) -> bool {
|
||||||
// The `ranges` are run-length encoded and of alternating definedness.
|
// The `ranges` are run-length encoded and of alternating definedness.
|
||||||
// So if `ranges.len() > 1` then the second block is a range of defined.
|
// So if `ranges.len() > 1` then the second block is a range of defined.
|
||||||
self.initial == false && self.ranges.len() == 1
|
!self.initial && self.ranges.len() == 1
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -64,8 +64,7 @@ thread_local! {
|
||||||
/// calling the same query.
|
/// calling the same query.
|
||||||
pub fn with_no_queries<F: FnOnce() -> R, R>(f: F) -> R {
|
pub fn with_no_queries<F: FnOnce() -> R, R>(f: F) -> R {
|
||||||
NO_QUERIES.with(|no_queries| {
|
NO_QUERIES.with(|no_queries| {
|
||||||
let old = no_queries.get();
|
let old = no_queries.replace(true);
|
||||||
no_queries.set(true);
|
|
||||||
let result = f();
|
let result = f();
|
||||||
no_queries.set(old);
|
no_queries.set(old);
|
||||||
result
|
result
|
||||||
|
@ -78,8 +77,7 @@ pub fn with_no_queries<F: FnOnce() -> R, R>(f: F) -> R {
|
||||||
/// so this variable disables that check.
|
/// so this variable disables that check.
|
||||||
pub fn with_forced_impl_filename_line<F: FnOnce() -> R, R>(f: F) -> R {
|
pub fn with_forced_impl_filename_line<F: FnOnce() -> R, R>(f: F) -> R {
|
||||||
FORCE_IMPL_FILENAME_LINE.with(|force| {
|
FORCE_IMPL_FILENAME_LINE.with(|force| {
|
||||||
let old = force.get();
|
let old = force.replace(true);
|
||||||
force.set(true);
|
|
||||||
let result = f();
|
let result = f();
|
||||||
force.set(old);
|
force.set(old);
|
||||||
result
|
result
|
||||||
|
@ -89,8 +87,7 @@ pub fn with_forced_impl_filename_line<F: FnOnce() -> R, R>(f: F) -> R {
|
||||||
/// Adds the `crate::` prefix to paths where appropriate.
|
/// Adds the `crate::` prefix to paths where appropriate.
|
||||||
pub fn with_crate_prefix<F: FnOnce() -> R, R>(f: F) -> R {
|
pub fn with_crate_prefix<F: FnOnce() -> R, R>(f: F) -> R {
|
||||||
SHOULD_PREFIX_WITH_CRATE.with(|flag| {
|
SHOULD_PREFIX_WITH_CRATE.with(|flag| {
|
||||||
let old = flag.get();
|
let old = flag.replace(true);
|
||||||
flag.set(true);
|
|
||||||
let result = f();
|
let result = f();
|
||||||
flag.set(old);
|
flag.set(old);
|
||||||
result
|
result
|
||||||
|
|
|
@ -515,12 +515,9 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// For normal codegen, this Miri-specific intrinsic is just a NOP.
|
// For normal codegen, this Miri-specific intrinsic should never occur.
|
||||||
if intrinsic == Some("miri_start_panic") {
|
if intrinsic == Some("miri_start_panic") {
|
||||||
let target = destination.as_ref().unwrap().1;
|
bug!("`miri_start_panic` should never end up in compiled code");
|
||||||
helper.maybe_sideeffect(self.mir, &mut bx, &[target]);
|
|
||||||
helper.funclet_br(self, &mut bx, target);
|
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Emit a panic or a no-op for `panic_if_uninhabited`.
|
// Emit a panic or a no-op for `panic_if_uninhabited`.
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
This error indicates that a binary assignment operator like `+=` or `^=` was
|
A binary assignment operator like `+=` or `^=` was applied to a type that
|
||||||
applied to a type that doesn't support it. For example:
|
doesn't support it.
|
||||||
|
|
||||||
|
Erroneous code example:
|
||||||
|
|
||||||
```compile_fail,E0368
|
```compile_fail,E0368
|
||||||
let mut x = 12f32; // error: binary operation `<<` cannot be applied to
|
let mut x = 12f32; // error: binary operation `<<` cannot be applied to
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
A binary operation was attempted on a type which doesn't support it.
|
A binary operation was attempted on a type which doesn't support it.
|
||||||
|
|
||||||
Erroneous code example:
|
Erroneous code example:
|
||||||
|
|
||||||
```compile_fail,E0369
|
```compile_fail,E0369
|
||||||
|
|
|
@ -730,8 +730,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
|
||||||
where
|
where
|
||||||
F: FnOnce(&Self) -> R,
|
F: FnOnce(&Self) -> R,
|
||||||
{
|
{
|
||||||
let flag = self.in_snapshot.get();
|
let flag = self.in_snapshot.replace(false);
|
||||||
self.in_snapshot.set(false);
|
|
||||||
let result = func(self);
|
let result = func(self);
|
||||||
self.in_snapshot.set(flag);
|
self.in_snapshot.set(flag);
|
||||||
result
|
result
|
||||||
|
@ -740,8 +739,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
|
||||||
fn start_snapshot(&self) -> CombinedSnapshot<'a, 'tcx> {
|
fn start_snapshot(&self) -> CombinedSnapshot<'a, 'tcx> {
|
||||||
debug!("start_snapshot()");
|
debug!("start_snapshot()");
|
||||||
|
|
||||||
let in_snapshot = self.in_snapshot.get();
|
let in_snapshot = self.in_snapshot.replace(true);
|
||||||
self.in_snapshot.set(true);
|
|
||||||
|
|
||||||
let mut inner = self.inner.borrow_mut();
|
let mut inner = self.inner.borrow_mut();
|
||||||
CombinedSnapshot {
|
CombinedSnapshot {
|
||||||
|
|
|
@ -104,7 +104,7 @@ where
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let bits_per_block = analysis.bits_per_block(body);
|
let bits_per_block = analysis.bits_per_block(body);
|
||||||
|
|
||||||
let bottom_value_set = if A::BOTTOM_VALUE == true {
|
let bottom_value_set = if A::BOTTOM_VALUE {
|
||||||
BitSet::new_filled(bits_per_block)
|
BitSet::new_filled(bits_per_block)
|
||||||
} else {
|
} else {
|
||||||
BitSet::new_empty(bits_per_block)
|
BitSet::new_empty(bits_per_block)
|
||||||
|
|
|
@ -821,7 +821,7 @@ where
|
||||||
let bits_per_block = denotation.bits_per_block();
|
let bits_per_block = denotation.bits_per_block();
|
||||||
let num_blocks = body.basic_blocks().len();
|
let num_blocks = body.basic_blocks().len();
|
||||||
|
|
||||||
let on_entry = if D::BOTTOM_VALUE == true {
|
let on_entry = if D::BOTTOM_VALUE {
|
||||||
vec![BitSet::new_filled(bits_per_block); num_blocks]
|
vec![BitSet::new_filled(bits_per_block); num_blocks]
|
||||||
} else {
|
} else {
|
||||||
vec![BitSet::new_empty(bits_per_block); num_blocks]
|
vec![BitSet::new_empty(bits_per_block); num_blocks]
|
||||||
|
|
|
@ -1171,13 +1171,13 @@ impl<'a> Parser<'a> {
|
||||||
let comma_after_doc_seen = self.eat(&token::Comma);
|
let comma_after_doc_seen = self.eat(&token::Comma);
|
||||||
// `seen_comma` is always false, because we are inside doc block
|
// `seen_comma` is always false, because we are inside doc block
|
||||||
// condition is here to make code more readable
|
// condition is here to make code more readable
|
||||||
if seen_comma == false && comma_after_doc_seen == true {
|
if !seen_comma && comma_after_doc_seen {
|
||||||
seen_comma = true;
|
seen_comma = true;
|
||||||
}
|
}
|
||||||
if comma_after_doc_seen || self.token == token::CloseDelim(token::Brace) {
|
if comma_after_doc_seen || self.token == token::CloseDelim(token::Brace) {
|
||||||
err.emit();
|
err.emit();
|
||||||
} else {
|
} else {
|
||||||
if seen_comma == false {
|
if !seen_comma {
|
||||||
let sp = self.sess.source_map().next_point(previous_span);
|
let sp = self.sess.source_map().next_point(previous_span);
|
||||||
err.span_suggestion(
|
err.span_suggestion(
|
||||||
sp,
|
sp,
|
||||||
|
|
|
@ -444,7 +444,7 @@ impl<'a> LateResolutionVisitor<'a, '_, '_> {
|
||||||
PathSource::Expr(Some(parent)) => {
|
PathSource::Expr(Some(parent)) => {
|
||||||
suggested = path_sep(err, &parent);
|
suggested = path_sep(err, &parent);
|
||||||
}
|
}
|
||||||
PathSource::Expr(None) if followed_by_brace == true => {
|
PathSource::Expr(None) if followed_by_brace => {
|
||||||
if let Some((sp, snippet)) = closing_brace {
|
if let Some((sp, snippet)) = closing_brace {
|
||||||
err.span_suggestion(
|
err.span_suggestion(
|
||||||
sp,
|
sp,
|
||||||
|
|
|
@ -495,7 +495,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
Some(hir_id) => hir_id,
|
Some(hir_id) => hir_id,
|
||||||
None => return false,
|
None => return false,
|
||||||
};
|
};
|
||||||
if self.tcx.has_typeck_tables(def_id) == false {
|
if !self.tcx.has_typeck_tables(def_id) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
let fn_sig = {
|
let fn_sig = {
|
||||||
|
@ -512,7 +512,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
Some(hir_id) => hir_id,
|
Some(hir_id) => hir_id,
|
||||||
None => return false,
|
None => return false,
|
||||||
};
|
};
|
||||||
if self.tcx.has_typeck_tables(def_id) == false {
|
if !self.tcx.has_typeck_tables(def_id) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
match self.tcx.typeck_tables_of(def_id).liberated_fn_sigs().get(hir_id) {
|
match self.tcx.typeck_tables_of(def_id).liberated_fn_sigs().get(hir_id) {
|
||||||
|
|
|
@ -465,7 +465,7 @@ impl<'a, I: Iterator<Item = Event<'a>>> Iterator for SummaryLine<'a, I> {
|
||||||
}
|
}
|
||||||
_ => true,
|
_ => true,
|
||||||
};
|
};
|
||||||
return if is_allowed_tag == false {
|
return if !is_allowed_tag {
|
||||||
if is_start {
|
if is_start {
|
||||||
Some(Event::Start(Tag::Paragraph))
|
Some(Event::Start(Tag::Paragraph))
|
||||||
} else {
|
} else {
|
||||||
|
@ -671,7 +671,7 @@ impl LangString {
|
||||||
"" => {}
|
"" => {}
|
||||||
"should_panic" => {
|
"should_panic" => {
|
||||||
data.should_panic = true;
|
data.should_panic = true;
|
||||||
seen_rust_tags = seen_other_tags == false;
|
seen_rust_tags = !seen_other_tags;
|
||||||
}
|
}
|
||||||
"no_run" => {
|
"no_run" => {
|
||||||
data.no_run = true;
|
data.no_run = true;
|
||||||
|
|
|
@ -4049,7 +4049,7 @@ fn get_next_url(used_links: &mut FxHashSet<String>, url: String) -> String {
|
||||||
return url;
|
return url;
|
||||||
}
|
}
|
||||||
let mut add = 1;
|
let mut add = 1;
|
||||||
while used_links.insert(format!("{}-{}", url, add)) == false {
|
while !used_links.insert(format!("{}-{}", url, add)) {
|
||||||
add += 1;
|
add += 1;
|
||||||
}
|
}
|
||||||
format!("{}-{}", url, add)
|
format!("{}-{}", url, add)
|
||||||
|
|
|
@ -340,12 +340,12 @@ pub fn look_for_tests<'tcx>(
|
||||||
|
|
||||||
find_testable_code(&dox, &mut tests, ErrorCodes::No, false);
|
find_testable_code(&dox, &mut tests, ErrorCodes::No, false);
|
||||||
|
|
||||||
if check_missing_code == true && tests.found_tests == 0 {
|
if check_missing_code && tests.found_tests == 0 {
|
||||||
let sp = span_of_attrs(&item.attrs).unwrap_or(item.source.span());
|
let sp = span_of_attrs(&item.attrs).unwrap_or(item.source.span());
|
||||||
cx.tcx.struct_span_lint_hir(lint::builtin::MISSING_DOC_CODE_EXAMPLES, hir_id, sp, |lint| {
|
cx.tcx.struct_span_lint_hir(lint::builtin::MISSING_DOC_CODE_EXAMPLES, hir_id, sp, |lint| {
|
||||||
lint.build("missing code example in this documentation").emit()
|
lint.build("missing code example in this documentation").emit()
|
||||||
});
|
});
|
||||||
} else if check_missing_code == false
|
} else if !check_missing_code
|
||||||
&& tests.found_tests > 0
|
&& tests.found_tests > 0
|
||||||
&& !cx.renderinfo.borrow().access_levels.is_public(item.def_id)
|
&& !cx.renderinfo.borrow().access_levels.is_public(item.def_id)
|
||||||
{
|
{
|
||||||
|
|
|
@ -253,9 +253,9 @@ pub fn get_differences(against: &CssPath, other: &CssPath, v: &mut Vec<String>)
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if found == false {
|
if !found {
|
||||||
v.push(format!(" Missing \"{}\" rule", child.name));
|
v.push(format!(" Missing \"{}\" rule", child.name));
|
||||||
} else if found_working == false {
|
} else if !found_working {
|
||||||
v.extend(tmp.iter().cloned());
|
v.extend(tmp.iter().cloned());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue