Auto merge of #82552 - GuillaumeGomez:rollup-8dn1ztn, r=GuillaumeGomez
Rollup of 8 pull requests Successful merges: - #81940 (Stabilize str_split_once) - #82165 (Reword labels on E0308 involving async fn return type) - #82456 (Replaced some unwrap_or and map_or with lazy variants) - #82491 (Consider inexpensive inlining criteria first) - #82506 (Properly account for non-shorthand pattern field in unused variable lint) - #82535 (Set codegen thread names) - #82545 (rustdoc: add optional woff2 versions of FiraSans.) - #82549 (Revert "Update normalize.css to 8.0.1") Failed merges: r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
9c09c1f7cf
53 changed files with 391 additions and 256 deletions
|
@ -5,8 +5,7 @@
|
|||
associated_type_bounds,
|
||||
never_type,
|
||||
try_blocks,
|
||||
hash_drain_filter,
|
||||
str_split_once
|
||||
hash_drain_filter
|
||||
)]
|
||||
#![warn(rust_2018_idioms)]
|
||||
#![warn(unused_lifetimes)]
|
||||
|
|
|
@ -2372,7 +2372,7 @@ fn compute_type_parameters(cx: &CodegenCx<'ll, 'tcx>, ty: Ty<'tcx>) -> &'ll DIAr
|
|||
fn get_parameter_names(cx: &CodegenCx<'_, '_>, generics: &ty::Generics) -> Vec<Symbol> {
|
||||
let mut names = generics
|
||||
.parent
|
||||
.map_or(vec![], |def_id| get_parameter_names(cx, cx.tcx.generics_of(def_id)));
|
||||
.map_or_else(Vec::new, |def_id| get_parameter_names(cx, cx.tcx.generics_of(def_id)));
|
||||
names.extend(generics.params.iter().map(|param| param.name));
|
||||
names
|
||||
}
|
||||
|
|
|
@ -481,9 +481,9 @@ impl DebugInfoMethods<'tcx> for CodegenCx<'ll, 'tcx> {
|
|||
}
|
||||
|
||||
fn get_parameter_names(cx: &CodegenCx<'_, '_>, generics: &ty::Generics) -> Vec<Symbol> {
|
||||
let mut names = generics
|
||||
.parent
|
||||
.map_or(vec![], |def_id| get_parameter_names(cx, cx.tcx.generics_of(def_id)));
|
||||
let mut names = generics.parent.map_or_else(Vec::new, |def_id| {
|
||||
get_parameter_names(cx, cx.tcx.generics_of(def_id))
|
||||
});
|
||||
names.extend(generics.params.iter().map(|param| param.name));
|
||||
names
|
||||
}
|
||||
|
|
|
@ -65,8 +65,8 @@ fn search_meta_section<'a>(
|
|||
while llvm::LLVMIsSectionIteratorAtEnd(of.llof, si.llsi) == False {
|
||||
let mut name_buf = None;
|
||||
let name_len = llvm::LLVMRustGetSectionName(si.llsi, &mut name_buf);
|
||||
let name = name_buf.map_or(
|
||||
String::new(), // We got a NULL ptr, ignore `name_len`.
|
||||
let name = name_buf.map_or_else(
|
||||
String::new, // We got a NULL ptr, ignore `name_len`.
|
||||
|buf| {
|
||||
String::from_utf8(
|
||||
slice::from_raw_parts(buf.as_ptr() as *const u8, name_len as usize)
|
||||
|
|
|
@ -2082,7 +2082,7 @@ fn add_upstream_rust_crates<'a, B: ArchiveBuilder<'a>>(
|
|||
let filestem = cratepath.file_stem().unwrap().to_str().unwrap();
|
||||
cmd.link_rust_dylib(
|
||||
Symbol::intern(&unlib(&sess.target, filestem)),
|
||||
parent.unwrap_or(Path::new("")),
|
||||
parent.unwrap_or_else(|| Path::new("")),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -712,6 +712,33 @@ impl<B: WriteBackendMethods> WorkItem<B> {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate a short description of this work item suitable for use as a thread name.
|
||||
fn short_description(&self) -> String {
|
||||
// `pthread_setname()` on *nix is limited to 15 characters and longer names are ignored.
|
||||
// Use very short descriptions in this case to maximize the space available for the module name.
|
||||
// Windows does not have that limitation so use slightly more descriptive names there.
|
||||
match self {
|
||||
WorkItem::Optimize(m) => {
|
||||
#[cfg(windows)]
|
||||
return format!("optimize module {}", m.name);
|
||||
#[cfg(not(windows))]
|
||||
return format!("opt {}", m.name);
|
||||
}
|
||||
WorkItem::CopyPostLtoArtifacts(m) => {
|
||||
#[cfg(windows)]
|
||||
return format!("copy LTO artifacts for {}", m.name);
|
||||
#[cfg(not(windows))]
|
||||
return format!("copy {}", m.name);
|
||||
}
|
||||
WorkItem::LTO(m) => {
|
||||
#[cfg(windows)]
|
||||
return format!("LTO module {}", m.name());
|
||||
#[cfg(not(windows))]
|
||||
return format!("LTO {}", m.name());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum WorkItemResult<B: WriteBackendMethods> {
|
||||
|
@ -1609,56 +1636,59 @@ fn start_executing_work<B: ExtraBackendMethods>(
|
|||
pub struct WorkerFatalError;
|
||||
|
||||
fn spawn_work<B: ExtraBackendMethods>(cgcx: CodegenContext<B>, work: WorkItem<B>) {
|
||||
thread::spawn(move || {
|
||||
// Set up a destructor which will fire off a message that we're done as
|
||||
// we exit.
|
||||
struct Bomb<B: ExtraBackendMethods> {
|
||||
coordinator_send: Sender<Box<dyn Any + Send>>,
|
||||
result: Option<Result<WorkItemResult<B>, FatalError>>,
|
||||
worker_id: usize,
|
||||
}
|
||||
impl<B: ExtraBackendMethods> Drop for Bomb<B> {
|
||||
fn drop(&mut self) {
|
||||
let worker_id = self.worker_id;
|
||||
let msg = match self.result.take() {
|
||||
Some(Ok(WorkItemResult::Compiled(m))) => {
|
||||
Message::Done::<B> { result: Ok(m), worker_id }
|
||||
}
|
||||
Some(Ok(WorkItemResult::NeedsLink(m))) => {
|
||||
Message::NeedsLink::<B> { module: m, worker_id }
|
||||
}
|
||||
Some(Ok(WorkItemResult::NeedsFatLTO(m))) => {
|
||||
Message::NeedsFatLTO::<B> { result: m, worker_id }
|
||||
}
|
||||
Some(Ok(WorkItemResult::NeedsThinLTO(name, thin_buffer))) => {
|
||||
Message::NeedsThinLTO::<B> { name, thin_buffer, worker_id }
|
||||
}
|
||||
Some(Err(FatalError)) => {
|
||||
Message::Done::<B> { result: Err(Some(WorkerFatalError)), worker_id }
|
||||
}
|
||||
None => Message::Done::<B> { result: Err(None), worker_id },
|
||||
};
|
||||
drop(self.coordinator_send.send(Box::new(msg)));
|
||||
let builder = thread::Builder::new().name(work.short_description());
|
||||
builder
|
||||
.spawn(move || {
|
||||
// Set up a destructor which will fire off a message that we're done as
|
||||
// we exit.
|
||||
struct Bomb<B: ExtraBackendMethods> {
|
||||
coordinator_send: Sender<Box<dyn Any + Send>>,
|
||||
result: Option<Result<WorkItemResult<B>, FatalError>>,
|
||||
worker_id: usize,
|
||||
}
|
||||
impl<B: ExtraBackendMethods> Drop for Bomb<B> {
|
||||
fn drop(&mut self) {
|
||||
let worker_id = self.worker_id;
|
||||
let msg = match self.result.take() {
|
||||
Some(Ok(WorkItemResult::Compiled(m))) => {
|
||||
Message::Done::<B> { result: Ok(m), worker_id }
|
||||
}
|
||||
Some(Ok(WorkItemResult::NeedsLink(m))) => {
|
||||
Message::NeedsLink::<B> { module: m, worker_id }
|
||||
}
|
||||
Some(Ok(WorkItemResult::NeedsFatLTO(m))) => {
|
||||
Message::NeedsFatLTO::<B> { result: m, worker_id }
|
||||
}
|
||||
Some(Ok(WorkItemResult::NeedsThinLTO(name, thin_buffer))) => {
|
||||
Message::NeedsThinLTO::<B> { name, thin_buffer, worker_id }
|
||||
}
|
||||
Some(Err(FatalError)) => {
|
||||
Message::Done::<B> { result: Err(Some(WorkerFatalError)), worker_id }
|
||||
}
|
||||
None => Message::Done::<B> { result: Err(None), worker_id },
|
||||
};
|
||||
drop(self.coordinator_send.send(Box::new(msg)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut bomb = Bomb::<B> {
|
||||
coordinator_send: cgcx.coordinator_send.clone(),
|
||||
result: None,
|
||||
worker_id: cgcx.worker,
|
||||
};
|
||||
let mut bomb = Bomb::<B> {
|
||||
coordinator_send: cgcx.coordinator_send.clone(),
|
||||
result: None,
|
||||
worker_id: cgcx.worker,
|
||||
};
|
||||
|
||||
// Execute the work itself, and if it finishes successfully then flag
|
||||
// ourselves as a success as well.
|
||||
//
|
||||
// Note that we ignore any `FatalError` coming out of `execute_work_item`,
|
||||
// as a diagnostic was already sent off to the main thread - just
|
||||
// surface that there was an error in this worker.
|
||||
bomb.result = {
|
||||
let _prof_timer = work.start_profiling(&cgcx);
|
||||
Some(execute_work_item(&cgcx, work))
|
||||
};
|
||||
});
|
||||
// Execute the work itself, and if it finishes successfully then flag
|
||||
// ourselves as a success as well.
|
||||
//
|
||||
// Note that we ignore any `FatalError` coming out of `execute_work_item`,
|
||||
// as a diagnostic was already sent off to the main thread - just
|
||||
// surface that there was an error in this worker.
|
||||
bomb.result = {
|
||||
let _prof_timer = work.start_profiling(&cgcx);
|
||||
Some(execute_work_item(&cgcx, work))
|
||||
};
|
||||
})
|
||||
.expect("failed to spawn thread");
|
||||
}
|
||||
|
||||
enum SharedEmitterMessage {
|
||||
|
|
|
@ -1484,13 +1484,16 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
|
|||
for (key, values) in types.iter() {
|
||||
let count = values.len();
|
||||
let kind = key.descr();
|
||||
let mut returned_async_output_error = false;
|
||||
for sp in values {
|
||||
err.span_label(
|
||||
*sp,
|
||||
format!(
|
||||
"{}{}{} {}{}",
|
||||
if sp.is_desugaring(DesugaringKind::Async) {
|
||||
"the `Output` of this `async fn`'s "
|
||||
if sp.is_desugaring(DesugaringKind::Async)
|
||||
&& !returned_async_output_error
|
||||
{
|
||||
"checked the `Output` of this `async fn`, "
|
||||
} else if count == 1 {
|
||||
"the "
|
||||
} else {
|
||||
|
@ -1502,6 +1505,12 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
|
|||
pluralize!(count),
|
||||
),
|
||||
);
|
||||
if sp.is_desugaring(DesugaringKind::Async)
|
||||
&& returned_async_output_error == false
|
||||
{
|
||||
err.note("while checking the return type of the `async fn`");
|
||||
returned_async_output_error = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -201,7 +201,7 @@ fn check_panic_str<'tcx>(
|
|||
Some(v) if v.len() == 1 => "panic message contains a brace",
|
||||
_ => "panic message contains braces",
|
||||
};
|
||||
cx.struct_span_lint(NON_FMT_PANIC, brace_spans.unwrap_or(vec![span]), |lint| {
|
||||
cx.struct_span_lint(NON_FMT_PANIC, brace_spans.unwrap_or_else(|| vec![span]), |lint| {
|
||||
let mut l = lint.build(msg);
|
||||
l.note("this message is not used as a format string, but will be in Rust 2021");
|
||||
if span.contains(arg.span) {
|
||||
|
|
|
@ -378,14 +378,14 @@ fn add_query_description_impl(
|
|||
let t = &(t.0).0;
|
||||
quote! { #t }
|
||||
})
|
||||
.unwrap_or(quote! { _ });
|
||||
.unwrap_or_else(|| quote! { _ });
|
||||
let value = args
|
||||
.as_ref()
|
||||
.map(|t| {
|
||||
let t = &(t.1).0;
|
||||
quote! { #t }
|
||||
})
|
||||
.unwrap_or(quote! { _ });
|
||||
.unwrap_or_else(|| quote! { _ });
|
||||
// expr is a `Block`, meaning that `{ #expr }` gets expanded
|
||||
// to `{ { stmts... } }`, which triggers the `unused_braces` lint.
|
||||
quote! {
|
||||
|
@ -409,7 +409,7 @@ fn add_query_description_impl(
|
|||
};
|
||||
|
||||
let (tcx, desc) = modifiers.desc;
|
||||
let tcx = tcx.as_ref().map_or(quote! { _ }, |t| quote! { #t });
|
||||
let tcx = tcx.as_ref().map_or_else(|| quote! { _ }, |t| quote! { #t });
|
||||
|
||||
let desc = quote! {
|
||||
#[allow(unused_variables)]
|
||||
|
|
|
@ -473,9 +473,9 @@ impl<'a> SessionDiagnosticDeriveBuilder<'a> {
|
|||
.map(
|
||||
|applicability_idx| quote!(#binding.#applicability_idx),
|
||||
)
|
||||
.unwrap_or(quote!(
|
||||
rustc_errors::Applicability::Unspecified
|
||||
));
|
||||
.unwrap_or_else(|| {
|
||||
quote!(rustc_errors::Applicability::Unspecified)
|
||||
});
|
||||
return Ok((span, applicability));
|
||||
}
|
||||
throw_span_err!(
|
||||
|
|
|
@ -50,7 +50,7 @@ fn eval_body_using_ecx<'mir, 'tcx>(
|
|||
|
||||
let name =
|
||||
with_no_trimmed_paths(|| ty::tls::with(|tcx| tcx.def_path_str(cid.instance.def_id())));
|
||||
let prom = cid.promoted.map_or(String::new(), |p| format!("::promoted[{:?}]", p));
|
||||
let prom = cid.promoted.map_or_else(String::new, |p| format!("::promoted[{:?}]", p));
|
||||
trace!("eval_body_using_ecx: pushing stack frame for global: {}{}", name, prom);
|
||||
|
||||
ecx.push_stack_frame(
|
||||
|
|
|
@ -28,7 +28,6 @@ Rust MIR: a lowered representation of Rust.
|
|||
#![feature(or_patterns)]
|
||||
#![feature(once_cell)]
|
||||
#![feature(control_flow_enum)]
|
||||
#![feature(str_split_once)]
|
||||
#![recursion_limit = "256"]
|
||||
|
||||
#[macro_use]
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//! Inlining pass for MIR functions
|
||||
|
||||
use rustc_attr as attr;
|
||||
use rustc_attr::InlineAttr;
|
||||
use rustc_hir as hir;
|
||||
use rustc_index::bit_set::BitSet;
|
||||
use rustc_index::vec::Idx;
|
||||
|
@ -106,72 +106,90 @@ struct Inliner<'tcx> {
|
|||
impl Inliner<'tcx> {
|
||||
fn process_blocks(&mut self, caller_body: &mut Body<'tcx>, blocks: Range<BasicBlock>) {
|
||||
for bb in blocks {
|
||||
let callsite = match self.get_valid_function_call(bb, &caller_body[bb], caller_body) {
|
||||
let bb_data = &caller_body[bb];
|
||||
if bb_data.is_cleanup {
|
||||
continue;
|
||||
}
|
||||
|
||||
let callsite = match self.resolve_callsite(caller_body, bb, bb_data) {
|
||||
None => continue,
|
||||
Some(it) => it,
|
||||
};
|
||||
|
||||
let span = trace_span!("process_blocks", %callsite.callee, ?bb);
|
||||
let _guard = span.enter();
|
||||
|
||||
trace!(
|
||||
"checking for self recursion ({:?} vs body_source: {:?})",
|
||||
callsite.callee.def_id(),
|
||||
caller_body.source.def_id()
|
||||
);
|
||||
if callsite.callee.def_id() == caller_body.source.def_id() {
|
||||
debug!("Not inlining a function into itself");
|
||||
continue;
|
||||
match self.try_inlining(caller_body, &callsite) {
|
||||
Err(reason) => {
|
||||
debug!("not-inlined {} [{}]", callsite.callee, reason);
|
||||
continue;
|
||||
}
|
||||
Ok(new_blocks) => {
|
||||
debug!("inlined {}", callsite.callee);
|
||||
self.changed = true;
|
||||
self.history.push(callsite.callee);
|
||||
self.process_blocks(caller_body, new_blocks);
|
||||
self.history.pop();
|
||||
}
|
||||
}
|
||||
|
||||
if !self.is_mir_available(callsite.callee, caller_body) {
|
||||
debug!("MIR unavailable {}", callsite.callee);
|
||||
continue;
|
||||
}
|
||||
|
||||
let span = trace_span!("instance_mir", %callsite.callee);
|
||||
let instance_mir_guard = span.enter();
|
||||
let callee_body = self.tcx.instance_mir(callsite.callee.def);
|
||||
drop(instance_mir_guard);
|
||||
if !self.should_inline(callsite, callee_body) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if !self.tcx.consider_optimizing(|| {
|
||||
format!("Inline {:?} into {}", callee_body.span, callsite.callee)
|
||||
}) {
|
||||
return;
|
||||
}
|
||||
|
||||
let callee_body = callsite.callee.subst_mir_and_normalize_erasing_regions(
|
||||
self.tcx,
|
||||
self.param_env,
|
||||
callee_body.clone(),
|
||||
);
|
||||
|
||||
let old_blocks = caller_body.basic_blocks().next_index();
|
||||
self.inline_call(callsite, caller_body, callee_body);
|
||||
let new_blocks = old_blocks..caller_body.basic_blocks().next_index();
|
||||
self.changed = true;
|
||||
|
||||
self.history.push(callsite.callee);
|
||||
self.process_blocks(caller_body, new_blocks);
|
||||
self.history.pop();
|
||||
}
|
||||
}
|
||||
|
||||
#[instrument(level = "debug", skip(self, caller_body))]
|
||||
fn is_mir_available(&self, callee: Instance<'tcx>, caller_body: &Body<'tcx>) -> bool {
|
||||
/// Attempts to inline a callsite into the caller body. When successful returns basic blocks
|
||||
/// containing the inlined body. Otherwise returns an error describing why inlining didn't take
|
||||
/// place.
|
||||
fn try_inlining(
|
||||
&self,
|
||||
caller_body: &mut Body<'tcx>,
|
||||
callsite: &CallSite<'tcx>,
|
||||
) -> Result<std::ops::Range<BasicBlock>, &'static str> {
|
||||
let callee_attrs = self.tcx.codegen_fn_attrs(callsite.callee.def_id());
|
||||
self.check_codegen_attributes(callsite, callee_attrs)?;
|
||||
self.check_mir_is_available(caller_body, &callsite.callee)?;
|
||||
let callee_body = self.tcx.instance_mir(callsite.callee.def);
|
||||
self.check_mir_body(callsite, callee_body, callee_attrs)?;
|
||||
|
||||
if !self.tcx.consider_optimizing(|| {
|
||||
format!("Inline {:?} into {}", callee_body.span, callsite.callee)
|
||||
}) {
|
||||
return Err("optimization fuel exhausted");
|
||||
}
|
||||
|
||||
let callee_body = callsite.callee.subst_mir_and_normalize_erasing_regions(
|
||||
self.tcx,
|
||||
self.param_env,
|
||||
callee_body.clone(),
|
||||
);
|
||||
|
||||
let old_blocks = caller_body.basic_blocks().next_index();
|
||||
self.inline_call(caller_body, &callsite, callee_body);
|
||||
let new_blocks = old_blocks..caller_body.basic_blocks().next_index();
|
||||
|
||||
Ok(new_blocks)
|
||||
}
|
||||
|
||||
fn check_mir_is_available(
|
||||
&self,
|
||||
caller_body: &Body<'tcx>,
|
||||
callee: &Instance<'tcx>,
|
||||
) -> Result<(), &'static str> {
|
||||
if callee.def_id() == caller_body.source.def_id() {
|
||||
return Err("self-recursion");
|
||||
}
|
||||
|
||||
match callee.def {
|
||||
InstanceDef::Item(_) => {
|
||||
// If there is no MIR available (either because it was not in metadata or
|
||||
// because it has no MIR because it's an extern function), then the inliner
|
||||
// won't cause cycles on this.
|
||||
if !self.tcx.is_mir_available(callee.def_id()) {
|
||||
return false;
|
||||
return Err("item MIR unavailable");
|
||||
}
|
||||
}
|
||||
// These have no own callable MIR.
|
||||
InstanceDef::Intrinsic(_) | InstanceDef::Virtual(..) => return false,
|
||||
InstanceDef::Intrinsic(_) | InstanceDef::Virtual(..) => {
|
||||
return Err("instance without MIR (intrinsic / virtual)");
|
||||
}
|
||||
// This cannot result in an immediate cycle since the callee MIR is a shim, which does
|
||||
// not get any optimizations run on it. Any subsequent inlining may cause cycles, but we
|
||||
// do not need to catch this here, we can wait until the inliner decides to continue
|
||||
|
@ -181,13 +199,13 @@ impl Inliner<'tcx> {
|
|||
| InstanceDef::FnPtrShim(..)
|
||||
| InstanceDef::ClosureOnceShim { .. }
|
||||
| InstanceDef::DropGlue(..)
|
||||
| InstanceDef::CloneShim(..) => return true,
|
||||
| InstanceDef::CloneShim(..) => return Ok(()),
|
||||
}
|
||||
|
||||
if self.tcx.is_constructor(callee.def_id()) {
|
||||
trace!("constructors always have MIR");
|
||||
// Constructor functions cannot cause a query cycle.
|
||||
return true;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if let Some(callee_def_id) = callee.def_id().as_local() {
|
||||
|
@ -196,39 +214,44 @@ impl Inliner<'tcx> {
|
|||
// since their `optimized_mir` is used for layout computation, which can
|
||||
// create a cycle, even when no attempt is made to inline the function
|
||||
// in the other direction.
|
||||
caller_body.generator_kind.is_none()
|
||||
&& (
|
||||
// Avoid a cycle here by only using `instance_mir` only if we have
|
||||
// a lower `HirId` than the callee. This ensures that the callee will
|
||||
// not inline us. This trick only works without incremental compilation.
|
||||
// So don't do it if that is enabled.
|
||||
!self.tcx.dep_graph.is_fully_enabled()
|
||||
&& self.hir_id < callee_hir_id
|
||||
// If we know for sure that the function we're calling will itself try to
|
||||
// call us, then we avoid inlining that function.
|
||||
|| !self.tcx.mir_callgraph_reachable((callee, caller_body.source.def_id().expect_local()))
|
||||
)
|
||||
if caller_body.generator_kind.is_some() {
|
||||
return Err("local generator (query cycle avoidance)");
|
||||
}
|
||||
|
||||
// Avoid a cycle here by only using `instance_mir` only if we have
|
||||
// a lower `HirId` than the callee. This ensures that the callee will
|
||||
// not inline us. This trick only works without incremental compilation.
|
||||
// So don't do it if that is enabled.
|
||||
if !self.tcx.dep_graph.is_fully_enabled() && self.hir_id < callee_hir_id {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// If we know for sure that the function we're calling will itself try to
|
||||
// call us, then we avoid inlining that function.
|
||||
if self
|
||||
.tcx
|
||||
.mir_callgraph_reachable((*callee, caller_body.source.def_id().expect_local()))
|
||||
{
|
||||
return Err("caller might be reachable from callee (query cycle avoidance)");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
} else {
|
||||
// This cannot result in an immediate cycle since the callee MIR is from another crate
|
||||
// and is already optimized. Any subsequent inlining may cause cycles, but we do
|
||||
// not need to catch this here, we can wait until the inliner decides to continue
|
||||
// inlining a second time.
|
||||
trace!("functions from other crates always have MIR");
|
||||
true
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn get_valid_function_call(
|
||||
fn resolve_callsite(
|
||||
&self,
|
||||
caller_body: &Body<'tcx>,
|
||||
bb: BasicBlock,
|
||||
bb_data: &BasicBlockData<'tcx>,
|
||||
caller_body: &Body<'tcx>,
|
||||
) -> Option<CallSite<'tcx>> {
|
||||
// Don't inline calls that are in cleanup blocks.
|
||||
if bb_data.is_cleanup {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Only consider direct calls to functions
|
||||
let terminator = bb_data.terminator();
|
||||
if let TerminatorKind::Call { ref func, ref destination, .. } = terminator.kind {
|
||||
|
@ -258,73 +281,73 @@ impl Inliner<'tcx> {
|
|||
None
|
||||
}
|
||||
|
||||
#[instrument(level = "debug", skip(self, callee_body))]
|
||||
fn should_inline(&self, callsite: CallSite<'tcx>, callee_body: &Body<'tcx>) -> bool {
|
||||
let tcx = self.tcx;
|
||||
|
||||
if callsite.fn_sig.c_variadic() {
|
||||
debug!("callee is variadic - not inlining");
|
||||
return false;
|
||||
/// Returns an error if inlining is not possible based on codegen attributes alone. A success
|
||||
/// indicates that inlining decision should be based on other criteria.
|
||||
fn check_codegen_attributes(
|
||||
&self,
|
||||
callsite: &CallSite<'tcx>,
|
||||
callee_attrs: &CodegenFnAttrs,
|
||||
) -> Result<(), &'satic str> {
|
||||
if let InlineAttr::Never = callee_attrs.inline {
|
||||
return Err("never inline hint");
|
||||
}
|
||||
|
||||
let codegen_fn_attrs = tcx.codegen_fn_attrs(callsite.callee.def_id());
|
||||
|
||||
let self_features = &self.codegen_fn_attrs.target_features;
|
||||
let callee_features = &codegen_fn_attrs.target_features;
|
||||
if callee_features.iter().any(|feature| !self_features.contains(feature)) {
|
||||
debug!("`callee has extra target features - not inlining");
|
||||
return false;
|
||||
}
|
||||
|
||||
if self.codegen_fn_attrs.no_sanitize != codegen_fn_attrs.no_sanitize {
|
||||
debug!("`callee has incompatible no_sanitize attribute - not inlining");
|
||||
return false;
|
||||
}
|
||||
|
||||
if self.codegen_fn_attrs.instruction_set != codegen_fn_attrs.instruction_set {
|
||||
debug!("`callee has incompatible instruction set - not inlining");
|
||||
return false;
|
||||
}
|
||||
|
||||
let hinted = match codegen_fn_attrs.inline {
|
||||
// Just treat inline(always) as a hint for now,
|
||||
// there are cases that prevent inlining that we
|
||||
// need to check for first.
|
||||
attr::InlineAttr::Always => true,
|
||||
attr::InlineAttr::Never => {
|
||||
debug!("`#[inline(never)]` present - not inlining");
|
||||
return false;
|
||||
}
|
||||
attr::InlineAttr::Hint => true,
|
||||
attr::InlineAttr::None => false,
|
||||
};
|
||||
|
||||
// Only inline local functions if they would be eligible for cross-crate
|
||||
// inlining. This is to ensure that the final crate doesn't have MIR that
|
||||
// reference unexported symbols
|
||||
if callsite.callee.def_id().is_local() {
|
||||
if callsite.callee.substs.non_erasable_generics().count() == 0 && !hinted {
|
||||
debug!(" callee is an exported function - not inlining");
|
||||
return false;
|
||||
let is_generic = callsite.callee.substs.non_erasable_generics().next().is_some();
|
||||
if !is_generic && !callee_attrs.requests_inline() {
|
||||
return Err("not exported");
|
||||
}
|
||||
}
|
||||
|
||||
let mut threshold = if hinted {
|
||||
if callsite.fn_sig.c_variadic() {
|
||||
return Err("C variadic");
|
||||
}
|
||||
|
||||
if callee_attrs.flags.contains(CodegenFnAttrFlags::NAKED) {
|
||||
return Err("naked");
|
||||
}
|
||||
|
||||
if callee_attrs.flags.contains(CodegenFnAttrFlags::COLD) {
|
||||
return Err("cold");
|
||||
}
|
||||
|
||||
if callee_attrs.no_sanitize != self.codegen_fn_attrs.no_sanitize {
|
||||
return Err("incompatible sanitizer set");
|
||||
}
|
||||
|
||||
if callee_attrs.instruction_set != self.codegen_fn_attrs.instruction_set {
|
||||
return Err("incompatible instruction set");
|
||||
}
|
||||
|
||||
for feature in &callee_attrs.target_features {
|
||||
if !self.codegen_fn_attrs.target_features.contains(feature) {
|
||||
return Err("incompatible target feature");
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Returns inlining decision that is based on the examination of callee MIR body.
|
||||
/// Assumes that codegen attributes have been checked for compatibility already.
|
||||
#[instrument(level = "debug", skip(self, callee_body))]
|
||||
fn check_mir_body(
|
||||
&self,
|
||||
callsite: &CallSite<'tcx>,
|
||||
callee_body: &Body<'tcx>,
|
||||
callee_attrs: &CodegenFnAttrs,
|
||||
) -> Result<(), &'static str> {
|
||||
let tcx = self.tcx;
|
||||
|
||||
let mut threshold = if callee_attrs.requests_inline() {
|
||||
self.tcx.sess.opts.debugging_opts.inline_mir_hint_threshold
|
||||
} else {
|
||||
self.tcx.sess.opts.debugging_opts.inline_mir_threshold
|
||||
};
|
||||
|
||||
if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::NAKED) {
|
||||
debug!("#[naked] present - not inlining");
|
||||
return false;
|
||||
}
|
||||
|
||||
if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::COLD) {
|
||||
debug!("#[cold] present - not inlining");
|
||||
return false;
|
||||
}
|
||||
|
||||
// Give a bonus functions with a small number of blocks,
|
||||
// We normally have two or three blocks for even
|
||||
// very small functions.
|
||||
|
@ -393,11 +416,10 @@ impl Inliner<'tcx> {
|
|||
if let Ok(Some(instance)) =
|
||||
Instance::resolve(self.tcx, self.param_env, def_id, substs)
|
||||
{
|
||||
if callsite.callee.def_id() == instance.def_id()
|
||||
|| self.history.contains(&instance)
|
||||
{
|
||||
debug!("`callee is recursive - not inlining");
|
||||
return false;
|
||||
if callsite.callee.def_id() == instance.def_id() {
|
||||
return Err("self-recursion");
|
||||
} else if self.history.contains(&instance) {
|
||||
return Err("already inlined");
|
||||
}
|
||||
}
|
||||
// Don't give intrinsics the extra penalty for calls
|
||||
|
@ -450,24 +472,24 @@ impl Inliner<'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
if let attr::InlineAttr::Always = codegen_fn_attrs.inline {
|
||||
if let InlineAttr::Always = callee_attrs.inline {
|
||||
debug!("INLINING {:?} because inline(always) [cost={}]", callsite, cost);
|
||||
true
|
||||
Ok(())
|
||||
} else {
|
||||
if cost <= threshold {
|
||||
debug!("INLINING {:?} [cost={} <= threshold={}]", callsite, cost, threshold);
|
||||
true
|
||||
Ok(())
|
||||
} else {
|
||||
debug!("NOT inlining {:?} [cost={} > threshold={}]", callsite, cost, threshold);
|
||||
false
|
||||
Err("cost above threshold")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn inline_call(
|
||||
&self,
|
||||
callsite: CallSite<'tcx>,
|
||||
caller_body: &mut Body<'tcx>,
|
||||
callsite: &CallSite<'tcx>,
|
||||
mut callee_body: Body<'tcx>,
|
||||
) {
|
||||
let terminator = caller_body[callsite.block].terminator.take().unwrap();
|
||||
|
|
|
@ -223,7 +223,7 @@ impl<'a> Parser<'a> {
|
|||
fn tokens_to_string(tokens: &[TokenType]) -> String {
|
||||
let mut i = tokens.iter();
|
||||
// This might be a sign we need a connect method on `Iterator`.
|
||||
let b = i.next().map_or(String::new(), |t| t.to_string());
|
||||
let b = i.next().map_or_else(String::new, |t| t.to_string());
|
||||
i.enumerate().fold(b, |mut b, (i, a)| {
|
||||
if tokens.len() > 2 && i == tokens.len() - 2 {
|
||||
b.push_str(", or ");
|
||||
|
|
|
@ -367,12 +367,17 @@ impl<'tcx> Visitor<'tcx> for IrMaps<'tcx> {
|
|||
}
|
||||
|
||||
fn visit_param(&mut self, param: &'tcx hir::Param<'tcx>) {
|
||||
let is_shorthand = matches!(param.pat.kind, rustc_hir::PatKind::Struct(..));
|
||||
param.pat.each_binding(|_bm, hir_id, _x, ident| {
|
||||
let var = if is_shorthand {
|
||||
Local(LocalInfo { id: hir_id, name: ident.name, is_shorthand: true })
|
||||
} else {
|
||||
Param(hir_id, ident.name)
|
||||
let var = match param.pat.kind {
|
||||
rustc_hir::PatKind::Struct(_, fields, _) => Local(LocalInfo {
|
||||
id: hir_id,
|
||||
name: ident.name,
|
||||
is_shorthand: fields
|
||||
.iter()
|
||||
.find(|f| f.ident == ident)
|
||||
.map_or(false, |f| f.is_shorthand),
|
||||
}),
|
||||
_ => Param(hir_id, ident.name),
|
||||
};
|
||||
self.add_variable(var);
|
||||
});
|
||||
|
|
|
@ -1971,7 +1971,7 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> {
|
|||
// Therefore, we would compute `object_lifetime_defaults` to a
|
||||
// vector like `['x, 'static]`. Note that the vector only
|
||||
// includes type parameters.
|
||||
let object_lifetime_defaults = type_def_id.map_or(vec![], |def_id| {
|
||||
let object_lifetime_defaults = type_def_id.map_or_else(Vec::new, |def_id| {
|
||||
let in_body = {
|
||||
let mut scope = self.scope;
|
||||
loop {
|
||||
|
|
|
@ -169,7 +169,7 @@ pub fn get_or_default_sysroot() -> PathBuf {
|
|||
|
||||
// Check if sysroot is found using env::args().next(), and if is not found,
|
||||
// use env::current_exe() to imply sysroot.
|
||||
from_env_args_next().unwrap_or(from_current_exe())
|
||||
from_env_args_next().unwrap_or_else(from_current_exe)
|
||||
}
|
||||
|
||||
// The name of the directory rustc expects libraries to be located.
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
#![feature(crate_visibility_modifier)]
|
||||
#![feature(once_cell)]
|
||||
#![feature(or_patterns)]
|
||||
#![feature(str_split_once)]
|
||||
|
||||
#[macro_use]
|
||||
extern crate bitflags;
|
||||
|
|
|
@ -22,7 +22,6 @@
|
|||
#![feature(nll)]
|
||||
#![feature(min_specialization)]
|
||||
#![feature(option_expect_none)]
|
||||
#![feature(str_split_once)]
|
||||
|
||||
#[macro_use]
|
||||
extern crate rustc_macros;
|
||||
|
|
|
@ -15,7 +15,6 @@
|
|||
#![feature(never_type)]
|
||||
#![feature(associated_type_bounds)]
|
||||
#![feature(exhaustive_patterns)]
|
||||
#![feature(str_split_once)]
|
||||
|
||||
#[macro_use]
|
||||
extern crate rustc_macros;
|
||||
|
|
|
@ -349,7 +349,7 @@ fn report_negative_positive_conflict(
|
|||
E0751,
|
||||
"found both positive and negative implementation of trait `{}`{}:",
|
||||
overlap.trait_desc,
|
||||
overlap.self_desc.clone().map_or(String::new(), |ty| format!(" for type `{}`", ty))
|
||||
overlap.self_desc.clone().map_or_else(String::new, |ty| format!(" for type `{}`", ty))
|
||||
);
|
||||
|
||||
match tcx.span_of_impl(negative_impl_def_id) {
|
||||
|
@ -397,7 +397,10 @@ fn report_conflicting_impls(
|
|||
let msg = format!(
|
||||
"conflicting implementations of trait `{}`{}:{}",
|
||||
overlap.trait_desc,
|
||||
overlap.self_desc.clone().map_or(String::new(), |ty| { format!(" for type `{}`", ty) }),
|
||||
overlap
|
||||
.self_desc
|
||||
.clone()
|
||||
.map_or_else(String::new, |ty| { format!(" for type `{}`", ty) }),
|
||||
match used_to_be_allowed {
|
||||
Some(FutureCompatOverlapErrorKind::Issue33140) => " (E0119)",
|
||||
_ => "",
|
||||
|
@ -415,7 +418,7 @@ fn report_conflicting_impls(
|
|||
impl_span,
|
||||
format!(
|
||||
"conflicting implementation{}",
|
||||
overlap.self_desc.map_or(String::new(), |ty| format!(" for `{}`", ty))
|
||||
overlap.self_desc.map_or_else(String::new, |ty| format!(" for `{}`", ty))
|
||||
),
|
||||
);
|
||||
}
|
||||
|
|
|
@ -1716,7 +1716,7 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
|
|||
} else {
|
||||
self.fcx
|
||||
.associated_item(def_id, name, Namespace::ValueNS)
|
||||
.map_or(Vec::new(), |x| vec![x])
|
||||
.map_or_else(Vec::new, |x| vec![x])
|
||||
}
|
||||
} else {
|
||||
self.tcx.associated_items(def_id).in_definition_order().copied().collect()
|
||||
|
|
|
@ -1062,7 +1062,10 @@ fn report_unexpected_variant_res(tcx: TyCtxt<'_>, res: Res, span: Span) {
|
|||
E0533,
|
||||
"expected unit struct, unit variant or constant, found {}{}",
|
||||
res.descr(),
|
||||
tcx.sess.source_map().span_to_snippet(span).map_or(String::new(), |s| format!(" `{}`", s)),
|
||||
tcx.sess
|
||||
.source_map()
|
||||
.span_to_snippet(span)
|
||||
.map_or_else(|_| String::new(), |s| format!(" `{}`", s)),
|
||||
)
|
||||
.emit();
|
||||
}
|
||||
|
|
|
@ -879,7 +879,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
let sm = tcx.sess.source_map();
|
||||
let path_str = sm
|
||||
.span_to_snippet(sm.span_until_char(pat.span, '('))
|
||||
.map_or(String::new(), |s| format!(" `{}`", s.trim_end()));
|
||||
.map_or_else(|_| String::new(), |s| format!(" `{}`", s.trim_end()));
|
||||
let msg = format!(
|
||||
"expected tuple struct or tuple variant, found {}{}",
|
||||
res.descr(),
|
||||
|
|
|
@ -348,9 +348,9 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
|
|||
let min_list_wb = min_list
|
||||
.iter()
|
||||
.map(|captured_place| {
|
||||
let locatable = captured_place.info.path_expr_id.unwrap_or(
|
||||
self.tcx().hir().local_def_id_to_hir_id(closure_def_id.expect_local()),
|
||||
);
|
||||
let locatable = captured_place.info.path_expr_id.unwrap_or_else(|| {
|
||||
self.tcx().hir().local_def_id_to_hir_id(closure_def_id.expect_local())
|
||||
});
|
||||
|
||||
self.resolve(captured_place.clone(), &locatable)
|
||||
})
|
||||
|
|
|
@ -2387,7 +2387,7 @@ fn compute_sig_of_foreign_fn_decl<'tcx>(
|
|||
.sess
|
||||
.source_map()
|
||||
.span_to_snippet(ast_ty.span)
|
||||
.map_or(String::new(), |s| format!(" `{}`", s));
|
||||
.map_or_else(|_| String::new(), |s| format!(" `{}`", s));
|
||||
tcx.sess
|
||||
.struct_span_err(
|
||||
ast_ty.span,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue