rust/compiler/rustc_mir/src/transform/mod.rs

579 lines
20 KiB
Rust
Raw Normal View History

use crate::{shim, util};
2020-04-22 12:30:11 -03:00
use required_consts::RequiredConstsVisitor;
use rustc_data_structures::fx::FxHashSet;
2020-11-14 01:29:30 +01:00
use rustc_data_structures::steal::Steal;
use rustc_hir as hir;
use rustc_hir::def_id::{CrateNum, DefId, LocalDefId, LOCAL_CRATE};
use rustc_hir::intravisit::{self, NestedVisitorMap, Visitor};
2019-12-22 17:42:04 -05:00
use rustc_index::vec::IndexVec;
use rustc_middle::mir::visit::Visitor as _;
use rustc_middle::mir::{traversal, Body, ConstQualifs, MirPhase, Promoted};
2020-03-29 17:19:48 +02:00
use rustc_middle::ty::query::Providers;
2020-10-04 11:01:13 -07:00
use rustc_middle::ty::{self, TyCtxt, TypeFoldable};
2020-04-19 13:00:18 +02:00
use rustc_span::{Span, Symbol};
use std::borrow::Cow;
2019-12-22 17:42:04 -05:00
pub mod add_call_guards;
pub mod add_moves_for_packed_drops;
2019-12-22 17:42:04 -05:00
pub mod add_retag;
pub mod check_const_item_mutation;
2019-09-17 16:25:40 -07:00
pub mod check_consts;
pub mod check_packed_ref;
pub mod check_unsafety;
2019-12-22 17:42:04 -05:00
pub mod cleanup_post_borrowck;
pub mod const_debuginfo;
2019-12-22 17:42:04 -05:00
pub mod const_prop;
pub mod coverage;
2019-12-22 17:42:04 -05:00
pub mod deaggregator;
pub mod dest_prop;
2019-12-22 17:42:04 -05:00
pub mod dump_mir;
pub mod early_otherwise_branch;
2019-12-22 17:42:04 -05:00
pub mod elaborate_drops;
pub mod function_item_references;
2019-12-22 17:42:04 -05:00
pub mod generator;
pub mod inline;
pub mod instcombine;
pub mod lower_intrinsics;
pub mod match_branches;
pub mod multiple_return_terminators;
pub mod no_landing_pads;
2020-05-14 10:11:15 -07:00
pub mod nrvo;
2016-05-07 19:14:28 +03:00
pub mod promote_consts;
pub mod remove_noop_landing_pads;
pub mod remove_unneeded_drops;
pub mod required_consts;
2019-12-22 17:42:04 -05:00
pub mod rustc_peek;
pub mod simplify;
pub mod simplify_branches;
pub mod simplify_comparison_integral;
2019-12-22 17:42:04 -05:00
pub mod simplify_try;
pub mod uninhabited_enum_branching;
pub mod unreachable_prop;
2020-05-24 00:55:44 +02:00
pub mod validate;
2020-10-04 11:01:13 -07:00
pub use rustc_middle::mir::MirSource;
pub(crate) fn provide(providers: &mut Providers) {
self::check_unsafety::provide(providers);
*providers = Providers {
2017-05-02 06:32:03 -04:00
mir_keys,
mir_const,
mir_const_qualif: |tcx, def_id| {
let def_id = def_id.expect_local();
2020-07-21 22:54:18 +02:00
if let Some(def) = ty::WithOptConstParam::try_lookup(def_id, tcx) {
tcx.mir_const_qualif_const_arg(def)
} else {
mir_const_qualif(tcx, ty::WithOptConstParam::unknown(def_id))
}
2020-07-06 23:49:53 +02:00
},
2020-07-08 01:03:19 +02:00
mir_const_qualif_const_arg: |tcx, (did, param_did)| {
2020-07-15 10:50:54 +02:00
mir_const_qualif(tcx, ty::WithOptConstParam { did, const_param_did: Some(param_did) })
2020-07-06 23:49:53 +02:00
},
mir_promoted,
mir_drops_elaborated_and_const_checked,
mir_for_ctfe,
mir_for_ctfe_of_const_arg,
2017-05-02 06:32:03 -04:00
optimized_mir,
is_mir_available,
is_ctfe_mir_available: |tcx, did| is_mir_available(tcx, did),
2020-07-06 23:49:53 +02:00
promoted_mir: |tcx, def_id| {
let def_id = def_id.expect_local();
2020-07-21 22:54:18 +02:00
if let Some(def) = ty::WithOptConstParam::try_lookup(def_id, tcx) {
tcx.promoted_mir_of_const_arg(def)
} else {
promoted_mir(tcx, ty::WithOptConstParam::unknown(def_id))
}
2020-07-06 23:49:53 +02:00
},
2020-07-08 10:35:58 +02:00
promoted_mir_of_const_arg: |tcx, (did, param_did)| {
2020-07-15 10:50:54 +02:00
promoted_mir(tcx, ty::WithOptConstParam { did, const_param_did: Some(param_did) })
2020-07-06 23:49:53 +02:00
},
..*providers
};
coverage::query::provide(providers);
}
2019-06-21 18:12:39 +02:00
fn is_mir_available(tcx: TyCtxt<'_>, def_id: DefId) -> bool {
tcx.mir_keys(def_id.krate).contains(&def_id.expect_local())
2017-05-02 06:32:03 -04:00
}
2019-02-08 14:53:55 +01:00
/// Finds the full set of `DefId`s within the current crate that have
2017-05-02 06:32:03 -04:00
/// MIR associated with them.
2020-03-27 18:46:25 +01:00
fn mir_keys(tcx: TyCtxt<'_>, krate: CrateNum) -> FxHashSet<LocalDefId> {
2017-05-02 06:32:03 -04:00
assert_eq!(krate, LOCAL_CRATE);
let mut set = FxHashSet::default();
2017-05-02 06:32:03 -04:00
// All body-owners have MIR associated with them.
set.extend(tcx.body_owners());
2017-05-02 06:32:03 -04:00
// Additionally, tuple struct/variant constructors have MIR, but
// they don't have a BodyId, so we need to build them separately.
struct GatherCtors<'a, 'tcx> {
2019-06-14 00:48:52 +03:00
tcx: TyCtxt<'tcx>,
set: &'a mut FxHashSet<LocalDefId>,
2017-05-02 06:32:03 -04:00
}
impl<'a, 'tcx> Visitor<'tcx> for GatherCtors<'a, 'tcx> {
2019-12-22 17:42:04 -05:00
fn visit_variant_data(
&mut self,
v: &'tcx hir::VariantData<'tcx>,
2020-04-19 13:00:18 +02:00
_: Symbol,
2019-12-01 16:08:58 +01:00
_: &'tcx hir::Generics<'tcx>,
2019-12-22 17:42:04 -05:00
_: hir::HirId,
_: Span,
) {
2019-03-01 09:52:20 +01:00
if let hir::VariantData::Tuple(_, hir_id) = *v {
self.set.insert(self.tcx.hir().local_def_id(hir_id));
2017-05-02 06:32:03 -04:00
}
intravisit::walk_struct_def(self, v)
}
2020-03-11 12:05:32 +01:00
type Map = intravisit::ErasedMap<'tcx>;
2020-02-09 15:32:00 +01:00
fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
2017-05-02 06:32:03 -04:00
NestedVisitorMap::None
}
}
2019-12-22 17:42:04 -05:00
tcx.hir()
.krate()
.visit_all_item_likes(&mut GatherCtors { tcx, set: &mut set }.as_deep_visitor());
2017-05-02 06:32:03 -04:00
2020-03-27 18:46:25 +01:00
set
2017-05-02 06:32:03 -04:00
}
/// Generates a default name for the pass based on the name of the
/// type `T`.
pub fn default_name<T: ?Sized>() -> Cow<'static, str> {
let name = std::any::type_name::<T>();
if let Some(tail) = name.rfind(':') { Cow::from(&name[tail + 1..]) } else { Cow::from(name) }
}
/// A streamlined trait that you can implement to create a pass; the
/// pass will be named after the type, and it will consist of a main
/// loop that goes over each available MIR and applies `run_pass`.
2019-08-04 16:20:00 -04:00
pub trait MirPass<'tcx> {
2019-06-21 18:12:39 +02:00
fn name(&self) -> Cow<'_, str> {
default_name::<Self>()
}
fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>);
}
pub fn run_passes(
2019-06-14 00:48:52 +03:00
tcx: TyCtxt<'tcx>,
2020-04-12 10:31:00 -07:00
body: &mut Body<'tcx>,
mir_phase: MirPhase,
passes: &[&[&dyn MirPass<'tcx>]],
2018-10-25 08:35:53 -04:00
) {
let phase_index = mir_phase.phase_index();
2020-05-24 00:55:44 +02:00
let validate = tcx.sess.opts.debugging_opts.validate_mir;
2019-08-15 06:39:31 -04:00
if body.phase >= mir_phase {
return;
}
2020-05-24 00:55:44 +02:00
if validate {
validate::Validator { when: format!("input to phase {:?}", mir_phase), mir_phase }
.run_pass(tcx, body);
2020-05-24 00:55:44 +02:00
}
2019-08-15 06:39:31 -04:00
let mut index = 0;
let mut run_pass = |pass: &dyn MirPass<'tcx>| {
let run_hooks = |body: &_, index, is_after| {
2019-12-22 17:42:04 -05:00
dump_mir::on_mir_pass(
tcx,
&format_args!("{:03}-{:03}", phase_index, index),
&pass.name(),
body,
is_after,
);
};
2019-08-15 06:39:31 -04:00
run_hooks(body, index, false);
pass.run_pass(tcx, body);
2019-08-15 06:39:31 -04:00
run_hooks(body, index, true);
2020-05-24 00:55:44 +02:00
if validate {
validate::Validator {
when: format!("after {} in phase {:?}", pass.name(), mir_phase),
mir_phase,
}
.run_pass(tcx, body);
2020-05-24 00:55:44 +02:00
}
2019-08-15 06:39:31 -04:00
index += 1;
};
for pass_group in passes {
for pass in *pass_group {
run_pass(*pass);
}
2019-08-15 06:39:31 -04:00
}
body.phase = mir_phase;
2020-05-25 22:04:48 +02:00
2020-08-18 13:44:57 +02:00
if mir_phase == MirPhase::Optimization {
validate::Validator { when: format!("end of phase {:?}", mir_phase), mir_phase }
.run_pass(tcx, body);
2020-05-25 22:04:48 +02:00
}
}
2020-07-15 10:50:54 +02:00
fn mir_const_qualif(tcx: TyCtxt<'_>, def: ty::WithOptConstParam<LocalDefId>) -> ConstQualifs {
2020-07-06 23:49:53 +02:00
let const_kind = tcx.hir().body_const_context(def.did);
// No need to const-check a non-const `fn`.
if const_kind.is_none() {
return Default::default();
}
// N.B., this `borrow()` is guaranteed to be valid (i.e., the value
// cannot yet be stolen), because `mir_promoted()`, which steals
// from `mir_const(), forces this query to execute before
// performing the steal.
2020-07-06 23:49:53 +02:00
let body = &tcx.mir_const(def).borrow();
if body.return_ty().references_error() {
tcx.sess.delay_span_bug(body.span, "mir_const_qualif: MIR had errors");
return Default::default();
}
let ccx = check_consts::ConstCx { body, tcx, const_kind, param_env: tcx.param_env(def.did) };
let mut validator = check_consts::validation::Validator::new(&ccx);
validator.check_body();
// We return the qualifs in the return place for every MIR body, even though it is only used
// when deciding to promote a reference to a `const` for now.
validator.qualifs_in_return_place()
}
/// Make MIR ready for const evaluation. This is run on all MIR, not just on consts!
2020-07-06 23:49:53 +02:00
fn mir_const<'tcx>(
tcx: TyCtxt<'tcx>,
2020-07-15 10:50:54 +02:00
def: ty::WithOptConstParam<LocalDefId>,
2020-07-06 23:49:53 +02:00
) -> &'tcx Steal<Body<'tcx>> {
2020-07-17 19:12:30 +02:00
if let Some(def) = def.try_upgrade(tcx) {
return tcx.mir_const(def);
2020-07-06 23:49:53 +02:00
}
2020-04-17 15:53:37 +01:00
// Unsafety check uses the raw mir, so make sure it is run.
2020-07-15 10:50:54 +02:00
if let Some(param_did) = def.const_param_did {
2020-07-15 11:26:26 +02:00
tcx.ensure().unsafety_check_result_for_const_arg((def.did, param_did));
2020-07-08 01:03:19 +02:00
} else {
tcx.ensure().unsafety_check_result(def.did);
}
2020-07-06 23:49:53 +02:00
let mut body = tcx.mir_built(def).steal();
util::dump_mir(tcx, None, "mir_map", &0, &body, |_, _| Ok(()));
2019-12-22 17:42:04 -05:00
run_passes(
tcx,
&mut body,
MirPhase::Const,
&[&[
// MIR-level lints.
&check_packed_ref::CheckPackedRef,
&check_const_item_mutation::CheckConstItemMutation,
&function_item_references::FunctionItemReferences,
2019-12-22 17:42:04 -05:00
// What we need to do constant evaluation.
&simplify::SimplifyCfg::new("initial"),
&rustc_peek::SanityCheck,
]],
2019-12-22 17:42:04 -05:00
);
tcx.alloc_steal_mir(body)
}
fn mir_promoted(
2019-08-14 08:08:17 -04:00
tcx: TyCtxt<'tcx>,
2020-07-15 10:50:54 +02:00
def: ty::WithOptConstParam<LocalDefId>,
2020-07-03 22:15:27 +02:00
) -> (&'tcx Steal<Body<'tcx>>, &'tcx Steal<IndexVec<Promoted, Body<'tcx>>>) {
2020-07-17 19:12:30 +02:00
if let Some(def) = def.try_upgrade(tcx) {
return tcx.mir_promoted(def);
2020-07-06 23:49:53 +02:00
}
2019-10-28 21:25:51 -07:00
// Ensure that we compute the `mir_const_qualif` for constants at
// this point, before we steal the mir-const result.
2020-08-16 10:44:53 +02:00
// Also this means promotion can rely on all const checks having been done.
2020-07-08 01:03:19 +02:00
let _ = tcx.mir_const_qualif_opt_const_arg(def);
let _ = tcx.mir_abstract_const_opt_const_arg(def.to_global());
2020-07-06 23:49:53 +02:00
let mut body = tcx.mir_const(def).steal();
let mut required_consts = Vec::new();
2020-04-22 12:30:11 -03:00
let mut required_consts_visitor = RequiredConstsVisitor::new(&mut required_consts);
for (bb, bb_data) in traversal::reverse_postorder(&body) {
required_consts_visitor.visit_basic_block_data(bb, bb_data);
}
body.required_consts = required_consts;
2019-11-06 14:23:35 -08:00
let promote_pass = promote_consts::PromoteTemps::default();
let promote: &[&dyn MirPass<'tcx>] = &[
// What we need to run borrowck etc.
&promote_pass,
2020-08-16 10:44:53 +02:00
&simplify::SimplifyCfg::new("promote-consts"),
];
let opt_coverage: &[&dyn MirPass<'tcx>] = if tcx.sess.opts.debugging_opts.instrument_coverage {
&[&coverage::InstrumentCoverage]
} else {
&[]
};
run_passes(tcx, &mut body, MirPhase::ConstPromotion, &[promote, opt_coverage]);
2019-11-06 14:23:35 -08:00
let promoted = promote_pass.promoted_fragments.into_inner();
(tcx.alloc_steal_mir(body), tcx.alloc_steal_promoted(promoted))
}
fn mir_for_ctfe<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> &'tcx Body<'tcx> {
let did = def_id.expect_local();
if let Some(def) = ty::WithOptConstParam::try_lookup(did, tcx) {
tcx.mir_for_ctfe_of_const_arg(def)
} else {
tcx.arena.alloc(inner_mir_for_ctfe(tcx, ty::WithOptConstParam::unknown(did)))
}
}
fn mir_for_ctfe_of_const_arg<'tcx>(
tcx: TyCtxt<'tcx>,
(did, param_did): (LocalDefId, DefId),
) -> &'tcx Body<'tcx> {
tcx.arena.alloc(inner_mir_for_ctfe(
tcx,
ty::WithOptConstParam { did, const_param_did: Some(param_did) },
))
}
fn inner_mir_for_ctfe(tcx: TyCtxt<'_>, def: ty::WithOptConstParam<LocalDefId>) -> Body<'_> {
// FIXME: don't duplicate this between the optimized_mir/mir_for_ctfe queries
if tcx.is_constructor(def.did.to_def_id()) {
// There's no reason to run all of the MIR passes on constructors when
// we can just output the MIR we want directly. This also saves const
// qualification and borrow checking the trouble of special casing
// constructors.
return shim::build_adt_ctor(tcx, def.did.to_def_id());
}
assert_ne!(
tcx.hir().body_const_context(def.did),
None,
"mir_for_ctfe should not be used for runtime functions"
);
let mut body = tcx.mir_drops_elaborated_and_const_checked(def).borrow().clone();
#[rustfmt::skip]
let optimizations: &[&dyn MirPass<'_>] = &[
&const_prop::ConstProp,
];
#[rustfmt::skip]
run_passes(
tcx,
&mut body,
MirPhase::Optimization,
&[
optimizations,
],
);
debug_assert!(!body.has_free_regions(), "Free regions in MIR for CTFE");
body
}
fn mir_drops_elaborated_and_const_checked<'tcx>(
tcx: TyCtxt<'tcx>,
2020-07-15 10:50:54 +02:00
def: ty::WithOptConstParam<LocalDefId>,
2020-07-03 22:15:27 +02:00
) -> &'tcx Steal<Body<'tcx>> {
2020-07-17 19:12:30 +02:00
if let Some(def) = def.try_upgrade(tcx) {
return tcx.mir_drops_elaborated_and_const_checked(def);
2020-07-03 22:15:27 +02:00
}
// (Mir-)Borrowck uses `mir_promoted`, so we have to force it to
// execute before we can steal.
2020-07-15 10:50:54 +02:00
if let Some(param_did) = def.const_param_did {
2020-07-08 01:03:19 +02:00
tcx.ensure().mir_borrowck_const_arg((def.did, param_did));
} else {
tcx.ensure().mir_borrowck(def.did);
}
let (body, _) = tcx.mir_promoted(def);
let mut body = body.steal();
run_post_borrowck_cleanup_passes(tcx, &mut body);
check_consts::post_drop_elaboration::check_live_drops(tcx, &body);
tcx.alloc_steal_mir(body)
}
/// After this series of passes, no lifetime analysis based on borrowing can be done.
fn run_post_borrowck_cleanup_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
debug!("post_borrowck_cleanup({:?})", body.source.def_id());
let post_borrowck_cleanup: &[&dyn MirPass<'tcx>] = &[
// Remove all things only needed by analysis
&no_landing_pads::NoLandingPads::new(tcx),
&simplify_branches::SimplifyBranches::new("initial"),
&remove_noop_landing_pads::RemoveNoopLandingPads,
&cleanup_post_borrowck::CleanupNonCodegenStatements,
&simplify::SimplifyCfg::new("early-opt"),
// These next passes must be executed together
&add_call_guards::CriticalCallEdges,
&elaborate_drops::ElaborateDrops,
&no_landing_pads::NoLandingPads::new(tcx),
// AddMovesForPackedDrops needs to run after drop
// elaboration.
&add_moves_for_packed_drops::AddMovesForPackedDrops,
// `AddRetag` needs to run after `ElaborateDrops`. Otherwise it should run fairly late,
// but before optimizations begin.
&add_retag::AddRetag,
&lower_intrinsics::LowerIntrinsics,
&simplify::SimplifyCfg::new("elaborate-drops"),
// `Deaggregator` is conceptually part of MIR building, some backends rely on it happening
// and it can help optimizations.
&deaggregator::Deaggregator,
];
run_passes(tcx, body, MirPhase::DropLowering, &[post_borrowck_cleanup]);
}
fn run_optimization_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
let mir_opt_level = tcx.sess.opts.debugging_opts.mir_opt_level;
// Lowering generator control-flow and variables has to happen before we do anything else
// to them. We run some optimizations before that, because they may be harder to do on the state
// machine than on MIR with async primitives.
let optimizations_with_generators: &[&dyn MirPass<'tcx>] = &[
&unreachable_prop::UnreachablePropagation,
&uninhabited_enum_branching::UninhabitedEnumBranching,
&simplify::SimplifyCfg::new("after-uninhabited-enum-branching"),
&inline::Inline,
&generator::StateTransform,
];
// Even if we don't do optimizations, we still have to lower generators for codegen.
let no_optimizations_with_generators: &[&dyn MirPass<'tcx>] = &[&generator::StateTransform];
// The main optimizations that we do on MIR.
let optimizations: &[&dyn MirPass<'tcx>] = &[
&remove_unneeded_drops::RemoveUnneededDrops,
&match_branches::MatchBranchSimplification,
// inst combine is after MatchBranchSimplification to clean up Ne(_1, false)
&multiple_return_terminators::MultipleReturnTerminators,
&instcombine::InstCombine,
&const_prop::ConstProp,
&simplify_branches::SimplifyBranches::new("after-const-prop"),
&early_otherwise_branch::EarlyOtherwiseBranch,
&simplify_comparison_integral::SimplifyComparisonIntegral,
&simplify_try::SimplifyArmIdentity,
&simplify_try::SimplifyBranchSame,
&dest_prop::DestinationPropagation,
2020-10-17 02:25:31 +02:00
&simplify_branches::SimplifyBranches::new("final"),
&remove_noop_landing_pads::RemoveNoopLandingPads,
&simplify::SimplifyCfg::new("final"),
2020-05-14 10:11:15 -07:00
&nrvo::RenameReturnPlace,
&const_debuginfo::ConstDebugInfo,
&simplify::SimplifyLocals,
&multiple_return_terminators::MultipleReturnTerminators,
];
// Optimizations to run even if mir optimizations have been disabled.
let no_optimizations: &[&dyn MirPass<'tcx>] = &[
// FIXME(#70073): This pass is responsible for both optimization as well as some lints.
&const_prop::ConstProp,
];
// Some cleanup necessary at least for LLVM and potentially other codegen backends.
let pre_codegen_cleanup: &[&dyn MirPass<'tcx>] = &[
&add_call_guards::CriticalCallEdges,
// Dump the end result for testing and debugging purposes.
&dump_mir::Marker("PreCodegen"),
];
// End of pass declarations, now actually run the passes.
// Generator Lowering
#[rustfmt::skip]
run_passes(
tcx,
body,
MirPhase::GeneratorLowering,
&[
if mir_opt_level > 0 {
optimizations_with_generators
} else {
no_optimizations_with_generators
}
],
);
// Main optimization passes
#[rustfmt::skip]
2019-12-22 17:42:04 -05:00
run_passes(
tcx,
body,
2020-08-18 13:44:57 +02:00
MirPhase::Optimization,
2019-12-22 17:42:04 -05:00
&[
if mir_opt_level > 0 { optimizations } else { no_optimizations },
pre_codegen_cleanup,
2019-12-22 17:42:04 -05:00
],
);
2019-08-15 06:39:31 -04:00
}
2020-07-03 20:38:31 +02:00
fn optimized_mir<'tcx>(tcx: TyCtxt<'tcx>, did: DefId) -> &'tcx Body<'tcx> {
let did = did.expect_local();
2020-07-21 22:54:18 +02:00
if let Some(def) = ty::WithOptConstParam::try_lookup(did, tcx) {
tcx.mir_for_ctfe_of_const_arg(def)
2020-07-03 20:38:31 +02:00
} else {
tcx.arena.alloc(inner_optimized_mir(tcx, ty::WithOptConstParam::unknown(did)))
2020-07-03 20:38:31 +02:00
}
}
2020-07-15 10:50:54 +02:00
fn inner_optimized_mir(tcx: TyCtxt<'_>, def: ty::WithOptConstParam<LocalDefId>) -> Body<'_> {
2020-07-03 20:38:31 +02:00
if tcx.is_constructor(def.did.to_def_id()) {
2019-08-15 06:39:31 -04:00
// There's no reason to run all of the MIR passes on constructors when
// we can just output the MIR we want directly. This also saves const
// qualification and borrow checking the trouble of special casing
// constructors.
2020-07-03 20:38:31 +02:00
return shim::build_adt_ctor(tcx, def.did.to_def_id());
2019-08-15 06:39:31 -04:00
}
match tcx.hir().body_const_context(def.did) {
Some(hir::ConstContext::ConstFn) => {
if let Some((did, param_did)) = def.to_global().as_const_arg() {
tcx.ensure().mir_for_ctfe_of_const_arg((did, param_did))
} else {
tcx.ensure().mir_for_ctfe(def.did)
}
}
None => {}
Some(other) => panic!("do not use `optimized_mir` for constants: {:?}", other),
}
2020-07-03 22:15:27 +02:00
let mut body = tcx.mir_drops_elaborated_and_const_checked(def).steal();
run_optimization_passes(tcx, &mut body);
2020-03-19 11:40:38 +00:00
debug_assert!(!body.has_free_regions(), "Free regions in optimized MIR");
2020-03-27 20:26:20 +01:00
body
}
2020-07-06 23:49:53 +02:00
fn promoted_mir<'tcx>(
tcx: TyCtxt<'tcx>,
2020-07-15 10:50:54 +02:00
def: ty::WithOptConstParam<LocalDefId>,
2020-07-06 23:49:53 +02:00
) -> &'tcx IndexVec<Promoted, Body<'tcx>> {
if tcx.is_constructor(def.did.to_def_id()) {
return tcx.arena.alloc(IndexVec::new());
}
2020-07-15 10:50:54 +02:00
if let Some(param_did) = def.const_param_did {
2020-07-08 01:03:19 +02:00
tcx.ensure().mir_borrowck_const_arg((def.did, param_did));
} else {
tcx.ensure().mir_borrowck(def.did);
}
let (_, promoted) = tcx.mir_promoted(def);
2019-08-04 16:20:21 -04:00
let mut promoted = promoted.steal();
for body in &mut promoted {
run_post_borrowck_cleanup_passes(tcx, body);
2019-08-04 16:20:21 -04:00
}
2020-03-19 11:40:38 +00:00
debug_assert!(!promoted.has_free_regions(), "Free regions in promoted MIR");
2020-07-06 23:49:53 +02:00
tcx.arena.alloc(promoted)
}