2022-03-01 07:43:12 -03:00
#![ allow(rustc::potential_query_instability) ]
2021-01-01 01:53:25 +01:00
#![ feature(box_patterns) ]
2022-03-01 07:43:12 -03:00
#![ feature(let_chains) ]
2021-10-16 03:45:14 +02:00
#![ feature(let_else) ]
2021-01-01 01:53:25 +01:00
#![ feature(map_try_insert) ]
#![ feature(min_specialization) ]
#![ feature(never_type) ]
2022-03-01 07:43:12 -03:00
#![ feature(once_cell) ]
#![ feature(option_get_or_insert_default) ]
2021-01-01 01:53:25 +01:00
#![ feature(trusted_step) ]
#![ feature(try_blocks) ]
2022-05-29 01:19:52 -07:00
#![ feature(yeet_expr) ]
2021-09-01 21:05:35 +02:00
#![ recursion_limit = " 256 " ]
2021-01-01 01:53:25 +01:00
#[ macro_use ]
extern crate tracing ;
#[ macro_use ]
extern crate rustc_middle ;
use required_consts ::RequiredConstsVisitor ;
2021-01-05 20:08:11 +01:00
use rustc_const_eval ::util ;
2022-01-06 13:27:59 -06:00
use rustc_data_structures ::fx ::FxIndexSet ;
2021-01-01 01:53:25 +01:00
use rustc_data_structures ::steal ::Steal ;
use rustc_hir as hir ;
use rustc_hir ::def_id ::{ DefId , LocalDefId } ;
2021-11-03 18:03:12 -05:00
use rustc_hir ::intravisit ::{ self , Visitor } ;
2021-01-01 01:53:25 +01:00
use rustc_index ::vec ::IndexVec ;
use rustc_middle ::mir ::visit ::Visitor as _ ;
2021-12-02 14:20:03 -08:00
use rustc_middle ::mir ::{ traversal , Body , ConstQualifs , MirPass , MirPhase , Promoted } ;
2021-01-01 01:53:25 +01:00
use rustc_middle ::ty ::query ::Providers ;
use rustc_middle ::ty ::{ self , TyCtxt , TypeFoldable } ;
use rustc_span ::{ Span , Symbol } ;
2021-11-29 22:46:32 -08:00
#[ macro_use ]
mod pass_manager ;
2021-12-02 14:38:04 -08:00
use pass_manager ::{ self as pm , Lint , MirLint , WithMinOptLevel } ;
2021-11-29 22:46:32 -08:00
2021-01-01 01:53:25 +01:00
mod abort_unwinding_calls ;
mod add_call_guards ;
mod add_moves_for_packed_drops ;
mod add_retag ;
mod check_const_item_mutation ;
mod check_packed_ref ;
pub mod check_unsafety ;
2022-02-09 16:17:42 +01:00
// This pass is public to allow external drivers to perform MIR cleanup
pub mod cleanup_post_borrowck ;
2021-01-01 01:53:25 +01:00
mod const_debuginfo ;
mod const_goto ;
mod const_prop ;
2022-03-08 17:20:31 +00:00
mod const_prop_lint ;
2021-01-01 01:53:25 +01:00
mod coverage ;
2022-05-09 20:12:03 -04:00
mod dead_store_elimination ;
2021-01-01 01:53:25 +01:00
mod deaggregator ;
mod deduplicate_blocks ;
2022-04-04 18:51:32 +03:00
mod deref_separator ;
2021-01-01 01:53:25 +01:00
mod dest_prop ;
pub mod dump_mir ;
mod early_otherwise_branch ;
mod elaborate_drops ;
mod function_item_references ;
mod generator ;
mod inline ;
mod instcombine ;
mod lower_intrinsics ;
mod lower_slice_len ;
2021-12-02 12:04:32 -08:00
mod marker ;
2021-01-01 01:53:25 +01:00
mod match_branches ;
mod multiple_return_terminators ;
2021-10-06 17:31:35 +02:00
mod normalize_array_len ;
2021-01-01 01:53:25 +01:00
mod nrvo ;
2022-02-09 16:17:42 +01:00
// This pass is public to allow external drivers to perform MIR cleanup
pub mod remove_false_edges ;
2021-01-01 01:53:25 +01:00
mod remove_noop_landing_pads ;
mod remove_storage_markers ;
2021-11-30 15:04:49 -08:00
mod remove_uninit_drops ;
2021-01-01 01:53:25 +01:00
mod remove_unneeded_drops ;
mod remove_zsts ;
mod required_consts ;
2021-05-13 12:04:41 +02:00
mod reveal_all ;
2021-01-01 01:53:25 +01:00
mod separate_const_switch ;
mod shim ;
2022-02-09 16:17:42 +01:00
// This pass is public to allow external drivers to perform MIR cleanup
pub mod simplify ;
2021-01-01 01:53:25 +01:00
mod simplify_branches ;
mod simplify_comparison_integral ;
mod simplify_try ;
mod uninhabited_enum_branching ;
mod unreachable_prop ;
2021-11-30 17:05:40 -08:00
use rustc_const_eval ::transform ::check_consts ::{ self , ConstCx } ;
2021-01-05 20:08:11 +01:00
use rustc_const_eval ::transform ::promote_consts ;
use rustc_const_eval ::transform ::validate ;
2021-01-05 19:53:07 +01:00
use rustc_mir_dataflow ::rustc_peek ;
2021-01-01 01:53:25 +01:00
pub fn provide ( providers : & mut Providers ) {
check_unsafety ::provide ( providers ) ;
check_packed_ref ::provide ( providers ) ;
coverage ::query ::provide ( providers ) ;
shim ::provide ( providers ) ;
* providers = Providers {
mir_keys ,
mir_const ,
mir_const_qualif : | tcx , def_id | {
let def_id = def_id . expect_local ( ) ;
if let Some ( def ) = ty ::WithOptConstParam ::try_lookup ( def_id , tcx ) {
tcx . mir_const_qualif_const_arg ( def )
} else {
mir_const_qualif ( tcx , ty ::WithOptConstParam ::unknown ( def_id ) )
}
} ,
mir_const_qualif_const_arg : | tcx , ( did , param_did ) | {
mir_const_qualif ( tcx , ty ::WithOptConstParam { did , const_param_did : Some ( param_did ) } )
} ,
mir_promoted ,
mir_drops_elaborated_and_const_checked ,
mir_for_ctfe ,
mir_for_ctfe_of_const_arg ,
optimized_mir ,
is_mir_available ,
is_ctfe_mir_available : | tcx , did | is_mir_available ( tcx , did ) ,
mir_callgraph_reachable : inline ::cycle ::mir_callgraph_reachable ,
mir_inliner_callees : inline ::cycle ::mir_inliner_callees ,
promoted_mir : | tcx , def_id | {
let def_id = def_id . expect_local ( ) ;
if let Some ( def ) = ty ::WithOptConstParam ::try_lookup ( def_id , tcx ) {
tcx . promoted_mir_of_const_arg ( def )
} else {
promoted_mir ( tcx , ty ::WithOptConstParam ::unknown ( def_id ) )
}
} ,
promoted_mir_of_const_arg : | tcx , ( did , param_did ) | {
promoted_mir ( tcx , ty ::WithOptConstParam { did , const_param_did : Some ( param_did ) } )
} ,
.. * providers
} ;
}
fn is_mir_available ( tcx : TyCtxt < '_ > , def_id : DefId ) -> bool {
let def_id = def_id . expect_local ( ) ;
tcx . mir_keys ( ( ) ) . contains ( & def_id )
}
/// Finds the full set of `DefId`s within the current crate that have
/// MIR associated with them.
2022-01-06 13:27:59 -06:00
fn mir_keys ( tcx : TyCtxt < '_ > , ( ) : ( ) ) -> FxIndexSet < LocalDefId > {
let mut set = FxIndexSet ::default ( ) ;
2021-01-01 01:53:25 +01:00
// All body-owners have MIR associated with them.
2021-09-12 11:33:16 +02:00
set . extend ( tcx . hir ( ) . body_owners ( ) ) ;
2021-01-01 01:53:25 +01:00
// Additionally, tuple struct/variant constructors have MIR, but
// they don't have a BodyId, so we need to build them separately.
struct GatherCtors < ' a , ' tcx > {
tcx : TyCtxt < ' tcx > ,
2022-01-06 13:27:59 -06:00
set : & ' a mut FxIndexSet < LocalDefId > ,
2021-01-01 01:53:25 +01:00
}
2021-12-06 00:48:37 -08:00
impl < ' tcx > Visitor < ' tcx > for GatherCtors < '_ , ' tcx > {
2021-01-01 01:53:25 +01:00
fn visit_variant_data (
& mut self ,
v : & ' tcx hir ::VariantData < ' tcx > ,
_ : Symbol ,
_ : & ' tcx hir ::Generics < ' tcx > ,
_ : hir ::HirId ,
_ : Span ,
) {
if let hir ::VariantData ::Tuple ( _ , hir_id ) = * v {
self . set . insert ( self . tcx . hir ( ) . local_def_id ( hir_id ) ) ;
}
intravisit ::walk_struct_def ( self , v )
}
}
2022-05-07 16:35:38 -04:00
tcx . hir ( ) . deep_visit_all_item_likes ( & mut GatherCtors { tcx , set : & mut set } ) ;
2021-01-01 01:53:25 +01:00
set
}
fn mir_const_qualif ( tcx : TyCtxt < '_ > , def : ty ::WithOptConstParam < LocalDefId > ) -> ConstQualifs {
let const_kind = tcx . hir ( ) . body_const_context ( def . did ) ;
// No need to const-check a non-const `fn`.
if const_kind . is_none ( ) {
return Default ::default ( ) ;
}
// N.B., this `borrow()` is guaranteed to be valid (i.e., the value
// cannot yet be stolen), because `mir_promoted()`, which steals
// from `mir_const(), forces this query to execute before
// performing the steal.
let body = & tcx . mir_const ( def ) . borrow ( ) ;
if body . return_ty ( ) . references_error ( ) {
tcx . sess . delay_span_bug ( body . span , " mir_const_qualif: MIR had errors " ) ;
return Default ::default ( ) ;
}
let ccx = check_consts ::ConstCx { body , tcx , const_kind , param_env : tcx . param_env ( def . did ) } ;
let mut validator = check_consts ::check ::Checker ::new ( & ccx ) ;
validator . check_body ( ) ;
// We return the qualifs in the return place for every MIR body, even though it is only used
// when deciding to promote a reference to a `const` for now.
validator . qualifs_in_return_place ( )
}
/// Make MIR ready for const evaluation. This is run on all MIR, not just on consts!
fn mir_const < ' tcx > (
tcx : TyCtxt < ' tcx > ,
def : ty ::WithOptConstParam < LocalDefId > ,
) -> & ' tcx Steal < Body < ' tcx > > {
if let Some ( def ) = def . try_upgrade ( tcx ) {
return tcx . mir_const ( def ) ;
}
// Unsafety check uses the raw mir, so make sure it is run.
if ! tcx . sess . opts . debugging_opts . thir_unsafeck {
if let Some ( param_did ) = def . const_param_did {
tcx . ensure ( ) . unsafety_check_result_for_const_arg ( ( def . did , param_did ) ) ;
} else {
tcx . ensure ( ) . unsafety_check_result ( def . did ) ;
}
}
let mut body = tcx . mir_built ( def ) . steal ( ) ;
2021-01-05 19:53:07 +01:00
rustc_middle ::mir ::dump_mir ( tcx , None , " mir_map " , & 0 , & body , | _ , _ | Ok ( ( ) ) ) ;
2021-01-01 01:53:25 +01:00
2021-12-02 14:20:03 -08:00
pm ::run_passes (
2021-01-01 01:53:25 +01:00
tcx ,
& mut body ,
2021-12-02 14:20:03 -08:00
& [
2021-01-01 01:53:25 +01:00
// MIR-level lints.
2021-12-02 09:17:32 -08:00
& Lint ( check_packed_ref ::CheckPackedRef ) ,
& Lint ( check_const_item_mutation ::CheckConstItemMutation ) ,
& Lint ( function_item_references ::FunctionItemReferences ) ,
2021-01-01 01:53:25 +01:00
// What we need to do constant evaluation.
& simplify ::SimplifyCfg ::new ( " initial " ) ,
2021-12-02 09:17:32 -08:00
& rustc_peek ::SanityCheck , // Just a lint
2021-12-02 14:20:03 -08:00
& marker ::PhaseChange ( MirPhase ::Const ) ,
] ,
2021-01-01 01:53:25 +01:00
) ;
tcx . alloc_steal_mir ( body )
}
/// Compute the main MIR body and the list of MIR bodies of the promoteds.
2021-12-06 00:48:37 -08:00
fn mir_promoted < ' tcx > (
2021-01-01 01:53:25 +01:00
tcx : TyCtxt < ' tcx > ,
def : ty ::WithOptConstParam < LocalDefId > ,
) -> ( & ' tcx Steal < Body < ' tcx > > , & ' tcx Steal < IndexVec < Promoted , Body < ' tcx > > > ) {
if let Some ( def ) = def . try_upgrade ( tcx ) {
return tcx . mir_promoted ( def ) ;
}
// Ensure that we compute the `mir_const_qualif` for constants at
// this point, before we steal the mir-const result.
// Also this means promotion can rely on all const checks having been done.
2022-02-07 22:00:15 -08:00
let const_qualifs = tcx . mir_const_qualif_opt_const_arg ( def ) ;
2021-01-01 01:53:25 +01:00
let mut body = tcx . mir_const ( def ) . steal ( ) ;
2022-02-07 22:00:15 -08:00
if let Some ( error_reported ) = const_qualifs . tainted_by_errors {
body . tainted_by_errors = Some ( error_reported ) ;
}
2021-01-01 01:53:25 +01:00
let mut required_consts = Vec ::new ( ) ;
let mut required_consts_visitor = RequiredConstsVisitor ::new ( & mut required_consts ) ;
for ( bb , bb_data ) in traversal ::reverse_postorder ( & body ) {
required_consts_visitor . visit_basic_block_data ( bb , bb_data ) ;
}
body . required_consts = required_consts ;
2021-12-02 14:20:03 -08:00
// What we need to run borrowck etc.
2021-01-01 01:53:25 +01:00
let promote_pass = promote_consts ::PromoteTemps ::default ( ) ;
2021-12-02 14:20:03 -08:00
pm ::run_passes (
tcx ,
& mut body ,
& [
& promote_pass ,
& simplify ::SimplifyCfg ::new ( " promote-consts " ) ,
& coverage ::InstrumentCoverage ,
] ,
) ;
2021-01-01 01:53:25 +01:00
let promoted = promote_pass . promoted_fragments . into_inner ( ) ;
( tcx . alloc_steal_mir ( body ) , tcx . alloc_steal_promoted ( promoted ) )
}
/// Compute the MIR that is used during CTFE (and thus has no optimizations run on it)
fn mir_for_ctfe < ' tcx > ( tcx : TyCtxt < ' tcx > , def_id : DefId ) -> & ' tcx Body < ' tcx > {
let did = def_id . expect_local ( ) ;
if let Some ( def ) = ty ::WithOptConstParam ::try_lookup ( did , tcx ) {
tcx . mir_for_ctfe_of_const_arg ( def )
} else {
tcx . arena . alloc ( inner_mir_for_ctfe ( tcx , ty ::WithOptConstParam ::unknown ( did ) ) )
}
}
/// Same as `mir_for_ctfe`, but used to get the MIR of a const generic parameter.
/// The docs on `WithOptConstParam` explain this a bit more, but the TLDR is that
/// we'd get cycle errors with `mir_for_ctfe`, because typeck would need to typeck
/// the const parameter while type checking the main body, which in turn would try
/// to type check the main body again.
fn mir_for_ctfe_of_const_arg < ' tcx > (
tcx : TyCtxt < ' tcx > ,
( did , param_did ) : ( LocalDefId , DefId ) ,
) -> & ' tcx Body < ' tcx > {
tcx . arena . alloc ( inner_mir_for_ctfe (
tcx ,
ty ::WithOptConstParam { did , const_param_did : Some ( param_did ) } ,
) )
}
fn inner_mir_for_ctfe ( tcx : TyCtxt < '_ > , def : ty ::WithOptConstParam < LocalDefId > ) -> Body < '_ > {
// FIXME: don't duplicate this between the optimized_mir/mir_for_ctfe queries
if tcx . is_constructor ( def . did . to_def_id ( ) ) {
// There's no reason to run all of the MIR passes on constructors when
// we can just output the MIR we want directly. This also saves const
// qualification and borrow checking the trouble of special casing
// constructors.
return shim ::build_adt_ctor ( tcx , def . did . to_def_id ( ) ) ;
}
let context = tcx
. hir ( )
. body_const_context ( def . did )
. expect ( " mir_for_ctfe should not be used for runtime functions " ) ;
let mut body = tcx . mir_drops_elaborated_and_const_checked ( def ) . borrow ( ) . clone ( ) ;
match context {
// Do not const prop functions, either they get executed at runtime or exported to metadata,
// so we run const prop on them, or they don't, in which case we const evaluate some control
// flow paths of the function and any errors in those paths will get emitted as const eval
// errors.
hir ::ConstContext ::ConstFn = > { }
// Static items always get evaluated, so we can just let const eval see if any erroneous
// control flow paths get executed.
hir ::ConstContext ::Static ( _ ) = > { }
// Associated constants get const prop run so we detect common failure situations in the
// crate that defined the constant.
// Technically we want to not run on regular const items, but oli-obk doesn't know how to
// conveniently detect that at this point without looking at the HIR.
hir ::ConstContext ::Const = > {
2021-12-02 14:20:03 -08:00
pm ::run_passes (
2021-01-01 01:53:25 +01:00
tcx ,
& mut body ,
2022-03-05 20:37:04 -05:00
& [ & const_prop ::ConstProp , & marker ::PhaseChange ( MirPhase ::Optimized ) ] ,
2021-01-01 01:53:25 +01:00
) ;
}
}
2022-01-12 03:19:52 +00:00
debug_assert! ( ! body . has_free_regions ( ) , " Free regions in MIR for CTFE " ) ;
2021-01-01 01:53:25 +01:00
body
}
/// Obtain just the main MIR (no promoteds) and run some cleanups on it. This also runs
/// mir borrowck *before* doing so in order to ensure that borrowck can be run and doesn't
/// end up missing the source MIR due to stealing happening.
fn mir_drops_elaborated_and_const_checked < ' tcx > (
tcx : TyCtxt < ' tcx > ,
def : ty ::WithOptConstParam < LocalDefId > ,
) -> & ' tcx Steal < Body < ' tcx > > {
if let Some ( def ) = def . try_upgrade ( tcx ) {
return tcx . mir_drops_elaborated_and_const_checked ( def ) ;
}
2022-02-07 22:00:15 -08:00
let mir_borrowck = tcx . mir_borrowck_opt_const_arg ( def ) ;
2021-01-01 01:53:25 +01:00
2022-03-29 23:50:01 +02:00
let is_fn_like = tcx . def_kind ( def . did ) . is_fn_like ( ) ;
2021-01-01 01:53:25 +01:00
if is_fn_like {
let did = def . did . to_def_id ( ) ;
let def = ty ::WithOptConstParam ::unknown ( did ) ;
// Do not compute the mir call graph without said call graph actually being used.
2021-12-02 09:17:32 -08:00
if inline ::Inline . is_enabled ( & tcx . sess ) {
2021-01-01 01:53:25 +01:00
let _ = tcx . mir_inliner_callees ( ty ::InstanceDef ::Item ( def ) ) ;
}
}
let ( body , _ ) = tcx . mir_promoted ( def ) ;
let mut body = body . steal ( ) ;
2022-02-07 22:00:15 -08:00
if let Some ( error_reported ) = mir_borrowck . tainted_by_errors {
body . tainted_by_errors = Some ( error_reported ) ;
}
2021-01-01 01:53:25 +01:00
2021-11-30 17:05:40 -08:00
// IMPORTANT
2021-12-02 14:20:03 -08:00
pm ::run_passes ( tcx , & mut body , & [ & remove_false_edges ::RemoveFalseEdges ] ) ;
2021-11-30 17:05:40 -08:00
// Do a little drop elaboration before const-checking if `const_precise_live_drops` is enabled.
if check_consts ::post_drop_elaboration ::checking_enabled ( & ConstCx ::new ( tcx , & body ) ) {
2021-12-02 14:20:03 -08:00
pm ::run_passes (
tcx ,
& mut body ,
& [
& simplify ::SimplifyCfg ::new ( " remove-false-edges " ) ,
& remove_uninit_drops ::RemoveUninitDrops ,
] ,
) ;
check_consts ::post_drop_elaboration ::check_live_drops ( tcx , & body ) ; // FIXME: make this a MIR lint
2021-11-30 17:05:40 -08:00
}
2021-01-01 01:53:25 +01:00
run_post_borrowck_cleanup_passes ( tcx , & mut body ) ;
2022-03-05 20:37:04 -05:00
assert! ( body . phase = = MirPhase ::Deaggregated ) ;
2021-01-01 01:53:25 +01:00
tcx . alloc_steal_mir ( body )
}
/// After this series of passes, no lifetime analysis based on borrowing can be done.
fn run_post_borrowck_cleanup_passes < ' tcx > ( tcx : TyCtxt < ' tcx > , body : & mut Body < ' tcx > ) {
debug! ( " post_borrowck_cleanup({:?}) " , body . source . def_id ( ) ) ;
let post_borrowck_cleanup : & [ & dyn MirPass < ' tcx > ] = & [
// Remove all things only needed by analysis
2021-11-30 10:14:50 -08:00
& simplify_branches ::SimplifyConstCondition ::new ( " initial " ) ,
2021-01-01 01:53:25 +01:00
& remove_noop_landing_pads ::RemoveNoopLandingPads ,
& cleanup_post_borrowck ::CleanupNonCodegenStatements ,
& simplify ::SimplifyCfg ::new ( " early-opt " ) ,
// These next passes must be executed together
& add_call_guards ::CriticalCallEdges ,
& elaborate_drops ::ElaborateDrops ,
// This will remove extraneous landing pads which are no longer
// necessary as well as well as forcing any call in a non-unwinding
// function calling a possibly-unwinding function to abort the process.
& abort_unwinding_calls ::AbortUnwindingCalls ,
// AddMovesForPackedDrops needs to run after drop
// elaboration.
& add_moves_for_packed_drops ::AddMovesForPackedDrops ,
// `AddRetag` needs to run after `ElaborateDrops`. Otherwise it should run fairly late,
// but before optimizations begin.
2022-04-28 01:03:07 +03:00
& deref_separator ::Derefer ,
2021-01-01 01:53:25 +01:00
& add_retag ::AddRetag ,
& lower_intrinsics ::LowerIntrinsics ,
& simplify ::SimplifyCfg ::new ( " elaborate-drops " ) ,
// `Deaggregator` is conceptually part of MIR building, some backends rely on it happening
// and it can help optimizations.
& deaggregator ::Deaggregator ,
2022-03-11 14:52:58 +00:00
& Lint ( const_prop_lint ::ConstProp ) ,
2021-01-01 01:53:25 +01:00
] ;
2021-12-02 14:20:03 -08:00
pm ::run_passes ( tcx , body , post_borrowck_cleanup ) ;
2021-01-01 01:53:25 +01:00
}
fn run_optimization_passes < ' tcx > ( tcx : TyCtxt < ' tcx > , body : & mut Body < ' tcx > ) {
2021-12-02 14:38:04 -08:00
fn o1 < T > ( x : T ) -> WithMinOptLevel < T > {
WithMinOptLevel ( 1 , x )
}
2021-01-01 01:53:25 +01:00
// Lowering generator control-flow and variables has to happen before we do anything else
// to them. We run some optimizations before that, because they may be harder to do on the state
// machine than on MIR with async primitives.
2021-12-02 14:20:03 -08:00
pm ::run_passes (
2021-01-01 01:53:25 +01:00
tcx ,
body ,
& [
2021-12-02 14:20:03 -08:00
& reveal_all ::RevealAll , // has to be done before inlining, since inlined code is in RevealAll mode.
& lower_slice_len ::LowerSliceLenCalls , // has to be done before inlining, otherwise actual call will be almost always inlined. Also simple, so can just do first
& normalize_array_len ::NormalizeArrayLen , // has to run after `slice::len` lowering
& unreachable_prop ::UnreachablePropagation ,
& uninhabited_enum_branching ::UninhabitedEnumBranching ,
2021-12-02 14:38:04 -08:00
& o1 ( simplify ::SimplifyCfg ::new ( " after-uninhabited-enum-branching " ) ) ,
2021-12-02 14:20:03 -08:00
& inline ::Inline ,
& generator ::StateTransform ,
2021-01-01 01:53:25 +01:00
] ,
) ;
2022-03-05 20:37:04 -05:00
assert! ( body . phase = = MirPhase ::GeneratorsLowered ) ;
2021-12-02 14:20:03 -08:00
// The main optimizations that we do on MIR.
pm ::run_passes (
2021-01-01 01:53:25 +01:00
tcx ,
body ,
& [
2021-12-02 14:20:03 -08:00
& remove_storage_markers ::RemoveStorageMarkers ,
& remove_zsts ::RemoveZsts ,
& const_goto ::ConstGoto ,
& remove_unneeded_drops ::RemoveUnneededDrops ,
& match_branches ::MatchBranchSimplification ,
// inst combine is after MatchBranchSimplification to clean up Ne(_1, false)
& multiple_return_terminators ::MultipleReturnTerminators ,
& instcombine ::InstCombine ,
& separate_const_switch ::SeparateConstSwitch ,
2021-12-02 14:38:04 -08:00
//
2021-12-02 14:20:03 -08:00
// FIXME(#70073): This pass is responsible for both optimization as well as some lints.
& const_prop ::ConstProp ,
2021-12-02 14:38:04 -08:00
//
2021-12-05 13:11:55 -08:00
// Const-prop runs unconditionally, but doesn't mutate the MIR at mir-opt-level=0.
2022-05-09 20:12:03 -04:00
& const_debuginfo ::ConstDebugInfo ,
2021-12-02 14:38:04 -08:00
& o1 ( simplify_branches ::SimplifyConstCondition ::new ( " after-const-prop " ) ) ,
2021-12-02 14:20:03 -08:00
& early_otherwise_branch ::EarlyOtherwiseBranch ,
& simplify_comparison_integral ::SimplifyComparisonIntegral ,
& simplify_try ::SimplifyArmIdentity ,
& simplify_try ::SimplifyBranchSame ,
2022-05-09 20:12:03 -04:00
& dead_store_elimination ::DeadStoreElimination ,
2021-12-02 14:20:03 -08:00
& dest_prop ::DestinationPropagation ,
2021-12-02 14:38:04 -08:00
& o1 ( simplify_branches ::SimplifyConstCondition ::new ( " final " ) ) ,
& o1 ( remove_noop_landing_pads ::RemoveNoopLandingPads ) ,
& o1 ( simplify ::SimplifyCfg ::new ( " final " ) ) ,
2021-12-02 14:20:03 -08:00
& nrvo ::RenameReturnPlace ,
& simplify ::SimplifyLocals ,
& multiple_return_terminators ::MultipleReturnTerminators ,
& deduplicate_blocks ::DeduplicateBlocks ,
// Some cleanup necessary at least for LLVM and potentially other codegen backends.
& add_call_guards ::CriticalCallEdges ,
2022-03-05 20:37:04 -05:00
& marker ::PhaseChange ( MirPhase ::Optimized ) ,
2021-12-02 14:20:03 -08:00
// Dump the end result for testing and debugging purposes.
& dump_mir ::Marker ( " PreCodegen " ) ,
2021-01-01 01:53:25 +01:00
] ,
) ;
}
/// Optimize the MIR and prepare it for codegen.
fn optimized_mir < ' tcx > ( tcx : TyCtxt < ' tcx > , did : DefId ) -> & ' tcx Body < ' tcx > {
let did = did . expect_local ( ) ;
assert_eq! ( ty ::WithOptConstParam ::try_lookup ( did , tcx ) , None ) ;
tcx . arena . alloc ( inner_optimized_mir ( tcx , did ) )
}
fn inner_optimized_mir ( tcx : TyCtxt < '_ > , did : LocalDefId ) -> Body < '_ > {
if tcx . is_constructor ( did . to_def_id ( ) ) {
// There's no reason to run all of the MIR passes on constructors when
// we can just output the MIR we want directly. This also saves const
// qualification and borrow checking the trouble of special casing
// constructors.
return shim ::build_adt_ctor ( tcx , did . to_def_id ( ) ) ;
}
match tcx . hir ( ) . body_const_context ( did ) {
// Run the `mir_for_ctfe` query, which depends on `mir_drops_elaborated_and_const_checked`
// which we are going to steal below. Thus we need to run `mir_for_ctfe` first, so it
// computes and caches its result.
Some ( hir ::ConstContext ::ConstFn ) = > tcx . ensure ( ) . mir_for_ctfe ( did ) ,
None = > { }
Some ( other ) = > panic! ( " do not use `optimized_mir` for constants: {:?} " , other ) ,
}
let mut body =
tcx . mir_drops_elaborated_and_const_checked ( ty ::WithOptConstParam ::unknown ( did ) ) . steal ( ) ;
run_optimization_passes ( tcx , & mut body ) ;
2022-01-12 03:19:52 +00:00
debug_assert! ( ! body . has_free_regions ( ) , " Free regions in optimized MIR " ) ;
2021-01-01 01:53:25 +01:00
body
}
/// Fetch all the promoteds of an item and prepare their MIR bodies to be ready for
/// constant evaluation once all substitutions become known.
fn promoted_mir < ' tcx > (
tcx : TyCtxt < ' tcx > ,
def : ty ::WithOptConstParam < LocalDefId > ,
) -> & ' tcx IndexVec < Promoted , Body < ' tcx > > {
if tcx . is_constructor ( def . did . to_def_id ( ) ) {
return tcx . arena . alloc ( IndexVec ::new ( ) ) ;
}
2022-02-07 22:00:15 -08:00
let tainted_by_errors = tcx . mir_borrowck_opt_const_arg ( def ) . tainted_by_errors ;
let mut promoted = tcx . mir_promoted ( def ) . 1. steal ( ) ;
2021-01-01 01:53:25 +01:00
for body in & mut promoted {
2022-02-07 22:00:15 -08:00
if let Some ( error_reported ) = tainted_by_errors {
body . tainted_by_errors = Some ( error_reported ) ;
}
2021-01-01 01:53:25 +01:00
run_post_borrowck_cleanup_passes ( tcx , body ) ;
}
2022-01-12 03:19:52 +00:00
debug_assert! ( ! promoted . has_free_regions ( ) , " Free regions in promoted MIR " ) ;
2021-01-01 01:53:25 +01:00
tcx . arena . alloc ( promoted )
}