Consolidate ad-hoc MIR lints into real pass-manager-based MIR lints
This commit is contained in:
parent
8e59cf95d5
commit
b08f3d5bdb
11 changed files with 120 additions and 110 deletions
266
compiler/rustc_mir_transform/src/check_call_recursion.rs
Normal file
266
compiler/rustc_mir_transform/src/check_call_recursion.rs
Normal file
|
@ -0,0 +1,266 @@
|
|||
use std::ops::ControlFlow;
|
||||
|
||||
use rustc_data_structures::graph::iterate::{
|
||||
NodeStatus, TriColorDepthFirstSearch, TriColorVisitor,
|
||||
};
|
||||
use rustc_hir::def::DefKind;
|
||||
use rustc_middle::mir::{self, BasicBlock, BasicBlocks, Body, Terminator, TerminatorKind};
|
||||
use rustc_middle::ty::{self, GenericArg, GenericArgs, Instance, Ty, TyCtxt};
|
||||
use rustc_session::lint::builtin::UNCONDITIONAL_RECURSION;
|
||||
use rustc_span::Span;
|
||||
|
||||
use crate::errors::UnconditionalRecursion;
|
||||
use crate::pass_manager::MirLint;
|
||||
|
||||
pub(super) struct CheckCallRecursion;
|
||||
|
||||
impl<'tcx> MirLint<'tcx> for CheckCallRecursion {
|
||||
fn run_lint(&self, tcx: TyCtxt<'tcx>, body: &Body<'tcx>) {
|
||||
let def_id = body.source.def_id().expect_local();
|
||||
|
||||
if let DefKind::Fn | DefKind::AssocFn = tcx.def_kind(def_id) {
|
||||
// If this is trait/impl method, extract the trait's args.
|
||||
let trait_args = match tcx.trait_of_item(def_id.to_def_id()) {
|
||||
Some(trait_def_id) => {
|
||||
let trait_args_count = tcx.generics_of(trait_def_id).count();
|
||||
&GenericArgs::identity_for_item(tcx, def_id)[..trait_args_count]
|
||||
}
|
||||
_ => &[],
|
||||
};
|
||||
|
||||
check_recursion(tcx, body, CallRecursion { trait_args })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Requires drop elaboration to have been performed.
|
||||
pub(super) struct CheckDropRecursion;
|
||||
|
||||
impl<'tcx> MirLint<'tcx> for CheckDropRecursion {
|
||||
fn run_lint(&self, tcx: TyCtxt<'tcx>, body: &Body<'tcx>) {
|
||||
let def_id = body.source.def_id().expect_local();
|
||||
|
||||
// First check if `body` is an `fn drop()` of `Drop`
|
||||
if let DefKind::AssocFn = tcx.def_kind(def_id)
|
||||
&& let Some(trait_ref) =
|
||||
tcx.impl_of_method(def_id.to_def_id()).and_then(|def_id| tcx.impl_trait_ref(def_id))
|
||||
&& let Some(drop_trait) = tcx.lang_items().drop_trait()
|
||||
&& drop_trait == trait_ref.instantiate_identity().def_id
|
||||
// avoid erroneous `Drop` impls from causing ICEs below
|
||||
&& let sig = tcx.fn_sig(def_id).instantiate_identity()
|
||||
&& sig.inputs().skip_binder().len() == 1
|
||||
{
|
||||
// It was. Now figure out for what type `Drop` is implemented and then
|
||||
// check for recursion.
|
||||
if let ty::Ref(_, dropped_ty, _) =
|
||||
tcx.liberate_late_bound_regions(def_id.to_def_id(), sig.input(0)).kind()
|
||||
{
|
||||
check_recursion(tcx, body, RecursiveDrop { drop_for: *dropped_ty });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn check_recursion<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
body: &Body<'tcx>,
|
||||
classifier: impl TerminatorClassifier<'tcx>,
|
||||
) {
|
||||
let def_id = body.source.def_id().expect_local();
|
||||
|
||||
if let DefKind::Fn | DefKind::AssocFn = tcx.def_kind(def_id) {
|
||||
let mut vis = Search { tcx, body, classifier, reachable_recursive_calls: vec![] };
|
||||
if let Some(NonRecursive) =
|
||||
TriColorDepthFirstSearch::new(&body.basic_blocks).run_from_start(&mut vis)
|
||||
{
|
||||
return;
|
||||
}
|
||||
if vis.reachable_recursive_calls.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
vis.reachable_recursive_calls.sort();
|
||||
|
||||
let sp = tcx.def_span(def_id);
|
||||
let hir_id = tcx.local_def_id_to_hir_id(def_id);
|
||||
tcx.emit_node_span_lint(UNCONDITIONAL_RECURSION, hir_id, sp, UnconditionalRecursion {
|
||||
span: sp,
|
||||
call_sites: vis.reachable_recursive_calls,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
trait TerminatorClassifier<'tcx> {
|
||||
fn is_recursive_terminator(
|
||||
&self,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
body: &Body<'tcx>,
|
||||
terminator: &Terminator<'tcx>,
|
||||
) -> bool;
|
||||
}
|
||||
|
||||
struct NonRecursive;
|
||||
|
||||
struct Search<'mir, 'tcx, C: TerminatorClassifier<'tcx>> {
|
||||
tcx: TyCtxt<'tcx>,
|
||||
body: &'mir Body<'tcx>,
|
||||
classifier: C,
|
||||
|
||||
reachable_recursive_calls: Vec<Span>,
|
||||
}
|
||||
|
||||
struct CallRecursion<'tcx> {
|
||||
trait_args: &'tcx [GenericArg<'tcx>],
|
||||
}
|
||||
|
||||
struct RecursiveDrop<'tcx> {
|
||||
/// The type that `Drop` is implemented for.
|
||||
drop_for: Ty<'tcx>,
|
||||
}
|
||||
|
||||
impl<'tcx> TerminatorClassifier<'tcx> for CallRecursion<'tcx> {
|
||||
/// Returns `true` if `func` refers to the function we are searching in.
|
||||
fn is_recursive_terminator(
|
||||
&self,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
body: &Body<'tcx>,
|
||||
terminator: &Terminator<'tcx>,
|
||||
) -> bool {
|
||||
let TerminatorKind::Call { func, args, .. } = &terminator.kind else {
|
||||
return false;
|
||||
};
|
||||
|
||||
// Resolving function type to a specific instance that is being called is expensive. To
|
||||
// avoid the cost we check the number of arguments first, which is sufficient to reject
|
||||
// most of calls as non-recursive.
|
||||
if args.len() != body.arg_count {
|
||||
return false;
|
||||
}
|
||||
let caller = body.source.def_id();
|
||||
let typing_env = body.typing_env(tcx);
|
||||
|
||||
let func_ty = func.ty(body, tcx);
|
||||
if let ty::FnDef(callee, args) = *func_ty.kind() {
|
||||
let Ok(normalized_args) = tcx.try_normalize_erasing_regions(typing_env, args) else {
|
||||
return false;
|
||||
};
|
||||
let (callee, call_args) = if let Ok(Some(instance)) =
|
||||
Instance::try_resolve(tcx, typing_env, callee, normalized_args)
|
||||
{
|
||||
(instance.def_id(), instance.args)
|
||||
} else {
|
||||
(callee, normalized_args)
|
||||
};
|
||||
|
||||
// FIXME(#57965): Make this work across function boundaries
|
||||
|
||||
// If this is a trait fn, the args on the trait have to match, or we might be
|
||||
// calling into an entirely different method (for example, a call from the default
|
||||
// method in the trait to `<A as Trait<B>>::method`, where `A` and/or `B` are
|
||||
// specific types).
|
||||
return callee == caller && &call_args[..self.trait_args.len()] == self.trait_args;
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> TerminatorClassifier<'tcx> for RecursiveDrop<'tcx> {
|
||||
fn is_recursive_terminator(
|
||||
&self,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
body: &Body<'tcx>,
|
||||
terminator: &Terminator<'tcx>,
|
||||
) -> bool {
|
||||
let TerminatorKind::Drop { place, .. } = &terminator.kind else { return false };
|
||||
|
||||
let dropped_ty = place.ty(body, tcx).ty;
|
||||
dropped_ty == self.drop_for
|
||||
}
|
||||
}
|
||||
|
||||
impl<'mir, 'tcx, C: TerminatorClassifier<'tcx>> TriColorVisitor<BasicBlocks<'tcx>>
|
||||
for Search<'mir, 'tcx, C>
|
||||
{
|
||||
type BreakVal = NonRecursive;
|
||||
|
||||
fn node_examined(
|
||||
&mut self,
|
||||
bb: BasicBlock,
|
||||
prior_status: Option<NodeStatus>,
|
||||
) -> ControlFlow<Self::BreakVal> {
|
||||
// Back-edge in the CFG (loop).
|
||||
if let Some(NodeStatus::Visited) = prior_status {
|
||||
return ControlFlow::Break(NonRecursive);
|
||||
}
|
||||
|
||||
match self.body[bb].terminator().kind {
|
||||
// These terminators return control flow to the caller.
|
||||
TerminatorKind::UnwindTerminate(_)
|
||||
| TerminatorKind::CoroutineDrop
|
||||
| TerminatorKind::UnwindResume
|
||||
| TerminatorKind::Return
|
||||
| TerminatorKind::Unreachable
|
||||
| TerminatorKind::Yield { .. } => ControlFlow::Break(NonRecursive),
|
||||
|
||||
// A InlineAsm without targets (diverging and contains no labels)
|
||||
// is treated as non-recursing.
|
||||
TerminatorKind::InlineAsm { ref targets, .. } => {
|
||||
if !targets.is_empty() {
|
||||
ControlFlow::Continue(())
|
||||
} else {
|
||||
ControlFlow::Break(NonRecursive)
|
||||
}
|
||||
}
|
||||
|
||||
// These do not.
|
||||
TerminatorKind::Assert { .. }
|
||||
| TerminatorKind::Call { .. }
|
||||
| TerminatorKind::Drop { .. }
|
||||
| TerminatorKind::FalseEdge { .. }
|
||||
| TerminatorKind::FalseUnwind { .. }
|
||||
| TerminatorKind::Goto { .. }
|
||||
| TerminatorKind::SwitchInt { .. } => ControlFlow::Continue(()),
|
||||
|
||||
// Note that tail call terminator technically returns to the caller,
|
||||
// but for purposes of this lint it makes sense to count it as possibly recursive,
|
||||
// since it's still a call.
|
||||
//
|
||||
// If this'll be repurposed for something else, this might need to be changed.
|
||||
TerminatorKind::TailCall { .. } => ControlFlow::Continue(()),
|
||||
}
|
||||
}
|
||||
|
||||
fn node_settled(&mut self, bb: BasicBlock) -> ControlFlow<Self::BreakVal> {
|
||||
// When we examine a node for the last time, remember it if it is a recursive call.
|
||||
let terminator = self.body[bb].terminator();
|
||||
|
||||
// FIXME(explicit_tail_calls): highlight tail calls as "recursive call site"
|
||||
//
|
||||
// We don't want to lint functions that recurse only through tail calls
|
||||
// (such as `fn g() { become () }`), so just adding `| TailCall { ... }`
|
||||
// here won't work.
|
||||
//
|
||||
// But at the same time we would like to highlight both calls in a function like
|
||||
// `fn f() { if false { become f() } else { f() } }`, so we need to figure something out.
|
||||
if self.classifier.is_recursive_terminator(self.tcx, self.body, terminator) {
|
||||
self.reachable_recursive_calls.push(terminator.source_info.span);
|
||||
}
|
||||
|
||||
ControlFlow::Continue(())
|
||||
}
|
||||
|
||||
fn ignore_edge(&mut self, bb: BasicBlock, target: BasicBlock) -> bool {
|
||||
let terminator = self.body[bb].terminator();
|
||||
let ignore_unwind = terminator.unwind() == Some(&mir::UnwindAction::Cleanup(target))
|
||||
&& terminator.successors().count() > 1;
|
||||
if ignore_unwind || self.classifier.is_recursive_terminator(self.tcx, self.body, terminator)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
match &terminator.kind {
|
||||
TerminatorKind::FalseEdge { imaginary_target, .. } => imaginary_target == &target,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
91
compiler/rustc_mir_transform/src/check_inline.rs
Normal file
91
compiler/rustc_mir_transform/src/check_inline.rs
Normal file
|
@ -0,0 +1,91 @@
|
|||
//! Check that a body annotated with `#[rustc_force_inline]` will not fail to inline based on its
|
||||
//! definition alone (irrespective of any specific caller).
|
||||
|
||||
use rustc_attr_parsing::InlineAttr;
|
||||
use rustc_hir::def_id::DefId;
|
||||
use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
|
||||
use rustc_middle::mir::{Body, TerminatorKind};
|
||||
use rustc_middle::ty;
|
||||
use rustc_middle::ty::TyCtxt;
|
||||
use rustc_span::sym;
|
||||
|
||||
use crate::pass_manager::MirLint;
|
||||
|
||||
pub(super) struct CheckForceInline;
|
||||
|
||||
impl<'tcx> MirLint<'tcx> for CheckForceInline {
|
||||
fn run_lint(&self, tcx: TyCtxt<'tcx>, body: &Body<'tcx>) {
|
||||
let def_id = body.source.def_id();
|
||||
if !tcx.hir().body_owner_kind(def_id).is_fn_or_closure() || !def_id.is_local() {
|
||||
return;
|
||||
}
|
||||
let InlineAttr::Force { attr_span, .. } = tcx.codegen_fn_attrs(def_id).inline else {
|
||||
return;
|
||||
};
|
||||
|
||||
if let Err(reason) =
|
||||
is_inline_valid_on_fn(tcx, def_id).and_then(|_| is_inline_valid_on_body(tcx, body))
|
||||
{
|
||||
tcx.dcx().emit_err(crate::errors::InvalidForceInline {
|
||||
attr_span,
|
||||
callee_span: tcx.def_span(def_id),
|
||||
callee: tcx.def_path_str(def_id),
|
||||
reason,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn is_inline_valid_on_fn<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
def_id: DefId,
|
||||
) -> Result<(), &'static str> {
|
||||
let codegen_attrs = tcx.codegen_fn_attrs(def_id);
|
||||
if tcx.has_attr(def_id, sym::rustc_no_mir_inline) {
|
||||
return Err("#[rustc_no_mir_inline]");
|
||||
}
|
||||
|
||||
// FIXME(#127234): Coverage instrumentation currently doesn't handle inlined
|
||||
// MIR correctly when Modified Condition/Decision Coverage is enabled.
|
||||
if tcx.sess.instrument_coverage_mcdc() {
|
||||
return Err("incompatible with MC/DC coverage");
|
||||
}
|
||||
|
||||
let ty = tcx.type_of(def_id);
|
||||
if match ty.instantiate_identity().kind() {
|
||||
ty::FnDef(..) => tcx.fn_sig(def_id).instantiate_identity().c_variadic(),
|
||||
ty::Closure(_, args) => args.as_closure().sig().c_variadic(),
|
||||
_ => false,
|
||||
} {
|
||||
return Err("C variadic");
|
||||
}
|
||||
|
||||
if codegen_attrs.flags.contains(CodegenFnAttrFlags::COLD) {
|
||||
return Err("cold");
|
||||
}
|
||||
|
||||
// Intrinsic fallback bodies are automatically made cross-crate inlineable,
|
||||
// but at this stage we don't know whether codegen knows the intrinsic,
|
||||
// so just conservatively don't inline it. This also ensures that we do not
|
||||
// accidentally inline the body of an intrinsic that *must* be overridden.
|
||||
if tcx.has_attr(def_id, sym::rustc_intrinsic) {
|
||||
return Err("callee is an intrinsic");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(super) fn is_inline_valid_on_body<'tcx>(
|
||||
_: TyCtxt<'tcx>,
|
||||
body: &Body<'tcx>,
|
||||
) -> Result<(), &'static str> {
|
||||
if body
|
||||
.basic_blocks
|
||||
.iter()
|
||||
.any(|bb| matches!(bb.terminator().kind, TerminatorKind::TailCall { .. }))
|
||||
{
|
||||
return Err("can't inline functions with tail calls");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -9,6 +9,28 @@ use rustc_span::{Span, Symbol};
|
|||
|
||||
use crate::fluent_generated as fluent;
|
||||
|
||||
#[derive(LintDiagnostic)]
|
||||
#[diag(mir_transform_unconditional_recursion)]
|
||||
#[help]
|
||||
pub(crate) struct UnconditionalRecursion {
|
||||
#[label]
|
||||
pub(crate) span: Span,
|
||||
#[label(mir_transform_unconditional_recursion_call_site_label)]
|
||||
pub(crate) call_sites: Vec<Span>,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(mir_transform_force_inline_attr)]
|
||||
#[note]
|
||||
pub(crate) struct InvalidForceInline {
|
||||
#[primary_span]
|
||||
pub attr_span: Span,
|
||||
#[label(mir_transform_callee)]
|
||||
pub callee_span: Span,
|
||||
pub callee: String,
|
||||
pub reason: &'static str,
|
||||
}
|
||||
|
||||
#[derive(LintDiagnostic)]
|
||||
pub(crate) enum ConstMutate {
|
||||
#[diag(mir_transform_const_modify)]
|
||||
|
|
|
@ -21,8 +21,8 @@ use tracing::{debug, instrument, trace, trace_span};
|
|||
use crate::cost_checker::CostChecker;
|
||||
use crate::deref_separator::deref_finder;
|
||||
use crate::simplify::simplify_cfg;
|
||||
use crate::util;
|
||||
use crate::validate::validate_types;
|
||||
use crate::{check_inline, util};
|
||||
|
||||
pub(crate) mod cycle;
|
||||
|
||||
|
@ -575,7 +575,7 @@ fn try_inlining<'tcx, I: Inliner<'tcx>>(
|
|||
check_mir_is_available(inliner, caller_body, callsite.callee)?;
|
||||
|
||||
let callee_attrs = tcx.codegen_fn_attrs(callsite.callee.def_id());
|
||||
rustc_mir_build::check_inline::is_inline_valid_on_fn(tcx, callsite.callee.def_id())?;
|
||||
check_inline::is_inline_valid_on_fn(tcx, callsite.callee.def_id())?;
|
||||
check_codegen_attributes(inliner, callsite, callee_attrs)?;
|
||||
|
||||
let terminator = caller_body[callsite.block].terminator.as_ref().unwrap();
|
||||
|
@ -590,7 +590,7 @@ fn try_inlining<'tcx, I: Inliner<'tcx>>(
|
|||
}
|
||||
|
||||
let callee_body = try_instance_mir(tcx, callsite.callee.def)?;
|
||||
rustc_mir_build::check_inline::is_inline_valid_on_body(tcx, callee_body)?;
|
||||
check_inline::is_inline_valid_on_body(tcx, callee_body)?;
|
||||
inliner.check_callee_mir_body(callsite, callee_body, callee_attrs)?;
|
||||
|
||||
let Ok(callee_body) = callsite.callee.try_instantiate_mir_and_normalize_erasing_regions(
|
||||
|
|
|
@ -114,6 +114,8 @@ declare_passes! {
|
|||
mod add_moves_for_packed_drops : AddMovesForPackedDrops;
|
||||
mod add_retag : AddRetag;
|
||||
mod add_subtyping_projections : Subtyper;
|
||||
mod check_inline : CheckForceInline;
|
||||
mod check_call_recursion : CheckCallRecursion, CheckDropRecursion;
|
||||
mod check_alignment : CheckAlignment;
|
||||
mod check_const_item_mutation : CheckConstItemMutation;
|
||||
mod check_packed_ref : CheckPackedRef;
|
||||
|
@ -375,6 +377,8 @@ fn mir_built(tcx: TyCtxt<'_>, def: LocalDefId) -> &Steal<Body<'_>> {
|
|||
&mut body,
|
||||
&[
|
||||
// MIR-level lints.
|
||||
&Lint(check_inline::CheckForceInline),
|
||||
&Lint(check_call_recursion::CheckCallRecursion),
|
||||
&Lint(check_packed_ref::CheckPackedRef),
|
||||
&Lint(check_const_item_mutation::CheckConstItemMutation),
|
||||
&Lint(function_item_references::FunctionItemReferences),
|
||||
|
@ -505,10 +509,6 @@ fn mir_drops_elaborated_and_const_checked(tcx: TyCtxt<'_>, def: LocalDefId) -> &
|
|||
|
||||
run_analysis_to_runtime_passes(tcx, &mut body);
|
||||
|
||||
// Now that drop elaboration has been performed, we can check for
|
||||
// unconditional drop recursion.
|
||||
rustc_mir_build::lints::check_drop_recursion(tcx, &body);
|
||||
|
||||
tcx.alloc_steal_mir(body)
|
||||
}
|
||||
|
||||
|
@ -570,6 +570,8 @@ fn run_runtime_lowering_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
|
|||
// Calling this after `PostAnalysisNormalize` ensures that we don't deal with opaque types.
|
||||
&add_subtyping_projections::Subtyper,
|
||||
&elaborate_drops::ElaborateDrops,
|
||||
// Needs to happen after drop elaboration.
|
||||
&Lint(check_call_recursion::CheckDropRecursion),
|
||||
// This will remove extraneous landing pads which are no longer
|
||||
// necessary as well as forcing any call in a non-unwinding
|
||||
// function calling a possibly-unwinding function to abort the process.
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue