2019-09-25 12:30:25 -07:00
|
|
|
//! The `Visitor` responsible for actually checking a `mir::Body` for invalid operations.
|
|
|
|
|
2024-08-11 12:10:36 -04:00
|
|
|
use std::assert_matches::assert_matches;
|
2024-08-24 17:14:31 +02:00
|
|
|
use std::borrow::Cow;
|
2020-09-29 17:52:12 -07:00
|
|
|
use std::mem;
|
2024-02-24 17:22:28 -05:00
|
|
|
use std::ops::Deref;
|
2024-07-29 08:13:50 +10:00
|
|
|
|
2024-02-23 10:20:45 +11:00
|
|
|
use rustc_errors::{Diag, ErrorGuaranteed};
|
2020-09-30 09:48:18 -07:00
|
|
|
use rustc_hir::def_id::DefId;
|
2024-06-14 14:46:32 -04:00
|
|
|
use rustc_hir::{self as hir, LangItem};
|
2020-12-27 17:33:56 +00:00
|
|
|
use rustc_index::bit_set::BitSet;
|
2020-03-29 17:19:48 +02:00
|
|
|
use rustc_infer::infer::TyCtxtInferExt;
|
2023-12-19 04:28:56 +00:00
|
|
|
use rustc_infer::traits::ObligationCause;
|
2024-08-18 01:24:50 +02:00
|
|
|
use rustc_middle::mir::visit::Visitor;
|
2020-03-29 16:41:09 +02:00
|
|
|
use rustc_middle::mir::*;
|
2024-05-08 19:03:14 +10:00
|
|
|
use rustc_middle::span_bug;
|
2023-12-19 04:28:56 +00:00
|
|
|
use rustc_middle::ty::adjustment::PointerCoercion;
|
2024-08-18 01:24:50 +02:00
|
|
|
use rustc_middle::ty::{self, Instance, InstanceKind, Ty, TypeVisitableExt};
|
2023-11-10 10:11:24 +08:00
|
|
|
use rustc_mir_dataflow::Analysis;
|
2024-08-24 17:14:31 +02:00
|
|
|
use rustc_mir_dataflow::impls::MaybeStorageLive;
|
|
|
|
use rustc_mir_dataflow::storage::always_storage_live_locals;
|
2024-05-10 11:04:53 -04:00
|
|
|
use rustc_span::{DUMMY_SP, Span, Symbol, sym};
|
2024-07-21 15:20:41 -04:00
|
|
|
use rustc_trait_selection::error_reporting::InferCtxtErrorExt;
|
2023-12-19 04:28:56 +00:00
|
|
|
use rustc_trait_selection::traits::{self, ObligationCauseCode, ObligationCtxt};
|
2024-05-22 14:20:23 +10:00
|
|
|
use tracing::{debug, instrument, trace};
|
|
|
|
|
2020-09-29 13:17:38 -07:00
|
|
|
use super::ops::{self, NonConstOp, Status};
|
2024-01-27 13:47:29 +01:00
|
|
|
use super::qualifs::{self, HasMutInterior, NeedsDrop, NeedsNonConstDrop};
|
2019-10-23 12:10:08 -07:00
|
|
|
use super::resolver::FlowSensitiveAnalysis;
|
2021-10-25 17:07:16 +01:00
|
|
|
use super::{ConstCx, Qualif};
|
2020-09-17 11:14:11 -07:00
|
|
|
use crate::const_eval::is_unstable_const_fn;
|
2022-06-28 21:26:05 -07:00
|
|
|
use crate::errors::UnstableInStable;
|
2019-09-17 16:25:40 -07:00
|
|
|
|
2020-04-19 10:23:57 -07:00
|
|
|
type QualifResults<'mir, 'tcx, Q> =
|
2021-01-05 19:53:07 +01:00
|
|
|
rustc_mir_dataflow::ResultsCursor<'mir, 'tcx, FlowSensitiveAnalysis<'mir, 'mir, 'tcx, Q>>;
|
2019-10-23 12:10:08 -07:00
|
|
|
|
2020-04-19 11:02:35 -07:00
|
|
|
#[derive(Default)]
|
2024-03-02 10:54:37 +01:00
|
|
|
pub(crate) struct Qualifs<'mir, 'tcx> {
|
2020-04-19 11:02:35 -07:00
|
|
|
has_mut_interior: Option<QualifResults<'mir, 'tcx, HasMutInterior>>,
|
2021-10-17 00:00:00 +00:00
|
|
|
needs_drop: Option<QualifResults<'mir, 'tcx, NeedsDrop>>,
|
2023-12-06 22:01:21 +00:00
|
|
|
needs_non_const_drop: Option<QualifResults<'mir, 'tcx, NeedsNonConstDrop>>,
|
2019-10-23 12:10:08 -07:00
|
|
|
}
|
|
|
|
|
2021-12-13 22:34:51 -05:00
|
|
|
impl<'mir, 'tcx> Qualifs<'mir, 'tcx> {
|
2021-10-17 00:00:00 +00:00
|
|
|
/// Returns `true` if `local` is `NeedsDrop` at the given `Location`.
|
|
|
|
///
|
|
|
|
/// Only updates the cursor if absolutely necessary
|
2024-08-30 16:36:04 +10:00
|
|
|
fn needs_drop(
|
2021-10-17 00:00:00 +00:00
|
|
|
&mut self,
|
|
|
|
ccx: &'mir ConstCx<'mir, 'tcx>,
|
|
|
|
local: Local,
|
|
|
|
location: Location,
|
|
|
|
) -> bool {
|
|
|
|
let ty = ccx.body.local_decls[local].ty;
|
2022-02-14 16:10:22 +00:00
|
|
|
// Peeking into opaque types causes cycles if the current function declares said opaque
|
|
|
|
// type. Thus we avoid short circuiting on the type and instead run the more expensive
|
|
|
|
// analysis that looks at the actual usage within this function
|
|
|
|
if !ty.has_opaque_types() && !NeedsDrop::in_any_value_of_ty(ccx, ty) {
|
2021-10-17 00:00:00 +00:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
let needs_drop = self.needs_drop.get_or_insert_with(|| {
|
|
|
|
let ConstCx { tcx, body, .. } = *ccx;
|
|
|
|
|
|
|
|
FlowSensitiveAnalysis::new(NeedsDrop, ccx)
|
2023-11-21 20:07:32 +01:00
|
|
|
.into_engine(tcx, body)
|
2021-10-17 00:00:00 +00:00
|
|
|
.iterate_to_fixpoint()
|
2023-11-21 20:07:32 +01:00
|
|
|
.into_results_cursor(body)
|
2021-10-17 00:00:00 +00:00
|
|
|
});
|
|
|
|
|
|
|
|
needs_drop.seek_before_primary_effect(location);
|
2021-10-23 00:00:00 +00:00
|
|
|
needs_drop.get().contains(local)
|
2021-10-17 00:00:00 +00:00
|
|
|
}
|
|
|
|
|
2021-10-17 00:00:00 +00:00
|
|
|
/// Returns `true` if `local` is `NeedsNonConstDrop` at the given `Location`.
|
2019-10-23 12:10:08 -07:00
|
|
|
///
|
|
|
|
/// Only updates the cursor if absolutely necessary
|
2024-08-30 16:36:04 +10:00
|
|
|
pub(crate) fn needs_non_const_drop(
|
2020-04-19 11:02:35 -07:00
|
|
|
&mut self,
|
|
|
|
ccx: &'mir ConstCx<'mir, 'tcx>,
|
|
|
|
local: Local,
|
|
|
|
location: Location,
|
|
|
|
) -> bool {
|
|
|
|
let ty = ccx.body.local_decls[local].ty;
|
2023-12-06 22:01:21 +00:00
|
|
|
// Peeking into opaque types causes cycles if the current function declares said opaque
|
|
|
|
// type. Thus we avoid short circuiting on the type and instead run the more expensive
|
|
|
|
// analysis that looks at the actual usage within this function
|
|
|
|
if !ty.has_opaque_types() && !NeedsNonConstDrop::in_any_value_of_ty(ccx, ty) {
|
2019-10-23 12:10:08 -07:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2021-10-17 00:00:00 +00:00
|
|
|
let needs_non_const_drop = self.needs_non_const_drop.get_or_insert_with(|| {
|
2020-10-04 15:22:23 -07:00
|
|
|
let ConstCx { tcx, body, .. } = *ccx;
|
2020-04-19 11:02:35 -07:00
|
|
|
|
2023-12-06 22:01:21 +00:00
|
|
|
FlowSensitiveAnalysis::new(NeedsNonConstDrop, ccx)
|
|
|
|
.into_engine(tcx, body)
|
2020-04-19 11:02:35 -07:00
|
|
|
.iterate_to_fixpoint()
|
2023-12-06 22:01:21 +00:00
|
|
|
.into_results_cursor(body)
|
2020-04-19 11:02:35 -07:00
|
|
|
});
|
|
|
|
|
2021-10-17 00:00:00 +00:00
|
|
|
needs_non_const_drop.seek_before_primary_effect(location);
|
2021-10-23 00:00:00 +00:00
|
|
|
needs_non_const_drop.get().contains(local)
|
2019-10-23 12:10:08 -07:00
|
|
|
}
|
|
|
|
|
2019-10-28 10:24:29 -07:00
|
|
|
/// Returns `true` if `local` is `HasMutInterior` at the given `Location`.
|
|
|
|
///
|
|
|
|
/// Only updates the cursor if absolutely necessary.
|
2024-08-30 16:36:04 +10:00
|
|
|
fn has_mut_interior(
|
2020-04-19 10:23:57 -07:00
|
|
|
&mut self,
|
2020-04-19 11:02:35 -07:00
|
|
|
ccx: &'mir ConstCx<'mir, 'tcx>,
|
2020-04-19 10:23:57 -07:00
|
|
|
local: Local,
|
|
|
|
location: Location,
|
|
|
|
) -> bool {
|
|
|
|
let ty = ccx.body.local_decls[local].ty;
|
2022-02-14 16:10:22 +00:00
|
|
|
// Peeking into opaque types causes cycles if the current function declares said opaque
|
|
|
|
// type. Thus we avoid short circuiting on the type and instead run the more expensive
|
|
|
|
// analysis that looks at the actual usage within this function
|
|
|
|
if !ty.has_opaque_types() && !HasMutInterior::in_any_value_of_ty(ccx, ty) {
|
2019-10-28 10:24:29 -07:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2020-04-19 11:02:35 -07:00
|
|
|
let has_mut_interior = self.has_mut_interior.get_or_insert_with(|| {
|
2020-10-04 15:22:23 -07:00
|
|
|
let ConstCx { tcx, body, .. } = *ccx;
|
2020-04-19 11:02:35 -07:00
|
|
|
|
|
|
|
FlowSensitiveAnalysis::new(HasMutInterior, ccx)
|
2023-11-21 20:07:32 +01:00
|
|
|
.into_engine(tcx, body)
|
2020-04-19 11:02:35 -07:00
|
|
|
.iterate_to_fixpoint()
|
2023-11-21 20:07:32 +01:00
|
|
|
.into_results_cursor(body)
|
2020-04-19 11:02:35 -07:00
|
|
|
});
|
|
|
|
|
2020-03-22 12:09:40 -07:00
|
|
|
has_mut_interior.seek_before_primary_effect(location);
|
2021-10-23 00:00:00 +00:00
|
|
|
has_mut_interior.get().contains(local)
|
2019-10-28 10:24:29 -07:00
|
|
|
}
|
|
|
|
|
2020-11-06 21:27:05 +05:30
|
|
|
fn in_return_place(
|
|
|
|
&mut self,
|
|
|
|
ccx: &'mir ConstCx<'mir, 'tcx>,
|
2022-01-23 12:34:26 -06:00
|
|
|
tainted_by_errors: Option<ErrorGuaranteed>,
|
2020-11-06 21:27:05 +05:30
|
|
|
) -> ConstQualifs {
|
2024-02-15 19:54:37 +00:00
|
|
|
// FIXME(explicit_tail_calls): uhhhh I think we can return without return now, does it change anything
|
|
|
|
|
2019-10-28 10:24:29 -07:00
|
|
|
// Find the `Return` terminator if one exists.
|
|
|
|
//
|
|
|
|
// If no `Return` terminator exists, this MIR is divergent. Just return the conservative
|
|
|
|
// qualifs for the return type.
|
2020-03-23 14:02:58 +01:00
|
|
|
let return_block = ccx
|
2019-10-28 10:24:29 -07:00
|
|
|
.body
|
2022-07-05 00:00:00 +00:00
|
|
|
.basic_blocks
|
2019-10-28 10:24:29 -07:00
|
|
|
.iter_enumerated()
|
2021-11-06 01:31:32 +01:00
|
|
|
.find(|(_, block)| matches!(block.terminator().kind, TerminatorKind::Return))
|
2019-10-28 10:24:29 -07:00
|
|
|
.map(|(bb, _)| bb);
|
|
|
|
|
2022-02-19 00:47:43 +01:00
|
|
|
let Some(return_block) = return_block else {
|
|
|
|
return qualifs::in_any_value_of_ty(ccx, ccx.body.return_ty(), tainted_by_errors);
|
2019-10-28 10:24:29 -07:00
|
|
|
};
|
|
|
|
|
2020-03-23 14:02:58 +01:00
|
|
|
let return_loc = ccx.body.terminator_loc(return_block);
|
2019-10-28 10:24:29 -07:00
|
|
|
|
2019-11-14 11:58:50 -08:00
|
|
|
ConstQualifs {
|
2021-10-17 00:00:00 +00:00
|
|
|
needs_drop: self.needs_drop(ccx, RETURN_PLACE, return_loc),
|
2021-10-17 00:00:00 +00:00
|
|
|
needs_non_const_drop: self.needs_non_const_drop(ccx, RETURN_PLACE, return_loc),
|
2020-04-19 10:23:57 -07:00
|
|
|
has_mut_interior: self.has_mut_interior(ccx, RETURN_PLACE, return_loc),
|
2022-02-07 22:00:15 -08:00
|
|
|
tainted_by_errors,
|
2019-11-14 09:16:08 -08:00
|
|
|
}
|
2019-10-28 10:24:29 -07:00
|
|
|
}
|
2019-09-17 16:25:40 -07:00
|
|
|
}
|
|
|
|
|
2021-07-24 13:27:17 +02:00
|
|
|
pub struct Checker<'mir, 'tcx> {
|
2020-04-19 10:23:57 -07:00
|
|
|
ccx: &'mir ConstCx<'mir, 'tcx>,
|
|
|
|
qualifs: Qualifs<'mir, 'tcx>,
|
2019-09-17 16:25:40 -07:00
|
|
|
|
|
|
|
/// The span of the current statement.
|
|
|
|
span: Span,
|
2020-09-17 11:09:52 -07:00
|
|
|
|
2024-08-24 17:14:31 +02:00
|
|
|
/// A set that stores for each local whether it is "transient", i.e. guaranteed to be dead
|
|
|
|
/// when this MIR body returns.
|
|
|
|
transient_locals: Option<BitSet<Local>>,
|
2020-12-27 17:33:56 +00:00
|
|
|
|
2022-01-23 12:34:26 -06:00
|
|
|
error_emitted: Option<ErrorGuaranteed>,
|
2024-02-23 10:20:45 +11:00
|
|
|
secondary_errors: Vec<Diag<'tcx>>,
|
2019-09-17 16:25:40 -07:00
|
|
|
}
|
|
|
|
|
2021-12-13 22:34:51 -05:00
|
|
|
impl<'mir, 'tcx> Deref for Checker<'mir, 'tcx> {
|
2020-03-23 14:02:58 +01:00
|
|
|
type Target = ConstCx<'mir, 'tcx>;
|
2019-09-17 16:25:40 -07:00
|
|
|
|
|
|
|
fn deref(&self) -> &Self::Target {
|
2023-11-21 20:07:32 +01:00
|
|
|
self.ccx
|
2019-09-17 16:25:40 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-12-13 22:34:51 -05:00
|
|
|
impl<'mir, 'tcx> Checker<'mir, 'tcx> {
|
2020-04-19 11:02:35 -07:00
|
|
|
pub fn new(ccx: &'mir ConstCx<'mir, 'tcx>) -> Self {
|
2021-07-24 13:27:17 +02:00
|
|
|
Checker {
|
2020-09-17 11:09:52 -07:00
|
|
|
span: ccx.body.span,
|
|
|
|
ccx,
|
|
|
|
qualifs: Default::default(),
|
2024-08-24 17:14:31 +02:00
|
|
|
transient_locals: None,
|
2020-11-10 14:59:44 +05:30
|
|
|
error_emitted: None,
|
2020-09-29 17:52:12 -07:00
|
|
|
secondary_errors: Vec::new(),
|
2020-09-17 11:09:52 -07:00
|
|
|
}
|
2019-09-17 16:25:40 -07:00
|
|
|
}
|
|
|
|
|
2019-10-29 17:19:58 -07:00
|
|
|
pub fn check_body(&mut self) {
|
2020-10-04 15:22:23 -07:00
|
|
|
let ConstCx { tcx, body, .. } = *self.ccx;
|
|
|
|
let def_id = self.ccx.def_id();
|
2020-09-17 11:14:11 -07:00
|
|
|
|
2020-09-29 19:13:25 -07:00
|
|
|
// `async` functions cannot be `const fn`. This is checked during AST lowering, so there's
|
|
|
|
// no need to emit duplicate errors here.
|
2023-10-19 21:46:28 +00:00
|
|
|
if self.ccx.is_async() || body.coroutine.is_some() {
|
2023-12-18 22:21:37 +11:00
|
|
|
tcx.dcx().span_delayed_bug(body.span, "`async` functions cannot be `const fn`");
|
2020-09-29 19:13:25 -07:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2023-03-13 18:54:05 +00:00
|
|
|
if !tcx.has_attr(def_id, sym::rustc_do_not_const_check) {
|
2023-11-21 20:07:32 +01:00
|
|
|
self.visit_body(body);
|
2021-10-12 05:06:37 +00:00
|
|
|
}
|
2019-10-29 17:19:58 -07:00
|
|
|
|
2020-09-29 17:52:12 -07:00
|
|
|
// If we got through const-checking without emitting any "primary" errors, emit any
|
2024-01-11 10:45:56 +11:00
|
|
|
// "secondary" errors if they occurred. Otherwise, cancel the "secondary" errors.
|
2020-09-29 17:52:12 -07:00
|
|
|
let secondary_errors = mem::take(&mut self.secondary_errors);
|
2020-11-10 14:59:44 +05:30
|
|
|
if self.error_emitted.is_none() {
|
2023-12-14 14:13:35 +11:00
|
|
|
for error in secondary_errors {
|
2024-02-14 12:28:07 +00:00
|
|
|
self.error_emitted = Some(error.emit());
|
2020-09-29 17:52:12 -07:00
|
|
|
}
|
|
|
|
} else {
|
2023-12-21 16:26:09 +11:00
|
|
|
assert!(self.tcx.dcx().has_errors().is_some());
|
2024-01-11 10:45:56 +11:00
|
|
|
for error in secondary_errors {
|
|
|
|
error.cancel();
|
|
|
|
}
|
2020-09-29 17:52:12 -07:00
|
|
|
}
|
2019-10-29 17:19:58 -07:00
|
|
|
}
|
|
|
|
|
2024-08-24 17:14:31 +02:00
|
|
|
fn local_is_transient(&mut self, local: Local) -> bool {
|
2020-12-27 17:33:56 +00:00
|
|
|
let ccx = self.ccx;
|
2024-08-24 17:14:31 +02:00
|
|
|
self.transient_locals
|
2020-12-27 17:33:56 +00:00
|
|
|
.get_or_insert_with(|| {
|
2024-08-24 17:14:31 +02:00
|
|
|
// A local is "transient" if it is guaranteed dead at all `Return`.
|
|
|
|
// So first compute the say of "maybe live" locals at each program point.
|
|
|
|
let always_live_locals = &always_storage_live_locals(&ccx.body);
|
2024-08-25 11:42:20 +02:00
|
|
|
let mut maybe_storage_live =
|
|
|
|
MaybeStorageLive::new(Cow::Borrowed(always_live_locals))
|
|
|
|
.into_engine(ccx.tcx, &ccx.body)
|
|
|
|
.iterate_to_fixpoint()
|
|
|
|
.into_results_cursor(&ccx.body);
|
2024-08-24 17:14:31 +02:00
|
|
|
|
|
|
|
// And then check all `Return` in the MIR, and if a local is "maybe live" at a
|
|
|
|
// `Return` then it is definitely not transient.
|
2024-08-25 11:42:20 +02:00
|
|
|
let mut transient = BitSet::new_filled(ccx.body.local_decls.len());
|
|
|
|
// Make sure to only visit reachable blocks, the dataflow engine can ICE otherwise.
|
|
|
|
for (bb, data) in traversal::reachable(&ccx.body) {
|
|
|
|
if matches!(data.terminator().kind, TerminatorKind::Return) {
|
|
|
|
let location = ccx.body.terminator_loc(bb);
|
|
|
|
maybe_storage_live.seek_after_primary_effect(location);
|
2024-08-25 12:33:55 +02:00
|
|
|
// If a local may be live here, it is definitely not transient.
|
|
|
|
transient.subtract(maybe_storage_live.get());
|
2020-12-27 17:33:56 +00:00
|
|
|
}
|
|
|
|
}
|
2024-08-24 17:14:31 +02:00
|
|
|
|
2024-08-25 11:42:20 +02:00
|
|
|
transient
|
2020-12-27 17:33:56 +00:00
|
|
|
})
|
|
|
|
.contains(local)
|
|
|
|
}
|
|
|
|
|
2019-11-14 11:58:50 -08:00
|
|
|
pub fn qualifs_in_return_place(&mut self) -> ConstQualifs {
|
2020-11-06 20:46:38 +05:30
|
|
|
self.qualifs.in_return_place(self.ccx, self.error_emitted)
|
2019-10-29 17:19:58 -07:00
|
|
|
}
|
|
|
|
|
2019-09-17 16:25:40 -07:00
|
|
|
/// Emits an error if an expression cannot be evaluated in the current context.
|
2021-12-09 22:42:17 +08:00
|
|
|
pub fn check_op(&mut self, op: impl NonConstOp<'tcx>) {
|
2020-09-17 11:09:52 -07:00
|
|
|
self.check_op_spanned(op, self.span);
|
2020-04-30 21:04:46 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Emits an error at the given `span` if an expression cannot be evaluated in the current
|
|
|
|
/// context.
|
2021-12-09 22:42:17 +08:00
|
|
|
pub fn check_op_spanned<O: NonConstOp<'tcx>>(&mut self, op: O, span: Span) {
|
2020-09-29 13:17:38 -07:00
|
|
|
let gate = match op.status_in_item(self.ccx) {
|
|
|
|
Status::Allowed => return,
|
|
|
|
|
2024-10-08 14:06:56 +02:00
|
|
|
Status::Unstable(gate) if self.tcx.features().enabled(gate) => {
|
2020-09-29 13:17:38 -07:00
|
|
|
let unstable_in_stable = self.ccx.is_const_stable_const_fn()
|
2022-05-02 09:31:56 +02:00
|
|
|
&& !super::rustc_allow_const_fn_unstable(self.tcx, self.def_id(), gate);
|
2020-09-29 13:17:38 -07:00
|
|
|
if unstable_in_stable {
|
|
|
|
emit_unstable_in_stable_error(self.ccx, span, gate);
|
|
|
|
}
|
|
|
|
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
Status::Unstable(gate) => Some(gate),
|
|
|
|
Status::Forbidden => None,
|
|
|
|
};
|
|
|
|
|
2022-07-06 07:44:47 -05:00
|
|
|
if self.tcx.sess.opts.unstable_opts.unleash_the_miri_inside_of_you {
|
2020-09-29 13:17:38 -07:00
|
|
|
self.tcx.sess.miri_unleashed_feature(span, gate);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
Make `DiagnosticBuilder::emit` consuming.
This works for most of its call sites. This is nice, because `emit` very
much makes sense as a consuming operation -- indeed,
`DiagnosticBuilderState` exists to ensure no diagnostic is emitted
twice, but it uses runtime checks.
For the small number of call sites where a consuming emit doesn't work,
the commit adds `DiagnosticBuilder::emit_without_consuming`. (This will
be removed in subsequent commits.)
Likewise, `emit_unless` becomes consuming. And `delay_as_bug` becomes
consuming, while `delay_as_bug_without_consuming` is added (which will
also be removed in subsequent commits.)
All this requires significant changes to `DiagnosticBuilder`'s chaining
methods. Currently `DiagnosticBuilder` method chaining uses a
non-consuming `&mut self -> &mut Self` style, which allows chaining to
be used when the chain ends in `emit()`, like so:
```
struct_err(msg).span(span).emit();
```
But it doesn't work when producing a `DiagnosticBuilder` value,
requiring this:
```
let mut err = self.struct_err(msg);
err.span(span);
err
```
This style of chaining won't work with consuming `emit` though. For
that, we need to use to a `self -> Self` style. That also would allow
`DiagnosticBuilder` production to be chained, e.g.:
```
self.struct_err(msg).span(span)
```
However, removing the `&mut self -> &mut Self` style would require that
individual modifications of a `DiagnosticBuilder` go from this:
```
err.span(span);
```
to this:
```
err = err.span(span);
```
There are *many* such places. I have a high tolerance for tedious
refactorings, but even I gave up after a long time trying to convert
them all.
Instead, this commit has it both ways: the existing `&mut self -> Self`
chaining methods are kept, and new `self -> Self` chaining methods are
added, all of which have a `_mv` suffix (short for "move"). Changes to
the existing `forward!` macro lets this happen with very little
additional boilerplate code. I chose to add the suffix to the new
chaining methods rather than the existing ones, because the number of
changes required is much smaller that way.
This doubled chainging is a bit clumsy, but I think it is worthwhile
because it allows a *lot* of good things to subsequently happen. In this
commit, there are many `mut` qualifiers removed in places where
diagnostics are emitted without being modified. In subsequent commits:
- chaining can be used more, making the code more concise;
- more use of chaining also permits the removal of redundant diagnostic
APIs like `struct_err_with_code`, which can be replaced easily with
`struct_err` + `code_mv`;
- `emit_without_diagnostic` can be removed, which simplifies a lot of
machinery, removing the need for `DiagnosticBuilderState`.
2024-01-03 12:17:35 +11:00
|
|
|
let err = op.build_error(self.ccx, span);
|
2020-09-29 13:17:38 -07:00
|
|
|
assert!(err.is_error());
|
2020-09-29 17:52:12 -07:00
|
|
|
|
|
|
|
match op.importance() {
|
2024-02-23 16:20:20 +11:00
|
|
|
ops::DiagImportance::Primary => {
|
2022-01-22 18:49:12 -06:00
|
|
|
let reported = err.emit();
|
|
|
|
self.error_emitted = Some(reported);
|
2020-09-29 17:52:12 -07:00
|
|
|
}
|
|
|
|
|
2024-02-23 16:20:20 +11:00
|
|
|
ops::DiagImportance::Secondary => self.secondary_errors.push(err),
|
2020-09-29 17:52:12 -07:00
|
|
|
}
|
2019-09-17 16:25:40 -07:00
|
|
|
}
|
2019-11-18 23:04:06 +00:00
|
|
|
|
2019-11-27 14:29:09 -08:00
|
|
|
fn check_static(&mut self, def_id: DefId, span: Span) {
|
2021-06-01 22:05:04 +08:00
|
|
|
if self.tcx.is_thread_local_static(def_id) {
|
2024-02-17 01:23:40 +11:00
|
|
|
self.tcx.dcx().span_bug(span, "tls access is checked in `Rvalue::ThreadLocalRef`");
|
2021-06-01 22:05:04 +08:00
|
|
|
}
|
2024-04-09 10:57:41 +00:00
|
|
|
if let Some(def_id) = def_id.as_local()
|
|
|
|
&& let Err(guar) = self.tcx.at(span).check_well_formed(hir::OwnerId { def_id })
|
|
|
|
{
|
|
|
|
self.error_emitted = Some(guar);
|
|
|
|
}
|
2019-11-18 23:04:06 +00:00
|
|
|
}
|
2020-09-17 11:14:11 -07:00
|
|
|
|
2024-08-26 14:51:16 +02:00
|
|
|
/// Returns whether this place can possibly escape the evaluation of the current const/static
|
|
|
|
/// initializer. The check assumes that all already existing pointers and references point to
|
|
|
|
/// non-escaping places.
|
|
|
|
fn place_may_escape(&mut self, place: &Place<'_>) -> bool {
|
2024-08-17 14:19:34 +02:00
|
|
|
let is_transient = match self.const_kind() {
|
2021-01-03 18:46:20 +00:00
|
|
|
// In a const fn all borrows are transient or point to the places given via
|
|
|
|
// references in the arguments (so we already checked them with
|
|
|
|
// TransientMutBorrow/MutBorrow as appropriate).
|
|
|
|
// The borrow checker guarantees that no new non-transient borrows are created.
|
|
|
|
// NOTE: Once we have heap allocations during CTFE we need to figure out
|
|
|
|
// how to prevent `const fn` to create long-lived allocations that point
|
|
|
|
// to mutable memory.
|
2024-08-17 14:19:34 +02:00
|
|
|
hir::ConstContext::ConstFn => true,
|
2021-01-03 18:46:20 +00:00
|
|
|
_ => {
|
2024-02-11 14:27:08 +01:00
|
|
|
// For indirect places, we are not creating a new permanent borrow, it's just as
|
|
|
|
// transient as the already existing one. For reborrowing references this is handled
|
|
|
|
// at the top of `visit_rvalue`, but for raw pointers we handle it here.
|
|
|
|
// Pointers/references to `static mut` and cases where the `*` is not the first
|
|
|
|
// projection also end up here.
|
2021-01-03 18:46:20 +00:00
|
|
|
// Locals with StorageDead do not live beyond the evaluation and can
|
|
|
|
// thus safely be borrowed without being able to be leaked to the final
|
|
|
|
// value of the constant.
|
2024-02-11 14:27:08 +01:00
|
|
|
// Note: This is only sound if every local that has a `StorageDead` has a
|
|
|
|
// `StorageDead` in every control flow path leading to a `return` terminator.
|
2024-08-26 14:51:16 +02:00
|
|
|
// If anything slips through, there's no safety net -- safe code can create
|
|
|
|
// references to variants of `!Freeze` enums as long as that variant is `Freeze`, so
|
|
|
|
// interning can't protect us here. (There *is* a safety net for mutable references
|
|
|
|
// though, interning will ICE if we miss something here.)
|
2024-08-17 14:19:34 +02:00
|
|
|
place.is_indirect() || self.local_is_transient(place.local)
|
2021-01-03 18:46:20 +00:00
|
|
|
}
|
2024-08-17 14:19:34 +02:00
|
|
|
};
|
2024-08-26 14:51:16 +02:00
|
|
|
// Transient places cannot possibly escape because the place doesn't exist any more at the
|
|
|
|
// end of evaluation.
|
|
|
|
!is_transient
|
2021-01-03 18:46:20 +00:00
|
|
|
}
|
2019-09-17 16:25:40 -07:00
|
|
|
}
|
|
|
|
|
2021-12-13 22:34:51 -05:00
|
|
|
impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> {
|
2019-10-29 17:19:58 -07:00
|
|
|
fn visit_basic_block_data(&mut self, bb: BasicBlock, block: &BasicBlockData<'tcx>) {
|
|
|
|
trace!("visit_basic_block_data: bb={:?} is_cleanup={:?}", bb, block.is_cleanup);
|
|
|
|
|
2020-10-04 10:39:12 -07:00
|
|
|
// We don't const-check basic blocks on the cleanup path since we never unwind during
|
|
|
|
// const-eval: a panic causes an immediate compile error. In other words, cleanup blocks
|
|
|
|
// are unreachable during const-eval.
|
2019-10-29 17:19:58 -07:00
|
|
|
//
|
2020-10-04 10:39:12 -07:00
|
|
|
// We can't be more conservative (e.g., by const-checking cleanup blocks anyways) because
|
|
|
|
// locals that would never be dropped during normal execution are sometimes dropped during
|
|
|
|
// unwinding, which means backwards-incompatible live-drop errors.
|
2019-10-29 17:19:58 -07:00
|
|
|
if block.is_cleanup {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
self.super_basic_block_data(bb, block);
|
|
|
|
}
|
|
|
|
|
2019-09-17 16:25:40 -07:00
|
|
|
fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
|
|
|
|
trace!("visit_rvalue: rvalue={:?} location={:?}", rvalue, location);
|
|
|
|
|
2019-11-22 15:52:59 -08:00
|
|
|
self.super_rvalue(rvalue, location);
|
|
|
|
|
2022-12-23 15:15:21 +00:00
|
|
|
match rvalue {
|
2020-05-02 21:44:25 +02:00
|
|
|
Rvalue::ThreadLocalRef(_) => self.check_op(ops::ThreadLocalAccess),
|
|
|
|
|
2019-09-17 16:25:40 -07:00
|
|
|
Rvalue::Use(_)
|
2022-06-13 16:37:41 +03:00
|
|
|
| Rvalue::CopyForDeref(..)
|
2019-09-17 16:25:40 -07:00
|
|
|
| Rvalue::Repeat(..)
|
|
|
|
| Rvalue::Discriminant(..)
|
2022-11-18 22:56:22 +01:00
|
|
|
| Rvalue::Len(_) => {}
|
|
|
|
|
2022-12-23 15:15:21 +00:00
|
|
|
Rvalue::Aggregate(kind, ..) => {
|
2023-10-19 16:06:43 +00:00
|
|
|
if let AggregateKind::Coroutine(def_id, ..) = kind.as_ref()
|
2023-12-21 18:49:20 +00:00
|
|
|
&& let Some(
|
|
|
|
coroutine_kind @ hir::CoroutineKind::Desugared(
|
|
|
|
hir::CoroutineDesugaring::Async,
|
|
|
|
_,
|
|
|
|
),
|
|
|
|
) = self.tcx.coroutine_kind(def_id)
|
2022-12-23 15:15:21 +00:00
|
|
|
{
|
2023-10-19 21:46:28 +00:00
|
|
|
self.check_op(ops::Coroutine(coroutine_kind));
|
2022-11-18 22:56:22 +01:00
|
|
|
}
|
|
|
|
}
|
2019-12-22 17:42:04 -05:00
|
|
|
|
2023-11-05 16:58:47 +01:00
|
|
|
Rvalue::Ref(_, BorrowKind::Mut { .. }, place)
|
2024-08-12 10:57:57 +02:00
|
|
|
| Rvalue::RawPtr(Mutability::Mut, place) => {
|
2023-11-05 16:58:47 +01:00
|
|
|
// Inside mutable statics, we allow arbitrary mutable references.
|
|
|
|
// We've allowed `static mut FOO = &mut [elements];` for a long time (the exact
|
|
|
|
// reasons why are lost to history), and there is no reason to restrict that to
|
|
|
|
// arrays and slices.
|
|
|
|
let is_allowed =
|
|
|
|
self.const_kind() == hir::ConstContext::Static(hir::Mutability::Mut);
|
2019-11-22 15:52:59 -08:00
|
|
|
|
2024-08-26 14:51:16 +02:00
|
|
|
if !is_allowed && self.place_may_escape(place) {
|
|
|
|
self.check_op(ops::EscapingMutBorrow(if matches!(rvalue, Rvalue::Ref(..)) {
|
|
|
|
hir::BorrowKind::Ref
|
|
|
|
} else {
|
|
|
|
hir::BorrowKind::Raw
|
|
|
|
}));
|
2019-11-22 15:52:59 -08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-04-07 00:30:28 +02:00
|
|
|
Rvalue::Ref(_, BorrowKind::Shared | BorrowKind::Fake(_), place)
|
2024-08-12 10:57:57 +02:00
|
|
|
| Rvalue::RawPtr(Mutability::Not, place) => {
|
2019-12-13 13:20:16 -08:00
|
|
|
let borrowed_place_has_mut_interior = qualifs::in_place::<HasMutInterior, _>(
|
2023-11-21 20:07:32 +01:00
|
|
|
self.ccx,
|
2020-04-19 10:23:57 -07:00
|
|
|
&mut |local| self.qualifs.has_mut_interior(self.ccx, local, location),
|
2020-02-03 10:37:36 -08:00
|
|
|
place.as_ref(),
|
|
|
|
);
|
|
|
|
|
2024-08-26 14:51:16 +02:00
|
|
|
if borrowed_place_has_mut_interior && self.place_may_escape(place) {
|
|
|
|
self.check_op(ops::EscapingCellBorrow);
|
2020-02-03 10:37:36 -08:00
|
|
|
}
|
2018-12-23 19:00:58 +00:00
|
|
|
}
|
2019-11-22 15:52:59 -08:00
|
|
|
|
2020-09-17 11:14:11 -07:00
|
|
|
Rvalue::Cast(
|
2023-07-05 20:07:03 +02:00
|
|
|
CastKind::PointerCoercion(
|
|
|
|
PointerCoercion::MutToConstPointer
|
|
|
|
| PointerCoercion::ArrayToPointer
|
|
|
|
| PointerCoercion::UnsafeFnPointer
|
|
|
|
| PointerCoercion::ClosureFnPointer(_)
|
|
|
|
| PointerCoercion::ReifyFnPointer,
|
2024-09-15 19:35:06 +02:00
|
|
|
_,
|
2020-09-17 11:14:11 -07:00
|
|
|
),
|
|
|
|
_,
|
|
|
|
_,
|
2022-02-08 16:33:15 -08:00
|
|
|
) => {
|
2022-06-02 09:05:37 -04:00
|
|
|
// These are all okay; they only change the type, not the data.
|
2022-02-08 16:33:15 -08:00
|
|
|
}
|
2020-09-17 11:14:11 -07:00
|
|
|
|
2024-09-15 16:47:42 +02:00
|
|
|
Rvalue::Cast(
|
2024-09-15 19:35:06 +02:00
|
|
|
CastKind::PointerCoercion(PointerCoercion::Unsize | PointerCoercion::DynStar, _),
|
2024-09-15 16:47:42 +02:00
|
|
|
_,
|
|
|
|
_,
|
|
|
|
) => {
|
|
|
|
// Unsizing and `dyn*` coercions are implemented for CTFE.
|
2020-09-17 11:14:11 -07:00
|
|
|
}
|
|
|
|
|
2024-04-03 15:17:00 +02:00
|
|
|
Rvalue::Cast(CastKind::PointerExposeProvenance, _, _) => {
|
2022-05-31 00:00:00 +00:00
|
|
|
self.check_op(ops::RawPtrToIntCast);
|
2019-09-17 16:25:40 -07:00
|
|
|
}
|
2024-03-23 12:21:20 +01:00
|
|
|
Rvalue::Cast(CastKind::PointerWithExposedProvenance, _, _) => {
|
2022-06-02 09:05:37 -04:00
|
|
|
// Since no pointer can ever get exposed (rejected above), this is easy to support.
|
|
|
|
}
|
2019-09-17 16:25:40 -07:00
|
|
|
|
2022-10-04 21:39:43 +03:00
|
|
|
Rvalue::Cast(_, _, _) => {}
|
2022-05-31 00:00:00 +00:00
|
|
|
|
2024-02-07 10:26:00 -05:00
|
|
|
Rvalue::NullaryOp(
|
2024-03-17 10:29:02 +01:00
|
|
|
NullOp::SizeOf | NullOp::AlignOf | NullOp::OffsetOf(_) | NullOp::UbChecks,
|
2024-02-07 10:26:00 -05:00
|
|
|
_,
|
|
|
|
) => {}
|
2021-09-06 18:33:23 +01:00
|
|
|
Rvalue::ShallowInitBox(_, _) => {}
|
2020-09-17 11:14:11 -07:00
|
|
|
|
2022-12-23 15:15:21 +00:00
|
|
|
Rvalue::UnaryOp(_, operand) => {
|
2020-09-17 11:14:11 -07:00
|
|
|
let ty = operand.ty(self.body, self.tcx);
|
2024-08-03 11:17:43 +02:00
|
|
|
if is_int_bool_float_or_char(ty) {
|
|
|
|
// Int, bool, float, and char operations are fine.
|
2020-09-23 11:54:11 -07:00
|
|
|
} else {
|
|
|
|
span_bug!(self.span, "non-primitive type in `Rvalue::UnaryOp`: {:?}", ty);
|
2020-09-17 11:14:11 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-05-16 02:07:31 -07:00
|
|
|
Rvalue::BinaryOp(op, box (lhs, rhs)) => {
|
2020-09-17 11:14:11 -07:00
|
|
|
let lhs_ty = lhs.ty(self.body, self.tcx);
|
|
|
|
let rhs_ty = rhs.ty(self.body, self.tcx);
|
|
|
|
|
2024-08-03 11:17:43 +02:00
|
|
|
if is_int_bool_float_or_char(lhs_ty) && is_int_bool_float_or_char(rhs_ty) {
|
|
|
|
// Int, bool, float, and char operations are fine.
|
2020-09-23 11:54:11 -07:00
|
|
|
} else if lhs_ty.is_fn_ptr() || lhs_ty.is_unsafe_ptr() {
|
2024-08-11 12:10:36 -04:00
|
|
|
assert_matches!(
|
2022-12-23 15:15:21 +00:00
|
|
|
op,
|
|
|
|
BinOp::Eq
|
|
|
|
| BinOp::Ne
|
|
|
|
| BinOp::Le
|
|
|
|
| BinOp::Lt
|
|
|
|
| BinOp::Ge
|
|
|
|
| BinOp::Gt
|
|
|
|
| BinOp::Offset
|
2024-08-11 12:10:36 -04:00
|
|
|
);
|
2019-09-17 16:25:40 -07:00
|
|
|
|
|
|
|
self.check_op(ops::RawPtrComparison);
|
2020-09-23 11:54:11 -07:00
|
|
|
} else {
|
|
|
|
span_bug!(
|
|
|
|
self.span,
|
|
|
|
"non-primitive type in `Rvalue::BinaryOp`: {:?} ⚬ {:?}",
|
|
|
|
lhs_ty,
|
|
|
|
rhs_ty
|
|
|
|
);
|
2020-09-17 11:14:11 -07:00
|
|
|
}
|
2019-09-17 16:25:40 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-11 12:15:38 +01:00
|
|
|
fn visit_operand(&mut self, op: &Operand<'tcx>, location: Location) {
|
|
|
|
self.super_operand(op, location);
|
|
|
|
if let Operand::Constant(c) = op {
|
|
|
|
if let Some(def_id) = c.check_static_ptr(self.tcx) {
|
2019-11-18 23:04:06 +00:00
|
|
|
self.check_static(def_id, self.span);
|
2019-09-17 16:25:40 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn visit_source_info(&mut self, source_info: &SourceInfo) {
|
|
|
|
trace!("visit_source_info: source_info={:?}", source_info);
|
|
|
|
self.span = source_info.span;
|
|
|
|
}
|
|
|
|
|
|
|
|
fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
|
|
|
|
trace!("visit_statement: statement={:?} location={:?}", statement, location);
|
|
|
|
|
2020-12-19 20:25:27 +01:00
|
|
|
self.super_statement(statement, location);
|
2020-04-19 16:05:59 -07:00
|
|
|
|
2020-12-19 20:25:27 +01:00
|
|
|
match statement.kind {
|
|
|
|
StatementKind::Assign(..)
|
|
|
|
| StatementKind::SetDiscriminant { .. }
|
2022-04-05 17:14:59 -04:00
|
|
|
| StatementKind::Deinit(..)
|
2020-12-19 20:25:27 +01:00
|
|
|
| StatementKind::FakeRead(..)
|
2019-09-17 16:25:40 -07:00
|
|
|
| StatementKind::StorageLive(_)
|
|
|
|
| StatementKind::StorageDead(_)
|
|
|
|
| StatementKind::Retag { .. }
|
2022-09-06 18:41:01 +02:00
|
|
|
| StatementKind::PlaceMention(..)
|
2019-09-17 16:25:40 -07:00
|
|
|
| StatementKind::AscribeUserType(..)
|
2020-08-15 04:42:13 -07:00
|
|
|
| StatementKind::Coverage(..)
|
2022-07-12 10:05:00 +00:00
|
|
|
| StatementKind::Intrinsic(..)
|
2022-12-20 00:51:17 +00:00
|
|
|
| StatementKind::ConstEvalCounter
|
2019-09-17 16:25:40 -07:00
|
|
|
| StatementKind::Nop => {}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-01-24 00:00:00 +00:00
|
|
|
#[instrument(level = "debug", skip(self))]
|
2020-04-14 22:50:16 +02:00
|
|
|
fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
|
|
|
|
self.super_terminator(terminator, location);
|
2019-09-17 16:25:40 -07:00
|
|
|
|
2020-04-14 22:50:16 +02:00
|
|
|
match &terminator.kind {
|
2024-02-15 19:54:37 +00:00
|
|
|
TerminatorKind::Call { func, args, fn_span, .. }
|
|
|
|
| TerminatorKind::TailCall { func, args, fn_span, .. } => {
|
|
|
|
let call_source = match terminator.kind {
|
|
|
|
TerminatorKind::Call { call_source, .. } => call_source,
|
|
|
|
TerminatorKind::TailCall { .. } => CallSource::Normal,
|
|
|
|
_ => unreachable!(),
|
|
|
|
};
|
|
|
|
|
2020-10-04 15:22:23 -07:00
|
|
|
let ConstCx { tcx, body, param_env, .. } = *self.ccx;
|
2022-05-02 09:31:56 +02:00
|
|
|
let caller = self.def_id();
|
2020-09-17 11:14:11 -07:00
|
|
|
|
|
|
|
let fn_ty = func.ty(body, tcx);
|
2019-09-17 16:25:40 -07:00
|
|
|
|
2023-07-11 22:35:29 +01:00
|
|
|
let (mut callee, mut fn_args) = match *fn_ty.kind() {
|
|
|
|
ty::FnDef(def_id, fn_args) => (def_id, fn_args),
|
2019-09-17 16:25:40 -07:00
|
|
|
|
2024-08-08 17:18:20 +10:00
|
|
|
ty::FnPtr(..) => {
|
2019-09-17 16:25:40 -07:00
|
|
|
self.check_op(ops::FnCallIndirect);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
_ => {
|
2020-04-15 08:44:56 +02:00
|
|
|
span_bug!(terminator.source_info.span, "invalid callee of type {:?}", fn_ty)
|
2019-09-17 16:25:40 -07:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2023-03-24 09:17:51 +00:00
|
|
|
// Check that all trait bounds that are marked as `~const` can be satisfied.
|
|
|
|
//
|
|
|
|
// Typeck only does a "non-const" check since it operates on HIR and cannot distinguish
|
|
|
|
// which path expressions are getting called on and which path expressions are only used
|
|
|
|
// as function pointers. This is required for correctness.
|
|
|
|
let infcx = tcx.infer_ctxt().build();
|
2024-06-01 14:51:31 -04:00
|
|
|
let ocx = ObligationCtxt::new_with_diagnostics(&infcx);
|
2023-03-24 09:17:51 +00:00
|
|
|
|
2023-07-11 22:35:29 +01:00
|
|
|
let predicates = tcx.predicates_of(callee).instantiate(tcx, fn_args);
|
2023-03-24 09:17:51 +00:00
|
|
|
let cause = ObligationCause::new(
|
|
|
|
terminator.source_info.span,
|
|
|
|
self.body.source.def_id().expect_local(),
|
2024-05-10 11:04:53 -04:00
|
|
|
ObligationCauseCode::WhereClause(callee, DUMMY_SP),
|
2023-03-24 09:17:51 +00:00
|
|
|
);
|
|
|
|
let normalized_predicates = ocx.normalize(&cause, param_env, predicates);
|
|
|
|
ocx.register_obligations(traits::predicates_for_generics(
|
|
|
|
|_, _| cause.clone(),
|
|
|
|
self.param_env,
|
|
|
|
normalized_predicates,
|
|
|
|
));
|
|
|
|
|
|
|
|
let errors = ocx.select_all_or_error();
|
|
|
|
if !errors.is_empty() {
|
2023-08-14 13:09:53 +00:00
|
|
|
infcx.err_ctxt().report_fulfillment_errors(errors);
|
2023-03-24 09:17:51 +00:00
|
|
|
}
|
|
|
|
|
2023-12-19 04:28:56 +00:00
|
|
|
let mut is_trait = false;
|
2020-11-22 04:19:46 +01:00
|
|
|
// Attempting to call a trait method?
|
2023-12-19 04:28:56 +00:00
|
|
|
if tcx.trait_of_item(callee).is_some() {
|
2021-01-04 23:01:58 +00:00
|
|
|
trace!("attempting to call a trait method");
|
2023-12-19 04:28:56 +00:00
|
|
|
// trait method calls are only permitted when `effects` is enabled.
|
|
|
|
// we don't error, since that is handled by typeck. We try to resolve
|
|
|
|
// the trait into the concrete method, and uses that for const stability
|
|
|
|
// checks.
|
|
|
|
// FIXME(effects) we might consider moving const stability checks to typeck as well.
|
2024-10-09 09:01:57 +02:00
|
|
|
if tcx.features().effects() {
|
2023-12-19 04:28:56 +00:00
|
|
|
is_trait = true;
|
|
|
|
|
|
|
|
if let Ok(Some(instance)) =
|
2024-07-02 15:55:17 -04:00
|
|
|
Instance::try_resolve(tcx, param_env, callee, fn_args)
|
2024-06-16 21:35:16 -04:00
|
|
|
&& let InstanceKind::Item(def) = instance.def
|
2023-12-19 04:28:56 +00:00
|
|
|
{
|
|
|
|
// Resolve a trait method call to its concrete implementation, which may be in a
|
|
|
|
// `const` trait impl. This is only used for the const stability check below, since
|
|
|
|
// we want to look at the concrete impl's stability.
|
|
|
|
fn_args = instance.args;
|
|
|
|
callee = def;
|
|
|
|
}
|
|
|
|
} else {
|
2021-12-09 22:42:17 +08:00
|
|
|
self.check_op(ops::FnCallNonConst {
|
|
|
|
caller,
|
|
|
|
callee,
|
2023-07-11 22:35:29 +01:00
|
|
|
args: fn_args,
|
2021-12-09 22:42:17 +08:00
|
|
|
span: *fn_span,
|
2024-02-15 19:54:37 +00:00
|
|
|
call_source,
|
2024-10-09 09:01:57 +02:00
|
|
|
feature: Some(if tcx.features().const_trait_impl() {
|
2023-12-19 04:28:56 +00:00
|
|
|
sym::effects
|
|
|
|
} else {
|
|
|
|
sym::const_trait_impl
|
|
|
|
}),
|
2021-12-09 22:42:17 +08:00
|
|
|
});
|
2020-11-22 04:19:46 +01:00
|
|
|
return;
|
|
|
|
}
|
2020-02-04 14:03:16 -08:00
|
|
|
}
|
|
|
|
|
2020-09-17 11:14:11 -07:00
|
|
|
// At this point, we are calling a function, `callee`, whose `DefId` is known...
|
2021-01-05 13:54:28 -08:00
|
|
|
|
2023-09-03 06:31:56 +02:00
|
|
|
// `begin_panic` and `#[rustc_const_panic_str]` functions accept generic
|
2021-10-25 17:07:16 +01:00
|
|
|
// types other than str. Check to enforce that only str can be used in
|
|
|
|
// const-eval.
|
|
|
|
|
|
|
|
// const-eval of the `begin_panic` fn assumes the argument is `&str`
|
2024-06-14 14:46:32 -04:00
|
|
|
if tcx.is_lang_item(callee, LangItem::BeginPanic) {
|
2024-01-12 08:21:42 +01:00
|
|
|
match args[0].node.ty(&self.ccx.body.local_decls, tcx).kind() {
|
2021-10-25 17:07:16 +01:00
|
|
|
ty::Ref(_, ty, _) if ty.is_str() => return,
|
|
|
|
_ => self.check_op(ops::PanicNonStr),
|
2021-09-14 19:14:37 +01:00
|
|
|
}
|
2021-10-25 17:07:16 +01:00
|
|
|
}
|
2021-09-14 19:14:37 +01:00
|
|
|
|
2023-09-03 06:31:56 +02:00
|
|
|
// const-eval of `#[rustc_const_panic_str]` functions assumes the argument is `&&str`
|
|
|
|
if tcx.has_attr(callee, sym::rustc_const_panic_str) {
|
2024-01-12 08:21:42 +01:00
|
|
|
match args[0].node.ty(&self.ccx.body.local_decls, tcx).kind() {
|
2021-10-25 17:07:16 +01:00
|
|
|
ty::Ref(_, ty, _) if matches!(ty.kind(), ty::Ref(_, ty, _) if ty.is_str()) =>
|
|
|
|
{
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
_ => self.check_op(ops::PanicNonStr),
|
2021-10-14 06:53:20 +00:00
|
|
|
}
|
2020-09-17 11:14:11 -07:00
|
|
|
}
|
|
|
|
|
2024-10-08 12:25:26 +02:00
|
|
|
// This can be called on stable via the `vec!` macro.
|
2024-06-14 14:46:32 -04:00
|
|
|
if tcx.is_lang_item(callee, LangItem::ExchangeMalloc) {
|
2021-09-16 22:49:34 +01:00
|
|
|
self.check_op(ops::HeapAllocation);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2023-12-19 04:28:56 +00:00
|
|
|
if !tcx.is_const_fn_raw(callee) && !is_trait {
|
|
|
|
self.check_op(ops::FnCallNonConst {
|
|
|
|
caller,
|
|
|
|
callee,
|
|
|
|
args: fn_args,
|
|
|
|
span: *fn_span,
|
2024-02-15 19:54:37 +00:00
|
|
|
call_source,
|
2023-12-19 04:28:56 +00:00
|
|
|
feature: None,
|
|
|
|
});
|
|
|
|
return;
|
2020-09-17 11:14:11 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
// If the `const fn` we are trying to call is not const-stable, ensure that we have
|
|
|
|
// the proper feature gate enabled.
|
2023-02-08 13:25:38 +00:00
|
|
|
if let Some((gate, implied_by)) = is_unstable_const_fn(tcx, callee) {
|
2021-01-04 23:01:58 +00:00
|
|
|
trace!(?gate, "calling unstable const fn");
|
2020-09-17 11:14:11 -07:00
|
|
|
if self.span.allows_unstable(gate) {
|
|
|
|
return;
|
|
|
|
}
|
2023-02-08 13:25:38 +00:00
|
|
|
if let Some(implied_by_gate) = implied_by
|
|
|
|
&& self.span.allows_unstable(implied_by_gate)
|
|
|
|
{
|
|
|
|
return;
|
|
|
|
}
|
2020-09-17 11:14:11 -07:00
|
|
|
|
|
|
|
// Calling an unstable function *always* requires that the corresponding gate
|
2023-02-08 13:25:38 +00:00
|
|
|
// (or implied gate) be enabled, even if the function has
|
|
|
|
// `#[rustc_allow_const_fn_unstable(the_gate)]`.
|
2024-10-08 14:06:56 +02:00
|
|
|
let gate_enabled = |gate| tcx.features().enabled(gate);
|
|
|
|
let feature_gate_enabled = gate_enabled(gate);
|
|
|
|
let implied_gate_enabled = implied_by.is_some_and(gate_enabled);
|
|
|
|
if !feature_gate_enabled && !implied_gate_enabled {
|
2020-09-17 11:14:11 -07:00
|
|
|
self.check_op(ops::FnCallUnstable(callee, Some(gate)));
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// If this crate is not using stability attributes, or the caller is not claiming to be a
|
|
|
|
// stable `const fn`, that is all that is required.
|
|
|
|
if !self.ccx.is_const_stable_const_fn() {
|
2021-01-04 23:01:58 +00:00
|
|
|
trace!("crate not using stability attributes or caller not stably const");
|
2020-09-17 11:14:11 -07:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Otherwise, we are something const-stable calling a const-unstable fn.
|
2020-10-23 17:11:08 +02:00
|
|
|
if super::rustc_allow_const_fn_unstable(tcx, caller, gate) {
|
2024-10-08 14:06:56 +02:00
|
|
|
trace!("rustc_allow_const_fn_unstable gate enabled");
|
2020-09-17 11:14:11 -07:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
self.check_op(ops::FnCallUnstable(callee, Some(gate)));
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// FIXME(ecstaticmorse); For compatibility, we consider `unstable` callees that
|
|
|
|
// have no `rustc_const_stable` attributes to be const-unstable as well. This
|
|
|
|
// should be fixed later.
|
|
|
|
let callee_is_unstable_unmarked = tcx.lookup_const_stability(callee).is_none()
|
2023-05-24 14:19:22 +00:00
|
|
|
&& tcx.lookup_stability(callee).is_some_and(|s| s.is_unstable());
|
2020-09-17 11:14:11 -07:00
|
|
|
if callee_is_unstable_unmarked {
|
2021-01-04 23:01:58 +00:00
|
|
|
trace!("callee_is_unstable_unmarked");
|
|
|
|
// We do not use `const` modifiers for intrinsic "functions", as intrinsics are
|
2022-03-30 01:39:38 -04:00
|
|
|
// `extern` functions, and these have no way to get marked `const`. So instead we
|
2021-01-04 23:01:58 +00:00
|
|
|
// use `rustc_const_(un)stable` attributes to mean that the intrinsic is `const`
|
2024-01-30 14:20:22 +00:00
|
|
|
if self.ccx.is_const_stable_const_fn() || tcx.intrinsic(callee).is_some() {
|
2020-09-17 11:14:11 -07:00
|
|
|
self.check_op(ops::FnCallUnstable(callee, None));
|
2021-01-04 23:01:58 +00:00
|
|
|
return;
|
2019-09-17 16:25:40 -07:00
|
|
|
}
|
|
|
|
}
|
2021-01-04 23:01:58 +00:00
|
|
|
trace!("permitting call");
|
2019-09-17 16:25:40 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
// Forbid all `Drop` terminators unless the place being dropped is a local with no
|
2021-10-17 00:00:00 +00:00
|
|
|
// projections that cannot be `NeedsNonConstDrop`.
|
2023-03-05 21:02:14 +01:00
|
|
|
TerminatorKind::Drop { place: dropped_place, .. } => {
|
2020-05-03 11:18:26 -07:00
|
|
|
// If we are checking live drops after drop-elaboration, don't emit duplicate
|
|
|
|
// errors here.
|
2020-09-17 11:05:51 -07:00
|
|
|
if super::post_drop_elaboration::checking_enabled(self.ccx) {
|
2020-05-03 11:18:26 -07:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2019-09-17 16:25:40 -07:00
|
|
|
let mut err_span = self.span;
|
2021-09-29 12:12:21 +00:00
|
|
|
let ty_of_dropped_place = dropped_place.ty(self.body, self.tcx).ty;
|
2019-09-17 16:25:40 -07:00
|
|
|
|
2021-09-29 12:12:21 +00:00
|
|
|
let ty_needs_non_const_drop =
|
2023-12-06 22:01:21 +00:00
|
|
|
qualifs::NeedsNonConstDrop::in_any_value_of_ty(self.ccx, ty_of_dropped_place);
|
2021-09-29 12:12:21 +00:00
|
|
|
|
|
|
|
debug!(?ty_of_dropped_place, ?ty_needs_non_const_drop);
|
2019-09-17 16:25:40 -07:00
|
|
|
|
2021-09-02 10:59:53 +00:00
|
|
|
if !ty_needs_non_const_drop {
|
2019-09-17 16:25:40 -07:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2021-10-17 00:00:00 +00:00
|
|
|
let needs_non_const_drop = if let Some(local) = dropped_place.as_local() {
|
2019-09-17 16:25:40 -07:00
|
|
|
// Use the span where the local was declared as the span of the drop error.
|
|
|
|
err_span = self.body.local_decls[local].source_info.span;
|
2021-10-17 00:00:00 +00:00
|
|
|
self.qualifs.needs_non_const_drop(self.ccx, local, location)
|
2019-09-17 16:25:40 -07:00
|
|
|
} else {
|
|
|
|
true
|
|
|
|
};
|
|
|
|
|
2021-10-17 00:00:00 +00:00
|
|
|
if needs_non_const_drop {
|
2020-06-19 14:46:04 -05:00
|
|
|
self.check_op_spanned(
|
2022-09-23 14:22:36 +00:00
|
|
|
ops::LiveDrop {
|
|
|
|
dropped_at: Some(terminator.source_info.span),
|
|
|
|
dropped_ty: ty_of_dropped_place,
|
|
|
|
},
|
2020-06-19 14:46:04 -05:00
|
|
|
err_span,
|
|
|
|
);
|
2019-09-17 16:25:40 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-09-17 11:14:11 -07:00
|
|
|
TerminatorKind::InlineAsm { .. } => self.check_op(ops::InlineAsm),
|
|
|
|
|
2023-12-25 16:56:12 +00:00
|
|
|
TerminatorKind::Yield { .. } => self.check_op(ops::Coroutine(
|
|
|
|
self.tcx
|
|
|
|
.coroutine_kind(self.body.source.def_id())
|
|
|
|
.expect("Only expected to have a yield in a coroutine"),
|
|
|
|
)),
|
|
|
|
|
|
|
|
TerminatorKind::CoroutineDrop => {
|
|
|
|
span_bug!(
|
|
|
|
self.body.source_info(location).span,
|
|
|
|
"We should not encounter TerminatorKind::CoroutineDrop after coroutine transform"
|
|
|
|
);
|
2020-02-14 18:17:50 +00:00
|
|
|
}
|
|
|
|
|
2023-08-21 09:57:10 +02:00
|
|
|
TerminatorKind::UnwindTerminate(_) => {
|
2020-10-04 13:02:54 -07:00
|
|
|
// Cleanup blocks are skipped for const checking (see `visit_basic_block_data`).
|
2022-10-31 01:01:24 +00:00
|
|
|
span_bug!(self.span, "`Terminate` terminator outside of cleanup block")
|
2020-10-04 10:39:12 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
TerminatorKind::Assert { .. }
|
2020-06-02 09:15:24 +02:00
|
|
|
| TerminatorKind::FalseEdge { .. }
|
2020-04-19 16:06:33 -07:00
|
|
|
| TerminatorKind::FalseUnwind { .. }
|
|
|
|
| TerminatorKind::Goto { .. }
|
2023-08-19 13:10:25 +02:00
|
|
|
| TerminatorKind::UnwindResume
|
2020-04-19 16:06:33 -07:00
|
|
|
| TerminatorKind::Return
|
|
|
|
| TerminatorKind::SwitchInt { .. }
|
2020-09-17 11:14:11 -07:00
|
|
|
| TerminatorKind::Unreachable => {}
|
2019-09-17 16:25:40 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-10-29 17:19:58 -07:00
|
|
|
|
2024-08-03 11:17:43 +02:00
|
|
|
fn is_int_bool_float_or_char(ty: Ty<'_>) -> bool {
|
|
|
|
ty.is_bool() || ty.is_integral() || ty.is_char() || ty.is_floating_point()
|
2020-09-23 11:54:11 -07:00
|
|
|
}
|
2020-09-29 19:13:25 -07:00
|
|
|
|
2020-09-29 13:17:38 -07:00
|
|
|
fn emit_unstable_in_stable_error(ccx: &ConstCx<'_, '_>, span: Span, gate: Symbol) {
|
2022-03-29 23:50:01 +02:00
|
|
|
let attr_span = ccx.tcx.def_span(ccx.def_id()).shrink_to_lo();
|
2020-09-30 09:48:18 -07:00
|
|
|
|
2023-12-18 22:21:37 +11:00
|
|
|
ccx.dcx().emit_err(UnstableInStable { gate: gate.to_string(), span, attr_span });
|
2020-09-29 13:17:38 -07:00
|
|
|
}
|