rust/src/librustc_mir/transform/check_consts/validation.rs

712 lines
27 KiB
Rust
Raw Normal View History

//! The `Visitor` responsible for actually checking a `mir::Body` for invalid operations.
2020-03-29 17:19:48 +02:00
use rustc_errors::struct_span_err;
2020-03-31 21:38:14 +02:00
use rustc_hir::lang_items;
2020-03-29 17:19:48 +02:00
use rustc_hir::{def_id::DefId, HirId};
use rustc_infer::infer::TyCtxtInferExt;
2020-03-29 16:41:09 +02:00
use rustc_middle::mir::visit::{MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor};
use rustc_middle::mir::*;
use rustc_middle::ty::cast::CastTy;
use rustc_middle::ty::{self, Instance, InstanceDef, TyCtxt};
use rustc_span::Span;
2020-02-11 21:19:40 +01:00
use rustc_trait_selection::traits::error_reporting::InferCtxtExt;
use rustc_trait_selection::traits::{self, TraitEngine};
2019-09-17 16:25:40 -07:00
use std::borrow::Cow;
2019-09-17 16:25:40 -07:00
use std::ops::Deref;
use super::ops::{self, NonConstOp};
2020-03-16 10:45:39 -07:00
use super::qualifs::{self, CustomEq, HasMutInterior, NeedsDrop};
use super::resolver::FlowSensitiveAnalysis;
use super::{is_lang_panic_fn, ConstCx, ConstKind, Qualif};
use crate::const_eval::{is_const_fn, is_unstable_const_fn};
use crate::dataflow::MaybeMutBorrowedLocals;
use crate::dataflow::{self, Analysis};
2019-09-17 16:25:40 -07:00
// We are using `MaybeMutBorrowedLocals` as a proxy for whether an item may have been mutated
// through a pointer prior to the given point. This is okay even though `MaybeMutBorrowedLocals`
// kills locals upon `StorageDead` because a local will never be used after a `StorageDead`.
2020-04-19 10:23:05 -07:00
type IndirectlyMutableResults<'mir, 'tcx> =
dataflow::ResultsCursor<'mir, 'tcx, MaybeMutBorrowedLocals<'mir, 'tcx>>;
type QualifResults<'mir, 'tcx, Q> =
dataflow::ResultsCursor<'mir, 'tcx, FlowSensitiveAnalysis<'mir, 'mir, 'tcx, Q>>;
#[derive(Default)]
pub struct Qualifs<'mir, 'tcx> {
has_mut_interior: Option<QualifResults<'mir, 'tcx, HasMutInterior>>,
needs_drop: Option<QualifResults<'mir, 'tcx, NeedsDrop>>,
indirectly_mutable: Option<IndirectlyMutableResults<'mir, 'tcx>>,
}
impl Qualifs<'mir, 'tcx> {
fn indirectly_mutable(
&mut self,
ccx: &'mir ConstCx<'mir, 'tcx>,
local: Local,
location: Location,
) -> bool {
let indirectly_mutable = self.indirectly_mutable.get_or_insert_with(|| {
let ConstCx { tcx, body, def_id, param_env, .. } = *ccx;
// We can use `unsound_ignore_borrow_on_drop` here because custom drop impls are not
// allowed in a const.
//
// FIXME(ecstaticmorse): Someday we want to allow custom drop impls. How do we do this
// without breaking stable code?
MaybeMutBorrowedLocals::mut_borrows_only(tcx, &body, param_env)
.unsound_ignore_borrow_on_drop()
.into_engine(tcx, &body, def_id)
.iterate_to_fixpoint()
.into_results_cursor(&body)
});
indirectly_mutable.seek_before(location);
indirectly_mutable.get().contains(local)
}
/// Returns `true` if `local` is `NeedsDrop` at the given `Location`.
///
/// Only updates the cursor if absolutely necessary
fn needs_drop(
&mut self,
ccx: &'mir ConstCx<'mir, 'tcx>,
local: Local,
location: Location,
) -> bool {
let ty = ccx.body.local_decls[local].ty;
if !NeedsDrop::in_any_value_of_ty(ccx, ty) {
return false;
}
let needs_drop = self.needs_drop.get_or_insert_with(|| {
let ConstCx { tcx, body, def_id, .. } = *ccx;
FlowSensitiveAnalysis::new(NeedsDrop, ccx)
.into_engine(tcx, &body, def_id)
.iterate_to_fixpoint()
.into_results_cursor(&body)
});
needs_drop.seek_before(location);
needs_drop.get().contains(local) || self.indirectly_mutable(ccx, local, location)
}
/// Returns `true` if `local` is `HasMutInterior` at the given `Location`.
///
/// Only updates the cursor if absolutely necessary.
fn has_mut_interior(
&mut self,
ccx: &'mir ConstCx<'mir, 'tcx>,
local: Local,
location: Location,
) -> bool {
let ty = ccx.body.local_decls[local].ty;
if !HasMutInterior::in_any_value_of_ty(ccx, ty) {
return false;
}
let has_mut_interior = self.has_mut_interior.get_or_insert_with(|| {
let ConstCx { tcx, body, def_id, .. } = *ccx;
FlowSensitiveAnalysis::new(HasMutInterior, ccx)
.into_engine(tcx, &body, def_id)
.iterate_to_fixpoint()
.into_results_cursor(&body)
});
has_mut_interior.seek_before(location);
has_mut_interior.get().contains(local) || self.indirectly_mutable(ccx, local, location)
}
fn in_return_place(&mut self, ccx: &'mir ConstCx<'mir, 'tcx>) -> ConstQualifs {
// Find the `Return` terminator if one exists.
//
// If no `Return` terminator exists, this MIR is divergent. Just return the conservative
// qualifs for the return type.
let return_block = ccx
2019-12-22 17:42:04 -05:00
.body
.basic_blocks()
.iter_enumerated()
2019-12-22 17:42:04 -05:00
.find(|(_, block)| match block.terminator().kind {
TerminatorKind::Return => true,
_ => false,
})
.map(|(bb, _)| bb);
let return_block = match return_block {
None => return qualifs::in_any_value_of_ty(ccx, ccx.body.return_ty()),
Some(bb) => bb,
};
let return_loc = ccx.body.terminator_loc(return_block);
2020-03-16 10:45:39 -07:00
let custom_eq = match ccx.const_kind() {
// We don't care whether a `const fn` returns a value that is not structurally
// matchable. Functions calls are opaque and always use type-based qualification, so
// this value should never be used.
ConstKind::ConstFn => true,
// If we know that all values of the return type are structurally matchable, there's no
// need to run dataflow.
ConstKind::Const | ConstKind::Static | ConstKind::StaticMut
if !CustomEq::in_any_value_of_ty(ccx, ccx.body.return_ty()) =>
{
false
}
ConstKind::Const | ConstKind::Static | ConstKind::StaticMut => {
let mut cursor = FlowSensitiveAnalysis::new(CustomEq, ccx)
.into_engine(ccx.tcx, &ccx.body, ccx.def_id)
.iterate_to_fixpoint()
.into_results_cursor(&ccx.body);
cursor.seek_after(return_loc);
cursor.contains(RETURN_PLACE)
}
};
2019-11-14 11:58:50 -08:00
ConstQualifs {
needs_drop: self.needs_drop(ccx, RETURN_PLACE, return_loc),
has_mut_interior: self.has_mut_interior(ccx, RETURN_PLACE, return_loc),
2020-03-16 10:45:39 -07:00
custom_eq,
2019-11-14 09:16:08 -08:00
}
}
2019-09-17 16:25:40 -07:00
}
pub struct Validator<'mir, 'tcx> {
ccx: &'mir ConstCx<'mir, 'tcx>,
qualifs: Qualifs<'mir, 'tcx>,
2019-09-17 16:25:40 -07:00
/// The span of the current statement.
span: Span,
}
impl Deref for Validator<'mir, 'tcx> {
type Target = ConstCx<'mir, 'tcx>;
2019-09-17 16:25:40 -07:00
fn deref(&self) -> &Self::Target {
&self.ccx
2019-09-17 16:25:40 -07:00
}
}
impl Validator<'mir, 'tcx> {
pub fn new(ccx: &'mir ConstCx<'mir, 'tcx>) -> Self {
Validator { span: ccx.body.span, ccx, qualifs: Default::default() }
2019-09-17 16:25:40 -07:00
}
pub fn check_body(&mut self) {
let ConstCx { tcx, body, def_id, const_kind, .. } = *self.ccx;
2019-12-22 17:42:04 -05:00
let use_min_const_fn_checks = (const_kind == Some(ConstKind::ConstFn)
&& crate::const_eval::is_min_const_fn(tcx, def_id))
&& !tcx.sess.opts.debugging_opts.unleash_the_miri_inside_of_you;
if use_min_const_fn_checks {
// Enforce `min_const_fn` for stable `const fn`s.
use crate::transform::qualify_min_const_fn::is_min_const_fn;
if let Err((span, err)) = is_min_const_fn(tcx, def_id, &body) {
error_min_const_fn_violation(tcx, span, err);
return;
}
}
check_short_circuiting_in_const_local(self.ccx);
if body.is_cfg_cyclic() {
// We can't provide a good span for the error here, but this should be caught by the
// HIR const-checker anyways.
self.check_op_spanned(ops::Loop, body.span);
}
self.visit_body(&body);
// Ensure that the end result is `Sync` in a non-thread local `static`.
2019-12-22 17:42:04 -05:00
let should_check_for_sync =
const_kind == Some(ConstKind::Static) && !tcx.is_thread_local_static(def_id);
if should_check_for_sync {
let hir_id = tcx.hir().as_local_hir_id(def_id.expect_local());
check_return_ty_is_sync(tcx, &body, hir_id);
}
}
2019-11-14 11:58:50 -08:00
pub fn qualifs_in_return_place(&mut self) -> ConstQualifs {
self.qualifs.in_return_place(self.ccx)
}
2019-09-17 16:25:40 -07:00
/// Emits an error at the given `span` if an expression cannot be evaluated in the current
2019-11-27 14:29:09 -08:00
/// context.
pub fn check_op_spanned<O>(&mut self, op: O, span: Span)
2019-09-17 16:25:40 -07:00
where
2019-12-22 17:42:04 -05:00
O: NonConstOp,
2019-09-17 16:25:40 -07:00
{
debug!("check_op: op={:?}", op);
2019-09-17 16:25:40 -07:00
if op.is_allowed_in_item(self) {
2019-11-27 14:29:09 -08:00
return;
2019-09-17 16:25:40 -07:00
}
// If an operation is supported in miri (and is not already controlled by a feature gate) it
// can be turned on with `-Zunleash-the-miri-inside-of-you`.
2020-03-14 15:59:10 -07:00
let is_unleashable = O::IS_SUPPORTED_IN_MIRI && O::feature_gate().is_none();
2019-09-17 16:25:40 -07:00
if is_unleashable && self.tcx.sess.opts.debugging_opts.unleash_the_miri_inside_of_you {
self.tcx.sess.span_warn(span, "skipping const checks");
2019-11-27 14:29:09 -08:00
return;
2019-09-17 16:25:40 -07:00
}
op.emit_error(self, span);
2019-09-17 16:25:40 -07:00
}
/// Emits an error if an expression cannot be evaluated in the current context.
2019-11-27 14:29:09 -08:00
pub fn check_op(&mut self, op: impl NonConstOp) {
2019-09-17 16:25:40 -07:00
let span = self.span;
self.check_op_spanned(op, span)
}
2019-11-18 23:04:06 +00:00
2019-11-27 14:29:09 -08:00
fn check_static(&mut self, def_id: DefId, span: Span) {
if self.tcx.is_thread_local_static(def_id) {
2019-11-18 23:04:06 +00:00
self.check_op_spanned(ops::ThreadLocalAccess, span)
} else {
self.check_op_spanned(ops::StaticAccess, span)
}
}
2019-09-17 16:25:40 -07:00
}
impl Visitor<'tcx> for Validator<'mir, 'tcx> {
2019-12-22 17:42:04 -05:00
fn visit_basic_block_data(&mut self, bb: BasicBlock, block: &BasicBlockData<'tcx>) {
trace!("visit_basic_block_data: bb={:?} is_cleanup={:?}", bb, block.is_cleanup);
// Just as the old checker did, we skip const-checking basic blocks on the unwind path.
// These blocks often drop locals that would otherwise be returned from the function.
//
// FIXME: This shouldn't be unsound since a panic at compile time will cause a compiler
// error anyway, but maybe we should do more here?
if block.is_cleanup {
return;
}
self.super_basic_block_data(bb, block);
}
2019-09-17 16:25:40 -07:00
fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
trace!("visit_rvalue: rvalue={:?} location={:?}", rvalue, location);
2019-11-22 15:52:59 -08:00
// Special-case reborrows to be more like a copy of a reference.
match *rvalue {
Rvalue::Ref(_, kind, place) => {
2020-04-12 10:28:41 -07:00
if let Some(reborrowed_proj) = place_as_reborrow(self.tcx, self.body, place) {
let ctx = match kind {
2019-12-22 17:42:04 -05:00
BorrowKind::Shared => {
PlaceContext::NonMutatingUse(NonMutatingUseContext::SharedBorrow)
}
BorrowKind::Shallow => {
PlaceContext::NonMutatingUse(NonMutatingUseContext::ShallowBorrow)
}
BorrowKind::Unique => {
PlaceContext::NonMutatingUse(NonMutatingUseContext::UniqueBorrow)
}
BorrowKind::Mut { .. } => {
PlaceContext::MutatingUse(MutatingUseContext::Borrow)
}
};
2020-04-21 17:11:00 -03:00
self.visit_local(&place.local, ctx, location);
self.visit_projection(place.local, reborrowed_proj, ctx, location);
return;
}
}
Rvalue::AddressOf(mutbl, place) => {
2020-04-12 10:28:41 -07:00
if let Some(reborrowed_proj) = place_as_reborrow(self.tcx, self.body, place) {
let ctx = match mutbl {
2019-12-22 17:42:04 -05:00
Mutability::Not => {
PlaceContext::NonMutatingUse(NonMutatingUseContext::AddressOf)
}
Mutability::Mut => PlaceContext::MutatingUse(MutatingUseContext::AddressOf),
};
2020-04-21 17:11:00 -03:00
self.visit_local(&place.local, ctx, location);
self.visit_projection(place.local, reborrowed_proj, ctx, location);
return;
}
2019-09-17 16:25:40 -07:00
}
_ => {}
2019-09-17 16:25:40 -07:00
}
2019-11-22 15:52:59 -08:00
self.super_rvalue(rvalue, location);
2019-09-17 16:25:40 -07:00
match *rvalue {
2019-12-22 17:42:04 -05:00
Rvalue::Use(_)
| Rvalue::Repeat(..)
| Rvalue::UnaryOp(UnOp::Neg, _)
| Rvalue::UnaryOp(UnOp::Not, _)
| Rvalue::NullaryOp(NullOp::SizeOf, _)
| Rvalue::CheckedBinaryOp(..)
| Rvalue::Cast(CastKind::Pointer(_), ..)
| Rvalue::Discriminant(..)
| Rvalue::Len(_)
| Rvalue::Aggregate(..) => {}
Rvalue::Ref(_, kind @ BorrowKind::Mut { .. }, ref place)
| Rvalue::Ref(_, kind @ BorrowKind::Unique, ref place) => {
2020-04-12 10:28:41 -07:00
let ty = place.ty(self.body, self.tcx).ty;
2019-11-22 15:52:59 -08:00
let is_allowed = match ty.kind {
// Inside a `static mut`, `&mut [...]` is allowed.
2019-12-22 17:42:04 -05:00
ty::Array(..) | ty::Slice(_) if self.const_kind() == ConstKind::StaticMut => {
true
}
2019-11-22 15:52:59 -08:00
// FIXME(ecstaticmorse): We could allow `&mut []` inside a const context given
// that this is merely a ZST and it is already eligible for promotion.
// This may require an RFC?
/*
ty::Array(_, len) if len.try_eval_usize(cx.tcx, cx.param_env) == Some(0)
=> true,
*/
_ => false,
};
if !is_allowed {
2019-12-22 17:42:04 -05:00
if let BorrowKind::Mut { .. } = kind {
2019-11-26 10:00:41 -05:00
self.check_op(ops::MutBorrow);
} else {
self.check_op(ops::CellBorrow);
}
2019-11-22 15:52:59 -08:00
}
}
2019-12-22 17:42:04 -05:00
Rvalue::AddressOf(Mutability::Mut, _) => self.check_op(ops::MutAddressOf),
Rvalue::Ref(_, BorrowKind::Shared | BorrowKind::Shallow, ref place)
| Rvalue::AddressOf(Mutability::Not, ref place) => {
2019-12-13 13:20:16 -08:00
let borrowed_place_has_mut_interior = qualifs::in_place::<HasMutInterior, _>(
&self.ccx,
&mut |local| self.qualifs.has_mut_interior(self.ccx, local, location),
place.as_ref(),
);
if borrowed_place_has_mut_interior {
self.check_op(ops::CellBorrow);
}
2019-12-22 17:42:04 -05:00
}
2019-11-22 15:52:59 -08:00
2019-09-17 16:25:40 -07:00
Rvalue::Cast(CastKind::Misc, ref operand, cast_ty) => {
2020-04-12 10:28:41 -07:00
let operand_ty = operand.ty(self.body, self.tcx);
2019-09-17 16:25:40 -07:00
let cast_in = CastTy::from_ty(operand_ty).expect("bad input type for cast");
let cast_out = CastTy::from_ty(cast_ty).expect("bad output type for cast");
if let (CastTy::Ptr(_) | CastTy::FnPtr, CastTy::Int(_)) = (cast_in, cast_out) {
2019-09-17 16:25:40 -07:00
self.check_op(ops::RawPtrToIntCast);
}
}
Rvalue::BinaryOp(op, ref lhs, _) => {
2020-04-12 10:28:41 -07:00
if let ty::RawPtr(_) | ty::FnPtr(..) = lhs.ty(self.body, self.tcx).kind {
2019-12-22 17:42:04 -05:00
assert!(
op == BinOp::Eq
|| op == BinOp::Ne
|| op == BinOp::Le
|| op == BinOp::Lt
|| op == BinOp::Ge
|| op == BinOp::Gt
|| op == BinOp::Offset
);
2019-09-17 16:25:40 -07:00
self.check_op(ops::RawPtrComparison);
}
}
Rvalue::NullaryOp(NullOp::Box, _) => {
self.check_op(ops::HeapAllocation);
}
}
}
2019-12-22 17:42:04 -05:00
fn visit_operand(&mut self, op: &Operand<'tcx>, location: Location) {
self.super_operand(op, location);
if let Operand::Constant(c) = op {
if let Some(def_id) = c.check_static_ptr(self.tcx) {
2019-11-18 23:04:06 +00:00
self.check_static(def_id, self.span);
2019-09-17 16:25:40 -07:00
}
}
}
2019-10-02 19:11:08 -03:00
fn visit_projection_elem(
2019-09-17 16:25:40 -07:00
&mut self,
place_local: Local,
2019-10-02 19:11:08 -03:00
proj_base: &[PlaceElem<'tcx>],
elem: &PlaceElem<'tcx>,
2019-09-17 16:25:40 -07:00
context: PlaceContext,
location: Location,
) {
trace!(
"visit_projection_elem: place_local={:?} proj_base={:?} elem={:?} \
2019-10-02 19:11:08 -03:00
context={:?} location={:?}",
place_local,
2019-10-02 19:11:08 -03:00
proj_base,
elem,
2019-09-17 16:25:40 -07:00
context,
location,
);
self.super_projection_elem(place_local, proj_base, elem, context, location);
2019-09-17 16:25:40 -07:00
match elem {
ProjectionElem::Deref => {
2020-04-12 10:28:41 -07:00
let base_ty = Place::ty_from(place_local, proj_base, self.body, self.tcx).ty;
if let ty::RawPtr(_) = base_ty.kind {
2019-11-18 23:04:06 +00:00
if proj_base.is_empty() {
if let (local, []) = (place_local, proj_base) {
let decl = &self.body.local_decls[local];
2019-11-18 23:04:06 +00:00
if let LocalInfo::StaticRef { def_id, .. } = decl.local_info {
let span = decl.source_info.span;
self.check_static(def_id, span);
return;
}
}
}
2019-09-17 16:25:40 -07:00
self.check_op(ops::RawPtrDeref);
}
2019-11-18 23:04:06 +00:00
if context.is_mutating_use() {
self.check_op(ops::MutDeref);
}
2019-09-17 16:25:40 -07:00
}
2019-12-22 17:42:04 -05:00
ProjectionElem::ConstantIndex { .. }
| ProjectionElem::Downcast(..)
2019-12-22 17:42:04 -05:00
| ProjectionElem::Subslice { .. }
| ProjectionElem::Field(..)
| ProjectionElem::Index(_) => {
2020-04-12 10:28:41 -07:00
let base_ty = Place::ty_from(place_local, proj_base, self.body, self.tcx).ty;
2019-09-17 16:25:40 -07:00
match base_ty.ty_adt_def() {
Some(def) if def.is_union() => {
self.check_op(ops::UnionAccess);
}
_ => {}
}
}
}
}
fn visit_source_info(&mut self, source_info: &SourceInfo) {
trace!("visit_source_info: source_info={:?}", source_info);
self.span = source_info.span;
}
fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
trace!("visit_statement: statement={:?} location={:?}", statement, location);
match statement.kind {
2019-11-30 12:25:45 -05:00
StatementKind::Assign(..) | StatementKind::SetDiscriminant { .. } => {
2019-09-17 16:25:40 -07:00
self.super_statement(statement, location);
}
StatementKind::FakeRead(
FakeReadCause::ForMatchedPlace
| FakeReadCause::ForMatchGuard
| FakeReadCause::ForGuardBinding,
_,
) => {
2020-04-21 09:24:41 -07:00
self.super_statement(statement, location);
self.check_op(ops::IfOrMatch);
2019-09-17 16:25:40 -07:00
}
StatementKind::LlvmInlineAsm { .. } => {
2020-04-21 09:24:41 -07:00
self.super_statement(statement, location);
self.check_op(ops::InlineAsm);
}
StatementKind::FakeRead(FakeReadCause::ForLet | FakeReadCause::ForIndex, _)
2019-12-22 17:42:04 -05:00
| StatementKind::StorageLive(_)
| StatementKind::StorageDead(_)
| StatementKind::Retag { .. }
| StatementKind::AscribeUserType(..)
| StatementKind::Nop => {}
2019-09-17 16:25:40 -07:00
}
}
fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
trace!("visit_terminator: terminator={:?} location={:?}", terminator, location);
self.super_terminator(terminator, location);
2019-09-17 16:25:40 -07:00
match &terminator.kind {
2019-09-17 16:25:40 -07:00
TerminatorKind::Call { func, .. } => {
2020-04-12 10:28:41 -07:00
let fn_ty = func.ty(self.body, self.tcx);
2019-09-17 16:25:40 -07:00
let (def_id, substs) = match fn_ty.kind {
ty::FnDef(def_id, substs) => (def_id, substs),
2019-09-17 16:25:40 -07:00
ty::FnPtr(_) => {
self.check_op(ops::FnCallIndirect);
return;
}
_ => {
span_bug!(terminator.source_info.span, "invalid callee of type {:?}", fn_ty)
2019-09-17 16:25:40 -07:00
}
};
// At this point, we are calling a function whose `DefId` is known...
if is_const_fn(self.tcx, def_id) {
2019-09-17 16:25:40 -07:00
return;
}
// See if this is a trait method for a concrete type whose impl of that trait is
// `const`.
if self.tcx.features().const_trait_impl {
let instance = Instance::resolve(self.tcx, self.param_env, def_id, substs);
debug!("Resolving ({:?}) -> {:?}", def_id, instance);
if let Ok(Some(func)) = instance {
if let InstanceDef::Item(def_id) = func.def {
if is_const_fn(self.tcx, def_id) {
return;
}
}
}
}
2019-09-17 16:25:40 -07:00
if is_lang_panic_fn(self.tcx, def_id) {
self.check_op(ops::Panic);
} else if let Some(feature) = is_unstable_const_fn(self.tcx, def_id) {
2019-09-17 16:25:40 -07:00
// Exempt unstable const fns inside of macros with
// `#[allow_internal_unstable]`.
if !self.span.allows_unstable(feature) {
self.check_op(ops::FnCallUnstable(def_id, feature));
}
} else {
self.check_op(ops::FnCallNonConst(def_id));
}
}
// Forbid all `Drop` terminators unless the place being dropped is a local with no
// projections that cannot be `NeedsDrop`.
2019-12-22 17:42:04 -05:00
TerminatorKind::Drop { location: dropped_place, .. }
| TerminatorKind::DropAndReplace { location: dropped_place, .. } => {
2019-09-17 16:25:40 -07:00
let mut err_span = self.span;
// Check to see if the type of this place can ever have a drop impl. If not, this
// `Drop` terminator is frivolous.
2019-12-22 17:42:04 -05:00
let ty_needs_drop =
2020-04-12 10:28:41 -07:00
dropped_place.ty(self.body, self.tcx).ty.needs_drop(self.tcx, self.param_env);
2019-09-17 16:25:40 -07:00
if !ty_needs_drop {
return;
}
let needs_drop = if let Some(local) = dropped_place.as_local() {
2019-09-17 16:25:40 -07:00
// Use the span where the local was declared as the span of the drop error.
err_span = self.body.local_decls[local].source_info.span;
self.qualifs.needs_drop(self.ccx, local, location)
2019-09-17 16:25:40 -07:00
} else {
true
};
if needs_drop {
self.check_op_spanned(ops::LiveDrop, err_span);
}
}
// FIXME: Some of these are only caught by `min_const_fn`, but should error here
// instead.
TerminatorKind::Abort
| TerminatorKind::Assert { .. }
| TerminatorKind::FalseEdges { .. }
| TerminatorKind::FalseUnwind { .. }
| TerminatorKind::GeneratorDrop
| TerminatorKind::Goto { .. }
| TerminatorKind::Resume
| TerminatorKind::Return
| TerminatorKind::SwitchInt { .. }
| TerminatorKind::Unreachable
| TerminatorKind::Yield { .. } => {}
2019-09-17 16:25:40 -07:00
}
}
}
fn error_min_const_fn_violation(tcx: TyCtxt<'_>, span: Span, msg: Cow<'_, str>) {
struct_span_err!(tcx.sess, span, E0723, "{}", msg)
2020-02-07 13:06:35 +01:00
.note(
"see issue #57563 <https://github.com/rust-lang/rust/issues/57563> \
for more information",
)
.help("add `#![feature(const_fn)]` to the crate attributes to enable")
.emit();
}
fn check_short_circuiting_in_const_local(ccx: &ConstCx<'_, 'tcx>) {
let body = ccx.body;
if body.control_flow_destroyed.is_empty() {
return;
}
let mut locals = body.vars_iter();
if let Some(local) = locals.next() {
let span = body.local_decls[local].source_info.span;
let mut error = ccx.tcx.sess.struct_span_err(
span,
&format!(
"new features like let bindings are not permitted in {}s \
which also use short circuiting operators",
ccx.const_kind(),
),
);
for (span, kind) in body.control_flow_destroyed.iter() {
error.span_note(
*span,
2019-12-22 17:42:04 -05:00
&format!(
"use of {} here does not actually short circuit due to \
2020-02-07 13:06:35 +01:00
the const evaluator presently not being able to do control flow. \
See issue #49146 <https://github.com/rust-lang/rust/issues/49146> \
for more information.",
2019-12-22 17:42:04 -05:00
kind
),
);
}
for local in locals {
let span = body.local_decls[local].source_info.span;
2020-01-22 23:57:38 +00:00
error.span_note(span, "more locals are defined here");
}
error.emit();
}
}
fn check_return_ty_is_sync(tcx: TyCtxt<'tcx>, body: &Body<'tcx>, hir_id: HirId) {
let ty = body.return_ty();
tcx.infer_ctxt().enter(|infcx| {
let cause = traits::ObligationCause::new(body.span, hir_id, traits::SharedStatic);
let mut fulfillment_cx = traits::FulfillmentContext::new();
let sync_def_id = tcx.require_lang_item(lang_items::SyncTraitLangItem, Some(body.span));
fulfillment_cx.register_bound(&infcx, ty::ParamEnv::empty(), ty, sync_def_id, cause);
if let Err(err) = fulfillment_cx.select_all_or_error(&infcx) {
infcx.report_fulfillment_errors(&err, None, false);
}
});
}
2019-11-22 15:52:59 -08:00
fn place_as_reborrow(
tcx: TyCtxt<'tcx>,
body: &Body<'tcx>,
place: Place<'tcx>,
2019-11-22 15:52:59 -08:00
) -> Option<&'a [PlaceElem<'tcx>]> {
2019-12-22 17:42:04 -05:00
place.projection.split_last().and_then(|(outermost, inner)| {
if outermost != &ProjectionElem::Deref {
return None;
}
2019-11-22 15:52:59 -08:00
2019-12-22 17:42:04 -05:00
// A borrow of a `static` also looks like `&(*_1)` in the MIR, but `_1` is a `const`
// that points to the allocation for the static. Don't treat these as reborrows.
if body.local_decls[place.local].is_ref_to_static() {
return None;
2019-12-22 17:42:04 -05:00
}
2019-12-22 17:42:04 -05:00
// Ensure the type being derefed is a reference and not a raw pointer.
//
// This is sufficient to prevent an access to a `static mut` from being marked as a
// reborrow, even if the check above were to disappear.
2020-01-14 02:21:42 -03:00
let inner_ty = Place::ty_from(place.local, inner, body, tcx).ty;
2019-12-22 17:42:04 -05:00
match inner_ty.kind {
ty::Ref(..) => Some(inner),
_ => None,
}
})
2019-11-22 15:52:59 -08:00
}