rust/compiler/rustc_mir/src/transform/check_consts/validation.rs

867 lines
34 KiB
Rust
Raw Normal View History

//! The `Visitor` responsible for actually checking a `mir::Body` for invalid operations.
2020-03-29 17:19:48 +02:00
use rustc_errors::struct_span_err;
use rustc_hir::{self as hir, LangItem};
2020-03-29 17:19:48 +02:00
use rustc_hir::{def_id::DefId, HirId};
use rustc_infer::infer::TyCtxtInferExt;
2020-03-29 16:41:09 +02:00
use rustc_middle::mir::visit::{MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor};
use rustc_middle::mir::*;
use rustc_middle::ty::cast::CastTy;
use rustc_middle::ty::subst::GenericArgKind;
use rustc_middle::ty::{
self, adjustment::PointerCast, Instance, InstanceDef, Ty, TyCtxt, TypeAndMut,
};
use rustc_span::{sym, Span};
2020-02-11 21:19:40 +01:00
use rustc_trait_selection::traits::error_reporting::InferCtxtExt;
use rustc_trait_selection::traits::{self, TraitEngine};
2019-09-17 16:25:40 -07:00
use std::ops::Deref;
use super::ops::{self, NonConstOp};
2020-03-16 10:45:39 -07:00
use super::qualifs::{self, CustomEq, HasMutInterior, NeedsDrop};
use super::resolver::FlowSensitiveAnalysis;
use super::{is_lang_panic_fn, ConstCx, Qualif};
use crate::const_eval::is_unstable_const_fn;
use crate::dataflow::impls::MaybeMutBorrowedLocals;
use crate::dataflow::{self, Analysis};
2019-09-17 16:25:40 -07:00
// We are using `MaybeMutBorrowedLocals` as a proxy for whether an item may have been mutated
// through a pointer prior to the given point. This is okay even though `MaybeMutBorrowedLocals`
// kills locals upon `StorageDead` because a local will never be used after a `StorageDead`.
2020-04-19 10:23:05 -07:00
type IndirectlyMutableResults<'mir, 'tcx> =
dataflow::ResultsCursor<'mir, 'tcx, MaybeMutBorrowedLocals<'mir, 'tcx>>;
type QualifResults<'mir, 'tcx, Q> =
dataflow::ResultsCursor<'mir, 'tcx, FlowSensitiveAnalysis<'mir, 'mir, 'tcx, Q>>;
#[derive(Default)]
pub struct Qualifs<'mir, 'tcx> {
has_mut_interior: Option<QualifResults<'mir, 'tcx, HasMutInterior>>,
needs_drop: Option<QualifResults<'mir, 'tcx, NeedsDrop>>,
indirectly_mutable: Option<IndirectlyMutableResults<'mir, 'tcx>>,
}
impl Qualifs<'mir, 'tcx> {
2020-04-30 21:04:59 -07:00
pub fn indirectly_mutable(
&mut self,
ccx: &'mir ConstCx<'mir, 'tcx>,
local: Local,
location: Location,
) -> bool {
let indirectly_mutable = self.indirectly_mutable.get_or_insert_with(|| {
let ConstCx { tcx, body, def_id, param_env, .. } = *ccx;
// We can use `unsound_ignore_borrow_on_drop` here because custom drop impls are not
// allowed in a const.
//
// FIXME(ecstaticmorse): Someday we want to allow custom drop impls. How do we do this
// without breaking stable code?
MaybeMutBorrowedLocals::mut_borrows_only(tcx, &body, param_env)
.unsound_ignore_borrow_on_drop()
2020-07-04 14:02:41 +02:00
.into_engine(tcx, &body, def_id.to_def_id())
.iterate_to_fixpoint()
.into_results_cursor(&body)
});
2020-03-22 12:09:40 -07:00
indirectly_mutable.seek_before_primary_effect(location);
indirectly_mutable.get().contains(local)
}
/// Returns `true` if `local` is `NeedsDrop` at the given `Location`.
///
/// Only updates the cursor if absolutely necessary
2020-04-30 21:04:59 -07:00
pub fn needs_drop(
&mut self,
ccx: &'mir ConstCx<'mir, 'tcx>,
local: Local,
location: Location,
) -> bool {
let ty = ccx.body.local_decls[local].ty;
if !NeedsDrop::in_any_value_of_ty(ccx, ty) {
return false;
}
let needs_drop = self.needs_drop.get_or_insert_with(|| {
let ConstCx { tcx, body, def_id, .. } = *ccx;
FlowSensitiveAnalysis::new(NeedsDrop, ccx)
2020-07-04 14:02:41 +02:00
.into_engine(tcx, &body, def_id.to_def_id())
.iterate_to_fixpoint()
.into_results_cursor(&body)
});
2020-03-22 12:09:40 -07:00
needs_drop.seek_before_primary_effect(location);
needs_drop.get().contains(local) || self.indirectly_mutable(ccx, local, location)
}
/// Returns `true` if `local` is `HasMutInterior` at the given `Location`.
///
/// Only updates the cursor if absolutely necessary.
2020-04-30 21:04:59 -07:00
pub fn has_mut_interior(
&mut self,
ccx: &'mir ConstCx<'mir, 'tcx>,
local: Local,
location: Location,
) -> bool {
let ty = ccx.body.local_decls[local].ty;
if !HasMutInterior::in_any_value_of_ty(ccx, ty) {
return false;
}
let has_mut_interior = self.has_mut_interior.get_or_insert_with(|| {
let ConstCx { tcx, body, def_id, .. } = *ccx;
FlowSensitiveAnalysis::new(HasMutInterior, ccx)
2020-07-04 14:02:41 +02:00
.into_engine(tcx, &body, def_id.to_def_id())
.iterate_to_fixpoint()
.into_results_cursor(&body)
});
2020-03-22 12:09:40 -07:00
has_mut_interior.seek_before_primary_effect(location);
has_mut_interior.get().contains(local) || self.indirectly_mutable(ccx, local, location)
}
fn in_return_place(&mut self, ccx: &'mir ConstCx<'mir, 'tcx>) -> ConstQualifs {
// Find the `Return` terminator if one exists.
//
// If no `Return` terminator exists, this MIR is divergent. Just return the conservative
// qualifs for the return type.
let return_block = ccx
2019-12-22 17:42:04 -05:00
.body
.basic_blocks()
.iter_enumerated()
2019-12-22 17:42:04 -05:00
.find(|(_, block)| match block.terminator().kind {
TerminatorKind::Return => true,
_ => false,
})
.map(|(bb, _)| bb);
let return_block = match return_block {
None => return qualifs::in_any_value_of_ty(ccx, ccx.body.return_ty()),
Some(bb) => bb,
};
let return_loc = ccx.body.terminator_loc(return_block);
2020-03-16 10:45:39 -07:00
let custom_eq = match ccx.const_kind() {
// We don't care whether a `const fn` returns a value that is not structurally
// matchable. Functions calls are opaque and always use type-based qualification, so
// this value should never be used.
hir::ConstContext::ConstFn => true,
2020-03-16 10:45:39 -07:00
// If we know that all values of the return type are structurally matchable, there's no
// need to run dataflow.
_ if !CustomEq::in_any_value_of_ty(ccx, ccx.body.return_ty()) => false,
2020-03-16 10:45:39 -07:00
hir::ConstContext::Const | hir::ConstContext::Static(_) => {
2020-03-16 10:45:39 -07:00
let mut cursor = FlowSensitiveAnalysis::new(CustomEq, ccx)
2020-07-04 14:02:41 +02:00
.into_engine(ccx.tcx, &ccx.body, ccx.def_id.to_def_id())
2020-03-16 10:45:39 -07:00
.iterate_to_fixpoint()
.into_results_cursor(&ccx.body);
2020-05-01 11:19:27 -07:00
cursor.seek_after_primary_effect(return_loc);
2020-03-16 10:45:39 -07:00
cursor.contains(RETURN_PLACE)
}
};
2019-11-14 11:58:50 -08:00
ConstQualifs {
needs_drop: self.needs_drop(ccx, RETURN_PLACE, return_loc),
has_mut_interior: self.has_mut_interior(ccx, RETURN_PLACE, return_loc),
2020-03-16 10:45:39 -07:00
custom_eq,
2019-11-14 09:16:08 -08:00
}
}
2019-09-17 16:25:40 -07:00
}
pub struct Validator<'mir, 'tcx> {
ccx: &'mir ConstCx<'mir, 'tcx>,
qualifs: Qualifs<'mir, 'tcx>,
2019-09-17 16:25:40 -07:00
/// The span of the current statement.
span: Span,
const_checking_stopped: bool,
2019-09-17 16:25:40 -07:00
}
impl Deref for Validator<'mir, 'tcx> {
type Target = ConstCx<'mir, 'tcx>;
2019-09-17 16:25:40 -07:00
fn deref(&self) -> &Self::Target {
&self.ccx
2019-09-17 16:25:40 -07:00
}
}
impl Validator<'mir, 'tcx> {
pub fn new(ccx: &'mir ConstCx<'mir, 'tcx>) -> Self {
Validator {
span: ccx.body.span,
ccx,
qualifs: Default::default(),
const_checking_stopped: false,
}
2019-09-17 16:25:40 -07:00
}
pub fn check_body(&mut self) {
let ConstCx { tcx, body, def_id, .. } = *self.ccx;
// HACK: This function has side-effects???? Make sure we call it.
let _ = crate::const_eval::is_min_const_fn(tcx, def_id.to_def_id());
// The local type and predicate checks are not free and only relevant for `const fn`s.
if self.const_kind() == hir::ConstContext::ConstFn {
// Prevent const trait methods from being annotated as `stable`.
// FIXME: Do this as part of stability checking.
if self.is_const_stable_const_fn() {
let hir_id = tcx.hir().local_def_id_to_hir_id(self.def_id);
if crate::const_eval::is_parent_const_impl_raw(tcx, hir_id) {
struct_span_err!(
self.ccx.tcx.sess,
self.span,
E0723,
"trait methods cannot be stable const fn"
)
.emit();
}
}
self.check_item_predicates();
for local in &body.local_decls {
if local.internal {
continue;
}
self.span = local.source_info.span;
self.check_local_or_return_ty(local.ty);
}
// impl trait is gone in MIR, so check the return type of a const fn by its signature
// instead of the type of the return place.
self.span = body.local_decls[RETURN_PLACE].source_info.span;
let return_ty = tcx.fn_sig(def_id).output();
self.check_local_or_return_ty(return_ty.skip_binder());
}
self.visit_body(&body);
// Ensure that the end result is `Sync` in a non-thread local `static`.
let should_check_for_sync = self.const_kind()
== hir::ConstContext::Static(hir::Mutability::Not)
2020-07-04 14:02:41 +02:00
&& !tcx.is_thread_local_static(def_id.to_def_id());
if should_check_for_sync {
let hir_id = tcx.hir().local_def_id_to_hir_id(def_id);
check_return_ty_is_sync(tcx, &body, hir_id);
}
}
2019-11-14 11:58:50 -08:00
pub fn qualifs_in_return_place(&mut self) -> ConstQualifs {
self.qualifs.in_return_place(self.ccx)
}
2019-09-17 16:25:40 -07:00
/// Emits an error if an expression cannot be evaluated in the current context.
2019-11-27 14:29:09 -08:00
pub fn check_op(&mut self, op: impl NonConstOp) {
self.check_op_spanned(op, self.span);
2020-04-30 21:04:46 -07:00
}
/// Emits an error at the given `span` if an expression cannot be evaluated in the current
/// context.
pub fn check_op_spanned<O: NonConstOp>(&mut self, op: O, span: Span) {
// HACK: This is for strict equivalence with the old `qualify_min_const_fn` pass, which
// only emitted one error per function. It should be removed and the test output updated.
if self.const_checking_stopped {
return;
}
let err_emitted = ops::non_const(self.ccx, op, span);
if err_emitted && O::STOPS_CONST_CHECKING {
self.const_checking_stopped = true;
}
2019-09-17 16:25:40 -07:00
}
2019-11-18 23:04:06 +00:00
2019-11-27 14:29:09 -08:00
fn check_static(&mut self, def_id: DefId, span: Span) {
2020-05-02 21:44:25 +02:00
assert!(
!self.tcx.is_thread_local_static(def_id),
"tls access is checked in `Rvalue::ThreadLocalRef"
);
self.check_op_spanned(ops::StaticAccess, span)
2019-11-18 23:04:06 +00:00
}
fn check_local_or_return_ty(&mut self, ty: Ty<'tcx>) {
for ty in ty.walk() {
let ty = match ty.unpack() {
GenericArgKind::Type(ty) => ty,
// No constraints on lifetimes or constants, except potentially
// constants' types, but `walk` will get to them as well.
GenericArgKind::Lifetime(_) | GenericArgKind::Const(_) => continue,
};
match *ty.kind() {
ty::Ref(_, _, hir::Mutability::Mut) => self.check_op(ops::ty::MutRef),
ty::Opaque(..) => self.check_op(ops::ty::ImplTrait),
ty::FnPtr(..) => self.check_op(ops::ty::FnPtr),
ty::Dynamic(preds, _) => {
for pred in preds.iter() {
match pred.skip_binder() {
ty::ExistentialPredicate::AutoTrait(_)
| ty::ExistentialPredicate::Projection(_) => {
self.check_op(ops::ty::TraitBound)
}
ty::ExistentialPredicate::Trait(trait_ref) => {
if Some(trait_ref.def_id) != self.tcx.lang_items().sized_trait() {
self.check_op(ops::ty::TraitBound)
}
}
}
}
}
_ => {}
}
}
}
fn check_item_predicates(&mut self) {
let ConstCx { tcx, def_id, .. } = *self.ccx;
let mut current = def_id.to_def_id();
loop {
let predicates = tcx.predicates_of(current);
for (predicate, _) in predicates.predicates {
match predicate.skip_binders() {
ty::PredicateAtom::RegionOutlives(_)
| ty::PredicateAtom::TypeOutlives(_)
| ty::PredicateAtom::WellFormed(_)
| ty::PredicateAtom::Projection(_)
| ty::PredicateAtom::ConstEvaluatable(..)
| ty::PredicateAtom::ConstEquate(..)
| ty::PredicateAtom::TypeWellFormedFromEnv(..) => continue,
ty::PredicateAtom::ObjectSafe(_) => {
bug!("object safe predicate on function: {:#?}", predicate)
}
ty::PredicateAtom::ClosureKind(..) => {
bug!("closure kind predicate on function: {:#?}", predicate)
}
ty::PredicateAtom::Subtype(_) => {
bug!("subtype predicate on function: {:#?}", predicate)
}
ty::PredicateAtom::Trait(pred, constness) => {
if Some(pred.def_id()) == tcx.lang_items().sized_trait() {
continue;
}
match pred.self_ty().kind() {
ty::Param(p) => {
let generics = tcx.generics_of(current);
let def = generics.type_param(p, tcx);
let span = tcx.def_span(def.def_id);
if constness == hir::Constness::Const {
self.check_op_spanned(ops::ty::TraitBound, span);
} else if !tcx.features().const_fn
|| self.ccx.is_const_stable_const_fn()
{
// HACK: We shouldn't need the conditional above, but trait
// bounds on containing impl blocks are wrongly being marked as
// "not-const".
self.check_op_spanned(ops::ty::TraitBound, span);
}
}
// other kinds of bounds are either tautologies
// or cause errors in other passes
_ => continue,
}
}
}
}
match predicates.parent {
Some(parent) => current = parent,
None => break,
}
}
}
2019-09-17 16:25:40 -07:00
}
impl Visitor<'tcx> for Validator<'mir, 'tcx> {
2019-12-22 17:42:04 -05:00
fn visit_basic_block_data(&mut self, bb: BasicBlock, block: &BasicBlockData<'tcx>) {
trace!("visit_basic_block_data: bb={:?} is_cleanup={:?}", bb, block.is_cleanup);
// Just as the old checker did, we skip const-checking basic blocks on the unwind path.
// These blocks often drop locals that would otherwise be returned from the function.
//
// FIXME: This shouldn't be unsound since a panic at compile time will cause a compiler
// error anyway, but maybe we should do more here?
if block.is_cleanup {
return;
}
self.super_basic_block_data(bb, block);
}
2019-09-17 16:25:40 -07:00
fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
trace!("visit_rvalue: rvalue={:?} location={:?}", rvalue, location);
2019-11-22 15:52:59 -08:00
// Special-case reborrows to be more like a copy of a reference.
match *rvalue {
Rvalue::Ref(_, kind, place) => {
2020-04-12 10:28:41 -07:00
if let Some(reborrowed_proj) = place_as_reborrow(self.tcx, self.body, place) {
let ctx = match kind {
2019-12-22 17:42:04 -05:00
BorrowKind::Shared => {
PlaceContext::NonMutatingUse(NonMutatingUseContext::SharedBorrow)
}
BorrowKind::Shallow => {
PlaceContext::NonMutatingUse(NonMutatingUseContext::ShallowBorrow)
}
BorrowKind::Unique => {
PlaceContext::NonMutatingUse(NonMutatingUseContext::UniqueBorrow)
}
BorrowKind::Mut { .. } => {
PlaceContext::MutatingUse(MutatingUseContext::Borrow)
}
};
2020-04-21 17:11:00 -03:00
self.visit_local(&place.local, ctx, location);
self.visit_projection(place.local, reborrowed_proj, ctx, location);
return;
}
}
Rvalue::AddressOf(mutbl, place) => {
2020-04-12 10:28:41 -07:00
if let Some(reborrowed_proj) = place_as_reborrow(self.tcx, self.body, place) {
let ctx = match mutbl {
2019-12-22 17:42:04 -05:00
Mutability::Not => {
PlaceContext::NonMutatingUse(NonMutatingUseContext::AddressOf)
}
Mutability::Mut => PlaceContext::MutatingUse(MutatingUseContext::AddressOf),
};
2020-04-21 17:11:00 -03:00
self.visit_local(&place.local, ctx, location);
self.visit_projection(place.local, reborrowed_proj, ctx, location);
return;
}
2019-09-17 16:25:40 -07:00
}
_ => {}
2019-09-17 16:25:40 -07:00
}
2019-11-22 15:52:59 -08:00
self.super_rvalue(rvalue, location);
2019-09-17 16:25:40 -07:00
match *rvalue {
2020-05-02 21:44:25 +02:00
Rvalue::ThreadLocalRef(_) => self.check_op(ops::ThreadLocalAccess),
2019-12-22 17:42:04 -05:00
Rvalue::Use(_)
| Rvalue::Repeat(..)
| Rvalue::Discriminant(..)
| Rvalue::Len(_)
| Rvalue::Aggregate(..) => {}
Rvalue::Ref(_, kind @ BorrowKind::Mut { .. }, ref place)
| Rvalue::Ref(_, kind @ BorrowKind::Unique, ref place) => {
2020-04-12 10:28:41 -07:00
let ty = place.ty(self.body, self.tcx).ty;
2020-08-03 00:49:11 +02:00
let is_allowed = match ty.kind() {
2019-11-22 15:52:59 -08:00
// Inside a `static mut`, `&mut [...]` is allowed.
ty::Array(..) | ty::Slice(_)
if self.const_kind() == hir::ConstContext::Static(hir::Mutability::Mut) =>
{
2019-12-22 17:42:04 -05:00
true
}
2019-11-22 15:52:59 -08:00
// FIXME(ecstaticmorse): We could allow `&mut []` inside a const context given
// that this is merely a ZST and it is already eligible for promotion.
// This may require an RFC?
/*
ty::Array(_, len) if len.try_eval_usize(cx.tcx, cx.param_env) == Some(0)
=> true,
*/
_ => false,
};
if !is_allowed {
2019-12-22 17:42:04 -05:00
if let BorrowKind::Mut { .. } = kind {
2019-11-26 10:00:41 -05:00
self.check_op(ops::MutBorrow);
} else {
self.check_op(ops::CellBorrow);
}
2019-11-22 15:52:59 -08:00
}
}
2019-12-22 17:42:04 -05:00
Rvalue::AddressOf(Mutability::Mut, _) => self.check_op(ops::MutAddressOf),
Rvalue::Ref(_, BorrowKind::Shared | BorrowKind::Shallow, ref place)
| Rvalue::AddressOf(Mutability::Not, ref place) => {
2019-12-13 13:20:16 -08:00
let borrowed_place_has_mut_interior = qualifs::in_place::<HasMutInterior, _>(
&self.ccx,
&mut |local| self.qualifs.has_mut_interior(self.ccx, local, location),
place.as_ref(),
);
if borrowed_place_has_mut_interior {
self.check_op(ops::CellBorrow);
}
2019-12-22 17:42:04 -05:00
}
2019-11-22 15:52:59 -08:00
Rvalue::Cast(
CastKind::Pointer(PointerCast::MutToConstPointer | PointerCast::ArrayToPointer),
_,
_,
) => {}
Rvalue::Cast(
CastKind::Pointer(
PointerCast::UnsafeFnPointer
| PointerCast::ClosureFnPointer(_)
| PointerCast::ReifyFnPointer,
),
_,
_,
) => self.check_op(ops::FnPtrCast),
Rvalue::Cast(CastKind::Pointer(PointerCast::Unsize), _, cast_ty) => {
if let Some(TypeAndMut { ty, .. }) = cast_ty.builtin_deref(true) {
let unsized_ty = self.tcx.struct_tail_erasing_lifetimes(ty, self.param_env);
// Casting/coercing things to slices is fine.
if let ty::Slice(_) | ty::Str = unsized_ty.kind() {
return;
}
}
self.check_op(ops::UnsizingCast);
}
2019-09-17 16:25:40 -07:00
Rvalue::Cast(CastKind::Misc, ref operand, cast_ty) => {
2020-04-12 10:28:41 -07:00
let operand_ty = operand.ty(self.body, self.tcx);
2019-09-17 16:25:40 -07:00
let cast_in = CastTy::from_ty(operand_ty).expect("bad input type for cast");
let cast_out = CastTy::from_ty(cast_ty).expect("bad output type for cast");
if let (CastTy::Ptr(_) | CastTy::FnPtr, CastTy::Int(_)) = (cast_in, cast_out) {
2019-09-17 16:25:40 -07:00
self.check_op(ops::RawPtrToIntCast);
}
}
Rvalue::NullaryOp(NullOp::SizeOf, _) => {}
Rvalue::NullaryOp(NullOp::Box, _) => self.check_op(ops::HeapAllocation),
Rvalue::UnaryOp(_, ref operand) => {
let ty = operand.ty(self.body, self.tcx);
if !(ty.is_integral() || ty.is_bool()) {
self.check_op(ops::NonPrimitiveOp)
}
}
Rvalue::BinaryOp(op, ref lhs, ref rhs)
| Rvalue::CheckedBinaryOp(op, ref lhs, ref rhs) => {
let lhs_ty = lhs.ty(self.body, self.tcx);
let rhs_ty = rhs.ty(self.body, self.tcx);
if let ty::RawPtr(_) | ty::FnPtr(..) = lhs_ty.kind() {
assert_eq!(lhs_ty, rhs_ty);
2019-12-22 17:42:04 -05:00
assert!(
op == BinOp::Eq
|| op == BinOp::Ne
|| op == BinOp::Le
|| op == BinOp::Lt
|| op == BinOp::Ge
|| op == BinOp::Gt
|| op == BinOp::Offset
);
2019-09-17 16:25:40 -07:00
self.check_op(ops::RawPtrComparison);
}
if !(lhs_ty.is_integral() || lhs_ty.is_bool() || lhs_ty.is_char())
|| !(rhs_ty.is_integral() || rhs_ty.is_bool() || rhs_ty.is_char())
{
self.check_op(ops::NonPrimitiveOp)
}
2019-09-17 16:25:40 -07:00
}
}
}
2019-12-22 17:42:04 -05:00
fn visit_operand(&mut self, op: &Operand<'tcx>, location: Location) {
self.super_operand(op, location);
if let Operand::Constant(c) = op {
if let Some(def_id) = c.check_static_ptr(self.tcx) {
2019-11-18 23:04:06 +00:00
self.check_static(def_id, self.span);
2019-09-17 16:25:40 -07:00
}
}
}
2019-10-02 19:11:08 -03:00
fn visit_projection_elem(
2019-09-17 16:25:40 -07:00
&mut self,
place_local: Local,
2019-10-02 19:11:08 -03:00
proj_base: &[PlaceElem<'tcx>],
2020-05-23 12:02:54 +02:00
elem: PlaceElem<'tcx>,
2019-09-17 16:25:40 -07:00
context: PlaceContext,
location: Location,
) {
trace!(
"visit_projection_elem: place_local={:?} proj_base={:?} elem={:?} \
2019-10-02 19:11:08 -03:00
context={:?} location={:?}",
place_local,
2019-10-02 19:11:08 -03:00
proj_base,
elem,
2019-09-17 16:25:40 -07:00
context,
location,
);
self.super_projection_elem(place_local, proj_base, elem, context, location);
2019-09-17 16:25:40 -07:00
match elem {
ProjectionElem::Deref => {
2020-04-12 10:28:41 -07:00
let base_ty = Place::ty_from(place_local, proj_base, self.body, self.tcx).ty;
2020-08-03 00:49:11 +02:00
if let ty::RawPtr(_) = base_ty.kind() {
2019-11-18 23:04:06 +00:00
if proj_base.is_empty() {
if let (local, []) = (place_local, proj_base) {
let decl = &self.body.local_decls[local];
if let Some(box LocalInfo::StaticRef { def_id, .. }) = decl.local_info {
2019-11-18 23:04:06 +00:00
let span = decl.source_info.span;
self.check_static(def_id, span);
return;
}
}
}
2019-09-17 16:25:40 -07:00
self.check_op(ops::RawPtrDeref);
}
2019-11-18 23:04:06 +00:00
if context.is_mutating_use() {
self.check_op(ops::MutDeref);
}
2019-09-17 16:25:40 -07:00
}
2019-12-22 17:42:04 -05:00
ProjectionElem::ConstantIndex { .. }
| ProjectionElem::Downcast(..)
2019-12-22 17:42:04 -05:00
| ProjectionElem::Subslice { .. }
| ProjectionElem::Field(..)
| ProjectionElem::Index(_) => {
2020-04-12 10:28:41 -07:00
let base_ty = Place::ty_from(place_local, proj_base, self.body, self.tcx).ty;
2019-09-17 16:25:40 -07:00
match base_ty.ty_adt_def() {
Some(def) if def.is_union() => {
self.check_op(ops::UnionAccess);
}
_ => {}
}
}
}
}
fn visit_source_info(&mut self, source_info: &SourceInfo) {
trace!("visit_source_info: source_info={:?}", source_info);
self.span = source_info.span;
}
fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
trace!("visit_statement: statement={:?} location={:?}", statement, location);
match statement.kind {
2019-11-30 12:25:45 -05:00
StatementKind::Assign(..) | StatementKind::SetDiscriminant { .. } => {
2019-09-17 16:25:40 -07:00
self.super_statement(statement, location);
}
StatementKind::LlvmInlineAsm { .. } => {
2020-04-21 09:24:41 -07:00
self.super_statement(statement, location);
self.check_op(ops::InlineAsm);
}
2020-05-21 12:46:49 -07:00
StatementKind::FakeRead(..)
2019-12-22 17:42:04 -05:00
| StatementKind::StorageLive(_)
| StatementKind::StorageDead(_)
| StatementKind::Retag { .. }
| StatementKind::AscribeUserType(..)
| StatementKind::Coverage(..)
2019-12-22 17:42:04 -05:00
| StatementKind::Nop => {}
2019-09-17 16:25:40 -07:00
}
}
fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
use rustc_target::spec::abi::Abi::RustIntrinsic;
trace!("visit_terminator: terminator={:?} location={:?}", terminator, location);
self.super_terminator(terminator, location);
2019-09-17 16:25:40 -07:00
match &terminator.kind {
2019-09-17 16:25:40 -07:00
TerminatorKind::Call { func, .. } => {
let ConstCx { tcx, body, def_id: caller, param_env, .. } = *self.ccx;
let caller = caller.to_def_id();
let fn_ty = func.ty(body, tcx);
2019-09-17 16:25:40 -07:00
let (mut callee, substs) = match *fn_ty.kind() {
ty::FnDef(def_id, substs) => (def_id, substs),
2019-09-17 16:25:40 -07:00
ty::FnPtr(_) => {
self.check_op(ops::FnCallIndirect);
return;
}
_ => {
span_bug!(terminator.source_info.span, "invalid callee of type {:?}", fn_ty)
2019-09-17 16:25:40 -07:00
}
};
if self.tcx.features().const_trait_impl {
let instance = Instance::resolve(tcx, param_env, callee, substs);
debug!("Resolving ({:?}) -> {:?}", callee, instance);
if let Ok(Some(func)) = instance {
2020-07-03 19:13:39 +02:00
if let InstanceDef::Item(def) = func.def {
callee = def.did;
}
}
}
// At this point, we are calling a function, `callee`, whose `DefId` is known...
if is_lang_panic_fn(tcx, callee) {
2019-09-17 16:25:40 -07:00
self.check_op(ops::Panic);
return;
}
// HACK: This is to "unstabilize" the `transmute` intrinsic
// within const fns. `transmute` is allowed in all other const contexts.
// This won't really scale to more intrinsics or functions. Let's allow const
// transmutes in const fn before we add more hacks to this.
if tcx.fn_sig(callee).abi() == RustIntrinsic
&& tcx.item_name(callee) == sym::transmute
{
self.check_op(ops::Transmute);
return;
}
if !tcx.is_const_fn_raw(callee) {
self.check_op(ops::FnCallNonConst(callee));
return;
}
// If the `const fn` we are trying to call is not const-stable, ensure that we have
// the proper feature gate enabled.
if let Some(gate) = is_unstable_const_fn(tcx, callee) {
if self.span.allows_unstable(gate) {
return;
}
// Calling an unstable function *always* requires that the corresponding gate
// be enabled, even if the function has `#[allow_internal_unstable(the_gate)]`.
if !tcx.features().declared_lib_features.iter().any(|&(sym, _)| sym == gate) {
self.check_op(ops::FnCallUnstable(callee, Some(gate)));
return;
}
// If this crate is not using stability attributes, or the caller is not claiming to be a
// stable `const fn`, that is all that is required.
if !self.ccx.is_const_stable_const_fn() {
return;
}
// Otherwise, we are something const-stable calling a const-unstable fn.
if super::allow_internal_unstable(tcx, caller, gate) {
return;
}
self.check_op(ops::FnCallUnstable(callee, Some(gate)));
return;
}
// FIXME(ecstaticmorse); For compatibility, we consider `unstable` callees that
// have no `rustc_const_stable` attributes to be const-unstable as well. This
// should be fixed later.
let callee_is_unstable_unmarked = tcx.lookup_const_stability(callee).is_none()
&& tcx.lookup_stability(callee).map_or(false, |s| s.level.is_unstable());
if callee_is_unstable_unmarked {
if self.ccx.is_const_stable_const_fn() {
self.check_op(ops::FnCallUnstable(callee, None));
2019-09-17 16:25:40 -07:00
}
}
}
// Forbid all `Drop` terminators unless the place being dropped is a local with no
// projections that cannot be `NeedsDrop`.
TerminatorKind::Drop { place: dropped_place, .. }
| TerminatorKind::DropAndReplace { place: dropped_place, .. } => {
2020-05-03 11:18:26 -07:00
// If we are checking live drops after drop-elaboration, don't emit duplicate
// errors here.
2020-09-17 11:05:51 -07:00
if super::post_drop_elaboration::checking_enabled(self.ccx) {
2020-05-03 11:18:26 -07:00
return;
}
2019-09-17 16:25:40 -07:00
let mut err_span = self.span;
// Check to see if the type of this place can ever have a drop impl. If not, this
// `Drop` terminator is frivolous.
2019-12-22 17:42:04 -05:00
let ty_needs_drop =
2020-04-12 10:28:41 -07:00
dropped_place.ty(self.body, self.tcx).ty.needs_drop(self.tcx, self.param_env);
2019-09-17 16:25:40 -07:00
if !ty_needs_drop {
return;
}
let needs_drop = if let Some(local) = dropped_place.as_local() {
2019-09-17 16:25:40 -07:00
// Use the span where the local was declared as the span of the drop error.
err_span = self.body.local_decls[local].source_info.span;
self.qualifs.needs_drop(self.ccx, local, location)
2019-09-17 16:25:40 -07:00
} else {
true
};
if needs_drop {
2020-06-19 14:46:04 -05:00
self.check_op_spanned(
ops::LiveDrop { dropped_at: Some(terminator.source_info.span) },
2020-06-19 14:46:04 -05:00
err_span,
);
2019-09-17 16:25:40 -07:00
}
}
TerminatorKind::InlineAsm { .. } => self.check_op(ops::InlineAsm),
TerminatorKind::Abort => self.check_op(ops::Abort),
TerminatorKind::GeneratorDrop | TerminatorKind::Yield { .. } => {
self.check_op(ops::Generator)
2020-02-14 18:17:50 +00:00
}
TerminatorKind::Assert { .. }
2020-06-02 09:15:24 +02:00
| TerminatorKind::FalseEdge { .. }
| TerminatorKind::FalseUnwind { .. }
| TerminatorKind::Goto { .. }
| TerminatorKind::Resume
| TerminatorKind::Return
| TerminatorKind::SwitchInt { .. }
| TerminatorKind::Unreachable => {}
2019-09-17 16:25:40 -07:00
}
}
}
fn check_return_ty_is_sync(tcx: TyCtxt<'tcx>, body: &Body<'tcx>, hir_id: HirId) {
let ty = body.return_ty();
tcx.infer_ctxt().enter(|infcx| {
let cause = traits::ObligationCause::new(body.span, hir_id, traits::SharedStatic);
let mut fulfillment_cx = traits::FulfillmentContext::new();
let sync_def_id = tcx.require_lang_item(LangItem::Sync, Some(body.span));
fulfillment_cx.register_bound(&infcx, ty::ParamEnv::empty(), ty, sync_def_id, cause);
if let Err(err) = fulfillment_cx.select_all_or_error(&infcx) {
infcx.report_fulfillment_errors(&err, None, false);
}
});
}
2019-11-22 15:52:59 -08:00
fn place_as_reborrow(
tcx: TyCtxt<'tcx>,
body: &Body<'tcx>,
place: Place<'tcx>,
2019-11-22 15:52:59 -08:00
) -> Option<&'a [PlaceElem<'tcx>]> {
2019-12-22 17:42:04 -05:00
place.projection.split_last().and_then(|(outermost, inner)| {
if outermost != &ProjectionElem::Deref {
return None;
}
2019-11-22 15:52:59 -08:00
2019-12-22 17:42:04 -05:00
// A borrow of a `static` also looks like `&(*_1)` in the MIR, but `_1` is a `const`
// that points to the allocation for the static. Don't treat these as reborrows.
if body.local_decls[place.local].is_ref_to_static() {
return None;
2019-12-22 17:42:04 -05:00
}
2019-12-22 17:42:04 -05:00
// Ensure the type being derefed is a reference and not a raw pointer.
//
// This is sufficient to prevent an access to a `static mut` from being marked as a
// reborrow, even if the check above were to disappear.
2020-01-14 02:21:42 -03:00
let inner_ty = Place::ty_from(place.local, inner, body, tcx).ty;
2020-08-03 00:49:11 +02:00
match inner_ty.kind() {
2019-12-22 17:42:04 -05:00
ty::Ref(..) => Some(inner),
_ => None,
}
})
2019-11-22 15:52:59 -08:00
}