1
Fork 0

Rollup merge of #78351 - RalfJung:validity-unsafe-cell, r=oli-obk

Move "mutable thing in const" check from interning to validity

This moves the check for mutable things (such as `UnsafeCell` or `&mut`) in a`const` from interning to validity. That means we can give more targeted error messages (pointing out *where* the problem lies), and we can simplify interning a bit.

Also fix the interning mode used for promoteds in statics.

r? @oli-obk
This commit is contained in:
Dylan DPC 2020-10-28 01:21:18 +01:00 committed by GitHub
commit 54ea0f9ccd
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
10 changed files with 135 additions and 144 deletions

View file

@ -1,8 +1,8 @@
use super::{CompileTimeEvalContext, CompileTimeInterpreter, ConstEvalErr, MemoryExtra};
use crate::interpret::eval_nullary_intrinsic;
use crate::interpret::{
intern_const_alloc_recursive, Allocation, ConstAlloc, ConstValue, GlobalId, Immediate,
InternKind, InterpCx, InterpResult, MPlaceTy, MemoryKind, OpTy, RefTracking, Scalar,
intern_const_alloc_recursive, Allocation, ConstAlloc, ConstValue, CtfeValidationMode, GlobalId,
Immediate, InternKind, InterpCx, InterpResult, MPlaceTy, MemoryKind, OpTy, RefTracking, Scalar,
ScalarMaybeUninit, StackPopCleanup,
};
@ -59,23 +59,15 @@ fn eval_body_using_ecx<'mir, 'tcx>(
ecx.run()?;
// Intern the result
// FIXME: since the DefId of a promoted is the DefId of its owner, this
// means that promoteds in statics are actually interned like statics!
// However, this is also currently crucial because we promote mutable
// non-empty slices in statics to extend their lifetime, and this
// ensures that they are put into a mutable allocation.
// For other kinds of promoteds in statics (like array initializers), this is rather silly.
let intern_kind = match tcx.static_mutability(cid.instance.def_id()) {
Some(m) => InternKind::Static(m),
None if cid.promoted.is_some() => InternKind::Promoted,
_ => InternKind::Constant,
let intern_kind = if cid.promoted.is_some() {
InternKind::Promoted
} else {
match tcx.static_mutability(cid.instance.def_id()) {
Some(m) => InternKind::Static(m),
None => InternKind::Constant,
}
};
intern_const_alloc_recursive(
ecx,
intern_kind,
ret,
body.ignore_interior_mut_in_const_validation,
);
intern_const_alloc_recursive(ecx, intern_kind, ret);
debug!("eval_body_using_ecx done: {:?}", *ret);
Ok(ret)
@ -376,16 +368,23 @@ pub fn eval_to_allocation_raw_provider<'tcx>(
// Since evaluation had no errors, valiate the resulting constant:
let validation = try {
// FIXME do not validate promoteds until a decision on
// https://github.com/rust-lang/rust/issues/67465 is made
// https://github.com/rust-lang/rust/issues/67465 and
// https://github.com/rust-lang/rust/issues/67534 is made.
// Promoteds can contain unexpected `UnsafeCell` and reference `static`s, but their
// otherwise restricted form ensures that this is still sound. We just lose the
// extra safety net of some of the dynamic checks. They can also contain invalid
// values, but since we do not usually check intermediate results of a computation
// for validity, it might be surprising to do that here.
if cid.promoted.is_none() {
let mut ref_tracking = RefTracking::new(mplace);
let mut inner = false;
while let Some((mplace, path)) = ref_tracking.todo.pop() {
ecx.const_validate_operand(
mplace.into(),
path,
&mut ref_tracking,
/*may_ref_to_static*/ ecx.memory.extra.can_access_statics,
)?;
let mode = match tcx.static_mutability(cid.instance.def_id()) {
Some(_) => CtfeValidationMode::Regular, // a `static`
None => CtfeValidationMode::Const { inner },
};
ecx.const_validate_operand(mplace.into(), path, &mut ref_tracking, mode)?;
inner = true;
}
}
};

View file

@ -29,7 +29,7 @@ pub(crate) fn const_caller_location(
let mut ecx = mk_eval_cx(tcx, DUMMY_SP, ty::ParamEnv::reveal_all(), false);
let loc_place = ecx.alloc_caller_location(file, line, col);
intern_const_alloc_recursive(&mut ecx, InternKind::Constant, loc_place, false);
intern_const_alloc_recursive(&mut ecx, InternKind::Constant, loc_place);
ConstValue::Scalar(loc_place.ptr)
}

View file

@ -2,12 +2,23 @@
//!
//! After a const evaluation has computed a value, before we destroy the const evaluator's session
//! memory, we need to extract all memory allocations to the global memory pool so they stay around.
//!
//! In principle, this is not very complicated: we recursively walk the final value, follow all the
//! pointers, and move all reachable allocations to the global `tcx` memory. The only complication
//! is picking the right mutability for the allocations in a `static` initializer: we want to make
//! as many allocations as possible immutable so LLVM can put them into read-only memory. At the
//! same time, we need to make memory that could be mutated by the program mutable to avoid
//! incorrect compilations. To achieve this, we do a type-based traversal of the final value,
//! tracking mutable and shared references and `UnsafeCell` to determine the current mutability.
//! (In principle, we could skip this type-based part for `const` and promoteds, as they need to be
//! always immutable. At least for `const` however we use this opportunity to reject any `const`
//! that contains allocations whose mutability we cannot identify.)
use super::validity::RefTracking;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_hir as hir;
use rustc_middle::mir::interpret::InterpResult;
use rustc_middle::ty::{self, layout::TyAndLayout, query::TyCtxtAt, Ty};
use rustc_middle::ty::{self, layout::TyAndLayout, Ty};
use rustc_target::abi::Size;
use rustc_ast::Mutability;
@ -40,11 +51,6 @@ struct InternVisitor<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx>> {
/// This field stores whether we are *currently* inside an `UnsafeCell`. This can affect
/// the intern mode of references we encounter.
inside_unsafe_cell: bool,
/// This flag is to avoid triggering UnsafeCells are not allowed behind references in constants
/// for promoteds.
/// It's a copy of `mir::Body`'s ignore_interior_mut_in_const_validation field
ignore_interior_mut_in_const: bool,
}
#[derive(Copy, Clone, Debug, PartialEq, Hash, Eq)]
@ -53,22 +59,14 @@ enum InternMode {
/// this is *immutable*, and below mutable references inside an `UnsafeCell`, this
/// is *mutable*.
Static(hir::Mutability),
/// The "base value" of a const, which can have `UnsafeCell` (as in `const FOO: Cell<i32>`),
/// but that interior mutability is simply ignored.
ConstBase,
/// The "inner values" of a const with references, where `UnsafeCell` is an error.
ConstInner,
/// A `const`.
Const,
}
/// Signalling data structure to ensure we don't recurse
/// into the memory of other constants or statics
struct IsStaticOrFn;
fn mutable_memory_in_const(tcx: TyCtxtAt<'_>, kind: &str) {
// FIXME: show this in validation instead so we can point at where in the value the error is?
tcx.sess.span_err(tcx.span, &format!("mutable memory ({}) is not allowed in constant", kind));
}
/// Intern an allocation without looking at its children.
/// `mode` is the mode of the environment where we found this pointer.
/// `mutablity` is the mutability of the place to be interned; even if that says
@ -129,9 +127,7 @@ fn intern_shallow<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx>>(
// See const_eval::machine::MemoryExtra::can_access_statics for why
// immutability is so important.
// There are no sensible checks we can do here; grep for `mutable_memory_in_const` to
// find the checks we are doing elsewhere to avoid even getting here for memory
// that "wants" to be mutable.
// Validation will ensure that there is no `UnsafeCell` on an immutable allocation.
alloc.mutability = Mutability::Not;
};
// link the alloc id to the actual allocation
@ -167,17 +163,13 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx>> ValueVisitor<'mir
mplace: MPlaceTy<'tcx>,
fields: impl Iterator<Item = InterpResult<'tcx, Self::V>>,
) -> InterpResult<'tcx> {
// ZSTs cannot contain pointers, so we can skip them.
if mplace.layout.is_zst() {
return Ok(());
}
if let Some(def) = mplace.layout.ty.ty_adt_def() {
if Some(def.did) == self.ecx.tcx.lang_items().unsafe_cell_type() {
if self.mode == InternMode::ConstInner && !self.ignore_interior_mut_in_const {
// We do not actually make this memory mutable. But in case the user
// *expected* it to be mutable, make sure we error. This is just a
// sanity check to prevent users from accidentally exploiting the UB
// they caused. It also helps us to find cases where const-checking
// failed to prevent an `UnsafeCell` (but as `ignore_interior_mut_in_const`
// shows that part is not airtight).
mutable_memory_in_const(self.ecx.tcx, "`UnsafeCell`");
}
// We are crossing over an `UnsafeCell`, we can mutate again. This means that
// References we encounter inside here are interned as pointing to mutable
// allocations.
@ -189,11 +181,6 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx>> ValueVisitor<'mir
}
}
// ZSTs cannot contain pointers, so we can skip them.
if mplace.layout.is_zst() {
return Ok(());
}
self.walk_aggregate(mplace, fields)
}
@ -213,7 +200,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx>> ValueVisitor<'mir
if let Scalar::Ptr(vtable) = mplace.meta.unwrap_meta() {
// Explicitly choose const mode here, since vtables are immutable, even
// if the reference of the fat pointer is mutable.
self.intern_shallow(vtable.alloc_id, InternMode::ConstInner, None);
self.intern_shallow(vtable.alloc_id, InternMode::Const, None);
} else {
// Validation will error (with a better message) on an invalid vtable pointer.
// Let validation show the error message, but make sure it *does* error.
@ -225,7 +212,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx>> ValueVisitor<'mir
// Only recurse for allocation-backed pointers.
if let Scalar::Ptr(ptr) = mplace.ptr {
// Compute the mode with which we intern this. Our goal here is to make as many
// statics as we can immutable so they can be placed in const memory by LLVM.
// statics as we can immutable so they can be placed in read-only memory by LLVM.
let ref_mode = match self.mode {
InternMode::Static(mutbl) => {
// In statics, merge outer mutability with reference mutability and
@ -259,27 +246,11 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx>> ValueVisitor<'mir
}
}
}
InternMode::ConstBase | InternMode::ConstInner => {
// Ignore `UnsafeCell`, everything is immutable. Do some sanity checking
// for mutable references that we encounter -- they must all be ZST.
// This helps to prevent users from accidentally exploiting UB that they
// caused (by somehow getting a mutable reference in a `const`).
if ref_mutability == Mutability::Mut {
match referenced_ty.kind() {
ty::Array(_, n) if n.eval_usize(*tcx, self.ecx.param_env) == 0 => {}
ty::Slice(_)
if mplace.meta.unwrap_meta().to_machine_usize(self.ecx)?
== 0 => {}
_ => mutable_memory_in_const(tcx, "`&mut`"),
}
} else {
// A shared reference. We cannot check `freeze` here due to references
// like `&dyn Trait` that are actually immutable. We do check for
// concrete `UnsafeCell` when traversing the pointee though (if it is
// a new allocation, not yet interned).
}
// Go on with the "inner" rules.
InternMode::ConstInner
InternMode::Const => {
// Ignore `UnsafeCell`, everything is immutable. Validity does some sanity
// checking for mutable references that we encounter -- they must all be
// ZST.
InternMode::Const
}
};
match self.intern_shallow(ptr.alloc_id, ref_mode, Some(referenced_ty)) {
@ -318,7 +289,6 @@ pub fn intern_const_alloc_recursive<M: CompileTimeMachine<'mir, 'tcx>>(
ecx: &mut InterpCx<'mir, 'tcx, M>,
intern_kind: InternKind,
ret: MPlaceTy<'tcx>,
ignore_interior_mut_in_const: bool,
) where
'tcx: 'mir,
{
@ -327,7 +297,7 @@ pub fn intern_const_alloc_recursive<M: CompileTimeMachine<'mir, 'tcx>>(
InternKind::Static(mutbl) => InternMode::Static(mutbl),
// `Constant` includes array lengths.
// `Promoted` includes non-`Copy` array initializers and `rustc_args_required_const` arguments.
InternKind::Constant | InternKind::Promoted => InternMode::ConstBase,
InternKind::Constant | InternKind::Promoted => InternMode::Const,
};
// Type based interning.
@ -357,7 +327,6 @@ pub fn intern_const_alloc_recursive<M: CompileTimeMachine<'mir, 'tcx>>(
ecx,
mode,
leftover_allocations,
ignore_interior_mut_in_const,
inside_unsafe_cell: false,
}
.visit_value(mplace);

View file

@ -24,7 +24,7 @@ pub use self::machine::{compile_time_machine, AllocMap, Machine, MayLeak, StackP
pub use self::memory::{AllocCheck, FnVal, Memory, MemoryKind};
pub use self::operand::{ImmTy, Immediate, OpTy, Operand};
pub use self::place::{MPlaceTy, MemPlace, MemPlaceMeta, Place, PlaceTy};
pub use self::validity::RefTracking;
pub use self::validity::{CtfeValidationMode, RefTracking};
pub use self::visitor::{MutValueVisitor, ValueVisitor};
crate use self::intrinsics::eval_nullary_intrinsic;

View file

@ -113,6 +113,17 @@ pub enum PathElem {
DynDowncast,
}
/// Extra things to check for during validation of CTFE results.
pub enum CtfeValidationMode {
/// Regular validation, nothing special happening.
Regular,
/// Validation of a `const`. `inner` says if this is an inner, indirect allocation (as opposed
/// to the top-level const allocation).
/// Being an inner allocation makes a difference because the top-level allocation of a `const`
/// is copied for each use, but the inner allocations are implicitly shared.
Const { inner: bool },
}
/// State for tracking recursive validation of references
pub struct RefTracking<T, PATH = ()> {
pub seen: FxHashSet<T>,
@ -202,9 +213,9 @@ struct ValidityVisitor<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> {
/// starts must not be changed! `visit_fields` and `visit_array` rely on
/// this stack discipline.
path: Vec<PathElem>,
ref_tracking_for_consts:
Option<&'rt mut RefTracking<MPlaceTy<'tcx, M::PointerTag>, Vec<PathElem>>>,
may_ref_to_static: bool,
ref_tracking: Option<&'rt mut RefTracking<MPlaceTy<'tcx, M::PointerTag>, Vec<PathElem>>>,
/// `None` indicates this is not validating for CTFE (but for runtime).
ctfe_mode: Option<CtfeValidationMode>,
ecx: &'rt InterpCx<'mir, 'tcx, M>,
}
@ -418,7 +429,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
{ "a dangling {} (use-after-free)", kind },
);
// Recursive checking
if let Some(ref mut ref_tracking) = self.ref_tracking_for_consts {
if let Some(ref mut ref_tracking) = self.ref_tracking {
if let Some(ptr) = ptr {
// not a ZST
// Skip validation entirely for some external statics
@ -426,19 +437,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
if let Some(GlobalAlloc::Static(did)) = alloc_kind {
assert!(!self.ecx.tcx.is_thread_local_static(did));
assert!(self.ecx.tcx.is_static(did));
if self.may_ref_to_static {
// We skip checking other statics. These statics must be sound by
// themselves, and the only way to get broken statics here is by using
// unsafe code.
// The reasons we don't check other statics is twofold. For one, in all
// sound cases, the static was already validated on its own, and second, we
// trigger cycle errors if we try to compute the value of the other static
// and that static refers back to us.
// We might miss const-invalid data,
// but things are still sound otherwise (in particular re: consts
// referring to statics).
return Ok(());
} else {
if matches!(self.ctfe_mode, Some(CtfeValidationMode::Const { .. })) {
// See const_eval::machine::MemoryExtra::can_access_statics for why
// this check is so important.
// This check is reachable when the const just referenced the static,
@ -447,6 +446,17 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
{ "a {} pointing to a static variable", kind }
);
}
// We skip checking other statics. These statics must be sound by
// themselves, and the only way to get broken statics here is by using
// unsafe code.
// The reasons we don't check other statics is twofold. For one, in all
// sound cases, the static was already validated on its own, and second, we
// trigger cycle errors if we try to compute the value of the other static
// and that static refers back to us.
// We might miss const-invalid data,
// but things are still sound otherwise (in particular re: consts
// referring to statics).
return Ok(());
}
}
// Proceed recursively even for ZST, no reason to skip them!
@ -504,7 +514,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
let value = self.ecx.read_scalar(value)?;
// NOTE: Keep this in sync with the array optimization for int/float
// types below!
if self.ref_tracking_for_consts.is_some() {
if self.ctfe_mode.is_some() {
// Integers/floats in CTFE: Must be scalar bits, pointers are dangerous
let is_bits = value.check_init().map_or(false, |v| v.is_bits());
if !is_bits {
@ -532,7 +542,17 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
}
Ok(true)
}
ty::Ref(..) => {
ty::Ref(_, ty, mutbl) => {
if matches!(self.ctfe_mode, Some(CtfeValidationMode::Const { .. }))
&& *mutbl == hir::Mutability::Mut
{
// A mutable reference inside a const? That does not seem right (except if it is
// a ZST).
let layout = self.ecx.layout_of(ty)?;
if !layout.is_zst() {
throw_validation_failure!(self.path, { "mutable reference in a `const`" });
}
}
self.check_safe_pointer(value, "reference")?;
Ok(true)
}
@ -723,6 +743,15 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
// Sanity check: `builtin_deref` does not know any pointers that are not primitive.
assert!(op.layout.ty.builtin_deref(true).is_none());
// Special check preventing `UnsafeCell` in constants
if let Some(def) = op.layout.ty.ty_adt_def() {
if matches!(self.ctfe_mode, Some(CtfeValidationMode::Const { inner: true }))
&& Some(def.did) == self.ecx.tcx.lang_items().unsafe_cell_type()
{
throw_validation_failure!(self.path, { "`UnsafeCell` in a `const`" });
}
}
// Recursively walk the value at its type.
self.walk_value(op)?;
@ -814,7 +843,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
self.ecx,
ptr,
size,
/*allow_uninit_and_ptr*/ self.ref_tracking_for_consts.is_none(),
/*allow_uninit_and_ptr*/ self.ctfe_mode.is_none(),
) {
// In the happy case, we needn't check anything else.
Ok(()) => {}
@ -865,16 +894,13 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
&self,
op: OpTy<'tcx, M::PointerTag>,
path: Vec<PathElem>,
ref_tracking_for_consts: Option<
&mut RefTracking<MPlaceTy<'tcx, M::PointerTag>, Vec<PathElem>>,
>,
may_ref_to_static: bool,
ref_tracking: Option<&mut RefTracking<MPlaceTy<'tcx, M::PointerTag>, Vec<PathElem>>>,
ctfe_mode: Option<CtfeValidationMode>,
) -> InterpResult<'tcx> {
trace!("validate_operand_internal: {:?}, {:?}", *op, op.layout.ty);
// Construct a visitor
let mut visitor =
ValidityVisitor { path, ref_tracking_for_consts, may_ref_to_static, ecx: self };
let mut visitor = ValidityVisitor { path, ref_tracking, ctfe_mode, ecx: self };
// Try to cast to ptr *once* instead of all the time.
let op = self.force_op_ptr(op).unwrap_or(op);
@ -902,16 +928,18 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
/// `ref_tracking` is used to record references that we encounter so that they
/// can be checked recursively by an outside driving loop.
///
/// `may_ref_to_static` controls whether references are allowed to point to statics.
/// `constant` controls whether this must satisfy the rules for constants:
/// - no pointers to statics.
/// - no `UnsafeCell` or non-ZST `&mut`.
#[inline(always)]
pub fn const_validate_operand(
&self,
op: OpTy<'tcx, M::PointerTag>,
path: Vec<PathElem>,
ref_tracking: &mut RefTracking<MPlaceTy<'tcx, M::PointerTag>, Vec<PathElem>>,
may_ref_to_static: bool,
ctfe_mode: CtfeValidationMode,
) -> InterpResult<'tcx> {
self.validate_operand_internal(op, path, Some(ref_tracking), may_ref_to_static)
self.validate_operand_internal(op, path, Some(ref_tracking), Some(ctfe_mode))
}
/// This function checks the data at `op` to be runtime-valid.
@ -919,6 +947,6 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
/// It will error if the bits at the destination do not match the ones described by the layout.
#[inline(always)]
pub fn validate_operand(&self, op: OpTy<'tcx, M::PointerTag>) -> InterpResult<'tcx> {
self.validate_operand_internal(op, vec![], None, false)
self.validate_operand_internal(op, vec![], None, None)
}
}

View file

@ -9,7 +9,6 @@ use rustc_hir::def::DefKind;
use rustc_hir::HirId;
use rustc_index::bit_set::BitSet;
use rustc_index::vec::IndexVec;
use rustc_middle::mir::interpret::{InterpResult, Scalar};
use rustc_middle::mir::visit::{
MutVisitor, MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor,
};
@ -28,9 +27,10 @@ use rustc_trait_selection::traits;
use crate::const_eval::ConstEvalErr;
use crate::interpret::{
self, compile_time_machine, truncate, AllocId, Allocation, ConstValue, Frame, ImmTy, Immediate,
InterpCx, LocalState, LocalValue, MemPlace, Memory, MemoryKind, OpTy, Operand as InterpOperand,
PlaceTy, Pointer, ScalarMaybeUninit, StackPopCleanup,
self, compile_time_machine, truncate, AllocId, Allocation, ConstValue, CtfeValidationMode,
Frame, ImmTy, Immediate, InterpCx, InterpResult, LocalState, LocalValue, MemPlace, Memory,
MemoryKind, OpTy, Operand as InterpOperand, PlaceTy, Pointer, Scalar, ScalarMaybeUninit,
StackPopCleanup,
};
use crate::transform::MirPass;
@ -805,8 +805,9 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
value,
vec![],
// FIXME: is ref tracking too expensive?
// FIXME: what is the point of ref tracking if we do not even check the tracked refs?
&mut interpret::RefTracking::empty(),
/*may_ref_to_static*/ true,
CtfeValidationMode::Regular,
) {
trace!("validation error, attempt failed: {:?}", e);
return;

View file

@ -1170,7 +1170,7 @@ pub fn promote_candidates<'tcx>(
let mut scope = body.source_scopes[candidate.source_info(body).scope].clone();
scope.parent_scope = None;
let mut promoted = Body::new(
let promoted = Body::new(
body.source, // `promoted` gets filled in below
IndexVec::new(),
IndexVec::from_elem_n(scope, 1),
@ -1181,7 +1181,6 @@ pub fn promote_candidates<'tcx>(
body.span,
body.generator_kind,
);
promoted.ignore_interior_mut_in_const_validation = true;
let promoter = Promoter {
promoted,