1
Fork 0

Retire to_ptr which should already have no users but still kept getting new ones

This commit is contained in:
Oliver Scherer 2019-12-14 00:04:27 +01:00
parent 1e40681f50
commit b5b5258d74
5 changed files with 19 additions and 19 deletions

View file

@ -367,8 +367,9 @@ impl<'tcx, Tag> Scalar<Tag> {
} }
/// Do not call this method! Use either `assert_ptr` or `force_ptr`. /// Do not call this method! Use either `assert_ptr` or `force_ptr`.
/// This method is intentionally private, do not make it public.
#[inline] #[inline]
pub fn to_ptr(self) -> InterpResult<'tcx, Pointer<Tag>> { fn to_ptr(self) -> InterpResult<'tcx, Pointer<Tag>> {
match self { match self {
Scalar::Raw { data: 0, .. } => throw_unsup!(InvalidNullPointerUsage), Scalar::Raw { data: 0, .. } => throw_unsup!(InvalidNullPointerUsage),
Scalar::Raw { .. } => throw_unsup!(ReadBytesAsPointer), Scalar::Raw { .. } => throw_unsup!(ReadBytesAsPointer),
@ -544,12 +545,6 @@ impl<'tcx, Tag> ScalarMaybeUndef<Tag> {
} }
} }
/// Do not call this method! Use either `assert_ptr` or `force_ptr`.
#[inline(always)]
pub fn to_ptr(self) -> InterpResult<'tcx, Pointer<Tag>> {
self.not_undef()?.to_ptr()
}
/// Do not call this method! Use either `assert_bits` or `force_bits`. /// Do not call this method! Use either `assert_bits` or `force_bits`.
#[inline(always)] #[inline(always)]
pub fn to_bits(self, target_size: Size) -> InterpResult<'tcx, u128> { pub fn to_bits(self, target_size: Size) -> InterpResult<'tcx, u128> {

View file

@ -537,8 +537,8 @@ pub fn super_relate_consts<R: TypeRelation<'tcx>>(
Ok(ConstValue::Scalar(a_val)) Ok(ConstValue::Scalar(a_val))
} else if let ty::FnPtr(_) = a.ty.kind { } else if let ty::FnPtr(_) = a.ty.kind {
let alloc_map = tcx.alloc_map.lock(); let alloc_map = tcx.alloc_map.lock();
let a_instance = alloc_map.unwrap_fn(a_val.to_ptr().unwrap().alloc_id); let a_instance = alloc_map.unwrap_fn(a_val.assert_ptr().alloc_id);
let b_instance = alloc_map.unwrap_fn(b_val.to_ptr().unwrap().alloc_id); let b_instance = alloc_map.unwrap_fn(b_val.assert_ptr().alloc_id);
if a_instance == b_instance { if a_instance == b_instance {
Ok(ConstValue::Scalar(a_val)) Ok(ConstValue::Scalar(a_val))
} else { } else {

View file

@ -119,7 +119,7 @@ pub(super) fn op_to_const<'tcx>(
}; };
let val = match immediate { let val = match immediate {
Ok(mplace) => { Ok(mplace) => {
let ptr = mplace.ptr.to_ptr().unwrap(); let ptr = mplace.ptr.assert_ptr();
let alloc = ecx.tcx.alloc_map.lock().unwrap_memory(ptr.alloc_id); let alloc = ecx.tcx.alloc_map.lock().unwrap_memory(ptr.alloc_id);
ConstValue::ByRef { alloc, offset: ptr.offset } ConstValue::ByRef { alloc, offset: ptr.offset }
} }
@ -133,7 +133,7 @@ pub(super) fn op_to_const<'tcx>(
// comes from a constant so it can happen have `Undef`, because the indirect // comes from a constant so it can happen have `Undef`, because the indirect
// memory that was read had undefined bytes. // memory that was read had undefined bytes.
let mplace = op.assert_mem_place(); let mplace = op.assert_mem_place();
let ptr = mplace.ptr.to_ptr().unwrap(); let ptr = mplace.ptr.assert_ptr();
let alloc = ecx.tcx.alloc_map.lock().unwrap_memory(ptr.alloc_id); let alloc = ecx.tcx.alloc_map.lock().unwrap_memory(ptr.alloc_id);
ConstValue::ByRef { alloc, offset: ptr.offset } ConstValue::ByRef { alloc, offset: ptr.offset }
} }
@ -176,7 +176,7 @@ fn validate_and_turn_into_const<'tcx>(
// Statics/promoteds are always `ByRef`, for the rest `op_to_const` decides // Statics/promoteds are always `ByRef`, for the rest `op_to_const` decides
// whether they become immediates. // whether they become immediates.
if is_static || cid.promoted.is_some() { if is_static || cid.promoted.is_some() {
let ptr = mplace.ptr.to_ptr()?; let ptr = mplace.ptr.assert_ptr();
Ok(tcx.mk_const(ty::Const { Ok(tcx.mk_const(ty::Const {
val: ty::ConstKind::Value(ConstValue::ByRef { val: ty::ConstKind::Value(ConstValue::ByRef {
alloc: ecx.tcx.alloc_map.lock().unwrap_memory(ptr.alloc_id), alloc: ecx.tcx.alloc_map.lock().unwrap_memory(ptr.alloc_id),

View file

@ -743,7 +743,9 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// FIXME: should we tell the user that there was a local which was never written to? // FIXME: should we tell the user that there was a local which was never written to?
if let LocalValue::Live(Operand::Indirect(MemPlace { ptr, .. })) = local { if let LocalValue::Live(Operand::Indirect(MemPlace { ptr, .. })) = local {
trace!("deallocating local"); trace!("deallocating local");
let ptr = ptr.to_ptr()?; // All locals have a backing allocation, even if the allocation is empty
// due to the local having ZST type.
let ptr = ptr.assert_ptr();
if log_enabled!(::log::Level::Trace) { if log_enabled!(::log::Level::Trace) {
self.memory.dump_alloc(ptr.alloc_id); self.memory.dump_alloc(ptr.alloc_id);
} }

View file

@ -191,11 +191,12 @@ impl<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx
if let ty::Dynamic(..) = if let ty::Dynamic(..) =
self.ecx.tcx.struct_tail_erasing_lifetimes(referenced_ty, self.ecx.param_env).kind self.ecx.tcx.struct_tail_erasing_lifetimes(referenced_ty, self.ecx.param_env).kind
{ {
if let Ok(vtable) = mplace.meta.unwrap().to_ptr() { // Validation has already errored on an invalid vtable pointer so this `assert_ptr`
// explitly choose `Immutable` here, since vtables are immutable, even // will never panic.
// if the reference of the fat pointer is mutable let vtable = mplace.meta.unwrap().assert_ptr();
self.intern_shallow(vtable.alloc_id, Mutability::Not, None)?; // explitly choose `Immutable` here, since vtables are immutable, even
} // if the reference of the fat pointer is mutable
self.intern_shallow(vtable.alloc_id, Mutability::Not, None)?;
} }
// Check if we have encountered this pointer+layout combination before. // Check if we have encountered this pointer+layout combination before.
// Only recurse for allocation-backed pointers. // Only recurse for allocation-backed pointers.
@ -280,7 +281,9 @@ pub fn intern_const_alloc_recursive<M: CompileTimeMachine<'mir, 'tcx>>(
ecx, ecx,
leftover_allocations, leftover_allocations,
base_intern_mode, base_intern_mode,
ret.ptr.to_ptr()?.alloc_id, // The outermost allocation must exist, because we allocated it with
// `Memory::allocate`.
ret.ptr.assert_ptr().alloc_id,
base_mutability, base_mutability,
Some(ret.layout.ty), Some(ret.layout.ty),
)?; )?;