rust/compiler/rustc_const_eval/src/interpret/cast.rs

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

384 lines
16 KiB
Rust
Raw Normal View History

use std::assert_matches::assert_matches;
use std::convert::TryFrom;
2020-03-29 16:41:09 +02:00
use rustc_apfloat::ieee::{Double, Single};
use rustc_apfloat::{Float, FloatConvert};
use rustc_middle::mir::interpret::{InterpResult, PointerArithmetic, Scalar};
use rustc_middle::mir::CastKind;
use rustc_middle::ty::adjustment::PointerCast;
use rustc_middle::ty::layout::{IntegerExt, LayoutOf, TyAndLayout};
use rustc_middle::ty::{self, FloatTy, Ty, TypeAndMut};
use rustc_target::abi::Integer;
2021-01-31 10:32:34 +01:00
use rustc_type_ir::sty::TyKind::*;
2016-09-07 18:34:59 +02:00
use super::{
util::ensure_monomorphic_enough, FnVal, ImmTy, Immediate, InterpCx, Machine, OpTy, PlaceTy,
};
impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
pub fn cast(
&mut self,
src: &OpTy<'tcx, M::Provenance>,
cast_kind: CastKind,
cast_ty: Ty<'tcx>,
dest: &PlaceTy<'tcx, M::Provenance>,
) -> InterpResult<'tcx> {
2020-03-29 16:41:09 +02:00
use rustc_middle::mir::CastKind::*;
// FIXME: In which cases should we trigger UB when the source is uninit?
match cast_kind {
Pointer(PointerCast::Unsize) => {
2020-05-24 15:10:15 +02:00
let cast_ty = self.layout_of(cast_ty)?;
self.unsize_into(src, cast_ty, dest)?;
}
PointerExposeAddress => {
let src = self.read_immediate(src)?;
let res = self.pointer_expose_address_cast(&src, cast_ty)?;
self.write_immediate(res, dest)?;
}
PointerFromExposedAddress => {
let src = self.read_immediate(src)?;
let res = self.pointer_from_exposed_address_cast(&src, cast_ty)?;
self.write_immediate(res, dest)?;
}
Misc => {
2018-10-26 12:33:26 +02:00
let src = self.read_immediate(src)?;
2021-02-15 00:00:00 +00:00
let res = self.misc_cast(&src, cast_ty)?;
self.write_immediate(res, dest)?;
}
Pointer(PointerCast::MutToConstPointer | PointerCast::ArrayToPointer) => {
// These are NOPs, but can be wide pointers.
let v = self.read_immediate(src)?;
self.write_immediate(*v, dest)?;
}
Pointer(PointerCast::ReifyFnPointer) => {
// The src operand does not matter, just its type
2020-08-03 00:49:11 +02:00
match *src.layout.ty.kind() {
ty::FnDef(def_id, substs) => {
// All reifications must be monomorphic, bail out otherwise.
ensure_monomorphic_enough(*self.tcx, src.layout.ty)?;
let instance = ty::Instance::resolve_for_fn_ptr(
2020-06-14 15:02:51 +02:00
*self.tcx,
self.param_env,
def_id,
substs,
)
.ok_or_else(|| err_inval!(TooGeneric))?;
let fn_ptr = self.create_fn_alloc_ptr(FnVal::Instance(instance));
self.write_pointer(fn_ptr, dest)?;
}
2020-06-21 16:13:31 +02:00
_ => span_bug!(self.cur_span(), "reify fn pointer on {:?}", src.layout.ty),
}
}
Pointer(PointerCast::UnsafeFnPointer) => {
2018-10-26 12:33:26 +02:00
let src = self.read_immediate(src)?;
2020-08-03 00:49:11 +02:00
match cast_ty.kind() {
ty::FnPtr(_) => {
// No change to value
2018-10-26 12:33:26 +02:00
self.write_immediate(*src, dest)?;
}
2020-06-21 16:13:31 +02:00
_ => span_bug!(self.cur_span(), "fn to unsafe fn cast on {:?}", cast_ty),
}
}
Pointer(PointerCast::ClosureFnPointer(_)) => {
// The src operand does not matter, just its type
2020-08-03 00:49:11 +02:00
match *src.layout.ty.kind() {
ty::Closure(def_id, substs) => {
// All reifications must be monomorphic, bail out otherwise.
ensure_monomorphic_enough(*self.tcx, src.layout.ty)?;
let instance = ty::Instance::resolve_closure(
2020-06-14 15:02:51 +02:00
*self.tcx,
def_id,
substs,
ty::ClosureKind::FnOnce,
)
.ok_or_else(|| err_inval!(TooGeneric))?;
let fn_ptr = self.create_fn_alloc_ptr(FnVal::Instance(instance));
self.write_pointer(fn_ptr, dest)?;
}
2020-06-21 16:13:31 +02:00
_ => span_bug!(self.cur_span(), "closure fn pointer on {:?}", src.layout.ty),
}
}
}
Ok(())
}
2022-03-06 14:01:01 -05:00
pub fn misc_cast(
&mut self,
src: &ImmTy<'tcx, M::Provenance>,
cast_ty: Ty<'tcx>,
) -> InterpResult<'tcx, Immediate<M::Provenance>> {
2021-01-31 10:32:34 +01:00
use rustc_type_ir::sty::TyKind::*;
trace!("Casting {:?}: {:?} to {:?}", *src, src.layout.ty, cast_ty);
2016-11-26 22:58:01 -08:00
2020-08-03 00:49:11 +02:00
match src.layout.ty.kind() {
// Floating point
Float(FloatTy::F32) => {
return Ok(self.cast_from_float(src.to_scalar()?.to_f32()?, cast_ty).into());
2019-12-22 17:42:04 -05:00
}
Float(FloatTy::F64) => {
return Ok(self.cast_from_float(src.to_scalar()?.to_f64()?, cast_ty).into());
2019-12-22 17:42:04 -05:00
}
// The rest is integer/pointer-"like", including fn ptr casts
_ => assert!(
src.layout.ty.is_bool()
|| src.layout.ty.is_char()
|| src.layout.ty.is_integral()
|| src.layout.ty.is_any_ptr(),
"Unexpected cast from type {:?}",
src.layout.ty
),
}
// # First handle non-scalar source values.
// Handle casting any ptr to raw ptr (might be a fat ptr).
if src.layout.ty.is_any_ptr() && cast_ty.is_unsafe_ptr() {
let dest_layout = self.layout_of(cast_ty)?;
if dest_layout.size == src.layout.size {
// Thin or fat pointer that just hast the ptr kind of target type changed.
2021-02-15 00:00:00 +00:00
return Ok(**src);
} else {
// Casting the metadata away from a fat ptr.
assert_eq!(src.layout.size, 2 * self.pointer_size());
assert_eq!(dest_layout.size, self.pointer_size());
assert!(src.layout.ty.is_unsafe_ptr());
2021-02-15 00:00:00 +00:00
return match **src {
Immediate::ScalarPair(data, _) => Ok(data.check_init()?.into()),
2020-06-21 16:13:31 +02:00
Immediate::Scalar(..) => span_bug!(
self.cur_span(),
"{:?} input to a fat-to-thin cast ({:?} -> {:?})",
*src,
src.layout.ty,
cast_ty
),
Immediate::Uninit => throw_ub!(InvalidUninitBytes(None)),
};
}
2019-07-24 20:09:18 +02:00
}
2022-03-06 14:01:01 -05:00
// # The remaining source values are scalar and "int-like".
let scalar = src.to_scalar()?;
Ok(self.cast_from_int_like(scalar, src.layout, cast_ty)?.into())
}
pub fn pointer_expose_address_cast(
&mut self,
src: &ImmTy<'tcx, M::Provenance>,
cast_ty: Ty<'tcx>,
) -> InterpResult<'tcx, Immediate<M::Provenance>> {
assert_matches!(src.layout.ty.kind(), ty::RawPtr(_) | ty::FnPtr(_));
assert!(cast_ty.is_integral());
let scalar = src.to_scalar()?;
let ptr = self.scalar_to_ptr(scalar)?;
match ptr.into_pointer_or_addr() {
Ok(ptr) => M::expose_ptr(self, ptr)?,
2022-07-01 17:57:32 -04:00
Err(_) => {} // Do nothing, exposing an invalid pointer (`None` provenance) is a NOP.
};
Ok(self.cast_from_int_like(scalar, src.layout, cast_ty)?.into())
2016-09-07 18:34:59 +02:00
}
pub fn pointer_from_exposed_address_cast(
&mut self,
src: &ImmTy<'tcx, M::Provenance>,
cast_ty: Ty<'tcx>,
) -> InterpResult<'tcx, Immediate<M::Provenance>> {
assert!(src.layout.ty.is_integral());
assert_matches!(cast_ty.kind(), ty::RawPtr(_));
// First cast to usize.
let scalar = src.to_scalar()?;
let addr = self.cast_from_int_like(scalar, src.layout, self.tcx.types.usize)?;
let addr = addr.to_machine_usize(self)?;
// Then turn address into pointer.
2022-06-05 10:53:35 -04:00
let ptr = M::ptr_from_addr_cast(&self, addr)?;
Ok(Scalar::from_maybe_pointer(ptr, self).into())
}
pub fn cast_from_int_like(
&self,
scalar: Scalar<M::Provenance>, // input value (there is no ScalarTy so we separate data+layout)
2020-03-04 14:50:21 +00:00
src_layout: TyAndLayout<'tcx>,
cast_ty: Ty<'tcx>,
) -> InterpResult<'tcx, Scalar<M::Provenance>> {
// Let's make sure v is sign-extended *if* it has a signed type.
2020-05-24 19:28:44 +02:00
let signed = src_layout.abi.is_signed(); // Also asserts that abi is `Scalar`.
let v = scalar.to_bits(src_layout.size)?;
let v = if signed { self.sign_extend(v, src_layout) } else { v };
trace!("cast_from_scalar: {}, {} -> {}", v, src_layout.ty, cast_ty);
Ok(match *cast_ty.kind() {
Int(_) | Uint(_) => {
2020-08-03 00:49:11 +02:00
let size = match *cast_ty.kind() {
Int(t) => Integer::from_int_ty(self, t).size(),
Uint(t) => Integer::from_uint_ty(self, t).size(),
_ => bug!(),
};
let v = size.truncate(v);
Scalar::from_uint(v, size)
}
2020-04-14 10:30:33 +02:00
Float(FloatTy::F32) if signed => Scalar::from_f32(Single::from_i128(v as i128).value),
Float(FloatTy::F64) if signed => Scalar::from_f64(Double::from_i128(v as i128).value),
Float(FloatTy::F32) => Scalar::from_f32(Single::from_u128(v).value),
Float(FloatTy::F64) => Scalar::from_f64(Double::from_u128(v).value),
Char => {
// `u8` to `char` cast
2020-04-14 10:30:33 +02:00
Scalar::from_u32(u8::try_from(v).unwrap().into())
}
// Casts to bool are not permitted by rustc, no need to handle them here.
2020-06-21 16:13:31 +02:00
_ => span_bug!(self.cur_span(), "invalid int to {:?} cast", cast_ty),
})
2016-09-07 18:34:59 +02:00
}
fn cast_from_float<F>(&self, f: F, dest_ty: Ty<'tcx>) -> Scalar<M::Provenance>
where
F: Float + Into<Scalar<M::Provenance>> + FloatConvert<Single> + FloatConvert<Double>,
{
2021-01-31 10:32:34 +01:00
use rustc_type_ir::sty::TyKind::*;
2020-08-03 00:49:11 +02:00
match *dest_ty.kind() {
// float -> uint
Uint(t) => {
let size = Integer::from_uint_ty(self, t).size();
// `to_u128` is a saturating cast, which is what we need
// (https://doc.rust-lang.org/nightly/nightly-rustc/rustc_apfloat/trait.Float.html#method.to_i128_r).
let v = f.to_u128(size.bits_usize()).value;
// This should already fit the bit width
Scalar::from_uint(v, size)
2017-12-06 09:25:29 +01:00
}
// float -> int
Int(t) => {
let size = Integer::from_int_ty(self, t).size();
// `to_i128` is a saturating cast, which is what we need
// (https://doc.rust-lang.org/nightly/nightly-rustc/rustc_apfloat/trait.Float.html#method.to_i128_r).
let v = f.to_i128(size.bits_usize()).value;
Scalar::from_int(v, size)
2017-12-06 09:25:29 +01:00
}
// float -> f32
2020-04-14 10:30:33 +02:00
Float(FloatTy::F32) => Scalar::from_f32(f.convert(&mut false).value),
// float -> f64
2020-04-14 10:30:33 +02:00
Float(FloatTy::F64) => Scalar::from_f64(f.convert(&mut false).value),
// That's it.
2020-06-21 16:13:31 +02:00
_ => span_bug!(self.cur_span(), "invalid float to {:?} cast", dest_ty),
2016-09-07 18:34:59 +02:00
}
}
fn unsize_into_ptr(
&mut self,
src: &OpTy<'tcx, M::Provenance>,
dest: &PlaceTy<'tcx, M::Provenance>,
// The pointee types
2019-09-16 18:59:31 +01:00
source_ty: Ty<'tcx>,
2020-05-24 15:10:15 +02:00
cast_ty: Ty<'tcx>,
) -> InterpResult<'tcx> {
// A<Struct> -> A<Trait> conversion
let (src_pointee_ty, dest_pointee_ty) =
2020-05-24 15:10:15 +02:00
self.tcx.struct_lockstep_tails_erasing_lifetimes(source_ty, cast_ty, self.param_env);
2020-08-03 00:49:11 +02:00
match (&src_pointee_ty.kind(), &dest_pointee_ty.kind()) {
2018-08-22 11:54:46 +01:00
(&ty::Array(_, length), &ty::Slice(_)) => {
let ptr = self.read_immediate(src)?.to_scalar()?;
// u64 cast is from usize to u64, which is always good
let val =
2020-06-14 15:02:51 +02:00
Immediate::new_slice(ptr, length.eval_usize(*self.tcx, self.param_env), self);
2018-10-26 12:33:26 +02:00
self.write_immediate(val, dest)
}
2021-07-31 22:46:23 +08:00
(&ty::Dynamic(ref data_a, ..), &ty::Dynamic(ref data_b, ..)) => {
2018-10-26 12:33:26 +02:00
let val = self.read_immediate(src)?;
let (old_data, old_vptr) = val.to_scalar_pair()?;
let old_vptr = self.scalar_to_ptr(old_vptr)?;
2021-07-31 22:46:23 +08:00
if data_a.principal_def_id() == data_b.principal_def_id() {
return self.write_immediate(*val, dest);
}
// trait upcasting coercion
let Some(vptr_entry_idx) = self.tcx.vtable_trait_upcasting_coercion_new_vptr_slot((
src_pointee_ty,
dest_pointee_ty,
)) else {
return self.write_immediate(*val, dest);
};
let (ty, _) = self.get_ptr_vtable(old_vptr)?;
let Some(ty::VtblEntry::TraitVPtr(new_trait)) = self.get_vtable_entries(old_vptr)?.get(vptr_entry_idx) else {
throw_ub_format!(
"upcasting to index {vptr_entry_idx} of vtable {old_vptr} but \
that vtable is too small or does not have an upcast-vtable at that index"
)
};
let new_trait = new_trait.map_bound(|trait_ref| {
ty::ExistentialTraitRef::erase_self_ty(*self.tcx, trait_ref)
});
let new_vptr = self.get_vtable_ptr(ty, Some(new_trait))?;
self.write_immediate(Immediate::new_dyn_trait(old_data, new_vptr, self), dest)
}
2018-08-22 11:54:46 +01:00
(_, &ty::Dynamic(ref data, _)) => {
// Initial cast from sized to dyn trait
let vtable = self.get_vtable_ptr(src_pointee_ty, data.principal())?;
let ptr = self.read_immediate(src)?.to_scalar()?;
let val = Immediate::new_dyn_trait(ptr, vtable, &*self.tcx);
2018-10-26 12:33:26 +02:00
self.write_immediate(val, dest)
}
2020-06-21 16:13:31 +02:00
_ => {
span_bug!(self.cur_span(), "invalid unsizing {:?} -> {:?}", src.layout.ty, cast_ty)
}
}
}
fn unsize_into(
&mut self,
src: &OpTy<'tcx, M::Provenance>,
2020-05-24 15:10:15 +02:00
cast_ty: TyAndLayout<'tcx>,
dest: &PlaceTy<'tcx, M::Provenance>,
) -> InterpResult<'tcx> {
2020-05-24 15:10:15 +02:00
trace!("Unsizing {:?} of type {} into {:?}", *src, src.layout.ty, cast_ty.ty);
2020-08-03 00:49:11 +02:00
match (&src.layout.ty.kind(), &cast_ty.ty.kind()) {
2020-05-24 15:10:15 +02:00
(&ty::Ref(_, s, _), &ty::Ref(_, c, _) | &ty::RawPtr(TypeAndMut { ty: c, .. }))
| (&ty::RawPtr(TypeAndMut { ty: s, .. }), &ty::RawPtr(TypeAndMut { ty: c, .. })) => {
2022-01-25 14:13:38 +11:00
self.unsize_into_ptr(src, dest, *s, *c)
}
2018-08-22 11:54:46 +01:00
(&ty::Adt(def_a, _), &ty::Adt(def_b, _)) => {
assert_eq!(def_a, def_b);
// unsizing of generic struct with pointer fields
// Example: `Arc<T>` -> `Arc<Trait>`
// here we need to increase the size of every &T thin ptr field to a fat ptr
for i in 0..src.layout.fields.count() {
let cast_ty_field = cast_ty.field(self, i);
2020-05-24 15:10:15 +02:00
if cast_ty_field.is_zst() {
continue;
}
let src_field = self.operand_field(src, i)?;
2020-05-24 15:10:15 +02:00
let dst_field = self.place_field(dest, i)?;
if src_field.layout.ty == cast_ty_field.ty {
2022-07-02 20:40:41 -04:00
self.copy_op(&src_field, &dst_field, /*allow_transmute*/ false)?;
} else {
2021-02-15 00:00:00 +00:00
self.unsize_into(&src_field, cast_ty_field, &dst_field)?;
}
}
Ok(())
}
2020-06-21 16:13:31 +02:00
_ => span_bug!(
self.cur_span(),
"unsize_into: invalid conversion: {:?} -> {:?}",
src.layout,
dest.layout
),
}
}
2016-09-07 18:34:59 +02:00
}