1
Fork 0

Split the "raw integer bytes" part out of Scalar

This commit is contained in:
Oliver Scherer 2020-09-26 15:15:35 +02:00 committed by oli
parent 56293097f7
commit 362123dd75
17 changed files with 325 additions and 184 deletions

View file

@ -24,6 +24,7 @@
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
#![feature(array_windows)]
#![feature(assoc_char_funcs)]
#![feature(backtrace)]
#![feature(bool_to_option)]
#![feature(box_patterns)]

View file

@ -71,7 +71,7 @@ macro_rules! throw_inval {
#[macro_export]
macro_rules! throw_ub {
($($tt:tt)*) => { Err::<!, _>(err_ub!($($tt)*))? };
($($tt:tt)*) => { Err::<!, _>($crate::err_ub!($($tt)*))? };
}
#[macro_export]

View file

@ -8,9 +8,9 @@ use rustc_apfloat::{
use rustc_macros::HashStable;
use rustc_target::abi::{HasDataLayout, Size, TargetDataLayout};
use crate::ty::{ParamEnv, Ty, TyCtxt};
use crate::ty::{ParamEnv, ScalarInt, Ty, TyCtxt};
use super::{sign_extend, truncate, AllocId, Allocation, InterpResult, Pointer, PointerArithmetic};
use super::{sign_extend, AllocId, Allocation, InterpResult, Pointer, PointerArithmetic};
/// Represents the result of const evaluation via the `eval_to_allocation` query.
#[derive(Clone, HashStable, TyEncodable, TyDecodable)]
@ -103,12 +103,7 @@ impl<'tcx> ConstValue<'tcx> {
#[derive(HashStable)]
pub enum Scalar<Tag = ()> {
/// The raw bytes of a simple value.
Raw {
/// The first `size` bytes of `data` are the value.
/// Do not try to read less or more bytes than that. The remaining bytes must be 0.
data: u128,
size: u8,
},
Raw(ScalarInt),
/// A pointer into an `Allocation`. An `Allocation` in the `memory` module has a list of
/// relocations, but a `Scalar` is only large enough to contain one, so we just represent the
@ -125,16 +120,7 @@ impl<Tag: fmt::Debug> fmt::Debug for Scalar<Tag> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Scalar::Ptr(ptr) => write!(f, "{:?}", ptr),
&Scalar::Raw { data, size } => {
Scalar::check_data(data, size);
if size == 0 {
write!(f, "<ZST>")
} else {
// Format as hex number wide enough to fit any value of the given `size`.
// So data=20, size=1 will be "0x14", but with size=4 it'll be "0x00000014".
write!(f, "0x{:>0width$x}", data, width = (size * 2) as usize)
}
}
Scalar::Raw(int) => write!(f, "{:?}", int),
}
}
}
@ -163,21 +149,6 @@ impl<Tag> From<Double> for Scalar<Tag> {
}
impl Scalar<()> {
/// Make sure the `data` fits in `size`.
/// This is guaranteed by all constructors here, but since the enum variants are public,
/// it could still be violated (even though no code outside this file should
/// construct `Scalar`s).
#[inline(always)]
fn check_data(data: u128, size: u8) {
debug_assert_eq!(
truncate(data, Size::from_bytes(u64::from(size))),
data,
"Scalar value {:#x} exceeds size of {} bytes",
data,
size
);
}
/// Tag this scalar with `new_tag` if it is a pointer, leave it unchanged otherwise.
///
/// Used by `MemPlace::replace_tag`.
@ -185,7 +156,7 @@ impl Scalar<()> {
pub fn with_tag<Tag>(self, new_tag: Tag) -> Scalar<Tag> {
match self {
Scalar::Ptr(ptr) => Scalar::Ptr(ptr.with_tag(new_tag)),
Scalar::Raw { data, size } => Scalar::Raw { data, size },
Scalar::Raw(int) => Scalar::Raw(int),
}
}
}
@ -198,18 +169,18 @@ impl<'tcx, Tag> Scalar<Tag> {
pub fn erase_tag(self) -> Scalar {
match self {
Scalar::Ptr(ptr) => Scalar::Ptr(ptr.erase_tag()),
Scalar::Raw { data, size } => Scalar::Raw { data, size },
Scalar::Raw(int) => Scalar::Raw(int),
}
}
#[inline]
pub fn null_ptr(cx: &impl HasDataLayout) -> Self {
Scalar::Raw { data: 0, size: cx.data_layout().pointer_size.bytes() as u8 }
Scalar::Raw(ScalarInt::null(cx.data_layout().pointer_size))
}
#[inline]
pub fn zst() -> Self {
Scalar::Raw { data: 0, size: 0 }
Scalar::Raw(ScalarInt::zst())
}
#[inline(always)]
@ -220,10 +191,7 @@ impl<'tcx, Tag> Scalar<Tag> {
f_ptr: impl FnOnce(Pointer<Tag>) -> InterpResult<'tcx, Pointer<Tag>>,
) -> InterpResult<'tcx, Self> {
match self {
Scalar::Raw { data, size } => {
assert_eq!(u64::from(size), dl.pointer_size.bytes());
Ok(Scalar::Raw { data: u128::from(f_int(u64::try_from(data).unwrap())?), size })
}
Scalar::Raw(int) => Ok(Scalar::Raw(int.ptr_sized_op(dl, f_int)?)),
Scalar::Ptr(ptr) => Ok(Scalar::Ptr(f_ptr(ptr)?)),
}
}
@ -264,24 +232,17 @@ impl<'tcx, Tag> Scalar<Tag> {
#[inline]
pub fn from_bool(b: bool) -> Self {
// Guaranteed to be truncated and does not need sign extension.
Scalar::Raw { data: b as u128, size: 1 }
Scalar::Raw(b.into())
}
#[inline]
pub fn from_char(c: char) -> Self {
// Guaranteed to be truncated and does not need sign extension.
Scalar::Raw { data: c as u128, size: 4 }
Scalar::Raw(c.into())
}
#[inline]
pub fn try_from_uint(i: impl Into<u128>, size: Size) -> Option<Self> {
let i = i.into();
if truncate(i, size) == i {
Some(Scalar::Raw { data: i, size: size.bytes() as u8 })
} else {
None
}
ScalarInt::try_from_uint(i, size).map(Scalar::Raw)
}
#[inline]
@ -293,26 +254,22 @@ impl<'tcx, Tag> Scalar<Tag> {
#[inline]
pub fn from_u8(i: u8) -> Self {
// Guaranteed to be truncated and does not need sign extension.
Scalar::Raw { data: i.into(), size: 1 }
Scalar::Raw(i.into())
}
#[inline]
pub fn from_u16(i: u16) -> Self {
// Guaranteed to be truncated and does not need sign extension.
Scalar::Raw { data: i.into(), size: 2 }
Scalar::Raw(i.into())
}
#[inline]
pub fn from_u32(i: u32) -> Self {
// Guaranteed to be truncated and does not need sign extension.
Scalar::Raw { data: i.into(), size: 4 }
Scalar::Raw(i.into())
}
#[inline]
pub fn from_u64(i: u64) -> Self {
// Guaranteed to be truncated and does not need sign extension.
Scalar::Raw { data: i.into(), size: 8 }
Scalar::Raw(i.into())
}
#[inline]
@ -322,14 +279,7 @@ impl<'tcx, Tag> Scalar<Tag> {
#[inline]
pub fn try_from_int(i: impl Into<i128>, size: Size) -> Option<Self> {
let i = i.into();
// `into` performed sign extension, we have to truncate
let truncated = truncate(i as u128, size);
if sign_extend(truncated, size) as i128 == i {
Some(Scalar::Raw { data: truncated, size: size.bytes() as u8 })
} else {
None
}
ScalarInt::try_from_int(i, size).map(Scalar::Raw)
}
#[inline]
@ -366,14 +316,12 @@ impl<'tcx, Tag> Scalar<Tag> {
#[inline]
pub fn from_f32(f: Single) -> Self {
// We trust apfloat to give us properly truncated data.
Scalar::Raw { data: f.to_bits(), size: 4 }
Scalar::Raw(f.into())
}
#[inline]
pub fn from_f64(f: Double) -> Self {
// We trust apfloat to give us properly truncated data.
Scalar::Raw { data: f.to_bits(), size: 8 }
Scalar::Raw(f.into())
}
/// This is very rarely the method you want! You should dispatch on the type
@ -388,11 +336,7 @@ impl<'tcx, Tag> Scalar<Tag> {
) -> Result<u128, Pointer<Tag>> {
assert_ne!(target_size.bytes(), 0, "you should never look at the bits of a ZST");
match self {
Scalar::Raw { data, size } => {
assert_eq!(target_size.bytes(), u64::from(size));
Scalar::check_data(data, size);
Ok(data)
}
Scalar::Raw(int) => Ok(int.assert_bits(target_size)),
Scalar::Ptr(ptr) => {
assert_eq!(target_size, cx.data_layout().pointer_size);
Err(ptr)
@ -406,16 +350,7 @@ impl<'tcx, Tag> Scalar<Tag> {
fn to_bits(self, target_size: Size) -> InterpResult<'tcx, u128> {
assert_ne!(target_size.bytes(), 0, "you should never look at the bits of a ZST");
match self {
Scalar::Raw { data, size } => {
if target_size.bytes() != u64::from(size) {
throw_ub!(ScalarSizeMismatch {
target_size: target_size.bytes(),
data_size: u64::from(size),
});
}
Scalar::check_data(data, size);
Ok(data)
}
Scalar::Raw(int) => int.to_bits(target_size),
Scalar::Ptr(_) => throw_unsup!(ReadPointerAsBytes),
}
}

View file

@ -28,7 +28,6 @@ use rustc_index::vec::{Idx, IndexVec};
use rustc_serialize::{Decodable, Encodable};
use rustc_span::symbol::Symbol;
use rustc_span::{Span, DUMMY_SP};
use rustc_target::abi;
use rustc_target::asm::InlineAsmRegOrRegClass;
use std::borrow::Cow;
use std::fmt::{self, Debug, Display, Formatter, Write};
@ -1952,10 +1951,10 @@ impl<'tcx> Operand<'tcx> {
.layout_of(param_env_and_ty)
.unwrap_or_else(|e| panic!("could not compute layout for {:?}: {:?}", ty, e))
.size;
let scalar_size = abi::Size::from_bytes(match val {
Scalar::Raw { size, .. } => size,
let scalar_size = match val {
Scalar::Raw(int) => int.size(),
_ => panic!("Invalid scalar type {:?}", val),
});
};
scalar_size == type_size
});
Operand::Constant(box Constant {

View file

@ -1,31 +1,34 @@
use crate::mir::interpret::truncate;
use rustc_target::abi::Size;
use crate::mir::interpret::{sign_extend, truncate, InterpErrorInfo, InterpResult};
use crate::throw_ub;
use rustc_apfloat::ieee::{Double, Single};
use rustc_apfloat::Float;
use rustc_macros::HashStable;
use rustc_target::abi::{Size, TargetDataLayout};
use std::convert::{TryFrom, TryInto};
use std::fmt;
#[derive(Copy, Clone)]
/// A type for representing any integer. Only used for printing.
// FIXME: Use this for the integer-tree representation needed for type level ints and
// const generics?
pub struct ConstInt {
/// Number of bytes of the integer. Only 1, 2, 4, 8, 16 are legal values.
size: u8,
/// The "untyped" variant of `ConstInt`.
int: ScalarInt,
/// Whether the value is of a signed integer type.
signed: bool,
/// Whether the value is a `usize` or `isize` type.
is_ptr_sized_integral: bool,
/// Raw memory of the integer. All bytes beyond the `size` are unused and must be zero.
raw: u128,
}
impl ConstInt {
pub fn new(raw: u128, size: Size, signed: bool, is_ptr_sized_integral: bool) -> Self {
assert!(raw <= truncate(u128::MAX, size));
Self { raw, size: size.bytes() as u8, signed, is_ptr_sized_integral }
pub fn new(int: ScalarInt, signed: bool, is_ptr_sized_integral: bool) -> Self {
Self { int, signed, is_ptr_sized_integral }
}
}
impl std::fmt::Debug for ConstInt {
fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let Self { size, signed, raw, is_ptr_sized_integral } = *self;
let Self { int, signed, is_ptr_sized_integral } = *self;
let size = int.size().bytes();
let raw = int.data();
if signed {
let bit_size = size * 8;
let min = 1u128 << (bit_size - 1);
@ -109,3 +112,215 @@ impl std::fmt::Debug for ConstInt {
}
}
}
// FIXME: reuse in `super::int::ConstInt` and `Scalar::Bits`
/// The raw bytes of a simple value.
#[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, TyEncodable, TyDecodable, Hash)]
#[derive(HashStable)]
pub struct ScalarInt {
/// The first `size` bytes of `data` are the value.
/// Do not try to read less or more bytes than that. The remaining bytes must be 0.
///
/// This is an array in order to allow this type to be optimally embedded in enums
/// (like Scalar).
bytes: [u8; 16],
size: u8,
}
impl ScalarInt {
pub const TRUE: ScalarInt = ScalarInt { bytes: 1_u128.to_ne_bytes(), size: 1 };
pub const FALSE: ScalarInt = ScalarInt { bytes: 0_u128.to_ne_bytes(), size: 1 };
pub const ZST: ScalarInt = ScalarInt { bytes: 0_u128.to_ne_bytes(), size: 0 };
fn data(self) -> u128 {
u128::from_ne_bytes(self.bytes)
}
#[inline]
pub fn size(self) -> Size {
Size::from_bytes(self.size)
}
/// Make sure the `data` fits in `size`.
/// This is guaranteed by all constructors here, but since the enum variants are public,
/// it could still be violated (even though no code outside this file should
/// construct `Scalar`s).
#[inline(always)]
fn check_data(self) {
debug_assert_eq!(
truncate(self.data(), self.size()),
self.data(),
"Scalar value {:#x} exceeds size of {} bytes",
self.data(),
self.size
);
}
#[inline]
pub fn zst() -> Self {
Self::null(Size::ZERO)
}
#[inline]
pub fn null(size: Size) -> Self {
Self { bytes: [0; 16], size: size.bytes() as u8 }
}
pub(crate) fn ptr_sized_op<'tcx>(
self,
dl: &TargetDataLayout,
f_int: impl FnOnce(u64) -> InterpResult<'tcx, u64>,
) -> InterpResult<'tcx, Self> {
assert_eq!(u64::from(self.size), dl.pointer_size.bytes());
Ok(Self {
bytes: u128::from(f_int(u64::try_from(self.data()).unwrap())?).to_ne_bytes(),
size: self.size,
})
}
#[inline]
pub fn try_from_uint(i: impl Into<u128>, size: Size) -> Option<Self> {
let data = i.into();
if truncate(data, size) == data {
Some(Self { bytes: data.to_ne_bytes(), size: size.bytes() as u8 })
} else {
None
}
}
#[inline]
pub fn try_from_int(i: impl Into<i128>, size: Size) -> Option<Self> {
let i = i.into();
// `into` performed sign extension, we have to truncate
let truncated = truncate(i as u128, size);
if sign_extend(truncated, size) as i128 == i {
Some(Self { bytes: truncated.to_ne_bytes(), size: size.bytes() as u8 })
} else {
None
}
}
#[inline]
pub fn assert_bits(self, target_size: Size) -> u128 {
assert_ne!(target_size.bytes(), 0, "you should never look at the bits of a ZST");
assert_eq!(target_size.bytes(), u64::from(self.size));
self.check_data();
self.data()
}
#[inline]
pub fn to_bits(self, target_size: Size) -> InterpResult<'static, u128> {
assert_ne!(target_size.bytes(), 0, "you should never look at the bits of a ZST");
if target_size.bytes() != u64::from(self.size) {
throw_ub!(ScalarSizeMismatch {
target_size: target_size.bytes(),
data_size: u64::from(self.size),
});
}
self.check_data();
Ok(self.data())
}
}
macro_rules! from {
($($ty:ty),*) => {
$(
impl From<$ty> for ScalarInt {
#[inline]
fn from(u: $ty) -> Self {
Self {
bytes: u128::from(u).to_ne_bytes(),
size: std::mem::size_of::<$ty>() as u8,
}
}
}
)*
}
}
macro_rules! try_from {
($($ty:ty),*) => {
$(
impl TryFrom<ScalarInt> for $ty {
type Error = InterpErrorInfo<'static>;
#[inline]
fn try_from(int: ScalarInt) -> InterpResult<'static, Self> {
int.to_bits(Size::from_bytes(std::mem::size_of::<$ty>())).map(|u| u.try_into().unwrap())
}
}
)*
}
}
from!(u8, u16, u32, u64, u128, bool);
try_from!(u8, u16, u32, u64, u128);
impl From<char> for ScalarInt {
#[inline]
fn from(c: char) -> Self {
Self { bytes: (c as u128).to_ne_bytes(), size: std::mem::size_of::<char>() as u8 }
}
}
impl TryFrom<ScalarInt> for char {
type Error = InterpErrorInfo<'static>;
#[inline]
fn try_from(int: ScalarInt) -> InterpResult<'static, Self> {
int.to_bits(Size::from_bytes(std::mem::size_of::<char>()))
.map(|u| char::from_u32(u.try_into().unwrap()).unwrap())
}
}
impl From<Single> for ScalarInt {
#[inline]
fn from(f: Single) -> Self {
// We trust apfloat to give us properly truncated data.
Self { bytes: f.to_bits().to_ne_bytes(), size: 4 }
}
}
impl TryFrom<ScalarInt> for Single {
type Error = InterpErrorInfo<'static>;
#[inline]
fn try_from(int: ScalarInt) -> InterpResult<'static, Self> {
int.to_bits(Size::from_bytes(4)).map(Self::from_bits)
}
}
impl From<Double> for ScalarInt {
#[inline]
fn from(f: Double) -> Self {
// We trust apfloat to give us properly truncated data.
Self { bytes: f.to_bits().to_ne_bytes(), size: 8 }
}
}
impl TryFrom<ScalarInt> for Double {
type Error = InterpErrorInfo<'static>;
#[inline]
fn try_from(int: ScalarInt) -> InterpResult<'static, Self> {
int.to_bits(Size::from_bytes(8)).map(Self::from_bits)
}
}
impl fmt::Debug for ScalarInt {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if self.size == 0 {
self.check_data();
write!(f, "<ZST>")
} else {
write!(f, "0x{:x}", self)
}
}
}
impl fmt::LowerHex for ScalarInt {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.check_data();
// Format as hex number wide enough to fit any value of the given `size`.
// So data=20, size=1 will be "0x14", but with size=4 it'll be "0x00000014".
write!(f, "{:01$x}", self.data(), self.size as usize * 2)
}
}

View file

@ -87,7 +87,7 @@ pub use self::trait_def::TraitDef;
pub use self::query::queries;
pub use self::consts::{Const, ConstInt, ConstKind, InferConst};
pub use self::consts::{Const, ConstInt, ConstKind, InferConst, ScalarInt};
pub mod _match;
pub mod adjustment;

View file

@ -1,12 +1,9 @@
use crate::middle::cstore::{ExternCrate, ExternCrateSource};
use crate::mir::interpret::{AllocId, ConstValue, GlobalAlloc, Pointer, Scalar};
use crate::ty::layout::IntegerExt;
use crate::ty::subst::{GenericArg, GenericArgKind, Subst};
use crate::ty::{self, ConstInt, DefIdTree, ParamConst, Ty, TyCtxt, TypeFoldable};
use crate::ty::{self, ConstInt, DefIdTree, ParamConst, ScalarInt, Ty, TyCtxt, TypeFoldable};
use rustc_apfloat::ieee::{Double, Single};
use rustc_apfloat::Float;
use rustc_ast as ast;
use rustc_attr::{SignedInt, UnsignedInt};
use rustc_data_structures::fx::FxHashMap;
use rustc_hir as hir;
use rustc_hir::def::{self, CtorKind, DefKind, Namespace};
@ -15,12 +12,13 @@ use rustc_hir::definitions::{DefPathData, DefPathDataName, DisambiguatedDefPathD
use rustc_hir::ItemKind;
use rustc_session::config::TrimmedDefPaths;
use rustc_span::symbol::{kw, Ident, Symbol};
use rustc_target::abi::{Integer, Size};
use rustc_target::abi::Size;
use rustc_target::spec::abi::Abi;
use std::cell::Cell;
use std::char;
use std::collections::BTreeMap;
use std::convert::TryFrom;
use std::fmt::{self, Write as _};
use std::ops::{ControlFlow, Deref, DerefMut};
@ -960,11 +958,7 @@ pub trait PrettyPrinter<'tcx>:
ty::Array(
ty::TyS { kind: ty::Uint(ast::UintTy::U8), .. },
ty::Const {
val:
ty::ConstKind::Value(ConstValue::Scalar(Scalar::Raw {
data,
..
})),
val: ty::ConstKind::Value(ConstValue::Scalar(int)),
..
},
),
@ -974,8 +968,9 @@ pub trait PrettyPrinter<'tcx>:
),
) => match self.tcx().get_global_alloc(ptr.alloc_id) {
Some(GlobalAlloc::Memory(alloc)) => {
if let Ok(byte_str) = alloc.get_bytes(&self.tcx(), ptr, Size::from_bytes(*data))
{
let bytes = int.assert_bits(self.tcx().data_layout.pointer_size);
let size = Size::from_bytes(bytes);
if let Ok(byte_str) = alloc.get_bytes(&self.tcx(), ptr, size) {
p!(pretty_print_byte_str(byte_str))
} else {
p!("<too short allocation>")
@ -987,32 +982,28 @@ pub trait PrettyPrinter<'tcx>:
None => p!("<dangling pointer>"),
},
// Bool
(Scalar::Raw { data: 0, .. }, ty::Bool) => p!("false"),
(Scalar::Raw { data: 1, .. }, ty::Bool) => p!("true"),
(Scalar::Raw(ScalarInt::FALSE), ty::Bool) => p!("false"),
(Scalar::Raw(ScalarInt::TRUE), ty::Bool) => p!("true"),
// Float
(Scalar::Raw { data, .. }, ty::Float(ast::FloatTy::F32)) => {
p!(write("{}f32", Single::from_bits(data)))
(Scalar::Raw(int), ty::Float(ast::FloatTy::F32)) => {
p!(write("{}f32", Single::try_from(int).unwrap()))
}
(Scalar::Raw { data, .. }, ty::Float(ast::FloatTy::F64)) => {
p!(write("{}f64", Double::from_bits(data)))
(Scalar::Raw(int), ty::Float(ast::FloatTy::F64)) => {
p!(write("{}f64", Double::try_from(int).unwrap()))
}
// Int
(Scalar::Raw { data, .. }, ty::Uint(ui)) => {
let size = Integer::from_attr(&self.tcx(), UnsignedInt(*ui)).size();
let int = ConstInt::new(data, size, false, ty.is_ptr_sized_integral());
if print_ty { p!(write("{:#?}", int)) } else { p!(write("{:?}", int)) }
}
(Scalar::Raw { data, .. }, ty::Int(i)) => {
let size = Integer::from_attr(&self.tcx(), SignedInt(*i)).size();
let int = ConstInt::new(data, size, true, ty.is_ptr_sized_integral());
(Scalar::Raw(int), ty::Uint(_) | ty::Int(_)) => {
let int =
ConstInt::new(int, matches!(ty.kind(), ty::Int(_)), ty.is_ptr_sized_integral());
if print_ty { p!(write("{:#?}", int)) } else { p!(write("{:?}", int)) }
}
// Char
(Scalar::Raw { data, .. }, ty::Char) if char::from_u32(data as u32).is_some() => {
p!(write("{:?}", char::from_u32(data as u32).unwrap()))
(Scalar::Raw(int), ty::Char) if char::try_from(int).is_ok() => {
p!(write("{:?}", char::try_from(int).unwrap()))
}
// Raw pointers
(Scalar::Raw { data, .. }, ty::RawPtr(_)) => {
(Scalar::Raw(int), ty::RawPtr(_)) => {
let data = int.assert_bits(self.tcx().data_layout.pointer_size);
self = self.typed_value(
|mut this| {
write!(this, "0x{:x}", data)?;
@ -1034,14 +1025,14 @@ pub trait PrettyPrinter<'tcx>:
)?;
}
// For function type zsts just printing the path is enough
(Scalar::Raw { size: 0, .. }, ty::FnDef(d, s)) => p!(print_value_path(*d, s)),
(Scalar::Raw(ScalarInt::ZST), ty::FnDef(d, s)) => p!(print_value_path(*d, s)),
// Nontrivial types with scalar bit representation
(Scalar::Raw { data, size }, _) => {
(Scalar::Raw(int), _) => {
let print = |mut this: Self| {
if size == 0 {
if int.size() == Size::ZERO {
write!(this, "transmute(())")?;
} else {
write!(this, "transmute(0x{:01$x})", data, size as usize * 2)?;
write!(this, "transmute(0x{:x})", int)?;
}
Ok(this)
};