1
Fork 0

ConstValue::ScalarPair only needs to represent slices

This commit is contained in:
Oliver Scherer 2019-01-08 13:49:37 +01:00
parent 2a1748834e
commit fe50b4eb1d
12 changed files with 59 additions and 82 deletions

View file

@ -302,7 +302,7 @@ impl_stable_hash_for!(struct ty::FieldDef {
impl_stable_hash_for!( impl_stable_hash_for!(
impl<'tcx> for enum mir::interpret::ConstValue<'tcx> [ mir::interpret::ConstValue ] { impl<'tcx> for enum mir::interpret::ConstValue<'tcx> [ mir::interpret::ConstValue ] {
Scalar(val), Scalar(val),
ScalarPair(a, b), Slice(a, b),
ByRef(id, alloc, offset), ByRef(id, alloc, offset),
} }
); );

View file

@ -22,10 +22,13 @@ pub enum ConstValue<'tcx> {
/// Not using the enum `Value` to encode that this must not be `Undef` /// Not using the enum `Value` to encode that this must not be `Undef`
Scalar(Scalar), Scalar(Scalar),
/// Used only for *fat pointers* with layout::abi::ScalarPair /// Used only for slices and strings (`&[T]`, `&str`, `*const [T]`, `*mut str`, `Box<str>`, ...)
/// ///
/// Needed for pattern matching code related to slices and strings. /// Empty slices don't necessarily have an address backed by an `AllocId`, thus we also need to
ScalarPair(Scalar, Scalar), /// enable integer pointers. The `Scalar` type covers exactly those two cases. While we could
/// create dummy-`AllocId`s, the additional code effort for the conversions doesn't seem worth
/// it.
Slice(Scalar, u64),
/// An allocation + offset into the allocation. /// An allocation + offset into the allocation.
/// Invariant: The AllocId matches the allocation. /// Invariant: The AllocId matches the allocation.
@ -33,14 +36,14 @@ pub enum ConstValue<'tcx> {
} }
#[cfg(target_arch = "x86_64")] #[cfg(target_arch = "x86_64")]
static_assert!(CONST_SIZE: ::std::mem::size_of::<ConstValue<'static>>() == 56); static_assert!(CONST_SIZE: ::std::mem::size_of::<ConstValue<'static>>() == 40);
impl<'tcx> ConstValue<'tcx> { impl<'tcx> ConstValue<'tcx> {
#[inline] #[inline]
pub fn try_to_scalar(&self) -> Option<Scalar> { pub fn try_to_scalar(&self) -> Option<Scalar> {
match *self { match *self {
ConstValue::ByRef(..) | ConstValue::ByRef(..) |
ConstValue::ScalarPair(..) => None, ConstValue::Slice(..) => None,
ConstValue::Scalar(val) => Some(val), ConstValue::Scalar(val) => Some(val),
} }
} }
@ -59,17 +62,8 @@ impl<'tcx> ConstValue<'tcx> {
pub fn new_slice( pub fn new_slice(
val: Scalar, val: Scalar,
len: u64, len: u64,
cx: &impl HasDataLayout
) -> Self { ) -> Self {
ConstValue::ScalarPair(val, Scalar::Bits { ConstValue::Slice(val, len)
bits: len as u128,
size: cx.data_layout().pointer_size.bytes() as u8,
})
}
#[inline]
pub fn new_dyn_trait(val: Scalar, vtable: Pointer) -> Self {
ConstValue::ScalarPair(val, Scalar::Ptr(vtable))
} }
} }

View file

@ -2702,14 +2702,13 @@ pub fn fmt_const_val(f: &mut impl Write, const_val: ty::Const<'_>) -> fmt::Resul
return write!(f, "{}", item_path_str(did)); return write!(f, "{}", item_path_str(did));
} }
// print string literals // print string literals
if let ConstValue::ScalarPair(ptr, len) = value { if let ConstValue::Slice(ptr, len) = value {
if let Scalar::Ptr(ptr) = ptr { if let Scalar::Ptr(ptr) = ptr {
if let Scalar::Bits { bits: len, .. } = len {
if let Ref(_, &ty::TyS { sty: Str, .. }, _) = ty.sty { if let Ref(_, &ty::TyS { sty: Str, .. }, _) = ty.sty {
return ty::tls::with(|tcx| { return ty::tls::with(|tcx| {
let alloc = tcx.alloc_map.lock().get(ptr.alloc_id); let alloc = tcx.alloc_map.lock().get(ptr.alloc_id);
if let Some(interpret::AllocKind::Memory(alloc)) = alloc { if let Some(interpret::AllocKind::Memory(alloc)) = alloc {
assert_eq!(len as usize as u128, len); assert_eq!(len as usize as u64, len);
let slice = let slice =
&alloc.bytes[(ptr.offset.bytes() as usize)..][..(len as usize)]; &alloc.bytes[(ptr.offset.bytes() as usize)..][..(len as usize)];
let s = ::std::str::from_utf8(slice).expect("non utf8 str from miri"); let s = ::std::str::from_utf8(slice).expect("non utf8 str from miri");
@ -2721,7 +2720,6 @@ pub fn fmt_const_val(f: &mut impl Write, const_val: ty::Const<'_>) -> fmt::Resul
} }
} }
} }
}
// just raw dump everything else // just raw dump everything else
write!(f, "{:?}:{}", value, ty) write!(f, "{:?}:{}", value, ty)
} }

View file

@ -498,7 +498,7 @@ impl<'a, 'tcx> Lift<'tcx> for ConstValue<'a> {
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> { fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
match *self { match *self {
ConstValue::Scalar(x) => Some(ConstValue::Scalar(x)), ConstValue::Scalar(x) => Some(ConstValue::Scalar(x)),
ConstValue::ScalarPair(x, y) => Some(ConstValue::ScalarPair(x, y)), ConstValue::Slice(x, y) => Some(ConstValue::Slice(x, y)),
ConstValue::ByRef(x, alloc, z) => Some(ConstValue::ByRef( ConstValue::ByRef(x, alloc, z) => Some(ConstValue::ByRef(
x, alloc.lift_to_tcx(tcx)?, z, x, alloc.lift_to_tcx(tcx)?, z,
)), )),

View file

@ -2064,7 +2064,7 @@ pub enum LazyConst<'tcx> {
} }
#[cfg(target_arch = "x86_64")] #[cfg(target_arch = "x86_64")]
static_assert!(LAZY_CONST_SIZE: ::std::mem::size_of::<LazyConst<'static>>() == 72); static_assert!(LAZY_CONST_SIZE: ::std::mem::size_of::<LazyConst<'static>>() == 56);
impl<'tcx> LazyConst<'tcx> { impl<'tcx> LazyConst<'tcx> {
pub fn map_evaluated<R>(self, f: impl FnOnce(Const<'tcx>) -> Option<R>) -> Option<R> { pub fn map_evaluated<R>(self, f: impl FnOnce(Const<'tcx>) -> Option<R>) -> Option<R> {
@ -2093,7 +2093,7 @@ pub struct Const<'tcx> {
} }
#[cfg(target_arch = "x86_64")] #[cfg(target_arch = "x86_64")]
static_assert!(CONST_SIZE: ::std::mem::size_of::<Const<'static>>() == 64); static_assert!(CONST_SIZE: ::std::mem::size_of::<Const<'static>>() == 48);
impl<'tcx> Const<'tcx> { impl<'tcx> Const<'tcx> {
#[inline] #[inline]

View file

@ -88,9 +88,9 @@ impl<'a, 'tcx: 'a, V: CodegenObject> OperandRef<'tcx, V> {
); );
OperandValue::Immediate(llval) OperandValue::Immediate(llval)
}, },
ConstValue::ScalarPair(a, b) => { ConstValue::Slice(a, b) => {
let (a_scalar, b_scalar) = match layout.abi { let a_scalar = match layout.abi {
layout::Abi::ScalarPair(ref a, ref b) => (a, b), layout::Abi::ScalarPair(ref a, _) => a,
_ => bug!("from_const: invalid ScalarPair layout: {:#?}", layout) _ => bug!("from_const: invalid ScalarPair layout: {:#?}", layout)
}; };
let a_llval = bx.cx().scalar_to_backend( let a_llval = bx.cx().scalar_to_backend(
@ -98,11 +98,7 @@ impl<'a, 'tcx: 'a, V: CodegenObject> OperandRef<'tcx, V> {
a_scalar, a_scalar,
bx.cx().scalar_pair_element_backend_type(layout, 0, true), bx.cx().scalar_pair_element_backend_type(layout, 0, true),
); );
let b_llval = bx.cx().scalar_to_backend( let b_llval = bx.cx().const_usize(b);
b,
b_scalar,
bx.cx().scalar_pair_element_backend_type(layout, 1, true),
);
OperandValue::Pair(a_llval, b_llval) OperandValue::Pair(a_llval, b_llval)
}, },
ConstValue::ByRef(_, alloc, offset) => { ConstValue::ByRef(_, alloc, offset) => {

View file

@ -67,14 +67,11 @@ pub fn op_to_const<'tcx>(
op: OpTy<'tcx>, op: OpTy<'tcx>,
may_normalize: bool, may_normalize: bool,
) -> EvalResult<'tcx, ty::Const<'tcx>> { ) -> EvalResult<'tcx, ty::Const<'tcx>> {
// We do not normalize just any data. Only scalar layout and fat pointers. // We do not normalize just any data. Only scalar layout and slices.
let normalize = may_normalize let normalize = may_normalize
&& match op.layout.abi { && match op.layout.abi {
layout::Abi::Scalar(..) => true, layout::Abi::Scalar(..) => true,
layout::Abi::ScalarPair(..) => { layout::Abi::ScalarPair(..) => op.layout.ty.is_slice(),
// Must be a fat pointer
op.layout.ty.builtin_deref(true).is_some()
},
_ => false, _ => false,
}; };
let normalized_op = if normalize { let normalized_op = if normalize {
@ -103,7 +100,7 @@ pub fn op_to_const<'tcx>(
Ok(Immediate::Scalar(x)) => Ok(Immediate::Scalar(x)) =>
ConstValue::Scalar(x.not_undef()?), ConstValue::Scalar(x.not_undef()?),
Ok(Immediate::ScalarPair(a, b)) => Ok(Immediate::ScalarPair(a, b)) =>
ConstValue::ScalarPair(a.not_undef()?, b.not_undef()?), ConstValue::Slice(a.not_undef()?, b.to_usize(ecx)?),
}; };
Ok(ty::Const { val, ty: op.layout.ty }) Ok(ty::Const { val, ty: op.layout.ty })
} }

View file

@ -35,13 +35,13 @@ crate fn lit_to_const<'a, 'gcx, 'tcx>(
LitKind::Str(ref s, _) => { LitKind::Str(ref s, _) => {
let s = s.as_str(); let s = s.as_str();
let id = tcx.allocate_bytes(s.as_bytes()); let id = tcx.allocate_bytes(s.as_bytes());
ConstValue::new_slice(Scalar::Ptr(id.into()), s.len() as u64, &tcx) ConstValue::new_slice(Scalar::Ptr(id.into()), s.len() as u64)
}, },
LitKind::Err(ref s) => { LitKind::Err(ref s) => {
let s = s.as_str(); let s = s.as_str();
let id = tcx.allocate_bytes(s.as_bytes()); let id = tcx.allocate_bytes(s.as_bytes());
return Ok(ty::Const { return Ok(ty::Const {
val: ConstValue::new_slice(Scalar::Ptr(id.into()), s.len() as u64, &tcx), val: ConstValue::new_slice(Scalar::Ptr(id.into()), s.len() as u64),
ty: tcx.types.err, ty: tcx.types.err,
}); });
}, },

View file

@ -221,13 +221,16 @@ impl<'a, 'tcx> LiteralExpander<'a, 'tcx> {
// unsize array to slice if pattern is array but match value or other patterns are slice // unsize array to slice if pattern is array but match value or other patterns are slice
(ConstValue::Scalar(Scalar::Ptr(p)), ty::Array(t, n), ty::Slice(u)) => { (ConstValue::Scalar(Scalar::Ptr(p)), ty::Array(t, n), ty::Slice(u)) => {
assert_eq!(t, u); assert_eq!(t, u);
ConstValue::ScalarPair( ConstValue::Slice(
Scalar::Ptr(p), Scalar::Ptr(p),
n.map_evaluated(|val| val.val.try_to_scalar()).unwrap(), n.map_evaluated(|val| val.val.try_to_scalar())
.unwrap()
.to_usize(&self.tcx)
.unwrap(),
) )
}, },
// fat pointers stay the same // fat pointers stay the same
(ConstValue::ScalarPair(..), _, _) => val, (ConstValue::Slice(..), _, _) => val,
// FIXME(oli-obk): this is reachable for `const FOO: &&&u32 = &&&42;` being used // FIXME(oli-obk): this is reachable for `const FOO: &&&u32 = &&&42;` being used
_ => bug!("cannot deref {:#?}, {} -> {}", val, crty, rty), _ => bug!("cannot deref {:#?}, {} -> {}", val, crty, rty),
} }
@ -788,9 +791,9 @@ fn max_slice_length<'p, 'a: 'p, 'tcx: 'a, I>(
max_fixed_len, max_fixed_len,
n.unwrap_usize(cx.tcx), n.unwrap_usize(cx.tcx),
), ),
(ConstValue::ScalarPair(_, n), ty::Slice(_)) => max_fixed_len = cmp::max( (ConstValue::Slice(_, n), ty::Slice(_)) => max_fixed_len = cmp::max(
max_fixed_len, max_fixed_len,
n.to_usize(&cx.tcx).unwrap(), n,
), ),
_ => {}, _ => {},
} }
@ -1432,7 +1435,7 @@ fn slice_pat_covered_by_const<'tcx>(
alloc.get_bytes(&tcx, ptr, Size::from_bytes(n)).unwrap() alloc.get_bytes(&tcx, ptr, Size::from_bytes(n)).unwrap()
}, },
// a slice fat pointer to a zero length slice // a slice fat pointer to a zero length slice
(ConstValue::ScalarPair(Scalar::Bits { .. }, n), ty::Slice(t)) => { (ConstValue::Slice(Scalar::Bits { .. }, 0), ty::Slice(t)) => {
if *t != tcx.types.u8 { if *t != tcx.types.u8 {
// FIXME(oli-obk): can't mix const patterns with slice patterns and get // FIXME(oli-obk): can't mix const patterns with slice patterns and get
// any sort of exhaustiveness/unreachable check yet // any sort of exhaustiveness/unreachable check yet
@ -1440,11 +1443,10 @@ fn slice_pat_covered_by_const<'tcx>(
// are definitely unreachable. // are definitely unreachable.
return Ok(false); return Ok(false);
} }
assert_eq!(n.to_usize(&tcx).unwrap(), 0);
&[] &[]
}, },
// //
(ConstValue::ScalarPair(Scalar::Ptr(ptr), n), ty::Slice(t)) => { (ConstValue::Slice(Scalar::Ptr(ptr), n), ty::Slice(t)) => {
if *t != tcx.types.u8 { if *t != tcx.types.u8 {
// FIXME(oli-obk): can't mix const patterns with slice patterns and get // FIXME(oli-obk): can't mix const patterns with slice patterns and get
// any sort of exhaustiveness/unreachable check yet // any sort of exhaustiveness/unreachable check yet
@ -1452,7 +1454,6 @@ fn slice_pat_covered_by_const<'tcx>(
// are definitely unreachable. // are definitely unreachable.
return Ok(false); return Ok(false);
} }
let n = n.to_usize(&tcx).unwrap();
tcx.alloc_map tcx.alloc_map
.lock() .lock()
.unwrap_memory(ptr.alloc_id) .unwrap_memory(ptr.alloc_id)
@ -1784,12 +1785,12 @@ fn specialize<'p, 'a: 'p, 'tcx: 'a>(
}, },
ty::TyKind::Slice(t) => { ty::TyKind::Slice(t) => {
match value.val { match value.val {
ConstValue::ScalarPair(ptr, n) => ( ConstValue::Slice(ptr, n) => (
ptr.to_ptr().ok().map(|ptr| ( ptr.to_ptr().ok().map(|ptr| (
ptr, ptr,
cx.tcx.alloc_map.lock().unwrap_memory(ptr.alloc_id), cx.tcx.alloc_map.lock().unwrap_memory(ptr.alloc_id),
)), )),
n.to_bits(cx.tcx.data_layout.pointer_size).unwrap() as u64, n,
t, t,
), ),
_ => span_bug!( _ => span_bug!(

View file

@ -1218,28 +1218,24 @@ pub fn compare_const_vals<'a, 'gcx, 'tcx>(
if let ty::Str = ty.value.sty { if let ty::Str = ty.value.sty {
match (a.val, b.val) { match (a.val, b.val) {
( (
ConstValue::ScalarPair( ConstValue::Slice(
Scalar::Ptr(ptr_a), Scalar::Ptr(ptr_a),
len_a, len_a,
), ),
ConstValue::ScalarPair( ConstValue::Slice(
Scalar::Ptr(ptr_b), Scalar::Ptr(ptr_b),
len_b, len_b,
), ),
) if ptr_a.offset.bytes() == 0 && ptr_b.offset.bytes() == 0 => { ) if ptr_a.offset.bytes() == 0 && ptr_b.offset.bytes() == 0 => {
if let Ok(len_a) = len_a.to_bits(tcx.data_layout.pointer_size) {
if let Ok(len_b) = len_b.to_bits(tcx.data_layout.pointer_size) {
if len_a == len_b { if len_a == len_b {
let map = tcx.alloc_map.lock(); let map = tcx.alloc_map.lock();
let alloc_a = map.unwrap_memory(ptr_a.alloc_id); let alloc_a = map.unwrap_memory(ptr_a.alloc_id);
let alloc_b = map.unwrap_memory(ptr_b.alloc_id); let alloc_b = map.unwrap_memory(ptr_b.alloc_id);
if alloc_a.bytes.len() as u128 == len_a { if alloc_a.bytes.len() as u64 == len_a {
return from_bool(alloc_a == alloc_b); return from_bool(alloc_a == alloc_b);
} }
} }
} }
}
}
_ => (), _ => (),
} }
} }

View file

@ -555,10 +555,10 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M>
MemPlace::from_ptr(Pointer::new(id, offset), alloc.align) MemPlace::from_ptr(Pointer::new(id, offset), alloc.align)
).with_default_tag()) ).with_default_tag())
}, },
ConstValue::ScalarPair(a, b) => ConstValue::Slice(a, b) =>
Ok(Operand::Immediate(Immediate::ScalarPair( Ok(Operand::Immediate(Immediate::ScalarPair(
a.into(), a.into(),
b.into(), Scalar::from_uint(b, self.tcx.data_layout.pointer_size).into(),
)).with_default_tag()), )).with_default_tag()),
ConstValue::Scalar(x) => ConstValue::Scalar(x) =>
Ok(Operand::Immediate(Immediate::Scalar(x.into())).with_default_tag()), Ok(Operand::Immediate(Immediate::Scalar(x.into())).with_default_tag()),

View file

@ -1254,12 +1254,7 @@ fn collect_const<'a, 'tcx>(
debug!("visiting const {:?}", constant); debug!("visiting const {:?}", constant);
match constant.val { match constant.val {
ConstValue::ScalarPair(Scalar::Ptr(a), Scalar::Ptr(b)) => { ConstValue::Slice(Scalar::Ptr(ptr), _) |
collect_miri(tcx, a.alloc_id, output);
collect_miri(tcx, b.alloc_id, output);
}
ConstValue::ScalarPair(_, Scalar::Ptr(ptr)) |
ConstValue::ScalarPair(Scalar::Ptr(ptr), _) |
ConstValue::Scalar(Scalar::Ptr(ptr)) => ConstValue::Scalar(Scalar::Ptr(ptr)) =>
collect_miri(tcx, ptr.alloc_id, output), collect_miri(tcx, ptr.alloc_id, output),
ConstValue::ByRef(_id, alloc, _offset) => { ConstValue::ByRef(_id, alloc, _offset) => {