1
Fork 0

Split the "raw integer bytes" part out of Scalar

This commit is contained in:
Oliver Scherer 2020-09-26 15:15:35 +02:00 committed by oli
parent 56293097f7
commit 362123dd75
17 changed files with 325 additions and 184 deletions

View file

@ -137,10 +137,10 @@ pub(super) fn op_to_const<'tcx>(
let alloc = ecx.tcx.global_alloc(ptr.alloc_id).unwrap_memory();
ConstValue::ByRef { alloc, offset: ptr.offset }
}
Scalar::Raw { data, .. } => {
Scalar::Raw(int) => {
assert!(mplace.layout.is_zst());
assert_eq!(
u64::try_from(data).unwrap() % mplace.layout.align.abi.bytes(),
u64::try_from(int).unwrap() % mplace.layout.align.abi.bytes(),
0,
"this MPlaceTy must come from a validated constant, thus we can assume the \
alignment is correct",

View file

@ -1,6 +1,6 @@
use rustc_middle::mir;
use rustc_middle::ty::layout::HasTyCtxt;
use rustc_middle::ty::{self, Ty};
use rustc_middle::ty::{self, ScalarInt, Ty};
use std::borrow::Borrow;
use std::collections::hash_map::Entry;
use std::hash::Hash;
@ -194,13 +194,13 @@ impl<'mir, 'tcx: 'mir> CompileTimeEvalContext<'mir, 'tcx> {
fn guaranteed_ne(&mut self, a: Scalar, b: Scalar) -> bool {
match (a, b) {
// Comparisons between integers are always known.
(Scalar::Raw { .. }, Scalar::Raw { .. }) => a != b,
(Scalar::Raw(_), Scalar::Raw(_)) => a != b,
// Comparisons of abstract pointers with null pointers are known if the pointer
// is in bounds, because if they are in bounds, the pointer can't be null.
(Scalar::Raw { data: 0, .. }, Scalar::Ptr(ptr))
| (Scalar::Ptr(ptr), Scalar::Raw { data: 0, .. }) => !self.memory.ptr_may_be_null(ptr),
// Inequality with integers other than null can never be known for sure.
(Scalar::Raw { .. }, Scalar::Ptr(_)) | (Scalar::Ptr(_), Scalar::Raw { .. }) => false,
(Scalar::Raw(int), Scalar::Ptr(ptr)) | (Scalar::Ptr(ptr), Scalar::Raw(int)) => {
int == ScalarInt::null(int.size()) && !self.memory.ptr_may_be_null(ptr)
}
// FIXME: return `true` for at least some comparisons where we can reliably
// determine the result of runtime inequality tests at compile-time.
// Examples include comparison of addresses in different static items.

View file

@ -211,14 +211,11 @@ impl<'tcx, Tag: Copy> ImmTy<'tcx, Tag> {
#[inline]
pub fn to_const_int(self) -> ConstInt {
assert!(self.layout.ty.is_integral());
ConstInt::new(
self.to_scalar()
.expect("to_const_int doesn't work on scalar pairs")
.assert_bits(self.layout.size),
self.layout.size,
self.layout.ty.is_signed(),
self.layout.ty.is_ptr_sized_integral(),
)
let int = match self.to_scalar().expect("to_const_int doesn't work on scalar pairs") {
Scalar::Raw(int) => int,
Scalar::Ptr(_) => bug!("to_const_int doesn't work on pointers"),
};
ConstInt::new(int, self.layout.ty.is_signed(), self.layout.ty.is_ptr_sized_integral())
}
}
@ -544,7 +541,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
let tag_scalar = |scalar| -> InterpResult<'tcx, _> {
Ok(match scalar {
Scalar::Ptr(ptr) => Scalar::Ptr(self.global_base_pointer(ptr)?),
Scalar::Raw { data, size } => Scalar::Raw { data, size },
Scalar::Raw(int) => Scalar::Raw(int),
})
};
// Early-return cases.

View file

@ -721,12 +721,8 @@ where
dest.layout.size,
"Size mismatch when writing pointer"
),
Immediate::Scalar(ScalarMaybeUninit::Scalar(Scalar::Raw { size, .. })) => {
assert_eq!(
Size::from_bytes(size),
dest.layout.size,
"Size mismatch when writing bits"
)
Immediate::Scalar(ScalarMaybeUninit::Scalar(Scalar::Raw(int))) => {
assert_eq!(int.size(), dest.layout.size, "Size mismatch when writing bits")
}
Immediate::Scalar(ScalarMaybeUninit::Uninit) => {} // uninit can have any size
Immediate::ScalarPair(_, _) => {

View file

@ -19,7 +19,9 @@ use rustc_middle::mir::{
};
use rustc_middle::ty::layout::{HasTyCtxt, LayoutError, TyAndLayout};
use rustc_middle::ty::subst::{InternalSubsts, Subst};
use rustc_middle::ty::{self, ConstInt, ConstKind, Instance, ParamEnv, Ty, TyCtxt, TypeFoldable};
use rustc_middle::ty::{
self, ConstInt, ConstKind, Instance, ParamEnv, ScalarInt, Ty, TyCtxt, TypeFoldable,
};
use rustc_session::lint;
use rustc_span::{def_id::DefId, Span};
use rustc_target::abi::{HasDataLayout, LayoutOf, Size, TargetDataLayout};
@ -578,8 +580,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
Some(l) => l.to_const_int(),
// Invent a dummy value, the diagnostic ignores it anyway
None => ConstInt::new(
1,
left_size,
ScalarInt::try_from_uint(1_u8, left_size).unwrap(),
left_ty.is_signed(),
left_ty.is_ptr_sized_integral(),
),

View file

@ -26,22 +26,26 @@ use rustc_middle::{
pub struct SimplifyComparisonIntegral;
impl<'tcx> MirPass<'tcx> for SimplifyComparisonIntegral {
fn run_pass(&self, _: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
trace!("Running SimplifyComparisonIntegral on {:?}", body.source);
let helper = OptimizationFinder { body };
let opts = helper.find_optimizations();
let mut storage_deads_to_insert = vec![];
let mut storage_deads_to_remove: Vec<(usize, BasicBlock)> = vec![];
let param_env = tcx.param_env(body.source.def_id());
for opt in opts {
trace!("SUCCESS: Applying {:?}", opt);
// replace terminator with a switchInt that switches on the integer directly
let bbs = &mut body.basic_blocks_mut();
let bb = &mut bbs[opt.bb_idx];
// We only use the bits for the untyped, not length checked `values` field. Thus we are
// not using any of the convenience wrappers here and directly access the bits.
let new_value = match opt.branch_value_scalar {
Scalar::Raw { data, .. } => data,
Scalar::Raw(int) => {
let layout = tcx
.layout_of(param_env.and(opt.branch_value_ty))
.expect("if we have an evaluated constant we must know the layout");
int.assert_bits(layout.size)
}
Scalar::Ptr(_) => continue,
};
const FALSE: u128 = 0;