1
Fork 0

Also generate undef scalars and scalar pairs

This commit is contained in:
Oli Scherer 2025-01-10 11:28:20 +00:00
commit 8876cf7181
3 changed files with 38 additions and 25 deletions

View file

@ -204,14 +204,30 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
let alloc_align = alloc.inner().align; let alloc_align = alloc.inner().align;
assert!(alloc_align >= layout.align.abi); assert!(alloc_align >= layout.align.abi);
// Returns `None` when the value is partially undefined or any byte of it has provenance.
// Otherwise returns the value or (if the entire value is undef) returns an undef.
let read_scalar = |start, size, s: abi::Scalar, ty| { let read_scalar = |start, size, s: abi::Scalar, ty| {
let range = alloc_range(start, size);
match alloc.0.read_scalar( match alloc.0.read_scalar(
bx, bx,
alloc_range(start, size), range,
/*read_provenance*/ matches!(s.primitive(), abi::Primitive::Pointer(_)), /*read_provenance*/ matches!(s.primitive(), abi::Primitive::Pointer(_)),
) { ) {
Ok(val) => bx.scalar_to_backend(val, s, ty), Ok(val) => Some(bx.scalar_to_backend(val, s, ty)),
Err(_) => bx.const_poison(ty), Err(_) => {
// We may have failed due to partial provenance or unexpected provenance,
// continue down the normal code path if so.
if alloc.0.provenance().range_empty(range, &bx.tcx())
// Since `read_scalar` failed, but there were no relocations involved, the
// bytes must be partially or fully uninitialized. Thus we can now unwrap the
// information about the range of uninit bytes and check if it's the full range.
&& alloc.0.init_mask().is_range_initialized(range).unwrap_err() == range
{
Some(bx.const_undef(ty))
} else {
None
}
}
} }
}; };
@ -222,16 +238,14 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
// check that walks over the type of `mplace` to make sure it is truly correct to treat this // check that walks over the type of `mplace` to make sure it is truly correct to treat this
// like a `Scalar` (or `ScalarPair`). // like a `Scalar` (or `ScalarPair`).
match layout.backend_repr { match layout.backend_repr {
BackendRepr::Scalar(s @ abi::Scalar::Initialized { .. }) => { BackendRepr::Scalar(s) => {
let size = s.size(bx); let size = s.size(bx);
assert_eq!(size, layout.size, "abi::Scalar size does not match layout size"); assert_eq!(size, layout.size, "abi::Scalar size does not match layout size");
let val = read_scalar(offset, size, s, bx.immediate_backend_type(layout)); if let Some(val) = read_scalar(offset, size, s, bx.immediate_backend_type(layout)) {
OperandRef { val: OperandValue::Immediate(val), layout } return OperandRef { val: OperandValue::Immediate(val), layout };
}
} }
BackendRepr::ScalarPair( BackendRepr::ScalarPair(a, b) => {
a @ abi::Scalar::Initialized { .. },
b @ abi::Scalar::Initialized { .. },
) => {
let (a_size, b_size) = (a.size(bx), b.size(bx)); let (a_size, b_size) = (a.size(bx), b.size(bx));
let b_offset = (offset + a_size).align_to(b.align(bx).abi); let b_offset = (offset + a_size).align_to(b.align(bx).abi);
assert!(b_offset.bytes() > 0); assert!(b_offset.bytes() > 0);
@ -247,20 +261,21 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
b, b,
bx.scalar_pair_element_backend_type(layout, 1, true), bx.scalar_pair_element_backend_type(layout, 1, true),
); );
OperandRef { val: OperandValue::Pair(a_val, b_val), layout } if let (Some(a_val), Some(b_val)) = (a_val, b_val) {
} return OperandRef { val: OperandValue::Pair(a_val, b_val), layout };
_ if layout.is_zst() => OperandRef::zero_sized(layout), }
_ => {
// Neither a scalar nor scalar pair. Load from a place
// FIXME: should we cache `const_data_from_alloc` to avoid repeating this for the
// same `ConstAllocation`?
let init = bx.const_data_from_alloc(alloc);
let base_addr = bx.static_addr_of(init, alloc_align, None);
let llval = bx.const_ptr_byte_offset(base_addr, offset);
bx.load_operand(PlaceRef::new_sized(llval, layout))
} }
_ if layout.is_zst() => return OperandRef::zero_sized(layout),
_ => {}
} }
// Neither a scalar nor scalar pair. Load from a place
// FIXME: should we cache `const_data_from_alloc` to avoid repeating this for the
// same `ConstAllocation`?
let init = bx.const_data_from_alloc(alloc);
let base_addr = bx.static_addr_of(init, alloc_align, None);
let llval = bx.const_ptr_byte_offset(base_addr, offset);
bx.load_operand(PlaceRef::new_sized(llval, layout))
} }
/// Asserts that this operand refers to a scalar and returns /// Asserts that this operand refers to a scalar and returns

View file

@ -222,7 +222,7 @@ impl AllocError {
} }
/// The information that makes up a memory access: offset and size. /// The information that makes up a memory access: offset and size.
#[derive(Copy, Clone)] #[derive(Copy, Clone, PartialEq)]
pub struct AllocRange { pub struct AllocRange {
pub start: Size, pub start: Size,
pub size: Size, pub size: Size,

View file

@ -17,8 +17,6 @@ pub fn overaligned_constant() {
// CHECK-LABEL: @overaligned_constant // CHECK-LABEL: @overaligned_constant
// CHECK: [[full:%_.*]] = alloca [32 x i8], align 8 // CHECK: [[full:%_.*]] = alloca [32 x i8], align 8
// CHECK: call void @llvm.memcpy.p0.p0.i64(ptr align 8 [[full]], ptr align 8 @0, i64 32, i1 false) // CHECK: call void @llvm.memcpy.p0.p0.i64(ptr align 8 [[full]], ptr align 8 @0, i64 32, i1 false)
// CHECK: %b.0 = load i32, ptr @0, align 4
// CHECK: %b.1 = load i32, ptr getelementptr inbounds ({{.*}}), align 4
let mut s = S(1); let mut s = S(1);
s.0 = 3; s.0 = 3;