1
Fork 0

Mark scalar layout unions so that backends that do not support partially initialized scalars can special case them.

This commit is contained in:
Oli Scherer 2022-03-03 12:02:12 +00:00
parent 6ce5ce82af
commit d71f633d38
4 changed files with 11 additions and 11 deletions

View file

@ -694,11 +694,11 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
} }
fn scalar_load_metadata<'a, 'gcc, 'tcx>(bx: &mut Builder<'a, 'gcc, 'tcx>, load: RValue<'gcc>, scalar: &abi::Scalar) { fn scalar_load_metadata<'a, 'gcc, 'tcx>(bx: &mut Builder<'a, 'gcc, 'tcx>, load: RValue<'gcc>, scalar: &abi::Scalar) {
let vr = scalar.valid_range.clone(); let vr = scalar.valid_range(bx);
match scalar.value { match scalar.primitive() {
abi::Int(..) => { abi::Int(..) => {
if !scalar.is_always_valid(bx) { if !scalar.is_always_valid(bx) {
bx.range_metadata(load, scalar.valid_range); bx.range_metadata(load, vr);
} }
} }
abi::Pointer if vr.start < vr.end && !vr.contains(0) => { abi::Pointer if vr.start < vr.end && !vr.contains(0) => {
@ -720,7 +720,7 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
OperandValue::Immediate(self.to_immediate(load, place.layout)) OperandValue::Immediate(self.to_immediate(load, place.layout))
} }
else if let abi::Abi::ScalarPair(ref a, ref b) = place.layout.abi { else if let abi::Abi::ScalarPair(ref a, ref b) = place.layout.abi {
let b_offset = a.value.size(self).align_to(b.value.align(self).abi); let b_offset = a.size(self).align_to(b.align(self).abi);
let pair_type = place.layout.gcc_type(self, false); let pair_type = place.layout.gcc_type(self, false);
let mut load = |i, scalar: &abi::Scalar, align| { let mut load = |i, scalar: &abi::Scalar, align| {

View file

@ -158,14 +158,14 @@ impl<'gcc, 'tcx> ConstMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
} }
fn scalar_to_backend(&self, cv: Scalar, layout: abi::Scalar, ty: Type<'gcc>) -> RValue<'gcc> { fn scalar_to_backend(&self, cv: Scalar, layout: abi::Scalar, ty: Type<'gcc>) -> RValue<'gcc> {
let bitsize = if layout.is_bool() { 1 } else { layout.value.size(self).bits() }; let bitsize = if layout.is_bool() { 1 } else { layout.size(self).bits() };
match cv { match cv {
Scalar::Int(ScalarInt::ZST) => { Scalar::Int(ScalarInt::ZST) => {
assert_eq!(0, layout.value.size(self).bytes()); assert_eq!(0, layout.size(self).bytes());
self.const_undef(self.type_ix(0)) self.const_undef(self.type_ix(0))
} }
Scalar::Int(int) => { Scalar::Int(int) => {
let data = int.assert_bits(layout.value.size(self)); let data = int.assert_bits(layout.size(self));
// FIXME(antoyo): there's some issues with using the u128 code that follows, so hard-code // FIXME(antoyo): there's some issues with using the u128 code that follows, so hard-code
// the paths for floating-point values. // the paths for floating-point values.
@ -209,7 +209,7 @@ impl<'gcc, 'tcx> ConstMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
let base_addr = self.const_bitcast(base_addr, self.usize_type); let base_addr = self.const_bitcast(base_addr, self.usize_type);
let offset = self.context.new_rvalue_from_long(self.usize_type, offset.bytes() as i64); let offset = self.context.new_rvalue_from_long(self.usize_type, offset.bytes() as i64);
let ptr = self.const_bitcast(base_addr + offset, ptr_type); let ptr = self.const_bitcast(base_addr + offset, ptr_type);
if layout.value != Pointer { if layout.primitive() != Pointer {
self.const_bitcast(ptr.dereference(None).to_rvalue(), ty) self.const_bitcast(ptr.dereference(None).to_rvalue(), ty)
} }
else { else {

View file

@ -328,7 +328,7 @@ pub fn const_alloc_to_gcc<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, alloc: ConstAl
interpret::Pointer::new(alloc_id, Size::from_bytes(ptr_offset)), interpret::Pointer::new(alloc_id, Size::from_bytes(ptr_offset)),
&cx.tcx, &cx.tcx,
), ),
abi::Scalar { value: Primitive::Pointer, valid_range: WrappingRange { start: 0, end: !0 } }, abi::Scalar::Initialized { value: Primitive::Pointer, valid_range: WrappingRange { start: 0, end: !0 } },
cx.type_i8p(), cx.type_i8p(),
)); ));
next_offset = offset + pointer_size; next_offset = offset + pointer_size;

View file

@ -224,7 +224,7 @@ impl<'tcx> LayoutGccExt<'tcx> for TyAndLayout<'tcx> {
} }
fn scalar_gcc_type_at<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>, scalar: &abi::Scalar, offset: Size) -> Type<'gcc> { fn scalar_gcc_type_at<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>, scalar: &abi::Scalar, offset: Size) -> Type<'gcc> {
match scalar.value { match scalar.primitive() {
Int(i, true) => cx.type_from_integer(i), Int(i, true) => cx.type_from_integer(i),
Int(i, false) => cx.type_from_unsigned_integer(i), Int(i, false) => cx.type_from_unsigned_integer(i),
F32 => cx.type_f32(), F32 => cx.type_f32(),
@ -282,7 +282,7 @@ impl<'tcx> LayoutGccExt<'tcx> for TyAndLayout<'tcx> {
Size::ZERO Size::ZERO
} }
else { else {
a.value.size(cx).align_to(b.value.align(cx).abi) a.size(cx).align_to(b.align(cx).abi)
}; };
self.scalar_gcc_type_at(cx, scalar, offset) self.scalar_gcc_type_at(cx, scalar, offset)
} }