1
Fork 0

Mark scalar layout unions so that backends that do not support partially initialized scalars can special case them.

This commit is contained in:
Oli Scherer 2022-03-03 12:02:12 +00:00
parent 2ed6786404
commit d32ce37a17
37 changed files with 356 additions and 288 deletions

View file

@ -158,14 +158,14 @@ impl<'gcc, 'tcx> ConstMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
}
fn scalar_to_backend(&self, cv: Scalar, layout: abi::Scalar, ty: Type<'gcc>) -> RValue<'gcc> {
let bitsize = if layout.is_bool() { 1 } else { layout.value.size(self).bits() };
let bitsize = if layout.is_bool() { 1 } else { layout.size(self).bits() };
match cv {
Scalar::Int(ScalarInt::ZST) => {
assert_eq!(0, layout.value.size(self).bytes());
assert_eq!(0, layout.size(self).bytes());
self.const_undef(self.type_ix(0))
}
Scalar::Int(int) => {
let data = int.assert_bits(layout.value.size(self));
let data = int.assert_bits(layout.size(self));
// FIXME(antoyo): there's some issues with using the u128 code that follows, so hard-code
// the paths for floating-point values.
@ -209,7 +209,7 @@ impl<'gcc, 'tcx> ConstMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
let base_addr = self.const_bitcast(base_addr, self.usize_type);
let offset = self.context.new_rvalue_from_long(self.usize_type, offset.bytes() as i64);
let ptr = self.const_bitcast(base_addr + offset, ptr_type);
if layout.value != Pointer {
if layout.primitive() != Pointer {
self.const_bitcast(ptr.dereference(None).to_rvalue(), ty)
}
else {