1
Fork 0

Prepare struct_gep for opaque pointers

Imlement struct_gep using LLVMBuildStructGEP2 which takes an explicit
type argument instead of deriving it from a pointer type.
This commit is contained in:
Tomasz Miąsko 2021-08-01 00:00:00 +00:00
parent 87d713ff2b
commit 838042aa4e
6 changed files with 20 additions and 12 deletions

View file

@ -497,9 +497,10 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
OperandValue::Immediate(self.to_immediate(llval, place.layout)) OperandValue::Immediate(self.to_immediate(llval, place.layout))
} else if let abi::Abi::ScalarPair(ref a, ref b) = place.layout.abi { } else if let abi::Abi::ScalarPair(ref a, ref b) = place.layout.abi {
let b_offset = a.value.size(self).align_to(b.value.align(self).abi); let b_offset = a.value.size(self).align_to(b.value.align(self).abi);
let pair_ty = place.layout.llvm_type(self);
let mut load = |i, scalar: &abi::Scalar, align| { let mut load = |i, scalar: &abi::Scalar, align| {
let llptr = self.struct_gep(place.llval, i as u64); let llptr = self.struct_gep(pair_ty, place.llval, i as u64);
let llty = place.layout.scalar_pair_element_llvm_type(self, i, false); let llty = place.layout.scalar_pair_element_llvm_type(self, i, false);
let load = self.load(llty, llptr, align); let load = self.load(llty, llptr, align);
scalar_load_metadata(self, load, scalar); scalar_load_metadata(self, load, scalar);
@ -663,9 +664,9 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
} }
} }
fn struct_gep(&mut self, ptr: &'ll Value, idx: u64) -> &'ll Value { fn struct_gep(&mut self, ty: &'ll Type, ptr: &'ll Value, idx: u64) -> &'ll Value {
assert_eq!(idx as c_uint as u64, idx); assert_eq!(idx as c_uint as u64, idx);
unsafe { llvm::LLVMBuildStructGEP(self.llbuilder, ptr, idx as c_uint, UNNAMED) } unsafe { llvm::LLVMBuildStructGEP2(self.llbuilder, ty, ptr, idx as c_uint, UNNAMED) }
} }
/* Casts */ /* Casts */

View file

@ -1408,8 +1408,9 @@ extern "C" {
NumIndices: c_uint, NumIndices: c_uint,
Name: *const c_char, Name: *const c_char,
) -> &'a Value; ) -> &'a Value;
pub fn LLVMBuildStructGEP( pub fn LLVMBuildStructGEP2(
B: &Builder<'a>, B: &Builder<'a>,
Ty: &'a Type,
Pointer: &'a Value, Pointer: &'a Value,
Idx: c_uint, Idx: c_uint,
Name: *const c_char, Name: *const c_char,

View file

@ -98,6 +98,7 @@ fn emit_aapcs_va_arg(
// Implementation of the AAPCS64 calling convention for va_args see // Implementation of the AAPCS64 calling convention for va_args see
// https://github.com/ARM-software/abi-aa/blob/master/aapcs64/aapcs64.rst // https://github.com/ARM-software/abi-aa/blob/master/aapcs64/aapcs64.rst
let va_list_addr = list.immediate(); let va_list_addr = list.immediate();
let va_list_ty = list.deref(bx.cx).layout.llvm_type(bx);
let layout = bx.cx.layout_of(target_ty); let layout = bx.cx.layout_of(target_ty);
let mut maybe_reg = bx.build_sibling_block("va_arg.maybe_reg"); let mut maybe_reg = bx.build_sibling_block("va_arg.maybe_reg");
@ -109,11 +110,11 @@ fn emit_aapcs_va_arg(
let gr_type = target_ty.is_any_ptr() || target_ty.is_integral(); let gr_type = target_ty.is_any_ptr() || target_ty.is_integral();
let (reg_off, reg_top_index, slot_size) = if gr_type { let (reg_off, reg_top_index, slot_size) = if gr_type {
let gr_offs = bx.struct_gep(va_list_addr, 7); let gr_offs = bx.struct_gep(va_list_ty, va_list_addr, 7);
let nreg = (layout.size.bytes() + 7) / 8; let nreg = (layout.size.bytes() + 7) / 8;
(gr_offs, 3, nreg * 8) (gr_offs, 3, nreg * 8)
} else { } else {
let vr_off = bx.struct_gep(va_list_addr, 9); let vr_off = bx.struct_gep(va_list_ty, va_list_addr, 9);
let nreg = (layout.size.bytes() + 15) / 16; let nreg = (layout.size.bytes() + 15) / 16;
(vr_off, 5, nreg * 16) (vr_off, 5, nreg * 16)
}; };
@ -141,7 +142,7 @@ fn emit_aapcs_va_arg(
maybe_reg.cond_br(use_stack, &on_stack.llbb(), &in_reg.llbb()); maybe_reg.cond_br(use_stack, &on_stack.llbb(), &in_reg.llbb());
let top_type = bx.type_i8p(); let top_type = bx.type_i8p();
let top = in_reg.struct_gep(va_list_addr, reg_top_index); let top = in_reg.struct_gep(va_list_ty, va_list_addr, reg_top_index);
let top = in_reg.load(top_type, top, bx.tcx().data_layout.pointer_align.abi); let top = in_reg.load(top_type, top, bx.tcx().data_layout.pointer_align.abi);
// reg_value = *(@top + reg_off_v); // reg_value = *(@top + reg_off_v);

View file

@ -311,14 +311,15 @@ impl<'a, 'tcx, V: CodegenObject> OperandValue<V> {
Abi::ScalarPair(ref a, ref b) => (a, b), Abi::ScalarPair(ref a, ref b) => (a, b),
_ => bug!("store_with_flags: invalid ScalarPair layout: {:#?}", dest.layout), _ => bug!("store_with_flags: invalid ScalarPair layout: {:#?}", dest.layout),
}; };
let ty = bx.backend_type(dest.layout);
let b_offset = a_scalar.value.size(bx).align_to(b_scalar.value.align(bx).abi); let b_offset = a_scalar.value.size(bx).align_to(b_scalar.value.align(bx).abi);
let llptr = bx.struct_gep(dest.llval, 0); let llptr = bx.struct_gep(ty, dest.llval, 0);
let val = bx.from_immediate(a); let val = bx.from_immediate(a);
let align = dest.align; let align = dest.align;
bx.store_with_flags(val, llptr, align, flags); bx.store_with_flags(val, llptr, align, flags);
let llptr = bx.struct_gep(dest.llval, 1); let llptr = bx.struct_gep(ty, dest.llval, 1);
let val = bx.from_immediate(b); let val = bx.from_immediate(b);
let align = dest.align.restrict_for_offset(b_offset); let align = dest.align.restrict_for_offset(b_offset);
bx.store_with_flags(val, llptr, align, flags); bx.store_with_flags(val, llptr, align, flags);

View file

@ -103,7 +103,8 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
if offset == a.value.size(bx.cx()).align_to(b.value.align(bx.cx()).abi) => if offset == a.value.size(bx.cx()).align_to(b.value.align(bx.cx()).abi) =>
{ {
// Offset matches second field. // Offset matches second field.
bx.struct_gep(self.llval, 1) let ty = bx.backend_type(self.layout);
bx.struct_gep(ty, self.llval, 1)
} }
Abi::Scalar(_) | Abi::ScalarPair(..) | Abi::Vector { .. } if field.is_zst() => { Abi::Scalar(_) | Abi::ScalarPair(..) | Abi::Vector { .. } if field.is_zst() => {
// ZST fields are not included in Scalar, ScalarPair, and Vector layouts, so manually offset the pointer. // ZST fields are not included in Scalar, ScalarPair, and Vector layouts, so manually offset the pointer.
@ -119,7 +120,10 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
self.layout self.layout
); );
} }
_ => bx.struct_gep(self.llval, bx.cx().backend_field_index(self.layout, ix)), _ => {
let ty = bx.backend_type(self.layout);
bx.struct_gep(ty, self.llval, bx.cx().backend_field_index(self.layout, ix))
}
}; };
PlaceRef { PlaceRef {
// HACK(eddyb): have to bitcast pointers until LLVM removes pointee types. // HACK(eddyb): have to bitcast pointers until LLVM removes pointee types.

View file

@ -178,7 +178,7 @@ pub trait BuilderMethods<'a, 'tcx>:
fn gep(&mut self, ptr: Self::Value, indices: &[Self::Value]) -> Self::Value; fn gep(&mut self, ptr: Self::Value, indices: &[Self::Value]) -> Self::Value;
fn inbounds_gep(&mut self, ptr: Self::Value, indices: &[Self::Value]) -> Self::Value; fn inbounds_gep(&mut self, ptr: Self::Value, indices: &[Self::Value]) -> Self::Value;
fn struct_gep(&mut self, ptr: Self::Value, idx: u64) -> Self::Value; fn struct_gep(&mut self, ty: Self::Type, ptr: Self::Value, idx: u64) -> Self::Value;
fn trunc(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value; fn trunc(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
fn sext(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value; fn sext(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;