1
Fork 0

Refactoring after the PlaceValue addition

I added `PlaceValue` in 123775, but kept that one line-by-line simple because it touched so many places.

This goes through to add more helpers & docs, and change some `PlaceRef` to `PlaceValue` where the type didn't need to be included.

No behaviour changes.
This commit is contained in:
Scott McMurray 2024-04-12 19:11:21 -07:00
parent 19dacee0d8
commit 9be16ebe89
7 changed files with 152 additions and 127 deletions

View file

@ -283,7 +283,7 @@ pub fn coerce_unsized_into<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
} }
if src_f.layout.ty == dst_f.layout.ty { if src_f.layout.ty == dst_f.layout.ty {
bx.typed_place_copy(dst_f, src_f); bx.typed_place_copy(dst_f.val, src_f.val, src_f.layout);
} else { } else {
coerce_unsized_into(bx, src_f, dst_f); coerce_unsized_into(bx, src_f, dst_f);
} }

View file

@ -1454,9 +1454,9 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
Some(pointee_align) => cmp::max(pointee_align, arg.layout.align.abi), Some(pointee_align) => cmp::max(pointee_align, arg.layout.align.abi),
None => arg.layout.align.abi, None => arg.layout.align.abi,
}; };
let scratch = PlaceRef::alloca_aligned(bx, arg.layout, required_align); let scratch = PlaceValue::alloca(bx, arg.layout.size, required_align);
op.val.store(bx, scratch); op.val.store(bx, scratch.with_type(arg.layout));
(scratch.val.llval, scratch.val.align, true) (scratch.llval, scratch.align, true)
} }
PassMode::Cast { .. } => { PassMode::Cast { .. } => {
let scratch = PlaceRef::alloca(bx, arg.layout); let scratch = PlaceRef::alloca(bx, arg.layout);
@ -1475,10 +1475,9 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
// For `foo(packed.large_field)`, and types with <4 byte alignment on x86, // For `foo(packed.large_field)`, and types with <4 byte alignment on x86,
// alignment requirements may be higher than the type's alignment, so copy // alignment requirements may be higher than the type's alignment, so copy
// to a higher-aligned alloca. // to a higher-aligned alloca.
let scratch = PlaceRef::alloca_aligned(bx, arg.layout, required_align); let scratch = PlaceValue::alloca(bx, arg.layout.size, required_align);
let op_place = PlaceRef { val: op_place_val, layout: op.layout }; bx.typed_place_copy(scratch, op_place_val, op.layout);
bx.typed_place_copy(scratch, op_place); (scratch.llval, scratch.align, true)
(scratch.val.llval, scratch.val.align, true)
} else { } else {
(op_place_val.llval, op_place_val.align, true) (op_place_val.llval, op_place_val.align, true)
} }
@ -1567,7 +1566,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
if place_val.llextra.is_some() { if place_val.llextra.is_some() {
bug!("closure arguments must be sized"); bug!("closure arguments must be sized");
} }
let tuple_ptr = PlaceRef { val: place_val, layout: tuple.layout }; let tuple_ptr = place_val.with_type(tuple.layout);
for i in 0..tuple.layout.fields.count() { for i in 0..tuple.layout.fields.count() {
let field_ptr = tuple_ptr.project_field(bx, i); let field_ptr = tuple_ptr.project_field(bx, i);
let field = bx.load_operand(field_ptr); let field = bx.load_operand(field_ptr);

View file

@ -1,4 +1,4 @@
use super::operand::{OperandRef, OperandValue}; use super::operand::OperandRef;
use super::place::PlaceRef; use super::place::PlaceRef;
use super::FunctionCx; use super::FunctionCx;
use crate::errors; use crate::errors;
@ -93,9 +93,10 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
// into the (unoptimized) direct swapping implementation, so we disable it. // into the (unoptimized) direct swapping implementation, so we disable it.
|| bx.sess().target.arch == "spirv" || bx.sess().target.arch == "spirv"
{ {
let x_place = PlaceRef::new_sized(args[0].immediate(), pointee_layout); let align = pointee_layout.align.abi;
let y_place = PlaceRef::new_sized(args[1].immediate(), pointee_layout); let x_place = args[0].val.deref(align);
bx.typed_place_swap(x_place, y_place); let y_place = args[1].val.deref(align);
bx.typed_place_swap(x_place, y_place, pointee_layout);
return Ok(()); return Ok(());
} }
} }
@ -113,15 +114,13 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
sym::va_end => bx.va_end(args[0].immediate()), sym::va_end => bx.va_end(args[0].immediate()),
sym::size_of_val => { sym::size_of_val => {
let tp_ty = fn_args.type_at(0); let tp_ty = fn_args.type_at(0);
let meta = let (_, meta) = args[0].val.pointer_parts();
if let OperandValue::Pair(_, meta) = args[0].val { Some(meta) } else { None };
let (llsize, _) = size_of_val::size_and_align_of_dst(bx, tp_ty, meta); let (llsize, _) = size_of_val::size_and_align_of_dst(bx, tp_ty, meta);
llsize llsize
} }
sym::min_align_of_val => { sym::min_align_of_val => {
let tp_ty = fn_args.type_at(0); let tp_ty = fn_args.type_at(0);
let meta = let (_, meta) = args[0].val.pointer_parts();
if let OperandValue::Pair(_, meta) = args[0].val { Some(meta) } else { None };
let (_, llalign) = size_of_val::size_and_align_of_dst(bx, tp_ty, meta); let (_, llalign) = size_of_val::size_and_align_of_dst(bx, tp_ty, meta);
llalign llalign
} }

View file

@ -61,7 +61,7 @@ pub enum OperandValue<V> {
ZeroSized, ZeroSized,
} }
impl<V> OperandValue<V> { impl<V: CodegenObject> OperandValue<V> {
/// If this is ZeroSized/Immediate/Pair, return an array of the 0/1/2 values. /// If this is ZeroSized/Immediate/Pair, return an array of the 0/1/2 values.
/// If this is Ref, return the place. /// If this is Ref, return the place.
#[inline] #[inline]
@ -86,6 +86,30 @@ impl<V> OperandValue<V> {
}; };
OperandValue::Pair(a, b) OperandValue::Pair(a, b)
} }
/// Treat this value as a pointer and return the data pointer and
/// optional metadata as backend values.
///
/// If you're making a place, use [`Self::deref`] instead.
pub fn pointer_parts(self) -> (V, Option<V>) {
match self {
OperandValue::Immediate(llptr) => (llptr, None),
OperandValue::Pair(llptr, llextra) => (llptr, Some(llextra)),
_ => bug!("OperandValue cannot be a pointer: {self:?}"),
}
}
/// Treat this value as a pointer and return the place to which it points.
///
/// The pointer immediate doesn't inherently know its alignment,
/// so you need to pass it in. If you want to get it from a type's ABI
/// alignment, then maybe you want [`OperandRef::deref`] instead.
///
/// This is the inverse of [`PlaceValue::address`].
pub fn deref(self, align: Align) -> PlaceValue<V> {
let (llval, llextra) = self.pointer_parts();
PlaceValue { llval, llextra, align }
}
} }
/// An `OperandRef` is an "SSA" reference to a Rust value, along with /// An `OperandRef` is an "SSA" reference to a Rust value, along with
@ -235,6 +259,15 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
} }
} }
/// Asserts that this operand is a pointer (or reference) and returns
/// the place to which it points. (This requires no code to be emitted
/// as we represent places using the pointer to the place.)
///
/// This uses [`Ty::builtin_deref`] to include the type of the place and
/// assumes the place is aligned to the pointee's usual ABI alignment.
///
/// If you don't need the type, see [`OperandValue::pointer_parts`]
/// or [`OperandValue::deref`].
pub fn deref<Cx: LayoutTypeMethods<'tcx>>(self, cx: &Cx) -> PlaceRef<'tcx, V> { pub fn deref<Cx: LayoutTypeMethods<'tcx>>(self, cx: &Cx) -> PlaceRef<'tcx, V> {
if self.layout.ty.is_box() { if self.layout.ty.is_box() {
// Derefer should have removed all Box derefs // Derefer should have removed all Box derefs
@ -247,15 +280,8 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
.builtin_deref(true) .builtin_deref(true)
.unwrap_or_else(|| bug!("deref of non-pointer {:?}", self)); .unwrap_or_else(|| bug!("deref of non-pointer {:?}", self));
let (llptr, llextra) = match self.val {
OperandValue::Immediate(llptr) => (llptr, None),
OperandValue::Pair(llptr, llextra) => (llptr, Some(llextra)),
OperandValue::Ref(..) => bug!("Deref of by-Ref operand {:?}", self),
OperandValue::ZeroSized => bug!("Deref of ZST operand {:?}", self),
};
let layout = cx.layout_of(projected_ty); let layout = cx.layout_of(projected_ty);
let val = PlaceValue { llval: llptr, llextra, align: layout.align.abi }; self.val.deref(layout.align.abi).with_type(layout)
PlaceRef { val, layout }
} }
/// If this operand is a `Pair`, we return an aggregate with the two values. /// If this operand is a `Pair`, we return an aggregate with the two values.
@ -448,8 +474,7 @@ impl<'a, 'tcx, V: CodegenObject> OperandValue<V> {
if val.llextra.is_some() { if val.llextra.is_some() {
bug!("cannot directly store unsized values"); bug!("cannot directly store unsized values");
} }
let source_place = PlaceRef { val, layout: dest.layout }; bx.typed_place_copy_with_flags(dest.val, val, dest.layout, flags);
bx.typed_place_copy_with_flags(dest, source_place, flags);
} }
OperandValue::Immediate(s) => { OperandValue::Immediate(s) => {
let val = bx.from_immediate(s); let val = bx.from_immediate(s);

View file

@ -10,12 +10,15 @@ use rustc_middle::mir;
use rustc_middle::mir::tcx::PlaceTy; use rustc_middle::mir::tcx::PlaceTy;
use rustc_middle::ty::layout::{HasTyCtxt, LayoutOf, TyAndLayout}; use rustc_middle::ty::layout::{HasTyCtxt, LayoutOf, TyAndLayout};
use rustc_middle::ty::{self, Ty}; use rustc_middle::ty::{self, Ty};
use rustc_target::abi::{Align, FieldsShape, Int, Pointer, TagEncoding}; use rustc_target::abi::{Align, FieldsShape, Int, Pointer, Size, TagEncoding};
use rustc_target::abi::{VariantIdx, Variants}; use rustc_target::abi::{VariantIdx, Variants};
/// The location and extra runtime properties of the place. /// The location and extra runtime properties of the place.
/// ///
/// Typically found in a [`PlaceRef`] or an [`OperandValue::Ref`]. /// Typically found in a [`PlaceRef`] or an [`OperandValue::Ref`].
///
/// As a location in memory, this has no specific type. If you want to
/// load or store it using a typed operation, use [`Self::with_type`].
#[derive(Copy, Clone, Debug)] #[derive(Copy, Clone, Debug)]
pub struct PlaceValue<V> { pub struct PlaceValue<V> {
/// A pointer to the contents of the place. /// A pointer to the contents of the place.
@ -35,6 +38,41 @@ impl<V: CodegenObject> PlaceValue<V> {
pub fn new_sized(llval: V, align: Align) -> PlaceValue<V> { pub fn new_sized(llval: V, align: Align) -> PlaceValue<V> {
PlaceValue { llval, llextra: None, align } PlaceValue { llval, llextra: None, align }
} }
/// Allocates a stack slot in the function for a value
/// of the specified size and alignment.
///
/// The allocation itself is untyped.
pub fn alloca<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx, Value = V>>(
bx: &mut Bx,
size: Size,
align: Align,
) -> PlaceValue<V> {
let llval = bx.alloca(size, align);
PlaceValue::new_sized(llval, align)
}
/// Creates a `PlaceRef` to this location with the given type.
pub fn with_type<'tcx>(self, layout: TyAndLayout<'tcx>) -> PlaceRef<'tcx, V> {
debug_assert!(
layout.is_unsized() || layout.abi.is_uninhabited() || self.llextra.is_none(),
"Had pointer metadata {:?} for sized type {layout:?}",
self.llextra,
);
PlaceRef { val: self, layout }
}
/// Gets the pointer to this place as an [`OperandValue::Immediate`]
/// or, for those needing metadata, an [`OperandValue::Pair`].
///
/// This is the inverse of [`OperandValue::deref`].
pub fn address(self) -> OperandValue<V> {
if let Some(llextra) = self.llextra {
OperandValue::Pair(self.llval, llextra)
} else {
OperandValue::Immediate(self.llval)
}
}
} }
#[derive(Copy, Clone, Debug)] #[derive(Copy, Clone, Debug)]
@ -52,9 +90,7 @@ pub struct PlaceRef<'tcx, V> {
impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> { impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
pub fn new_sized(llval: V, layout: TyAndLayout<'tcx>) -> PlaceRef<'tcx, V> { pub fn new_sized(llval: V, layout: TyAndLayout<'tcx>) -> PlaceRef<'tcx, V> {
assert!(layout.is_sized()); PlaceRef::new_sized_aligned(llval, layout, layout.align.abi)
let val = PlaceValue::new_sized(llval, layout.align.abi);
PlaceRef { val, layout }
} }
pub fn new_sized_aligned( pub fn new_sized_aligned(
@ -63,8 +99,7 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
align: Align, align: Align,
) -> PlaceRef<'tcx, V> { ) -> PlaceRef<'tcx, V> {
assert!(layout.is_sized()); assert!(layout.is_sized());
let val = PlaceValue::new_sized(llval, align); PlaceValue::new_sized(llval, align).with_type(layout)
PlaceRef { val, layout }
} }
// FIXME(eddyb) pass something else for the name so no work is done // FIXME(eddyb) pass something else for the name so no work is done
@ -72,18 +107,9 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
pub fn alloca<Bx: BuilderMethods<'a, 'tcx, Value = V>>( pub fn alloca<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
bx: &mut Bx, bx: &mut Bx,
layout: TyAndLayout<'tcx>, layout: TyAndLayout<'tcx>,
) -> Self {
Self::alloca_aligned(bx, layout, layout.align.abi)
}
pub fn alloca_aligned<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
bx: &mut Bx,
layout: TyAndLayout<'tcx>,
align: Align,
) -> Self { ) -> Self {
assert!(layout.is_sized(), "tried to statically allocate unsized place"); assert!(layout.is_sized(), "tried to statically allocate unsized place");
let tmp = bx.alloca(layout.size, align); PlaceValue::alloca(bx, layout.size, layout.align.abi).with_type(layout)
Self::new_sized_aligned(tmp, layout, align)
} }
/// Returns a place for an indirect reference to an unsized place. /// Returns a place for an indirect reference to an unsized place.
@ -132,18 +158,12 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
} else { } else {
bx.inbounds_ptradd(self.val.llval, bx.const_usize(offset.bytes())) bx.inbounds_ptradd(self.val.llval, bx.const_usize(offset.bytes()))
}; };
PlaceRef { let val = PlaceValue {
val: PlaceValue {
llval, llval,
llextra: if bx.cx().type_has_metadata(field.ty) { llextra: if bx.cx().type_has_metadata(field.ty) { self.val.llextra } else { None },
self.val.llextra
} else {
None
},
align: effective_field_align, align: effective_field_align,
}, };
layout: field, val.with_type(field)
}
}; };
// Simple cases, which don't need DST adjustment: // Simple cases, which don't need DST adjustment:
@ -198,7 +218,7 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
let ptr = bx.inbounds_ptradd(self.val.llval, offset); let ptr = bx.inbounds_ptradd(self.val.llval, offset);
let val = let val =
PlaceValue { llval: ptr, llextra: self.val.llextra, align: effective_field_align }; PlaceValue { llval: ptr, llextra: self.val.llextra, align: effective_field_align };
PlaceRef { val, layout: field } val.with_type(field)
} }
/// Obtain the actual discriminant of a value. /// Obtain the actual discriminant of a value.
@ -387,18 +407,13 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
layout.size layout.size
}; };
PlaceRef { let llval = bx.inbounds_gep(
val: PlaceValue {
llval: bx.inbounds_gep(
bx.cx().backend_type(self.layout), bx.cx().backend_type(self.layout),
self.val.llval, self.val.llval,
&[bx.cx().const_usize(0), llindex], &[bx.cx().const_usize(0), llindex],
), );
llextra: None, let align = self.val.align.restrict_for_offset(offset);
align: self.val.align.restrict_for_offset(offset), PlaceValue::new_sized(llval, align).with_type(layout)
},
layout,
}
} }
pub fn project_downcast<Bx: BuilderMethods<'a, 'tcx, Value = V>>( pub fn project_downcast<Bx: BuilderMethods<'a, 'tcx, Value = V>>(

View file

@ -74,8 +74,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
if val.llextra.is_some() { if val.llextra.is_some() {
bug!("unsized coercion on an unsized rvalue"); bug!("unsized coercion on an unsized rvalue");
} }
let source = PlaceRef { val, layout: operand.layout }; base::coerce_unsized_into(bx, val.with_type(operand.layout), dest);
base::coerce_unsized_into(bx, source, dest);
} }
OperandValue::ZeroSized => { OperandValue::ZeroSized => {
bug!("unsized coercion on a ZST rvalue"); bug!("unsized coercion on a ZST rvalue");
@ -184,10 +183,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
OperandValue::Immediate(..) | OperandValue::Pair(..) => { OperandValue::Immediate(..) | OperandValue::Pair(..) => {
// When we have immediate(s), the alignment of the source is irrelevant, // When we have immediate(s), the alignment of the source is irrelevant,
// so we can store them using the destination's alignment. // so we can store them using the destination's alignment.
src.val.store( src.val.store(bx, dst.val.with_type(src.layout));
bx,
PlaceRef::new_sized_aligned(dst.val.llval, src.layout, dst.val.align),
);
} }
} }
} }
@ -225,8 +221,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
OperandValue::Ref(source_place_val) => { OperandValue::Ref(source_place_val) => {
debug_assert_eq!(source_place_val.llextra, None); debug_assert_eq!(source_place_val.llextra, None);
debug_assert!(matches!(operand_kind, OperandValueKind::Ref)); debug_assert!(matches!(operand_kind, OperandValueKind::Ref));
let fake_place = PlaceRef { val: source_place_val, layout: cast }; Some(bx.load_operand(source_place_val.with_type(cast)).val)
Some(bx.load_operand(fake_place).val)
} }
OperandValue::ZeroSized => { OperandValue::ZeroSized => {
let OperandValueKind::ZeroSized = operand_kind else { let OperandValueKind::ZeroSized = operand_kind else {
@ -452,23 +447,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
} }
mir::CastKind::PointerCoercion(PointerCoercion::Unsize) => { mir::CastKind::PointerCoercion(PointerCoercion::Unsize) => {
assert!(bx.cx().is_backend_scalar_pair(cast)); assert!(bx.cx().is_backend_scalar_pair(cast));
let (lldata, llextra) = match operand.val { let (lldata, llextra) = operand.val.pointer_parts();
OperandValue::Pair(lldata, llextra) => {
// unsize from a fat pointer -- this is a
// "trait-object-to-supertrait" coercion.
(lldata, Some(llextra))
}
OperandValue::Immediate(lldata) => {
// "standard" unsize
(lldata, None)
}
OperandValue::Ref(..) => {
bug!("by-ref operand {:?} in `codegen_rvalue_operand`", operand);
}
OperandValue::ZeroSized => {
bug!("zero-sized operand {:?} in `codegen_rvalue_operand`", operand);
}
};
let (lldata, llextra) = let (lldata, llextra) =
base::unsize_ptr(bx, lldata, operand.layout.ty, cast.ty, llextra); base::unsize_ptr(bx, lldata, operand.layout.ty, cast.ty, llextra);
OperandValue::Pair(lldata, llextra) OperandValue::Pair(lldata, llextra)
@ -489,12 +468,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
} }
} }
mir::CastKind::DynStar => { mir::CastKind::DynStar => {
let (lldata, llextra) = match operand.val { let (lldata, llextra) = operand.val.pointer_parts();
OperandValue::Ref(..) => todo!(),
OperandValue::Immediate(v) => (v, None),
OperandValue::Pair(v, l) => (v, Some(l)),
OperandValue::ZeroSized => bug!("ZST -- which is not PointerLike -- in DynStar"),
};
let (lldata, llextra) = let (lldata, llextra) =
base::cast_to_dyn_star(bx, lldata, operand.layout, cast.ty, llextra); base::cast_to_dyn_star(bx, lldata, operand.layout, cast.ty, llextra);
OperandValue::Pair(lldata, llextra) OperandValue::Pair(lldata, llextra)
@ -812,16 +786,18 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
mk_ptr_ty: impl FnOnce(TyCtxt<'tcx>, Ty<'tcx>) -> Ty<'tcx>, mk_ptr_ty: impl FnOnce(TyCtxt<'tcx>, Ty<'tcx>) -> Ty<'tcx>,
) -> OperandRef<'tcx, Bx::Value> { ) -> OperandRef<'tcx, Bx::Value> {
let cg_place = self.codegen_place(bx, place.as_ref()); let cg_place = self.codegen_place(bx, place.as_ref());
let val = cg_place.val.address();
let ty = cg_place.layout.ty; let ty = cg_place.layout.ty;
debug_assert!(
// Note: places are indirect, so storing the `llval` into the if bx.cx().type_has_metadata(ty) {
// destination effectively creates a reference. matches!(val, OperandValue::Pair(..))
let val = if !bx.cx().type_has_metadata(ty) {
OperandValue::Immediate(cg_place.val.llval)
} else { } else {
OperandValue::Pair(cg_place.val.llval, cg_place.val.llextra.unwrap()) matches!(val, OperandValue::Immediate(..))
}; },
"Address of place was unexpectedly {val:?} for pointee type {ty:?}",
);
OperandRef { val, layout: self.cx.layout_of(mk_ptr_ty(self.cx.tcx(), ty)) } OperandRef { val, layout: self.cx.layout_of(mk_ptr_ty(self.cx.tcx(), ty)) }
} }

View file

@ -186,6 +186,15 @@ pub trait BuilderMethods<'a, 'tcx>:
align: Align, align: Align,
flags: MemFlags, flags: MemFlags,
) -> Self::Value; ) -> Self::Value;
fn store_to_place_with_flags(
&mut self,
val: Self::Value,
place: PlaceValue<Self::Value>,
flags: MemFlags,
) -> Self::Value {
debug_assert_eq!(place.llextra, None);
self.store_with_flags(val, place.llval, place.align, flags)
}
fn atomic_store( fn atomic_store(
&mut self, &mut self,
val: Self::Value, val: Self::Value,
@ -286,35 +295,36 @@ pub trait BuilderMethods<'a, 'tcx>:
/// (For example, typed load-stores with alias metadata.) /// (For example, typed load-stores with alias metadata.)
fn typed_place_copy( fn typed_place_copy(
&mut self, &mut self,
dst: PlaceRef<'tcx, Self::Value>, dst: PlaceValue<Self::Value>,
src: PlaceRef<'tcx, Self::Value>, src: PlaceValue<Self::Value>,
layout: TyAndLayout<'tcx>,
) { ) {
self.typed_place_copy_with_flags(dst, src, MemFlags::empty()); self.typed_place_copy_with_flags(dst, src, layout, MemFlags::empty());
} }
fn typed_place_copy_with_flags( fn typed_place_copy_with_flags(
&mut self, &mut self,
dst: PlaceRef<'tcx, Self::Value>, dst: PlaceValue<Self::Value>,
src: PlaceRef<'tcx, Self::Value>, src: PlaceValue<Self::Value>,
layout: TyAndLayout<'tcx>,
flags: MemFlags, flags: MemFlags,
) { ) {
debug_assert!(src.val.llextra.is_none(), "cannot directly copy from unsized values"); debug_assert!(layout.is_sized(), "cannot typed-copy an unsigned type");
debug_assert!(dst.val.llextra.is_none(), "cannot directly copy into unsized values"); debug_assert!(src.llextra.is_none(), "cannot directly copy from unsized values");
debug_assert_eq!(dst.layout.size, src.layout.size); debug_assert!(dst.llextra.is_none(), "cannot directly copy into unsized values");
if flags.contains(MemFlags::NONTEMPORAL) { if flags.contains(MemFlags::NONTEMPORAL) {
// HACK(nox): This is inefficient but there is no nontemporal memcpy. // HACK(nox): This is inefficient but there is no nontemporal memcpy.
let ty = self.backend_type(dst.layout); let ty = self.backend_type(layout);
let val = self.load_from_place(ty, src.val); let val = self.load_from_place(ty, src);
self.store_with_flags(val, dst.val.llval, dst.val.align, flags); self.store_to_place_with_flags(val, dst, flags);
} else if self.sess().opts.optimize == OptLevel::No && self.is_backend_immediate(dst.layout) } else if self.sess().opts.optimize == OptLevel::No && self.is_backend_immediate(layout) {
{
// If we're not optimizing, the aliasing information from `memcpy` // If we're not optimizing, the aliasing information from `memcpy`
// isn't useful, so just load-store the value for smaller code. // isn't useful, so just load-store the value for smaller code.
let temp = self.load_operand(src); let temp = self.load_operand(src.with_type(layout));
temp.val.store_with_flags(self, dst, flags); temp.val.store_with_flags(self, dst.with_type(layout), flags);
} else if !dst.layout.is_zst() { } else if !layout.is_zst() {
let bytes = self.const_usize(dst.layout.size.bytes()); let bytes = self.const_usize(layout.size.bytes());
self.memcpy(dst.val.llval, dst.val.align, src.val.llval, src.val.align, bytes, flags); self.memcpy(dst.llval, dst.align, src.llval, src.align, bytes, flags);
} }
} }
@ -327,18 +337,19 @@ pub trait BuilderMethods<'a, 'tcx>:
/// cases (in non-debug), preferring the fallback body instead. /// cases (in non-debug), preferring the fallback body instead.
fn typed_place_swap( fn typed_place_swap(
&mut self, &mut self,
left: PlaceRef<'tcx, Self::Value>, left: PlaceValue<Self::Value>,
right: PlaceRef<'tcx, Self::Value>, right: PlaceValue<Self::Value>,
layout: TyAndLayout<'tcx>,
) { ) {
let mut temp = self.load_operand(left); let mut temp = self.load_operand(left.with_type(layout));
if let OperandValue::Ref(..) = temp.val { if let OperandValue::Ref(..) = temp.val {
// The SSA value isn't stand-alone, so we need to copy it elsewhere // The SSA value isn't stand-alone, so we need to copy it elsewhere
let alloca = PlaceRef::alloca(self, left.layout); let alloca = PlaceRef::alloca(self, layout);
self.typed_place_copy(alloca, left); self.typed_place_copy(alloca.val, left, layout);
temp = self.load_operand(alloca); temp = self.load_operand(alloca);
} }
self.typed_place_copy(left, right); self.typed_place_copy(left, right, layout);
temp.val.store(self, right); temp.val.store(self, right.with_type(layout));
} }
fn select( fn select(