1
Fork 0

Make abi::Abi Copy and remove a *lot* of refs

fix

fix

Remove more refs and clones

fix

more

fix
This commit is contained in:
Andreas Liljeqvist 2021-08-29 11:06:55 +02:00
parent 86ff6aeb82
commit 5b2f757dae
33 changed files with 139 additions and 163 deletions

View file

@ -404,7 +404,7 @@ fn push_debuginfo_type_name<'tcx>(
// calculate the range of values for the dataful variant
let dataful_discriminant_range =
&dataful_variant_layout.largest_niche.as_ref().unwrap().scalar.valid_range;
dataful_variant_layout.largest_niche.unwrap().scalar.valid_range;
let min = dataful_discriminant_range.start;
let min = tag.value.size(&tcx).truncate(min);

View file

@ -1102,9 +1102,9 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
// the load would just produce `OperandValue::Ref` instead
// of the `OperandValue::Immediate` we need for the call.
llval = bx.load(bx.backend_type(arg.layout), llval, align);
if let abi::Abi::Scalar(ref scalar) = arg.layout.abi {
if let abi::Abi::Scalar(scalar) = arg.layout.abi {
if scalar.is_bool() {
bx.range_metadata(llval, &WrappingRange { start: 0, end: 1 });
bx.range_metadata(llval, WrappingRange { start: 0, end: 1 });
}
}
// We store bools as `i8` so we need to truncate to `i1`.
@ -1424,7 +1424,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
let src = self.codegen_operand(bx, src);
// Special-case transmutes between scalars as simple bitcasts.
match (&src.layout.abi, &dst.layout.abi) {
match (src.layout.abi, dst.layout.abi) {
(abi::Abi::Scalar(src_scalar), abi::Abi::Scalar(dst_scalar)) => {
// HACK(eddyb) LLVM doesn't like `bitcast`s between pointers and non-pointers.
if (src_scalar.value == abi::Pointer) == (dst_scalar.value == abi::Pointer) {

View file

@ -68,7 +68,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
if let Some(prim) = field.val.try_to_scalar() {
let layout = bx.layout_of(field_ty);
let scalar = match layout.abi {
Abi::Scalar(ref x) => x,
Abi::Scalar(x) => x,
_ => bug!("from_const: invalid ByVal layout: {:#?}", layout),
};
bx.scalar_to_backend(prim, scalar, bx.immediate_backend_type(layout))

View file

@ -79,7 +79,7 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
let val = match val {
ConstValue::Scalar(x) => {
let scalar = match layout.abi {
Abi::Scalar(ref x) => x,
Abi::Scalar(x) => x,
_ => bug!("from_const: invalid ByVal layout: {:#?}", layout),
};
let llval = bx.scalar_to_backend(x, scalar, bx.immediate_backend_type(layout));
@ -87,7 +87,7 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
}
ConstValue::Slice { data, start, end } => {
let a_scalar = match layout.abi {
Abi::ScalarPair(ref a, _) => a,
Abi::ScalarPair(a, _) => a,
_ => bug!("from_const: invalid ScalarPair layout: {:#?}", layout),
};
let a = Scalar::from_pointer(
@ -162,7 +162,7 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
llval: V,
layout: TyAndLayout<'tcx>,
) -> Self {
let val = if let Abi::ScalarPair(ref a, ref b) = layout.abi {
let val = if let Abi::ScalarPair(a, b) = layout.abi {
debug!("Operand::from_immediate_or_packed_pair: unpacking {:?} @ {:?}", llval, layout);
// Deconstruct the immediate aggregate.
@ -185,7 +185,7 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
let field = self.layout.field(bx.cx(), i);
let offset = self.layout.fields.offset(i);
let mut val = match (self.val, &self.layout.abi) {
let mut val = match (self.val, self.layout.abi) {
// If the field is ZST, it has no data.
_ if field.is_zst() => {
return OperandRef::new_zst(bx, field);
@ -200,7 +200,7 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
}
// Extract a scalar component from a pair.
(OperandValue::Pair(a_llval, b_llval), &Abi::ScalarPair(ref a, ref b)) => {
(OperandValue::Pair(a_llval, b_llval), Abi::ScalarPair(a, b)) => {
if offset.bytes() == 0 {
assert_eq!(field.size, a.value.size(bx.cx()));
OperandValue::Immediate(a_llval)
@ -212,14 +212,14 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
}
// `#[repr(simd)]` types are also immediate.
(OperandValue::Immediate(llval), &Abi::Vector { .. }) => {
(OperandValue::Immediate(llval), Abi::Vector { .. }) => {
OperandValue::Immediate(bx.extract_element(llval, bx.cx().const_usize(i as u64)))
}
_ => bug!("OperandRef::extract_field({:?}): not applicable", self),
};
match (&mut val, &field.abi) {
match (&mut val, field.abi) {
(OperandValue::Immediate(llval), _) => {
// Bools in union fields needs to be truncated.
*llval = bx.to_immediate(*llval, field);
@ -308,7 +308,7 @@ impl<'a, 'tcx, V: CodegenObject> OperandValue<V> {
}
OperandValue::Pair(a, b) => {
let (a_scalar, b_scalar) = match dest.layout.abi {
Abi::ScalarPair(ref a, ref b) => (a, b),
Abi::ScalarPair(a, b) => (a, b),
_ => bug!("store_with_flags: invalid ScalarPair layout: {:#?}", dest.layout),
};
let ty = bx.backend_type(dest.layout);

View file

@ -99,7 +99,7 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
// Also handles the first field of Scalar, ScalarPair, and Vector layouts.
self.llval
}
Abi::ScalarPair(ref a, ref b)
Abi::ScalarPair(a, b)
if offset == a.value.size(bx.cx()).align_to(b.value.align(bx.cx()).abi) =>
{
// Offset matches second field.
@ -222,7 +222,7 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
.map_or(index.as_u32() as u128, |discr| discr.val);
return bx.cx().const_uint_big(cast_to, discr_val);
}
Variants::Multiple { ref tag, ref tag_encoding, tag_field, .. } => {
Variants::Multiple { tag, ref tag_encoding, tag_field, .. } => {
(tag, tag_encoding, tag_field)
}
};

View file

@ -124,13 +124,13 @@ pub trait BuilderMethods<'a, 'tcx>:
fn from_immediate(&mut self, val: Self::Value) -> Self::Value;
fn to_immediate(&mut self, val: Self::Value, layout: TyAndLayout<'_>) -> Self::Value {
if let Abi::Scalar(ref scalar) = layout.abi {
if let Abi::Scalar(scalar) = layout.abi {
self.to_immediate_scalar(val, scalar)
} else {
val
}
}
fn to_immediate_scalar(&mut self, val: Self::Value, scalar: &Scalar) -> Self::Value;
fn to_immediate_scalar(&mut self, val: Self::Value, scalar: Scalar) -> Self::Value;
fn alloca(&mut self, ty: Self::Type, align: Align) -> Self::Value;
fn dynamic_alloca(&mut self, ty: Self::Type, align: Align) -> Self::Value;
@ -156,7 +156,7 @@ pub trait BuilderMethods<'a, 'tcx>:
dest: PlaceRef<'tcx, Self::Value>,
) -> Self;
fn range_metadata(&mut self, load: Self::Value, range: &WrappingRange);
fn range_metadata(&mut self, load: Self::Value, range: WrappingRange);
fn nonnull_metadata(&mut self, load: Self::Value);
fn store(&mut self, val: Self::Value, ptr: Self::Value, align: Align) -> Self::Value;

View file

@ -28,7 +28,7 @@ pub trait ConstMethods<'tcx>: BackendTypes {
fn const_data_from_alloc(&self, alloc: &Allocation) -> Self::Value;
fn scalar_to_backend(&self, cv: Scalar, layout: &abi::Scalar, llty: Self::Type) -> Self::Value;
fn scalar_to_backend(&self, cv: Scalar, layout: abi::Scalar, llty: Self::Type) -> Self::Value;
fn from_const_alloc(
&self,
layout: TyAndLayout<'tcx>,