compiler: rustc_abi::Abi
=> BackendRepr
The initial naming of "Abi" was an awful mistake, conveying wrong ideas about how psABIs worked and even more about what the enum meant. It was only meant to represent the way the value would be described to a codegen backend as it was lowered to that intermediate representation. It was never meant to mean anything about the actual psABI handling! The conflation is because LLVM typically will associate a certain form with a certain ABI, but even that does not hold when the special cases that actually exist arise, plus the IR annotations that modify the ABI. Reframe `rustc_abi::Abi` as the `BackendRepr` of the type, and rename `BackendRepr::Aggregate` as `BackendRepr::Memory`. Unfortunately, due to the persistent misunderstandings, this too is now incorrect: - Scattered ABI-relevant code is entangled with BackendRepr - We do not always pre-compute a correct BackendRepr that reflects how we "actually" want this value to be handled, so we leave the backend interface to also inject various special-cases here - In some cases `BackendRepr::Memory` is a "real" aggregate, but in others it is in fact using memory, and in some cases it is a scalar! Our rustc-to-backend lowering code handles this sort of thing right now. That will eventually be addressed by lifting duplicated lowering code to either rustc_codegen_ssa or rustc_target as appropriate.
This commit is contained in:
parent
2dece5bb62
commit
7086dd83cc
51 changed files with 517 additions and 428 deletions
|
@ -1532,7 +1532,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
// the load would just produce `OperandValue::Ref` instead
|
||||
// of the `OperandValue::Immediate` we need for the call.
|
||||
llval = bx.load(bx.backend_type(arg.layout), llval, align);
|
||||
if let abi::Abi::Scalar(scalar) = arg.layout.abi {
|
||||
if let abi::BackendRepr::Scalar(scalar) = arg.layout.backend_repr {
|
||||
if scalar.is_bool() {
|
||||
bx.range_metadata(llval, WrappingRange { start: 0, end: 1 });
|
||||
}
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
use rustc_abi::BackendRepr;
|
||||
use rustc_middle::mir::interpret::ErrorHandled;
|
||||
use rustc_middle::ty::layout::HasTyCtxt;
|
||||
use rustc_middle::ty::{self, Ty};
|
||||
use rustc_middle::{bug, mir, span_bug};
|
||||
use rustc_target::abi::Abi;
|
||||
|
||||
use super::FunctionCx;
|
||||
use crate::errors;
|
||||
|
@ -86,7 +86,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
.map(|field| {
|
||||
if let Some(prim) = field.try_to_scalar() {
|
||||
let layout = bx.layout_of(field_ty);
|
||||
let Abi::Scalar(scalar) = layout.abi else {
|
||||
let BackendRepr::Scalar(scalar) = layout.backend_repr else {
|
||||
bug!("from_const: invalid ByVal layout: {:#?}", layout);
|
||||
};
|
||||
bx.scalar_to_backend(prim, scalar, bx.immediate_backend_type(layout))
|
||||
|
|
|
@ -2,6 +2,7 @@ use std::collections::hash_map::Entry;
|
|||
use std::marker::PhantomData;
|
||||
use std::ops::Range;
|
||||
|
||||
use rustc_abi::{BackendRepr, FieldIdx, FieldsShape, Size, VariantIdx};
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_index::IndexVec;
|
||||
use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
|
||||
|
@ -11,7 +12,6 @@ use rustc_middle::{bug, mir, ty};
|
|||
use rustc_session::config::DebugInfo;
|
||||
use rustc_span::symbol::{Symbol, kw};
|
||||
use rustc_span::{BytePos, Span, hygiene};
|
||||
use rustc_target::abi::{Abi, FieldIdx, FieldsShape, Size, VariantIdx};
|
||||
|
||||
use super::operand::{OperandRef, OperandValue};
|
||||
use super::place::{PlaceRef, PlaceValue};
|
||||
|
@ -510,7 +510,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
// be marked as a `LocalVariable` for MSVC debuggers to visualize
|
||||
// their data correctly. (See #81894 & #88625)
|
||||
let var_ty_layout = self.cx.layout_of(var_ty);
|
||||
if let Abi::ScalarPair(_, _) = var_ty_layout.abi {
|
||||
if let BackendRepr::ScalarPair(_, _) = var_ty_layout.backend_repr {
|
||||
VariableKind::LocalVariable
|
||||
} else {
|
||||
VariableKind::ArgumentVariable(arg_index)
|
||||
|
|
|
@ -4,7 +4,7 @@ use std::fmt;
|
|||
use arrayvec::ArrayVec;
|
||||
use either::Either;
|
||||
use rustc_abi as abi;
|
||||
use rustc_abi::{Abi, Align, Size};
|
||||
use rustc_abi::{Align, BackendRepr, Size};
|
||||
use rustc_middle::bug;
|
||||
use rustc_middle::mir::interpret::{Pointer, Scalar, alloc_range};
|
||||
use rustc_middle::mir::{self, ConstValue};
|
||||
|
@ -163,7 +163,7 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
|
|||
|
||||
let val = match val {
|
||||
ConstValue::Scalar(x) => {
|
||||
let Abi::Scalar(scalar) = layout.abi else {
|
||||
let BackendRepr::Scalar(scalar) = layout.backend_repr else {
|
||||
bug!("from_const: invalid ByVal layout: {:#?}", layout);
|
||||
};
|
||||
let llval = bx.scalar_to_backend(x, scalar, bx.immediate_backend_type(layout));
|
||||
|
@ -171,7 +171,7 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
|
|||
}
|
||||
ConstValue::ZeroSized => return OperandRef::zero_sized(layout),
|
||||
ConstValue::Slice { data, meta } => {
|
||||
let Abi::ScalarPair(a_scalar, _) = layout.abi else {
|
||||
let BackendRepr::ScalarPair(a_scalar, _) = layout.backend_repr else {
|
||||
bug!("from_const: invalid ScalarPair layout: {:#?}", layout);
|
||||
};
|
||||
let a = Scalar::from_pointer(
|
||||
|
@ -221,14 +221,14 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
|
|||
// case where some of the bytes are initialized and others are not. So, we need an extra
|
||||
// check that walks over the type of `mplace` to make sure it is truly correct to treat this
|
||||
// like a `Scalar` (or `ScalarPair`).
|
||||
match layout.abi {
|
||||
Abi::Scalar(s @ abi::Scalar::Initialized { .. }) => {
|
||||
match layout.backend_repr {
|
||||
BackendRepr::Scalar(s @ abi::Scalar::Initialized { .. }) => {
|
||||
let size = s.size(bx);
|
||||
assert_eq!(size, layout.size, "abi::Scalar size does not match layout size");
|
||||
let val = read_scalar(offset, size, s, bx.immediate_backend_type(layout));
|
||||
OperandRef { val: OperandValue::Immediate(val), layout }
|
||||
}
|
||||
Abi::ScalarPair(
|
||||
BackendRepr::ScalarPair(
|
||||
a @ abi::Scalar::Initialized { .. },
|
||||
b @ abi::Scalar::Initialized { .. },
|
||||
) => {
|
||||
|
@ -322,7 +322,7 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
|
|||
llval: V,
|
||||
layout: TyAndLayout<'tcx>,
|
||||
) -> Self {
|
||||
let val = if let Abi::ScalarPair(..) = layout.abi {
|
||||
let val = if let BackendRepr::ScalarPair(..) = layout.backend_repr {
|
||||
debug!("Operand::from_immediate_or_packed_pair: unpacking {:?} @ {:?}", llval, layout);
|
||||
|
||||
// Deconstruct the immediate aggregate.
|
||||
|
@ -343,7 +343,7 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
|
|||
let field = self.layout.field(bx.cx(), i);
|
||||
let offset = self.layout.fields.offset(i);
|
||||
|
||||
let mut val = match (self.val, self.layout.abi) {
|
||||
let mut val = match (self.val, self.layout.backend_repr) {
|
||||
// If the field is ZST, it has no data.
|
||||
_ if field.is_zst() => OperandValue::ZeroSized,
|
||||
|
||||
|
@ -356,7 +356,7 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
|
|||
}
|
||||
|
||||
// Extract a scalar component from a pair.
|
||||
(OperandValue::Pair(a_llval, b_llval), Abi::ScalarPair(a, b)) => {
|
||||
(OperandValue::Pair(a_llval, b_llval), BackendRepr::ScalarPair(a, b)) => {
|
||||
if offset.bytes() == 0 {
|
||||
assert_eq!(field.size, a.size(bx.cx()));
|
||||
OperandValue::Immediate(a_llval)
|
||||
|
@ -368,30 +368,30 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
|
|||
}
|
||||
|
||||
// `#[repr(simd)]` types are also immediate.
|
||||
(OperandValue::Immediate(llval), Abi::Vector { .. }) => {
|
||||
(OperandValue::Immediate(llval), BackendRepr::Vector { .. }) => {
|
||||
OperandValue::Immediate(bx.extract_element(llval, bx.cx().const_usize(i as u64)))
|
||||
}
|
||||
|
||||
_ => bug!("OperandRef::extract_field({:?}): not applicable", self),
|
||||
};
|
||||
|
||||
match (&mut val, field.abi) {
|
||||
match (&mut val, field.backend_repr) {
|
||||
(OperandValue::ZeroSized, _) => {}
|
||||
(
|
||||
OperandValue::Immediate(llval),
|
||||
Abi::Scalar(_) | Abi::ScalarPair(..) | Abi::Vector { .. },
|
||||
BackendRepr::Scalar(_) | BackendRepr::ScalarPair(..) | BackendRepr::Vector { .. },
|
||||
) => {
|
||||
// Bools in union fields needs to be truncated.
|
||||
*llval = bx.to_immediate(*llval, field);
|
||||
}
|
||||
(OperandValue::Pair(a, b), Abi::ScalarPair(a_abi, b_abi)) => {
|
||||
(OperandValue::Pair(a, b), BackendRepr::ScalarPair(a_abi, b_abi)) => {
|
||||
// Bools in union fields needs to be truncated.
|
||||
*a = bx.to_immediate_scalar(*a, a_abi);
|
||||
*b = bx.to_immediate_scalar(*b, b_abi);
|
||||
}
|
||||
// Newtype vector of array, e.g. #[repr(simd)] struct S([i32; 4]);
|
||||
(OperandValue::Immediate(llval), Abi::Aggregate { sized: true }) => {
|
||||
assert_matches!(self.layout.abi, Abi::Vector { .. });
|
||||
(OperandValue::Immediate(llval), BackendRepr::Memory { sized: true }) => {
|
||||
assert_matches!(self.layout.backend_repr, BackendRepr::Vector { .. });
|
||||
|
||||
let llfield_ty = bx.cx().backend_type(field);
|
||||
|
||||
|
@ -400,7 +400,10 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
|
|||
bx.store(*llval, llptr, field.align.abi);
|
||||
*llval = bx.load(llfield_ty, llptr, field.align.abi);
|
||||
}
|
||||
(OperandValue::Immediate(_), Abi::Uninhabited | Abi::Aggregate { sized: false }) => {
|
||||
(
|
||||
OperandValue::Immediate(_),
|
||||
BackendRepr::Uninhabited | BackendRepr::Memory { sized: false },
|
||||
) => {
|
||||
bug!()
|
||||
}
|
||||
(OperandValue::Pair(..), _) => bug!(),
|
||||
|
@ -494,7 +497,7 @@ impl<'a, 'tcx, V: CodegenObject> OperandValue<V> {
|
|||
bx.store_with_flags(val, dest.val.llval, dest.val.align, flags);
|
||||
}
|
||||
OperandValue::Pair(a, b) => {
|
||||
let Abi::ScalarPair(a_scalar, b_scalar) = dest.layout.abi else {
|
||||
let BackendRepr::ScalarPair(a_scalar, b_scalar) = dest.layout.backend_repr else {
|
||||
bug!("store_with_flags: invalid ScalarPair layout: {:#?}", dest.layout);
|
||||
};
|
||||
let b_offset = a_scalar.size(bx).align_to(b_scalar.align(bx).abi);
|
||||
|
@ -645,7 +648,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
// However, some SIMD types do not actually use the vector ABI
|
||||
// (in particular, packed SIMD types do not). Ensure we exclude those.
|
||||
let layout = bx.layout_of(constant_ty);
|
||||
if let Abi::Vector { .. } = layout.abi {
|
||||
if let BackendRepr::Vector { .. } = layout.backend_repr {
|
||||
let (llval, ty) = self.immediate_const_vector(bx, constant);
|
||||
return OperandRef {
|
||||
val: OperandValue::Immediate(llval),
|
||||
|
|
|
@ -1136,17 +1136,17 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
OperandValueKind::ZeroSized
|
||||
} else if self.cx.is_backend_immediate(layout) {
|
||||
assert!(!self.cx.is_backend_scalar_pair(layout));
|
||||
OperandValueKind::Immediate(match layout.abi {
|
||||
abi::Abi::Scalar(s) => s,
|
||||
abi::Abi::Vector { element, .. } => element,
|
||||
OperandValueKind::Immediate(match layout.backend_repr {
|
||||
abi::BackendRepr::Scalar(s) => s,
|
||||
abi::BackendRepr::Vector { element, .. } => element,
|
||||
x => span_bug!(self.mir.span, "Couldn't translate {x:?} as backend immediate"),
|
||||
})
|
||||
} else if self.cx.is_backend_scalar_pair(layout) {
|
||||
let abi::Abi::ScalarPair(s1, s2) = layout.abi else {
|
||||
let abi::BackendRepr::ScalarPair(s1, s2) = layout.backend_repr else {
|
||||
span_bug!(
|
||||
self.mir.span,
|
||||
"Couldn't translate {:?} as backend scalar pair",
|
||||
layout.abi,
|
||||
layout.backend_repr,
|
||||
);
|
||||
};
|
||||
OperandValueKind::Pair(s1, s2)
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
use std::assert_matches::assert_matches;
|
||||
use std::ops::Deref;
|
||||
|
||||
use rustc_abi::{Align, BackendRepr, Scalar, Size, WrappingRange};
|
||||
use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrs;
|
||||
use rustc_middle::ty::layout::{FnAbiOf, LayoutOf, TyAndLayout};
|
||||
use rustc_middle::ty::{Instance, Ty};
|
||||
use rustc_session::config::OptLevel;
|
||||
use rustc_span::Span;
|
||||
use rustc_target::abi::call::FnAbi;
|
||||
use rustc_target::abi::{Abi, Align, Scalar, Size, WrappingRange};
|
||||
|
||||
use super::abi::AbiBuilderMethods;
|
||||
use super::asm::AsmBuilderMethods;
|
||||
|
@ -162,7 +162,7 @@ pub trait BuilderMethods<'a, 'tcx>:
|
|||
|
||||
fn from_immediate(&mut self, val: Self::Value) -> Self::Value;
|
||||
fn to_immediate(&mut self, val: Self::Value, layout: TyAndLayout<'_>) -> Self::Value {
|
||||
if let Abi::Scalar(scalar) = layout.abi {
|
||||
if let BackendRepr::Scalar(scalar) = layout.backend_repr {
|
||||
self.to_immediate_scalar(val, scalar)
|
||||
} else {
|
||||
val
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue