compiler: rustc_abi::Abi => BackendRepr

The initial naming of "Abi" was an awful mistake, conveying wrong ideas
about how psABIs worked and even more about what the enum meant.
It was only meant to represent the way the value would be described to
a codegen backend as it was lowered to that intermediate representation.
It was never meant to mean anything about the actual psABI handling!
The conflation is because LLVM typically will associate a certain form
with a certain ABI, but even that does not hold when the special cases
that actually exist arise, plus the IR annotations that modify the ABI.

Reframe `rustc_abi::Abi` as the `BackendRepr` of the type, and rename
`BackendRepr::Aggregate` as `BackendRepr::Memory`. Unfortunately, due to
the persistent misunderstandings, this too is now incorrect:
- Scattered ABI-relevant code is entangled with BackendRepr
- We do not always pre-compute a correct BackendRepr that reflects how
  we "actually" want this value to be handled, so we leave the backend
  interface to also inject various special-cases here
- In some cases `BackendRepr::Memory` is a "real" aggregate, but in
  others it is in fact using memory, and in some cases it is a scalar!

Our rustc-to-backend lowering code handles this sort of thing right now.
That will eventually be addressed by lifting duplicated lowering code
to either rustc_codegen_ssa or rustc_target as appropriate.
This commit is contained in:
Jubilee Young 2024-10-29 13:37:26 -07:00
parent 2dece5bb62
commit 7086dd83cc
51 changed files with 517 additions and 428 deletions

View file

@ -258,8 +258,8 @@ impl<'ll, 'tcx> IntrinsicCallBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> {
self.call_intrinsic("llvm.va_copy", &[args[0].immediate(), args[1].immediate()])
}
sym::va_arg => {
match fn_abi.ret.layout.abi {
abi::Abi::Scalar(scalar) => {
match fn_abi.ret.layout.backend_repr {
abi::BackendRepr::Scalar(scalar) => {
match scalar.primitive() {
Primitive::Int(..) => {
if self.cx().size_of(ret_ty).bytes() < 4 {
@ -436,13 +436,13 @@ impl<'ll, 'tcx> IntrinsicCallBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> {
}
sym::raw_eq => {
use abi::Abi::*;
use abi::BackendRepr::*;
let tp_ty = fn_args.type_at(0);
let layout = self.layout_of(tp_ty).layout;
let use_integer_compare = match layout.abi() {
let use_integer_compare = match layout.backend_repr() {
Scalar(_) | ScalarPair(_, _) => true,
Uninhabited | Vector { .. } => false,
Aggregate { .. } => {
Memory { .. } => {
// For rusty ABIs, small aggregates are actually passed
// as `RegKind::Integer` (see `FnAbi::adjust_for_abi`),
// so we re-use that same threshold here.
@ -549,7 +549,8 @@ impl<'ll, 'tcx> IntrinsicCallBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> {
}
let llret_ty = if ret_ty.is_simd()
&& let abi::Abi::Aggregate { .. } = self.layout_of(ret_ty).layout.abi
&& let abi::BackendRepr::Memory { .. } =
self.layout_of(ret_ty).layout.backend_repr
{
let (size, elem_ty) = ret_ty.simd_size_and_type(self.tcx());
let elem_ll_ty = match elem_ty.kind() {