1
Fork 0

compiler: rustc_abi::Abi => BackendRepr

The initial naming of "Abi" was an awful mistake, conveying wrong ideas
about how psABIs worked and even more about what the enum meant.
It was only meant to represent the way the value would be described to
a codegen backend as it was lowered to that intermediate representation.
It was never meant to mean anything about the actual psABI handling!
The conflation is because LLVM typically will associate a certain form
with a certain ABI, but even that does not hold when the special cases
that actually exist arise, plus the IR annotations that modify the ABI.

Reframe `rustc_abi::Abi` as the `BackendRepr` of the type, and rename
`BackendRepr::Aggregate` as `BackendRepr::Memory`. Unfortunately, due to
the persistent misunderstandings, this too is now incorrect:
- Scattered ABI-relevant code is entangled with BackendRepr
- We do not always pre-compute a correct BackendRepr that reflects how
  we "actually" want this value to be handled, so we leave the backend
  interface to also inject various special-cases here
- In some cases `BackendRepr::Memory` is a "real" aggregate, but in
  others it is in fact using memory, and in some cases it is a scalar!

Our rustc-to-backend lowering code handles this sort of thing right now.
That will eventually be addressed by lifting duplicated lowering code
to either rustc_codegen_ssa or rustc_target as appropriate.
This commit is contained in:
Jubilee Young 2024-10-29 13:37:26 -07:00
parent 2dece5bb62
commit 7086dd83cc
51 changed files with 517 additions and 428 deletions

View file

@ -131,7 +131,7 @@ impl<'tcx> interpret::Machine<'tcx> for DummyMachine {
interp_ok(match bin_op {
Eq | Ne | Lt | Le | Gt | Ge => {
// Types can differ, e.g. fn ptrs with different `for`.
assert_eq!(left.layout.abi, right.layout.abi);
assert_eq!(left.layout.backend_repr, right.layout.backend_repr);
let size = ecx.pointer_size();
// Just compare the bits. ScalarPairs are compared lexicographically.
// We thus always compare pairs and simply fill scalars up with 0.

View file

@ -1,6 +1,7 @@
use std::sync::atomic::Ordering::Relaxed;
use either::{Left, Right};
use rustc_abi::{self as abi, BackendRepr};
use rustc_hir::def::DefKind;
use rustc_middle::bug;
use rustc_middle::mir::interpret::{AllocId, ErrorHandled, InterpErrorInfo};
@ -12,7 +13,6 @@ use rustc_middle::ty::print::with_no_trimmed_paths;
use rustc_middle::ty::{self, Ty, TyCtxt};
use rustc_span::def_id::LocalDefId;
use rustc_span::{DUMMY_SP, Span};
use rustc_target::abi::{self, Abi};
use tracing::{debug, instrument, trace};
use super::{CanAccessMutGlobal, CompileTimeInterpCx, CompileTimeMachine};
@ -174,8 +174,8 @@ pub(super) fn op_to_const<'tcx>(
// type (it's used throughout the compiler and having it work just on literals is not enough)
// and we want it to be fast (i.e., don't go to an `Allocation` and reconstruct the `Scalar`
// from its byte-serialized form).
let force_as_immediate = match op.layout.abi {
Abi::Scalar(abi::Scalar::Initialized { .. }) => true,
let force_as_immediate = match op.layout.backend_repr {
BackendRepr::Scalar(abi::Scalar::Initialized { .. }) => true,
// We don't *force* `ConstValue::Slice` for `ScalarPair`. This has the advantage that if the
// input `op` is a place, then turning it into a `ConstValue` and back into a `OpTy` will
// not have to generate any duplicate allocations (we preserve the original `AllocId` in

View file

@ -1,10 +1,10 @@
use rustc_abi::{BackendRepr, VariantIdx};
use rustc_data_structures::stack::ensure_sufficient_stack;
use rustc_middle::mir::interpret::{EvalToValTreeResult, GlobalId};
use rustc_middle::ty::layout::{LayoutCx, LayoutOf, TyAndLayout};
use rustc_middle::ty::{self, ScalarInt, Ty, TyCtxt};
use rustc_middle::{bug, mir};
use rustc_span::DUMMY_SP;
use rustc_target::abi::{Abi, VariantIdx};
use tracing::{debug, instrument, trace};
use super::eval_queries::{mk_eval_cx_to_read_const_val, op_to_const};
@ -117,7 +117,7 @@ fn const_to_valtree_inner<'tcx>(
let val = ecx.read_immediate(place).unwrap();
// We could allow wide raw pointers where both sides are integers in the future,
// but for now we reject them.
if matches!(val.layout.abi, Abi::ScalarPair(..)) {
if matches!(val.layout.backend_repr, BackendRepr::ScalarPair(..)) {
return Err(ValTreeCreationError::NonSupportedType(ty));
}
let val = val.to_scalar();
@ -311,7 +311,7 @@ pub fn valtree_to_const_value<'tcx>(
// Fast path to avoid some allocations.
return mir::ConstValue::ZeroSized;
}
if layout.abi.is_scalar()
if layout.backend_repr.is_scalar()
&& (matches!(ty.kind(), ty::Tuple(_))
|| matches!(ty.kind(), ty::Adt(def, _) if def.is_struct()))
{