1
Fork 0

don't force all slice-typed ConstValue to be ConstValue::Slice

This commit is contained in:
Ralf Jung 2023-09-12 15:57:40 +02:00
parent 430c386821
commit 7aa44eee99
3 changed files with 36 additions and 54 deletions

View file

@ -18,9 +18,9 @@ use super::{CanAccessStatics, CompileTimeEvalContext, CompileTimeInterpreter};
use crate::errors; use crate::errors;
use crate::interpret::eval_nullary_intrinsic; use crate::interpret::eval_nullary_intrinsic;
use crate::interpret::{ use crate::interpret::{
intern_const_alloc_recursive, Allocation, ConstAlloc, ConstValue, CtfeValidationMode, GlobalId, intern_const_alloc_recursive, ConstAlloc, ConstValue, CtfeValidationMode, GlobalId, Immediate,
Immediate, InternKind, InterpCx, InterpError, InterpResult, MPlaceTy, MemoryKind, OpTy, InternKind, InterpCx, InterpError, InterpResult, MPlaceTy, MemoryKind, OpTy, RefTracking,
RefTracking, StackPopCleanup, StackPopCleanup,
}; };
// Returns a pointer to where the result lives // Returns a pointer to where the result lives
@ -105,8 +105,7 @@ pub(super) fn mk_eval_cx<'mir, 'tcx>(
) )
} }
/// This function converts an interpreter value into a constant that is meant for use in the /// This function converts an interpreter value into a MIR constant.
/// type system.
#[instrument(skip(ecx), level = "debug")] #[instrument(skip(ecx), level = "debug")]
pub(super) fn op_to_const<'tcx>( pub(super) fn op_to_const<'tcx>(
ecx: &CompileTimeEvalContext<'_, 'tcx>, ecx: &CompileTimeEvalContext<'_, 'tcx>,
@ -117,28 +116,25 @@ pub(super) fn op_to_const<'tcx>(
return ConstValue::ZeroSized; return ConstValue::ZeroSized;
} }
// We do not have value optimizations for everything. // All scalar types should be stored as `ConstValue::Scalar`. This is needed to make
// Only scalars and slices, since they are very common. // `ConstValue::try_to_scalar` efficient; we want that to work for *all* constants of scalar
let try_as_immediate = match op.layout.abi { // type (it's used throughout the compiler and having it work just on literals is not enough)
// and we want it to be fast (i.e., don't go to an `Allocation` and reconstruct the `Scalar`
// from its byte-serialized form).
let force_as_immediate = match op.layout.abi {
Abi::Scalar(abi::Scalar::Initialized { .. }) => true, Abi::Scalar(abi::Scalar::Initialized { .. }) => true,
Abi::ScalarPair(..) => match op.layout.ty.kind() { // We don't *force* `ConstValue::Slice` for `ScalarPair`. This has the advantage that if the
ty::Ref(_, inner, _) => match *inner.kind() { // input `op` is a place, then turning it into a `ConstValue` and back into a `OpTy` will
ty::Slice(elem) => elem == ecx.tcx.types.u8, // not have to generate any duplicate allocations (we preserve the original `AllocId` in
ty::Str => true, // `ConstValue::Indirect`). It means accessing the contents of a slice can be slow (since
_ => false, // they can be stored as `ConstValue::Indirect`), but that's not relevant since we barely
}, // ever have to do this. (`try_get_slice_bytes_for_diagnostics` exists to provide this
_ => false, // functionality.)
},
_ => false, _ => false,
}; };
let immediate = if try_as_immediate { let immediate = if force_as_immediate {
Right(ecx.read_immediate(op).expect("normalization works on validated constants")) Right(ecx.read_immediate(op).expect("normalization works on validated constants"))
} else { } else {
// It is guaranteed that any non-slice scalar pair is actually `Indirect` here.
// When we come back from raw const eval, we are always by-ref. The only way our op here is
// by-val is if we are in destructure_mir_constant, i.e., if this is (a field of) something that we
// "tried to make immediate" before. We wouldn't do that for non-slice scalar pairs or
// structs containing such.
op.as_mplace_or_imm() op.as_mplace_or_imm()
}; };
@ -151,25 +147,22 @@ pub(super) fn op_to_const<'tcx>(
let alloc_id = alloc_id.expect("cannot have `fake` place fot non-ZST type"); let alloc_id = alloc_id.expect("cannot have `fake` place fot non-ZST type");
ConstValue::Indirect { alloc_id, offset } ConstValue::Indirect { alloc_id, offset }
} }
// see comment on `let try_as_immediate` above // see comment on `let force_as_immediate` above
Right(imm) => match *imm { Right(imm) => match *imm {
Immediate::Scalar(x) => ConstValue::Scalar(x), Immediate::Scalar(x) => ConstValue::Scalar(x),
Immediate::ScalarPair(a, b) => { Immediate::ScalarPair(a, b) => {
debug!("ScalarPair(a: {:?}, b: {:?})", a, b); debug!("ScalarPair(a: {:?}, b: {:?})", a, b);
// FIXME: assert that this has an appropriate type.
// Currently we actually get here for non-[u8] slices during valtree construction!
let msg = "`op_to_const` on an immediate scalar pair must only be used on slice references to actually allocated memory";
// We know `offset` is relative to the allocation, so we can use `into_parts`. // We know `offset` is relative to the allocation, so we can use `into_parts`.
let (data, start) = match a.to_pointer(ecx).unwrap().into_parts() { // We use `ConstValue::Slice` so that we don't have to generate an allocation for
(Some(alloc_id), offset) => { // `ConstValue::Indirect` here.
(ecx.tcx.global_alloc(alloc_id).unwrap_memory(), offset.bytes()) let (alloc_id, offset) = a.to_pointer(ecx).expect(msg).into_parts();
} let alloc_id = alloc_id.expect(msg);
(None, _offset) => ( let data = ecx.tcx.global_alloc(alloc_id).unwrap_memory();
ecx.tcx.mk_const_alloc(Allocation::from_bytes_byte_aligned_immutable( let start = offset.bytes_usize();
b"" as &[u8], let len = b.to_target_usize(ecx).expect(msg);
)),
0,
),
};
let len = b.to_target_usize(ecx).unwrap();
let start = start.try_into().unwrap();
let len: usize = len.try_into().unwrap(); let len: usize = len.try_into().unwrap();
ConstValue::Slice { data, start, end: start + len } ConstValue::Slice { data, start, end: start + len }
} }

View file

@ -149,7 +149,7 @@ pub use self::error::{
UnsupportedOpInfo, ValidationErrorInfo, ValidationErrorKind, UnsupportedOpInfo, ValidationErrorInfo, ValidationErrorKind,
}; };
pub use self::value::{get_slice_bytes, ConstAlloc, ConstValue, Scalar}; pub use self::value::{ConstAlloc, ConstValue, Scalar};
pub use self::allocation::{ pub use self::allocation::{
alloc_range, AllocBytes, AllocError, AllocRange, AllocResult, Allocation, ConstAllocation, alloc_range, AllocBytes, AllocError, AllocRange, AllocResult, Allocation, ConstAllocation,

View file

@ -12,7 +12,7 @@ use rustc_target::abi::{HasDataLayout, Size};
use crate::ty::{ParamEnv, ScalarInt, Ty, TyCtxt}; use crate::ty::{ParamEnv, ScalarInt, Ty, TyCtxt};
use super::{ use super::{
AllocId, AllocRange, ConstAllocation, InterpResult, Pointer, PointerArithmetic, Provenance, AllocId, ConstAllocation, InterpResult, Pointer, PointerArithmetic, Provenance,
ScalarSizeMismatch, ScalarSizeMismatch,
}; };
@ -40,10 +40,14 @@ pub enum ConstValue<'tcx> {
/// Used for `&[u8]` and `&str`. /// Used for `&[u8]` and `&str`.
/// ///
/// This is worth the optimization since Rust has literals of that type. /// This is worth an optimized representation since Rust has literals of these types.
/// Not having to indirect those through an `AllocId` (or two, if we used `Indirect`) has shown
/// measurable performance improvements on stress tests.
Slice { data: ConstAllocation<'tcx>, start: usize, end: usize }, Slice { data: ConstAllocation<'tcx>, start: usize, end: usize },
/// A value not representable by the other variants; needs to be stored in-memory. /// A value not representable by the other variants; needs to be stored in-memory.
///
/// Must *not* be used for scalars or ZST, but having `&str` or other slices in this variant is fine.
Indirect { Indirect {
/// The backing memory of the value. May contain more memory than needed for just the value /// The backing memory of the value. May contain more memory than needed for just the value
/// if this points into some other larger ConstValue. /// if this points into some other larger ConstValue.
@ -511,18 +515,3 @@ impl<'tcx, Prov: Provenance> Scalar<Prov> {
Ok(Double::from_bits(self.to_u64()?.into())) Ok(Double::from_bits(self.to_u64()?.into()))
} }
} }
/// Gets the bytes of a constant slice value.
pub fn get_slice_bytes<'tcx>(cx: &impl HasDataLayout, val: ConstValue<'tcx>) -> &'tcx [u8] {
if let ConstValue::Slice { data, start, end } = val {
let len = end - start;
data.inner()
.get_bytes_strip_provenance(
cx,
AllocRange { start: Size::from_bytes(start), size: Size::from_bytes(len) },
)
.unwrap_or_else(|err| bug!("const slice is invalid: {:?}", err))
} else {
bug!("expected const slice, but found another const value");
}
}