Auto merge of #127638 - adwinwhite:cache_string, r=oli-obk
Add cache for `allocate_str` Best effort cache for string allocation in const eval. Fixes [rust-lang/miri#3470](https://github.com/rust-lang/miri/issues/3470).
This commit is contained in:
commit
5c84886056
4 changed files with 28 additions and 10 deletions
|
@ -995,13 +995,25 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns a wide MPlace of type `str` to a new 1-aligned allocation.
|
/// Returns a wide MPlace of type `str` to a new 1-aligned allocation.
|
||||||
|
/// Immutable strings are deduplicated and stored in global memory.
|
||||||
pub fn allocate_str(
|
pub fn allocate_str(
|
||||||
&mut self,
|
&mut self,
|
||||||
str: &str,
|
str: &str,
|
||||||
kind: MemoryKind<M::MemoryKind>,
|
kind: MemoryKind<M::MemoryKind>,
|
||||||
mutbl: Mutability,
|
mutbl: Mutability,
|
||||||
) -> InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> {
|
) -> InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> {
|
||||||
let ptr = self.allocate_bytes_ptr(str.as_bytes(), Align::ONE, kind, mutbl)?;
|
let tcx = self.tcx.tcx;
|
||||||
|
|
||||||
|
// Use cache for immutable strings.
|
||||||
|
let ptr = if mutbl.is_not() {
|
||||||
|
// Use dedup'd allocation function.
|
||||||
|
let id = tcx.allocate_bytes_dedup(str.as_bytes());
|
||||||
|
|
||||||
|
// Turn untagged "global" pointers (obtained via `tcx`) into the machine pointer to the allocation.
|
||||||
|
M::adjust_alloc_root_pointer(&self, Pointer::from(id), Some(kind))?
|
||||||
|
} else {
|
||||||
|
self.allocate_bytes_ptr(str.as_bytes(), Align::ONE, kind, mutbl)?
|
||||||
|
};
|
||||||
let meta = Scalar::from_target_usize(u64::try_from(str.len()).unwrap(), self);
|
let meta = Scalar::from_target_usize(u64::try_from(str.len()).unwrap(), self);
|
||||||
let layout = self.layout_of(self.tcx.types.str_).unwrap();
|
let layout = self.layout_of(self.tcx.types.str_).unwrap();
|
||||||
Ok(self.ptr_with_meta_to_mplace(ptr.into(), MemPlaceMeta::Meta(meta), layout))
|
Ok(self.ptr_with_meta_to_mplace(ptr.into(), MemPlaceMeta::Meta(meta), layout))
|
||||||
|
|
|
@ -393,7 +393,6 @@ pub(crate) struct AllocMap<'tcx> {
|
||||||
alloc_map: FxHashMap<AllocId, GlobalAlloc<'tcx>>,
|
alloc_map: FxHashMap<AllocId, GlobalAlloc<'tcx>>,
|
||||||
|
|
||||||
/// Used to ensure that statics and functions only get one associated `AllocId`.
|
/// Used to ensure that statics and functions only get one associated `AllocId`.
|
||||||
/// Should never contain a `GlobalAlloc::Memory`!
|
|
||||||
//
|
//
|
||||||
// FIXME: Should we just have two separate dedup maps for statics and functions each?
|
// FIXME: Should we just have two separate dedup maps for statics and functions each?
|
||||||
dedup: FxHashMap<GlobalAlloc<'tcx>, AllocId>,
|
dedup: FxHashMap<GlobalAlloc<'tcx>, AllocId>,
|
||||||
|
@ -433,13 +432,13 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Reserves a new ID *if* this allocation has not been dedup-reserved before.
|
/// Reserves a new ID *if* this allocation has not been dedup-reserved before.
|
||||||
/// Should only be used for "symbolic" allocations (function pointers, vtables, statics), we
|
/// Should not be used for mutable memory.
|
||||||
/// don't want to dedup IDs for "real" memory!
|
|
||||||
fn reserve_and_set_dedup(self, alloc: GlobalAlloc<'tcx>) -> AllocId {
|
fn reserve_and_set_dedup(self, alloc: GlobalAlloc<'tcx>) -> AllocId {
|
||||||
let mut alloc_map = self.alloc_map.lock();
|
let mut alloc_map = self.alloc_map.lock();
|
||||||
match alloc {
|
if let GlobalAlloc::Memory(mem) = alloc {
|
||||||
GlobalAlloc::Function { .. } | GlobalAlloc::Static(..) | GlobalAlloc::VTable(..) => {}
|
if mem.inner().mutability.is_mut() {
|
||||||
GlobalAlloc::Memory(..) => bug!("Trying to dedup-reserve memory with real data!"),
|
bug!("trying to dedup-reserve mutable memory");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if let Some(&alloc_id) = alloc_map.dedup.get(&alloc) {
|
if let Some(&alloc_id) = alloc_map.dedup.get(&alloc) {
|
||||||
return alloc_id;
|
return alloc_id;
|
||||||
|
@ -451,6 +450,12 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||||
id
|
id
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Generates an `AllocId` for a memory allocation. If the exact same memory has been
|
||||||
|
/// allocated before, this will return the same `AllocId`.
|
||||||
|
pub fn reserve_and_set_memory_dedup(self, mem: ConstAllocation<'tcx>) -> AllocId {
|
||||||
|
self.reserve_and_set_dedup(GlobalAlloc::Memory(mem))
|
||||||
|
}
|
||||||
|
|
||||||
/// Generates an `AllocId` for a static or return a cached one in case this function has been
|
/// Generates an `AllocId` for a static or return a cached one in case this function has been
|
||||||
/// called on the same static before.
|
/// called on the same static before.
|
||||||
pub fn reserve_and_set_static_alloc(self, static_id: DefId) -> AllocId {
|
pub fn reserve_and_set_static_alloc(self, static_id: DefId) -> AllocId {
|
||||||
|
|
|
@ -1442,11 +1442,12 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Allocates a read-only byte or string literal for `mir::interpret`.
|
/// Allocates a read-only byte or string literal for `mir::interpret`.
|
||||||
pub fn allocate_bytes(self, bytes: &[u8]) -> interpret::AllocId {
|
/// Returns the same `AllocId` if called again with the same bytes.
|
||||||
|
pub fn allocate_bytes_dedup(self, bytes: &[u8]) -> interpret::AllocId {
|
||||||
// Create an allocation that just contains these bytes.
|
// Create an allocation that just contains these bytes.
|
||||||
let alloc = interpret::Allocation::from_bytes_byte_aligned_immutable(bytes);
|
let alloc = interpret::Allocation::from_bytes_byte_aligned_immutable(bytes);
|
||||||
let alloc = self.mk_const_alloc(alloc);
|
let alloc = self.mk_const_alloc(alloc);
|
||||||
self.reserve_and_set_memory_alloc(alloc)
|
self.reserve_and_set_memory_dedup(alloc)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns a range of the start/end indices specified with the
|
/// Returns a range of the start/end indices specified with the
|
||||||
|
|
|
@ -140,7 +140,7 @@ fn lit_to_mir_constant<'tcx>(
|
||||||
ConstValue::Slice { data: allocation, meta: allocation.inner().size().bytes() }
|
ConstValue::Slice { data: allocation, meta: allocation.inner().size().bytes() }
|
||||||
}
|
}
|
||||||
(ast::LitKind::ByteStr(data, _), ty::Ref(_, inner_ty, _)) if inner_ty.is_array() => {
|
(ast::LitKind::ByteStr(data, _), ty::Ref(_, inner_ty, _)) if inner_ty.is_array() => {
|
||||||
let id = tcx.allocate_bytes(data);
|
let id = tcx.allocate_bytes_dedup(data);
|
||||||
ConstValue::Scalar(Scalar::from_pointer(id.into(), &tcx))
|
ConstValue::Scalar(Scalar::from_pointer(id.into(), &tcx))
|
||||||
}
|
}
|
||||||
(ast::LitKind::CStr(data, _), ty::Ref(_, inner_ty, _)) if matches!(inner_ty.kind(), ty::Adt(def, _) if tcx.is_lang_item(def.did(), LangItem::CStr)) =>
|
(ast::LitKind::CStr(data, _), ty::Ref(_, inner_ty, _)) if matches!(inner_ty.kind(), ty::Adt(def, _) if tcx.is_lang_item(def.did(), LangItem::CStr)) =>
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue