1
Fork 0

Refactor to make interpreter and codegen backend neutral to vtable internal representation.

This commit is contained in:
Charles Lew 2021-06-14 18:02:53 +08:00
parent 14831568d5
commit a86d3a7e45
10 changed files with 246 additions and 177 deletions

View file

@ -1,7 +1,10 @@
use std::convert::TryFrom;
use rustc_middle::mir::interpret::{InterpResult, Pointer, PointerArithmetic, Scalar};
use rustc_middle::ty::{self, Instance, Ty};
use rustc_middle::ty::{
self, Instance, Ty, VtblEntry, COMMON_VTABLE_ENTRIES, COMMON_VTABLE_ENTRIES_ALIGN,
COMMON_VTABLE_ENTRIES_DROPINPLACE, COMMON_VTABLE_ENTRIES_SIZE,
};
use rustc_target::abi::{Align, LayoutOf, Size};
use super::util::ensure_monomorphic_enough;
@ -35,13 +38,13 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
return Ok(vtable);
}
let methods = if let Some(poly_trait_ref) = poly_trait_ref {
let vtable_entries = if let Some(poly_trait_ref) = poly_trait_ref {
let trait_ref = poly_trait_ref.with_self_ty(*self.tcx, ty);
let trait_ref = self.tcx.erase_regions(trait_ref);
self.tcx.vtable_methods(trait_ref)
self.tcx.vtable_entries(trait_ref)
} else {
&[]
COMMON_VTABLE_ENTRIES
};
let layout = self.layout_of(ty)?;
@ -56,38 +59,41 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// If you touch this code, be sure to also make the corresponding changes to
// `get_vtable` in `rust_codegen_llvm/meth.rs`.
// /////////////////////////////////////////////////////////////////////////////////////////
let vtable_size = ptr_size * u64::try_from(methods.len()).unwrap().checked_add(3).unwrap();
let vtable_size = ptr_size * u64::try_from(vtable_entries.len()).unwrap();
let vtable = self.memory.allocate(vtable_size, ptr_align, MemoryKind::Vtable);
let drop = Instance::resolve_drop_in_place(tcx, ty);
let drop = self.memory.create_fn_alloc(FnVal::Instance(drop));
// Prepare the fn ptrs we will write into the vtable later.
let fn_ptrs = methods
.iter()
.enumerate() // remember the original position
.filter_map(|(i, method)| {
if let Some((def_id, substs)) = method { Some((i, def_id, substs)) } else { None }
})
.map(|(i, def_id, substs)| {
let instance =
ty::Instance::resolve_for_vtable(tcx, self.param_env, *def_id, substs)
.ok_or_else(|| err_inval!(TooGeneric))?;
Ok((i, self.memory.create_fn_alloc(FnVal::Instance(instance))))
})
.collect::<InterpResult<'tcx, Vec<(usize, Pointer<M::PointerTag>)>>>()?;
// No need to do any alignment checks on the memory accesses below, because we know the
// allocation is correctly aligned as we created it above. Also we're only offsetting by
// multiples of `ptr_align`, which means that it will stay aligned to `ptr_align`.
let scalars = vtable_entries
.iter()
.map(|entry| -> InterpResult<'tcx, _> {
match entry {
VtblEntry::MetadataDropInPlace => Ok(Some(drop.into())),
VtblEntry::MetadataSize => Ok(Some(Scalar::from_uint(size, ptr_size).into())),
VtblEntry::MetadataAlign => Ok(Some(Scalar::from_uint(align, ptr_size).into())),
VtblEntry::Vacant => Ok(None),
VtblEntry::Method(def_id, substs) => {
// Prepare the fn ptr we write into the vtable.
let instance =
ty::Instance::resolve_for_vtable(tcx, self.param_env, *def_id, substs)
.ok_or_else(|| err_inval!(TooGeneric))?;
let fn_ptr = self.memory.create_fn_alloc(FnVal::Instance(instance));
Ok(Some(fn_ptr.into()))
}
}
})
.collect::<Result<Vec<_>, _>>()?;
let mut vtable_alloc =
self.memory.get_mut(vtable.into(), vtable_size, ptr_align)?.expect("not a ZST");
vtable_alloc.write_ptr_sized(ptr_size * 0, drop.into())?;
vtable_alloc.write_ptr_sized(ptr_size * 1, Scalar::from_uint(size, ptr_size).into())?;
vtable_alloc.write_ptr_sized(ptr_size * 2, Scalar::from_uint(align, ptr_size).into())?;
for (i, fn_ptr) in fn_ptrs.into_iter() {
vtable_alloc.write_ptr_sized(ptr_size * (3 + i as u64), fn_ptr.into())?;
for (idx, scalar) in scalars.into_iter().enumerate() {
if let Some(scalar) = scalar {
let idx: u64 = u64::try_from(idx).unwrap();
vtable_alloc.write_ptr_sized(ptr_size * idx, scalar)?;
}
}
M::after_static_mem_initialized(self, vtable, vtable_size)?;
@ -99,16 +105,15 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
}
/// Resolves the function at the specified slot in the provided
/// vtable. An index of '0' corresponds to the first method
/// declared in the trait of the provided vtable.
/// vtable. Currently an index of '3' (`COMMON_VTABLE_ENTRIES.len()`)
/// corresponds to the first method declared in the trait of the provided vtable.
pub fn get_vtable_slot(
&self,
vtable: Scalar<M::PointerTag>,
idx: u64,
) -> InterpResult<'tcx, FnVal<'tcx, M::ExtraFnVal>> {
let ptr_size = self.pointer_size();
// Skip over the 'drop_ptr', 'size', and 'align' fields.
let vtable_slot = vtable.ptr_offset(ptr_size * idx.checked_add(3).unwrap(), self)?;
let vtable_slot = vtable.ptr_offset(ptr_size * idx, self)?;
let vtable_slot = self
.memory
.get(vtable_slot, ptr_size, self.tcx.data_layout.pointer_align.abi)?
@ -122,12 +127,21 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
&self,
vtable: Scalar<M::PointerTag>,
) -> InterpResult<'tcx, (ty::Instance<'tcx>, Ty<'tcx>)> {
let pointer_size = self.pointer_size();
// We don't care about the pointee type; we just want a pointer.
let vtable = self
.memory
.get(vtable, self.tcx.data_layout.pointer_size, self.tcx.data_layout.pointer_align.abi)?
.get(
vtable,
pointer_size * u64::try_from(COMMON_VTABLE_ENTRIES.len()).unwrap(),
self.tcx.data_layout.pointer_align.abi,
)?
.expect("cannot be a ZST");
let drop_fn = vtable.read_ptr_sized(Size::ZERO)?.check_init()?;
let drop_fn = vtable
.read_ptr_sized(
pointer_size * u64::try_from(COMMON_VTABLE_ENTRIES_DROPINPLACE).unwrap(),
)?
.check_init()?;
// We *need* an instance here, no other kind of function value, to be able
// to determine the type.
let drop_instance = self.memory.get_fn(drop_fn)?.as_instance()?;
@ -153,11 +167,19 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// the size, and the align (which we read below).
let vtable = self
.memory
.get(vtable, 3 * pointer_size, self.tcx.data_layout.pointer_align.abi)?
.get(
vtable,
pointer_size * u64::try_from(COMMON_VTABLE_ENTRIES.len()).unwrap(),
self.tcx.data_layout.pointer_align.abi,
)?
.expect("cannot be a ZST");
let size = vtable.read_ptr_sized(pointer_size)?.check_init()?;
let size = vtable
.read_ptr_sized(pointer_size * u64::try_from(COMMON_VTABLE_ENTRIES_SIZE).unwrap())?
.check_init()?;
let size = u64::try_from(self.force_bits(size, pointer_size)?).unwrap();
let align = vtable.read_ptr_sized(pointer_size * 2)?.check_init()?;
let align = vtable
.read_ptr_sized(pointer_size * u64::try_from(COMMON_VTABLE_ENTRIES_ALIGN).unwrap())?
.check_init()?;
let align = u64::try_from(self.force_bits(align, pointer_size)?).unwrap();
let align = Align::from_bytes(align).map_err(|e| err_ub!(InvalidVtableAlignment(e)))?;

View file

@ -196,7 +196,7 @@ use rustc_middle::mir::{self, Local, Location};
use rustc_middle::ty::adjustment::{CustomCoerceUnsized, PointerCast};
use rustc_middle::ty::print::with_no_trimmed_paths;
use rustc_middle::ty::subst::{GenericArgKind, InternalSubsts};
use rustc_middle::ty::{self, GenericParamDefKind, Instance, Ty, TyCtxt, TypeFoldable};
use rustc_middle::ty::{self, GenericParamDefKind, Instance, Ty, TyCtxt, TypeFoldable, VtblEntry};
use rustc_middle::{middle::codegen_fn_attrs::CodegenFnAttrFlags, mir::visit::TyContext};
use rustc_session::config::EntryFnType;
use rustc_session::lint::builtin::LARGE_ASSIGNMENTS;
@ -1091,21 +1091,22 @@ fn create_mono_items_for_vtable_methods<'tcx>(
assert!(!poly_trait_ref.has_escaping_bound_vars());
// Walk all methods of the trait, including those of its supertraits
let methods = tcx.vtable_methods(poly_trait_ref);
let methods = methods
let entries = tcx.vtable_entries(poly_trait_ref);
let methods = entries
.iter()
.cloned()
.filter_map(|method| method)
.map(|(def_id, substs)| {
ty::Instance::resolve_for_vtable(
.filter_map(|entry| match entry {
VtblEntry::MetadataDropInPlace
| VtblEntry::MetadataSize
| VtblEntry::MetadataAlign
| VtblEntry::Vacant => None,
VtblEntry::Method(def_id, substs) => ty::Instance::resolve_for_vtable(
tcx,
ty::ParamEnv::reveal_all(),
def_id,
*def_id,
substs,
)
.unwrap()
.filter(|instance| should_codegen_locally(tcx, instance)),
})
.filter(|&instance| should_codegen_locally(tcx, &instance))
.map(|item| create_fn_mono_item(tcx, item, source));
output.extend(methods);
}