interpret: rename relocation → provenance
This commit is contained in:
parent
332cc8fb75
commit
e63a625711
14 changed files with 161 additions and 164 deletions
|
@ -430,7 +430,7 @@ fn define_all_allocs(tcx: TyCtxt<'_>, module: &mut dyn Module, cx: &mut Constant
|
||||||
let bytes = alloc.inspect_with_uninit_and_ptr_outside_interpreter(0..alloc.len()).to_vec();
|
let bytes = alloc.inspect_with_uninit_and_ptr_outside_interpreter(0..alloc.len()).to_vec();
|
||||||
data_ctx.define(bytes.into_boxed_slice());
|
data_ctx.define(bytes.into_boxed_slice());
|
||||||
|
|
||||||
for &(offset, alloc_id) in alloc.relocations().iter() {
|
for &(offset, alloc_id) in alloc.provenance().iter() {
|
||||||
let addend = {
|
let addend = {
|
||||||
let endianness = tcx.data_layout.endian;
|
let endianness = tcx.data_layout.endian;
|
||||||
let offset = offset.bytes() as usize;
|
let offset = offset.bytes() as usize;
|
||||||
|
|
|
@ -127,7 +127,7 @@ impl<'gcc, 'tcx> StaticMethods for CodegenCx<'gcc, 'tcx> {
|
||||||
//
|
//
|
||||||
// We could remove this hack whenever we decide to drop macOS 10.10 support.
|
// We could remove this hack whenever we decide to drop macOS 10.10 support.
|
||||||
if self.tcx.sess.target.options.is_like_osx {
|
if self.tcx.sess.target.options.is_like_osx {
|
||||||
// The `inspect` method is okay here because we checked relocations, and
|
// The `inspect` method is okay here because we checked for provenance, and
|
||||||
// because we are doing this access to inspect the final interpreter state
|
// because we are doing this access to inspect the final interpreter state
|
||||||
// (not as part of the interpreter execution).
|
// (not as part of the interpreter execution).
|
||||||
//
|
//
|
||||||
|
@ -296,17 +296,17 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
||||||
|
|
||||||
pub fn const_alloc_to_gcc<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, alloc: ConstAllocation<'tcx>) -> RValue<'gcc> {
|
pub fn const_alloc_to_gcc<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, alloc: ConstAllocation<'tcx>) -> RValue<'gcc> {
|
||||||
let alloc = alloc.inner();
|
let alloc = alloc.inner();
|
||||||
let mut llvals = Vec::with_capacity(alloc.relocations().len() + 1);
|
let mut llvals = Vec::with_capacity(alloc.provenance().len() + 1);
|
||||||
let dl = cx.data_layout();
|
let dl = cx.data_layout();
|
||||||
let pointer_size = dl.pointer_size.bytes() as usize;
|
let pointer_size = dl.pointer_size.bytes() as usize;
|
||||||
|
|
||||||
let mut next_offset = 0;
|
let mut next_offset = 0;
|
||||||
for &(offset, alloc_id) in alloc.relocations().iter() {
|
for &(offset, alloc_id) in alloc.provenance().iter() {
|
||||||
let offset = offset.bytes();
|
let offset = offset.bytes();
|
||||||
assert_eq!(offset as usize as u64, offset);
|
assert_eq!(offset as usize as u64, offset);
|
||||||
let offset = offset as usize;
|
let offset = offset as usize;
|
||||||
if offset > next_offset {
|
if offset > next_offset {
|
||||||
// This `inspect` is okay since we have checked that it is not within a relocation, it
|
// This `inspect` is okay since we have checked that it is not within a pointer with provenance, it
|
||||||
// is within the bounds of the allocation, and it doesn't affect interpreter execution
|
// is within the bounds of the allocation, and it doesn't affect interpreter execution
|
||||||
// (we inspect the result after interpreter execution). Any undef byte is replaced with
|
// (we inspect the result after interpreter execution). Any undef byte is replaced with
|
||||||
// some arbitrary byte value.
|
// some arbitrary byte value.
|
||||||
|
@ -319,7 +319,7 @@ pub fn const_alloc_to_gcc<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, alloc: ConstAl
|
||||||
read_target_uint( dl.endian,
|
read_target_uint( dl.endian,
|
||||||
// This `inspect` is okay since it is within the bounds of the allocation, it doesn't
|
// This `inspect` is okay since it is within the bounds of the allocation, it doesn't
|
||||||
// affect interpreter execution (we inspect the result after interpreter execution),
|
// affect interpreter execution (we inspect the result after interpreter execution),
|
||||||
// and we properly interpret the relocation as a relocation pointer offset.
|
// and we properly interpret the provenance as a relocation pointer offset.
|
||||||
alloc.inspect_with_uninit_and_ptr_outside_interpreter(offset..(offset + pointer_size)),
|
alloc.inspect_with_uninit_and_ptr_outside_interpreter(offset..(offset + pointer_size)),
|
||||||
)
|
)
|
||||||
.expect("const_alloc_to_llvm: could not read relocation pointer")
|
.expect("const_alloc_to_llvm: could not read relocation pointer")
|
||||||
|
@ -336,7 +336,7 @@ pub fn const_alloc_to_gcc<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, alloc: ConstAl
|
||||||
}
|
}
|
||||||
if alloc.len() >= next_offset {
|
if alloc.len() >= next_offset {
|
||||||
let range = next_offset..alloc.len();
|
let range = next_offset..alloc.len();
|
||||||
// This `inspect` is okay since we have check that it is after all relocations, it is
|
// This `inspect` is okay since we have check that it is after all provenance, it is
|
||||||
// within the bounds of the allocation, and it doesn't affect interpreter execution (we
|
// within the bounds of the allocation, and it doesn't affect interpreter execution (we
|
||||||
// inspect the result after interpreter execution). Any undef byte is replaced with some
|
// inspect the result after interpreter execution). Any undef byte is replaced with some
|
||||||
// arbitrary byte value.
|
// arbitrary byte value.
|
||||||
|
|
|
@ -27,12 +27,12 @@ use tracing::debug;
|
||||||
|
|
||||||
pub fn const_alloc_to_llvm<'ll>(cx: &CodegenCx<'ll, '_>, alloc: ConstAllocation<'_>) -> &'ll Value {
|
pub fn const_alloc_to_llvm<'ll>(cx: &CodegenCx<'ll, '_>, alloc: ConstAllocation<'_>) -> &'ll Value {
|
||||||
let alloc = alloc.inner();
|
let alloc = alloc.inner();
|
||||||
let mut llvals = Vec::with_capacity(alloc.relocations().len() + 1);
|
let mut llvals = Vec::with_capacity(alloc.provenance().len() + 1);
|
||||||
let dl = cx.data_layout();
|
let dl = cx.data_layout();
|
||||||
let pointer_size = dl.pointer_size.bytes() as usize;
|
let pointer_size = dl.pointer_size.bytes() as usize;
|
||||||
|
|
||||||
// Note: this function may call `inspect_with_uninit_and_ptr_outside_interpreter`,
|
// Note: this function may call `inspect_with_uninit_and_ptr_outside_interpreter`, so `range`
|
||||||
// so `range` must be within the bounds of `alloc` and not contain or overlap a relocation.
|
// must be within the bounds of `alloc` and not contain or overlap a pointer provenance.
|
||||||
fn append_chunks_of_init_and_uninit_bytes<'ll, 'a, 'b>(
|
fn append_chunks_of_init_and_uninit_bytes<'ll, 'a, 'b>(
|
||||||
llvals: &mut Vec<&'ll Value>,
|
llvals: &mut Vec<&'ll Value>,
|
||||||
cx: &'a CodegenCx<'ll, 'b>,
|
cx: &'a CodegenCx<'ll, 'b>,
|
||||||
|
@ -79,12 +79,12 @@ pub fn const_alloc_to_llvm<'ll>(cx: &CodegenCx<'ll, '_>, alloc: ConstAllocation<
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut next_offset = 0;
|
let mut next_offset = 0;
|
||||||
for &(offset, alloc_id) in alloc.relocations().iter() {
|
for &(offset, alloc_id) in alloc.provenance().iter() {
|
||||||
let offset = offset.bytes();
|
let offset = offset.bytes();
|
||||||
assert_eq!(offset as usize as u64, offset);
|
assert_eq!(offset as usize as u64, offset);
|
||||||
let offset = offset as usize;
|
let offset = offset as usize;
|
||||||
if offset > next_offset {
|
if offset > next_offset {
|
||||||
// This `inspect` is okay since we have checked that it is not within a relocation, it
|
// This `inspect` is okay since we have checked that there is no provenance, it
|
||||||
// is within the bounds of the allocation, and it doesn't affect interpreter execution
|
// is within the bounds of the allocation, and it doesn't affect interpreter execution
|
||||||
// (we inspect the result after interpreter execution).
|
// (we inspect the result after interpreter execution).
|
||||||
append_chunks_of_init_and_uninit_bytes(&mut llvals, cx, alloc, next_offset..offset);
|
append_chunks_of_init_and_uninit_bytes(&mut llvals, cx, alloc, next_offset..offset);
|
||||||
|
@ -93,7 +93,7 @@ pub fn const_alloc_to_llvm<'ll>(cx: &CodegenCx<'ll, '_>, alloc: ConstAllocation<
|
||||||
dl.endian,
|
dl.endian,
|
||||||
// This `inspect` is okay since it is within the bounds of the allocation, it doesn't
|
// This `inspect` is okay since it is within the bounds of the allocation, it doesn't
|
||||||
// affect interpreter execution (we inspect the result after interpreter execution),
|
// affect interpreter execution (we inspect the result after interpreter execution),
|
||||||
// and we properly interpret the relocation as a relocation pointer offset.
|
// and we properly interpret the provenance as a relocation pointer offset.
|
||||||
alloc.inspect_with_uninit_and_ptr_outside_interpreter(offset..(offset + pointer_size)),
|
alloc.inspect_with_uninit_and_ptr_outside_interpreter(offset..(offset + pointer_size)),
|
||||||
)
|
)
|
||||||
.expect("const_alloc_to_llvm: could not read relocation pointer")
|
.expect("const_alloc_to_llvm: could not read relocation pointer")
|
||||||
|
@ -121,7 +121,7 @@ pub fn const_alloc_to_llvm<'ll>(cx: &CodegenCx<'ll, '_>, alloc: ConstAllocation<
|
||||||
}
|
}
|
||||||
if alloc.len() >= next_offset {
|
if alloc.len() >= next_offset {
|
||||||
let range = next_offset..alloc.len();
|
let range = next_offset..alloc.len();
|
||||||
// This `inspect` is okay since we have check that it is after all relocations, it is
|
// This `inspect` is okay since we have check that it is after all provenance, it is
|
||||||
// within the bounds of the allocation, and it doesn't affect interpreter execution (we
|
// within the bounds of the allocation, and it doesn't affect interpreter execution (we
|
||||||
// inspect the result after interpreter execution).
|
// inspect the result after interpreter execution).
|
||||||
append_chunks_of_init_and_uninit_bytes(&mut llvals, cx, alloc, range);
|
append_chunks_of_init_and_uninit_bytes(&mut llvals, cx, alloc, range);
|
||||||
|
@ -479,7 +479,7 @@ impl<'ll> StaticMethods for CodegenCx<'ll, '_> {
|
||||||
//
|
//
|
||||||
// We could remove this hack whenever we decide to drop macOS 10.10 support.
|
// We could remove this hack whenever we decide to drop macOS 10.10 support.
|
||||||
if self.tcx.sess.target.is_like_osx {
|
if self.tcx.sess.target.is_like_osx {
|
||||||
// The `inspect` method is okay here because we checked relocations, and
|
// The `inspect` method is okay here because we checked for provenance, and
|
||||||
// because we are doing this access to inspect the final interpreter state
|
// because we are doing this access to inspect the final interpreter state
|
||||||
// (not as part of the interpreter execution).
|
// (not as part of the interpreter execution).
|
||||||
//
|
//
|
||||||
|
@ -487,7 +487,7 @@ impl<'ll> StaticMethods for CodegenCx<'ll, '_> {
|
||||||
// happens to be zero. Instead, we should only check the value of defined bytes
|
// happens to be zero. Instead, we should only check the value of defined bytes
|
||||||
// and set all undefined bytes to zero if this allocation is headed for the
|
// and set all undefined bytes to zero if this allocation is headed for the
|
||||||
// BSS.
|
// BSS.
|
||||||
let all_bytes_are_zero = alloc.relocations().is_empty()
|
let all_bytes_are_zero = alloc.provenance().is_empty()
|
||||||
&& alloc
|
&& alloc
|
||||||
.inspect_with_uninit_and_ptr_outside_interpreter(0..alloc.len())
|
.inspect_with_uninit_and_ptr_outside_interpreter(0..alloc.len())
|
||||||
.iter()
|
.iter()
|
||||||
|
@ -511,9 +511,9 @@ impl<'ll> StaticMethods for CodegenCx<'ll, '_> {
|
||||||
section.as_str().as_ptr().cast(),
|
section.as_str().as_ptr().cast(),
|
||||||
section.as_str().len() as c_uint,
|
section.as_str().len() as c_uint,
|
||||||
);
|
);
|
||||||
assert!(alloc.relocations().is_empty());
|
assert!(alloc.provenance().is_empty());
|
||||||
|
|
||||||
// The `inspect` method is okay here because we checked relocations, and
|
// The `inspect` method is okay here because we checked for provenance, and
|
||||||
// because we are doing this access to inspect the final interpreter state (not
|
// because we are doing this access to inspect the final interpreter state (not
|
||||||
// as part of the interpreter execution).
|
// as part of the interpreter execution).
|
||||||
let bytes =
|
let bytes =
|
||||||
|
|
|
@ -134,7 +134,7 @@ fn intern_shallow<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval:
|
||||||
alloc.mutability = Mutability::Not;
|
alloc.mutability = Mutability::Not;
|
||||||
};
|
};
|
||||||
// link the alloc id to the actual allocation
|
// link the alloc id to the actual allocation
|
||||||
leftover_allocations.extend(alloc.relocations().iter().map(|&(_, alloc_id)| alloc_id));
|
leftover_allocations.extend(alloc.provenance().iter().map(|&(_, alloc_id)| alloc_id));
|
||||||
let alloc = tcx.intern_const_alloc(alloc);
|
let alloc = tcx.intern_const_alloc(alloc);
|
||||||
tcx.set_alloc_id_memory(alloc_id, alloc);
|
tcx.set_alloc_id_memory(alloc_id, alloc);
|
||||||
None
|
None
|
||||||
|
@ -191,10 +191,10 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::Memory
|
||||||
return Ok(true);
|
return Ok(true);
|
||||||
};
|
};
|
||||||
|
|
||||||
// If there are no relocations in this allocation, it does not contain references
|
// If there is no provenance in this allocation, it does not contain references
|
||||||
// that point to another allocation, and we can avoid the interning walk.
|
// that point to another allocation, and we can avoid the interning walk.
|
||||||
if let Some(alloc) = self.ecx.get_ptr_alloc(mplace.ptr, size, align)? {
|
if let Some(alloc) = self.ecx.get_ptr_alloc(mplace.ptr, size, align)? {
|
||||||
if !alloc.has_relocations() {
|
if !alloc.has_provenance() {
|
||||||
return Ok(false);
|
return Ok(false);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
@ -233,8 +233,8 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::Memory
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_value(&mut self, mplace: &MPlaceTy<'tcx>) -> InterpResult<'tcx> {
|
fn visit_value(&mut self, mplace: &MPlaceTy<'tcx>) -> InterpResult<'tcx> {
|
||||||
// Handle Reference types, as these are the only relocations supported by const eval.
|
// Handle Reference types, as these are the only types with provenance supported by const eval.
|
||||||
// Raw pointers (and boxes) are handled by the `leftover_relocations` logic.
|
// Raw pointers (and boxes) are handled by the `leftover_allocations` logic.
|
||||||
let tcx = self.ecx.tcx;
|
let tcx = self.ecx.tcx;
|
||||||
let ty = mplace.layout.ty;
|
let ty = mplace.layout.ty;
|
||||||
if let ty::Ref(_, referenced_ty, ref_mutability) = *ty.kind() {
|
if let ty::Ref(_, referenced_ty, ref_mutability) = *ty.kind() {
|
||||||
|
@ -410,7 +410,7 @@ pub fn intern_const_alloc_recursive<
|
||||||
// references and a `leftover_allocations` set (where we only have a todo-list here).
|
// references and a `leftover_allocations` set (where we only have a todo-list here).
|
||||||
// So we hand-roll the interning logic here again.
|
// So we hand-roll the interning logic here again.
|
||||||
match intern_kind {
|
match intern_kind {
|
||||||
// Statics may contain mutable allocations even behind relocations.
|
// Statics may point to mutable allocations.
|
||||||
// Even for immutable statics it would be ok to have mutable allocations behind
|
// Even for immutable statics it would be ok to have mutable allocations behind
|
||||||
// raw pointers, e.g. for `static FOO: *const AtomicUsize = &AtomicUsize::new(42)`.
|
// raw pointers, e.g. for `static FOO: *const AtomicUsize = &AtomicUsize::new(42)`.
|
||||||
InternKind::Static(_) => {}
|
InternKind::Static(_) => {}
|
||||||
|
@ -441,7 +441,7 @@ pub fn intern_const_alloc_recursive<
|
||||||
}
|
}
|
||||||
let alloc = tcx.intern_const_alloc(alloc);
|
let alloc = tcx.intern_const_alloc(alloc);
|
||||||
tcx.set_alloc_id_memory(alloc_id, alloc);
|
tcx.set_alloc_id_memory(alloc_id, alloc);
|
||||||
for &(_, alloc_id) in alloc.inner().relocations().iter() {
|
for &(_, alloc_id) in alloc.inner().provenance().iter() {
|
||||||
if leftover_allocations.insert(alloc_id) {
|
if leftover_allocations.insert(alloc_id) {
|
||||||
todo.push(alloc_id);
|
todo.push(alloc_id);
|
||||||
}
|
}
|
||||||
|
|
|
@ -326,7 +326,7 @@ pub trait Machine<'mir, 'tcx>: Sized {
|
||||||
/// cache the result. (This relies on `AllocMap::get_or` being able to add the
|
/// cache the result. (This relies on `AllocMap::get_or` being able to add the
|
||||||
/// owned allocation to the map even when the map is shared.)
|
/// owned allocation to the map even when the map is shared.)
|
||||||
///
|
///
|
||||||
/// This must only fail if `alloc` contains relocations.
|
/// This must only fail if `alloc` contains provenance.
|
||||||
fn adjust_allocation<'b>(
|
fn adjust_allocation<'b>(
|
||||||
ecx: &InterpCx<'mir, 'tcx, Self>,
|
ecx: &InterpCx<'mir, 'tcx, Self>,
|
||||||
id: AllocId,
|
id: AllocId,
|
||||||
|
|
|
@ -214,7 +214,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||||
self.allocate_raw_ptr(alloc, kind).unwrap()
|
self.allocate_raw_ptr(alloc, kind).unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This can fail only of `alloc` contains relocations.
|
/// This can fail only of `alloc` contains provenance.
|
||||||
pub fn allocate_raw_ptr(
|
pub fn allocate_raw_ptr(
|
||||||
&mut self,
|
&mut self,
|
||||||
alloc: Allocation,
|
alloc: Allocation,
|
||||||
|
@ -794,10 +794,10 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||||
todo.extend(static_roots);
|
todo.extend(static_roots);
|
||||||
while let Some(id) = todo.pop() {
|
while let Some(id) = todo.pop() {
|
||||||
if reachable.insert(id) {
|
if reachable.insert(id) {
|
||||||
// This is a new allocation, add its relocations to `todo`.
|
// This is a new allocation, add the allocation it points to to `todo`.
|
||||||
if let Some((_, alloc)) = self.memory.alloc_map.get(id) {
|
if let Some((_, alloc)) = self.memory.alloc_map.get(id) {
|
||||||
todo.extend(
|
todo.extend(
|
||||||
alloc.relocations().values().filter_map(|prov| prov.get_alloc_id()),
|
alloc.provenance().values().filter_map(|prov| prov.get_alloc_id()),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -833,7 +833,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> std::fmt::Debug for DumpAllocs<'a,
|
||||||
allocs_to_print: &mut VecDeque<AllocId>,
|
allocs_to_print: &mut VecDeque<AllocId>,
|
||||||
alloc: &Allocation<Prov, Extra>,
|
alloc: &Allocation<Prov, Extra>,
|
||||||
) -> std::fmt::Result {
|
) -> std::fmt::Result {
|
||||||
for alloc_id in alloc.relocations().values().filter_map(|prov| prov.get_alloc_id()) {
|
for alloc_id in alloc.provenance().values().filter_map(|prov| prov.get_alloc_id()) {
|
||||||
allocs_to_print.push_back(alloc_id);
|
allocs_to_print.push_back(alloc_id);
|
||||||
}
|
}
|
||||||
write!(fmt, "{}", display_allocation(tcx, alloc))
|
write!(fmt, "{}", display_allocation(tcx, alloc))
|
||||||
|
@ -960,9 +960,9 @@ impl<'tcx, 'a, Prov: Provenance, Extra> AllocRef<'a, 'tcx, Prov, Extra> {
|
||||||
.map_err(|e| e.to_interp_error(self.alloc_id))?)
|
.map_err(|e| e.to_interp_error(self.alloc_id))?)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns whether the allocation has relocations for the entire range of the `AllocRef`.
|
/// Returns whether the allocation has provenance anywhere in the range of the `AllocRef`.
|
||||||
pub(crate) fn has_relocations(&self) -> bool {
|
pub(crate) fn has_provenance(&self) -> bool {
|
||||||
self.alloc.has_relocations(&self.tcx, self.range)
|
self.alloc.range_has_provenance(&self.tcx, self.range)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1078,17 +1078,17 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
};
|
};
|
||||||
|
|
||||||
// This checks relocation edges on the src, which needs to happen before
|
// This checks provenance edges on the src, which needs to happen before
|
||||||
// `prepare_relocation_copy`.
|
// `prepare_provenance_copy`.
|
||||||
let src_bytes = src_alloc
|
let src_bytes = src_alloc
|
||||||
.get_bytes_with_uninit_and_ptr(&tcx, src_range)
|
.get_bytes_with_uninit_and_ptr(&tcx, src_range)
|
||||||
.map_err(|e| e.to_interp_error(src_alloc_id))?
|
.map_err(|e| e.to_interp_error(src_alloc_id))?
|
||||||
.as_ptr(); // raw ptr, so we can also get a ptr to the destination allocation
|
.as_ptr(); // raw ptr, so we can also get a ptr to the destination allocation
|
||||||
// first copy the relocations to a temporary buffer, because
|
// first copy the provenance to a temporary buffer, because
|
||||||
// `get_bytes_mut` will clear the relocations, which is correct,
|
// `get_bytes_mut` will clear the provenance, which is correct,
|
||||||
// since we don't want to keep any relocations at the target.
|
// since we don't want to keep any provenance at the target.
|
||||||
let relocations =
|
let provenance =
|
||||||
src_alloc.prepare_relocation_copy(self, src_range, dest_offset, num_copies);
|
src_alloc.prepare_provenance_copy(self, src_range, dest_offset, num_copies);
|
||||||
// Prepare a copy of the initialization mask.
|
// Prepare a copy of the initialization mask.
|
||||||
let compressed = src_alloc.compress_uninit_range(src_range);
|
let compressed = src_alloc.compress_uninit_range(src_range);
|
||||||
|
|
||||||
|
@ -1117,7 +1117,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||||
dest_alloc
|
dest_alloc
|
||||||
.write_uninit(&tcx, dest_range)
|
.write_uninit(&tcx, dest_range)
|
||||||
.map_err(|e| e.to_interp_error(dest_alloc_id))?;
|
.map_err(|e| e.to_interp_error(dest_alloc_id))?;
|
||||||
// We can forget about the relocations, this is all not initialized anyway.
|
// We can forget about the provenance, this is all not initialized anyway.
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1161,8 +1161,8 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||||
alloc_range(dest_offset, size), // just a single copy (i.e., not full `dest_range`)
|
alloc_range(dest_offset, size), // just a single copy (i.e., not full `dest_range`)
|
||||||
num_copies,
|
num_copies,
|
||||||
);
|
);
|
||||||
// copy the relocations to the destination
|
// copy the provenance to the destination
|
||||||
dest_alloc.mark_relocation_range(relocations);
|
dest_alloc.mark_provenance_range(provenance);
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -34,11 +34,11 @@ pub struct Allocation<Prov = AllocId, Extra = ()> {
|
||||||
/// The actual bytes of the allocation.
|
/// The actual bytes of the allocation.
|
||||||
/// Note that the bytes of a pointer represent the offset of the pointer.
|
/// Note that the bytes of a pointer represent the offset of the pointer.
|
||||||
bytes: Box<[u8]>,
|
bytes: Box<[u8]>,
|
||||||
/// Maps from byte addresses to extra data for each pointer.
|
/// Maps from byte addresses to extra provenance data for each pointer.
|
||||||
/// Only the first byte of a pointer is inserted into the map; i.e.,
|
/// Only the first byte of a pointer is inserted into the map; i.e.,
|
||||||
/// every entry in this map applies to `pointer_size` consecutive bytes starting
|
/// every entry in this map applies to `pointer_size` consecutive bytes starting
|
||||||
/// at the given offset.
|
/// at the given offset.
|
||||||
relocations: Relocations<Prov>,
|
provenance: ProvenanceMap<Prov>,
|
||||||
/// Denotes which part of this allocation is initialized.
|
/// Denotes which part of this allocation is initialized.
|
||||||
init_mask: InitMask,
|
init_mask: InitMask,
|
||||||
/// The alignment of the allocation to detect unaligned reads.
|
/// The alignment of the allocation to detect unaligned reads.
|
||||||
|
@ -84,7 +84,7 @@ impl hash::Hash for Allocation {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Hash the other fields as usual.
|
// Hash the other fields as usual.
|
||||||
self.relocations.hash(state);
|
self.provenance.hash(state);
|
||||||
self.init_mask.hash(state);
|
self.init_mask.hash(state);
|
||||||
self.align.hash(state);
|
self.align.hash(state);
|
||||||
self.mutability.hash(state);
|
self.mutability.hash(state);
|
||||||
|
@ -211,7 +211,7 @@ impl<Prov> Allocation<Prov> {
|
||||||
let size = Size::from_bytes(bytes.len());
|
let size = Size::from_bytes(bytes.len());
|
||||||
Self {
|
Self {
|
||||||
bytes,
|
bytes,
|
||||||
relocations: Relocations::new(),
|
provenance: ProvenanceMap::new(),
|
||||||
init_mask: InitMask::new(size, true),
|
init_mask: InitMask::new(size, true),
|
||||||
align,
|
align,
|
||||||
mutability,
|
mutability,
|
||||||
|
@ -246,7 +246,7 @@ impl<Prov> Allocation<Prov> {
|
||||||
let bytes = unsafe { bytes.assume_init() };
|
let bytes = unsafe { bytes.assume_init() };
|
||||||
Ok(Allocation {
|
Ok(Allocation {
|
||||||
bytes,
|
bytes,
|
||||||
relocations: Relocations::new(),
|
provenance: ProvenanceMap::new(),
|
||||||
init_mask: InitMask::new(size, false),
|
init_mask: InitMask::new(size, false),
|
||||||
align,
|
align,
|
||||||
mutability: Mutability::Mut,
|
mutability: Mutability::Mut,
|
||||||
|
@ -266,22 +266,22 @@ impl Allocation {
|
||||||
) -> Result<Allocation<Prov, Extra>, Err> {
|
) -> Result<Allocation<Prov, Extra>, Err> {
|
||||||
// Compute new pointer provenance, which also adjusts the bytes.
|
// Compute new pointer provenance, which also adjusts the bytes.
|
||||||
let mut bytes = self.bytes;
|
let mut bytes = self.bytes;
|
||||||
let mut new_relocations = Vec::with_capacity(self.relocations.0.len());
|
let mut new_provenance = Vec::with_capacity(self.provenance.0.len());
|
||||||
let ptr_size = cx.data_layout().pointer_size.bytes_usize();
|
let ptr_size = cx.data_layout().pointer_size.bytes_usize();
|
||||||
let endian = cx.data_layout().endian;
|
let endian = cx.data_layout().endian;
|
||||||
for &(offset, alloc_id) in self.relocations.iter() {
|
for &(offset, alloc_id) in self.provenance.iter() {
|
||||||
let idx = offset.bytes_usize();
|
let idx = offset.bytes_usize();
|
||||||
let ptr_bytes = &mut bytes[idx..idx + ptr_size];
|
let ptr_bytes = &mut bytes[idx..idx + ptr_size];
|
||||||
let bits = read_target_uint(endian, ptr_bytes).unwrap();
|
let bits = read_target_uint(endian, ptr_bytes).unwrap();
|
||||||
let (ptr_prov, ptr_offset) =
|
let (ptr_prov, ptr_offset) =
|
||||||
adjust_ptr(Pointer::new(alloc_id, Size::from_bytes(bits)))?.into_parts();
|
adjust_ptr(Pointer::new(alloc_id, Size::from_bytes(bits)))?.into_parts();
|
||||||
write_target_uint(endian, ptr_bytes, ptr_offset.bytes().into()).unwrap();
|
write_target_uint(endian, ptr_bytes, ptr_offset.bytes().into()).unwrap();
|
||||||
new_relocations.push((offset, ptr_prov));
|
new_provenance.push((offset, ptr_prov));
|
||||||
}
|
}
|
||||||
// Create allocation.
|
// Create allocation.
|
||||||
Ok(Allocation {
|
Ok(Allocation {
|
||||||
bytes,
|
bytes,
|
||||||
relocations: Relocations::from_presorted(new_relocations),
|
provenance: ProvenanceMap::from_presorted(new_provenance),
|
||||||
init_mask: self.init_mask,
|
init_mask: self.init_mask,
|
||||||
align: self.align,
|
align: self.align,
|
||||||
mutability: self.mutability,
|
mutability: self.mutability,
|
||||||
|
@ -300,8 +300,8 @@ impl<Prov, Extra> Allocation<Prov, Extra> {
|
||||||
Size::from_bytes(self.len())
|
Size::from_bytes(self.len())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Looks at a slice which may describe uninitialized bytes or describe a relocation. This differs
|
/// Looks at a slice which may contain uninitialized bytes or provenance. This differs
|
||||||
/// from `get_bytes_with_uninit_and_ptr` in that it does no relocation checks (even on the
|
/// from `get_bytes_with_uninit_and_ptr` in that it does no provenance checks (even on the
|
||||||
/// edges) at all.
|
/// edges) at all.
|
||||||
/// This must not be used for reads affecting the interpreter execution.
|
/// This must not be used for reads affecting the interpreter execution.
|
||||||
pub fn inspect_with_uninit_and_ptr_outside_interpreter(&self, range: Range<usize>) -> &[u8] {
|
pub fn inspect_with_uninit_and_ptr_outside_interpreter(&self, range: Range<usize>) -> &[u8] {
|
||||||
|
@ -313,23 +313,23 @@ impl<Prov, Extra> Allocation<Prov, Extra> {
|
||||||
&self.init_mask
|
&self.init_mask
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the relocation list.
|
/// Returns the provenance map.
|
||||||
pub fn relocations(&self) -> &Relocations<Prov> {
|
pub fn provenance(&self) -> &ProvenanceMap<Prov> {
|
||||||
&self.relocations
|
&self.provenance
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Byte accessors.
|
/// Byte accessors.
|
||||||
impl<Prov: Provenance, Extra> Allocation<Prov, Extra> {
|
impl<Prov: Provenance, Extra> Allocation<Prov, Extra> {
|
||||||
/// This is the entirely abstraction-violating way to just grab the raw bytes without
|
/// This is the entirely abstraction-violating way to just grab the raw bytes without
|
||||||
/// caring about relocations. It just deduplicates some code between `read_scalar`
|
/// caring about provenance. It just deduplicates some code between `read_scalar`
|
||||||
/// and `get_bytes_internal`.
|
/// and `get_bytes_internal`.
|
||||||
fn get_bytes_even_more_internal(&self, range: AllocRange) -> &[u8] {
|
fn get_bytes_even_more_internal(&self, range: AllocRange) -> &[u8] {
|
||||||
&self.bytes[range.start.bytes_usize()..range.end().bytes_usize()]
|
&self.bytes[range.start.bytes_usize()..range.end().bytes_usize()]
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The last argument controls whether we error out when there are uninitialized or pointer
|
/// The last argument controls whether we error out when there are uninitialized or pointer
|
||||||
/// bytes. However, we *always* error when there are relocations overlapping the edges of the
|
/// bytes. However, we *always* error when there is provenance overlapping the edges of the
|
||||||
/// range.
|
/// range.
|
||||||
///
|
///
|
||||||
/// You should never call this, call `get_bytes` or `get_bytes_with_uninit_and_ptr` instead,
|
/// You should never call this, call `get_bytes` or `get_bytes_with_uninit_and_ptr` instead,
|
||||||
|
@ -347,10 +347,10 @@ impl<Prov: Provenance, Extra> Allocation<Prov, Extra> {
|
||||||
) -> AllocResult<&[u8]> {
|
) -> AllocResult<&[u8]> {
|
||||||
if check_init_and_ptr {
|
if check_init_and_ptr {
|
||||||
self.check_init(range)?;
|
self.check_init(range)?;
|
||||||
self.check_relocations(cx, range)?;
|
self.check_provenance(cx, range)?;
|
||||||
} else {
|
} else {
|
||||||
// We still don't want relocations on the *edges*.
|
// We still don't want provenance on the *edges*.
|
||||||
self.check_relocation_edges(cx, range)?;
|
self.check_provenance_edges(cx, range)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(self.get_bytes_even_more_internal(range))
|
Ok(self.get_bytes_even_more_internal(range))
|
||||||
|
@ -368,7 +368,7 @@ impl<Prov: Provenance, Extra> Allocation<Prov, Extra> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// It is the caller's responsibility to handle uninitialized and pointer bytes.
|
/// It is the caller's responsibility to handle uninitialized and pointer bytes.
|
||||||
/// However, this still checks that there are no relocations on the *edges*.
|
/// However, this still checks that there is no provenance on the *edges*.
|
||||||
///
|
///
|
||||||
/// It is the caller's responsibility to check bounds and alignment beforehand.
|
/// It is the caller's responsibility to check bounds and alignment beforehand.
|
||||||
#[inline]
|
#[inline]
|
||||||
|
@ -380,7 +380,7 @@ impl<Prov: Provenance, Extra> Allocation<Prov, Extra> {
|
||||||
self.get_bytes_internal(cx, range, false)
|
self.get_bytes_internal(cx, range, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Just calling this already marks everything as defined and removes relocations,
|
/// Just calling this already marks everything as defined and removes provenance,
|
||||||
/// so be sure to actually put data there!
|
/// so be sure to actually put data there!
|
||||||
///
|
///
|
||||||
/// It is the caller's responsibility to check bounds and alignment beforehand.
|
/// It is the caller's responsibility to check bounds and alignment beforehand.
|
||||||
|
@ -392,7 +392,7 @@ impl<Prov: Provenance, Extra> Allocation<Prov, Extra> {
|
||||||
range: AllocRange,
|
range: AllocRange,
|
||||||
) -> AllocResult<&mut [u8]> {
|
) -> AllocResult<&mut [u8]> {
|
||||||
self.mark_init(range, true);
|
self.mark_init(range, true);
|
||||||
self.clear_relocations(cx, range)?;
|
self.clear_provenance(cx, range)?;
|
||||||
|
|
||||||
Ok(&mut self.bytes[range.start.bytes_usize()..range.end().bytes_usize()])
|
Ok(&mut self.bytes[range.start.bytes_usize()..range.end().bytes_usize()])
|
||||||
}
|
}
|
||||||
|
@ -404,7 +404,7 @@ impl<Prov: Provenance, Extra> Allocation<Prov, Extra> {
|
||||||
range: AllocRange,
|
range: AllocRange,
|
||||||
) -> AllocResult<*mut [u8]> {
|
) -> AllocResult<*mut [u8]> {
|
||||||
self.mark_init(range, true);
|
self.mark_init(range, true);
|
||||||
self.clear_relocations(cx, range)?;
|
self.clear_provenance(cx, range)?;
|
||||||
|
|
||||||
assert!(range.end().bytes_usize() <= self.bytes.len()); // need to do our own bounds-check
|
assert!(range.end().bytes_usize() <= self.bytes.len()); // need to do our own bounds-check
|
||||||
let begin_ptr = self.bytes.as_mut_ptr().wrapping_add(range.start.bytes_usize());
|
let begin_ptr = self.bytes.as_mut_ptr().wrapping_add(range.start.bytes_usize());
|
||||||
|
@ -415,7 +415,7 @@ impl<Prov: Provenance, Extra> Allocation<Prov, Extra> {
|
||||||
|
|
||||||
/// Reading and writing.
|
/// Reading and writing.
|
||||||
impl<Prov: Provenance, Extra> Allocation<Prov, Extra> {
|
impl<Prov: Provenance, Extra> Allocation<Prov, Extra> {
|
||||||
/// Validates that this memory range is initiailized and contains no relocations.
|
/// Validates that this memory range is initiailized and contains no provenance.
|
||||||
pub fn check_bytes(&self, cx: &impl HasDataLayout, range: AllocRange) -> AllocResult {
|
pub fn check_bytes(&self, cx: &impl HasDataLayout, range: AllocRange) -> AllocResult {
|
||||||
// This implicitly does all the checking we are asking for.
|
// This implicitly does all the checking we are asking for.
|
||||||
self.get_bytes(cx, range)?;
|
self.get_bytes(cx, range)?;
|
||||||
|
@ -447,17 +447,17 @@ impl<Prov: Provenance, Extra> Allocation<Prov, Extra> {
|
||||||
return Err(AllocError::InvalidUninitBytes(None));
|
return Err(AllocError::InvalidUninitBytes(None));
|
||||||
}
|
}
|
||||||
|
|
||||||
// If we are doing a pointer read, and there is a relocation exactly where we
|
// If we are doing a pointer read, and there is provenance exactly where we
|
||||||
// are reading, then we can put data and relocation back together and return that.
|
// are reading, then we can put data and provenance back together and return that.
|
||||||
if read_provenance && let Some(&prov) = self.relocations.get(&range.start) {
|
if read_provenance && let Some(&prov) = self.provenance.get(&range.start) {
|
||||||
// We already checked init and relocations, so we can use this function.
|
// We already checked init and provenance, so we can use this function.
|
||||||
let bytes = self.get_bytes_even_more_internal(range);
|
let bytes = self.get_bytes_even_more_internal(range);
|
||||||
let bits = read_target_uint(cx.data_layout().endian, bytes).unwrap();
|
let bits = read_target_uint(cx.data_layout().endian, bytes).unwrap();
|
||||||
let ptr = Pointer::new(prov, Size::from_bytes(bits));
|
let ptr = Pointer::new(prov, Size::from_bytes(bits));
|
||||||
return Ok(Scalar::from_pointer(ptr, cx));
|
return Ok(Scalar::from_pointer(ptr, cx));
|
||||||
}
|
}
|
||||||
|
|
||||||
// If we are *not* reading a pointer, and we can just ignore relocations,
|
// If we are *not* reading a pointer, and we can just ignore provenance,
|
||||||
// then do exactly that.
|
// then do exactly that.
|
||||||
if !read_provenance && Prov::OFFSET_IS_ADDR {
|
if !read_provenance && Prov::OFFSET_IS_ADDR {
|
||||||
// We just strip provenance.
|
// We just strip provenance.
|
||||||
|
@ -469,8 +469,8 @@ impl<Prov: Provenance, Extra> Allocation<Prov, Extra> {
|
||||||
// It's complicated. Better make sure there is no provenance anywhere.
|
// It's complicated. Better make sure there is no provenance anywhere.
|
||||||
// FIXME: If !OFFSET_IS_ADDR, this is the best we can do. But if OFFSET_IS_ADDR, then
|
// FIXME: If !OFFSET_IS_ADDR, this is the best we can do. But if OFFSET_IS_ADDR, then
|
||||||
// `read_pointer` is true and we ideally would distinguish the following two cases:
|
// `read_pointer` is true and we ideally would distinguish the following two cases:
|
||||||
// - The entire `range` is covered by 2 relocations for the same provenance.
|
// - The entire `range` is covered by the same provenance, stored in two separate entries of
|
||||||
// Then we should return a pointer with that provenance.
|
// the provenance map. Then we should return a pointer with that provenance.
|
||||||
// - The range has inhomogeneous provenance. Then we should return just the
|
// - The range has inhomogeneous provenance. Then we should return just the
|
||||||
// underlying bits.
|
// underlying bits.
|
||||||
let bytes = self.get_bytes(cx, range)?;
|
let bytes = self.get_bytes(cx, range)?;
|
||||||
|
@ -508,9 +508,9 @@ impl<Prov: Provenance, Extra> Allocation<Prov, Extra> {
|
||||||
let dst = self.get_bytes_mut(cx, range)?;
|
let dst = self.get_bytes_mut(cx, range)?;
|
||||||
write_target_uint(endian, dst, bytes).unwrap();
|
write_target_uint(endian, dst, bytes).unwrap();
|
||||||
|
|
||||||
// See if we have to also write a relocation.
|
// See if we have to also store some provenance.
|
||||||
if let Some(provenance) = provenance {
|
if let Some(provenance) = provenance {
|
||||||
self.relocations.0.insert(range.start, provenance);
|
self.provenance.0.insert(range.start, provenance);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -519,64 +519,64 @@ impl<Prov: Provenance, Extra> Allocation<Prov, Extra> {
|
||||||
/// Write "uninit" to the given memory range.
|
/// Write "uninit" to the given memory range.
|
||||||
pub fn write_uninit(&mut self, cx: &impl HasDataLayout, range: AllocRange) -> AllocResult {
|
pub fn write_uninit(&mut self, cx: &impl HasDataLayout, range: AllocRange) -> AllocResult {
|
||||||
self.mark_init(range, false);
|
self.mark_init(range, false);
|
||||||
self.clear_relocations(cx, range)?;
|
self.clear_provenance(cx, range)?;
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Relocations.
|
/// Provenance.
|
||||||
impl<Prov: Copy, Extra> Allocation<Prov, Extra> {
|
impl<Prov: Copy, Extra> Allocation<Prov, Extra> {
|
||||||
/// Returns all relocations overlapping with the given pointer-offset pair.
|
/// Returns all provenance overlapping with the given pointer-offset pair.
|
||||||
fn get_relocations(&self, cx: &impl HasDataLayout, range: AllocRange) -> &[(Size, Prov)] {
|
fn range_get_provenance(&self, cx: &impl HasDataLayout, range: AllocRange) -> &[(Size, Prov)] {
|
||||||
// We have to go back `pointer_size - 1` bytes, as that one would still overlap with
|
// We have to go back `pointer_size - 1` bytes, as that one would still overlap with
|
||||||
// the beginning of this range.
|
// the beginning of this range.
|
||||||
let start = range.start.bytes().saturating_sub(cx.data_layout().pointer_size.bytes() - 1);
|
let start = range.start.bytes().saturating_sub(cx.data_layout().pointer_size.bytes() - 1);
|
||||||
self.relocations.range(Size::from_bytes(start)..range.end())
|
self.provenance.range(Size::from_bytes(start)..range.end())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns whether this allocation has relocations overlapping with the given range.
|
/// Returns whether this allocation has progrnance overlapping with the given range.
|
||||||
///
|
///
|
||||||
/// Note: this function exists to allow `get_relocations` to be private, in order to somewhat
|
/// Note: this function exists to allow `range_get_provenance` to be private, in order to somewhat
|
||||||
/// limit access to relocations outside of the `Allocation` abstraction.
|
/// limit access to provenance outside of the `Allocation` abstraction.
|
||||||
///
|
///
|
||||||
pub fn has_relocations(&self, cx: &impl HasDataLayout, range: AllocRange) -> bool {
|
pub fn range_has_provenance(&self, cx: &impl HasDataLayout, range: AllocRange) -> bool {
|
||||||
!self.get_relocations(cx, range).is_empty()
|
!self.range_get_provenance(cx, range).is_empty()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Checks that there are no relocations overlapping with the given range.
|
/// Checks that there is no provenance overlapping with the given range.
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
fn check_relocations(&self, cx: &impl HasDataLayout, range: AllocRange) -> AllocResult {
|
fn check_provenance(&self, cx: &impl HasDataLayout, range: AllocRange) -> AllocResult {
|
||||||
if self.has_relocations(cx, range) { Err(AllocError::ReadPointerAsBytes) } else { Ok(()) }
|
if self.range_has_provenance(cx, range) { Err(AllocError::ReadPointerAsBytes) } else { Ok(()) }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Removes all relocations inside the given range.
|
/// Removes all provenance inside the given range.
|
||||||
/// If there are relocations overlapping with the edges, they
|
/// If there is provenance overlapping with the edges, it
|
||||||
/// are removed as well *and* the bytes they cover are marked as
|
/// are removed as well *and* the bytes they cover are marked as
|
||||||
/// uninitialized. This is a somewhat odd "spooky action at a distance",
|
/// uninitialized. This is a somewhat odd "spooky action at a distance",
|
||||||
/// but it allows strictly more code to run than if we would just error
|
/// but it allows strictly more code to run than if we would just error
|
||||||
/// immediately in that case.
|
/// immediately in that case.
|
||||||
fn clear_relocations(&mut self, cx: &impl HasDataLayout, range: AllocRange) -> AllocResult
|
fn clear_provenance(&mut self, cx: &impl HasDataLayout, range: AllocRange) -> AllocResult
|
||||||
where
|
where
|
||||||
Prov: Provenance,
|
Prov: Provenance,
|
||||||
{
|
{
|
||||||
// Find the start and end of the given range and its outermost relocations.
|
// Find the start and end of the given range and its outermost provenance.
|
||||||
let (first, last) = {
|
let (first, last) = {
|
||||||
// Find all relocations overlapping the given range.
|
// Find all provenance overlapping the given range.
|
||||||
let relocations = self.get_relocations(cx, range);
|
let provenance = self.range_get_provenance(cx, range);
|
||||||
if relocations.is_empty() {
|
if provenance.is_empty() {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
(
|
(
|
||||||
relocations.first().unwrap().0,
|
provenance.first().unwrap().0,
|
||||||
relocations.last().unwrap().0 + cx.data_layout().pointer_size,
|
provenance.last().unwrap().0 + cx.data_layout().pointer_size,
|
||||||
)
|
)
|
||||||
};
|
};
|
||||||
let start = range.start;
|
let start = range.start;
|
||||||
let end = range.end();
|
let end = range.end();
|
||||||
|
|
||||||
// We need to handle clearing the relocations from parts of a pointer.
|
// We need to handle clearing the provenance from parts of a pointer.
|
||||||
// FIXME: Miri should preserve partial relocations; see
|
// FIXME: Miri should preserve partial provenance; see
|
||||||
// https://github.com/rust-lang/miri/issues/2181.
|
// https://github.com/rust-lang/miri/issues/2181.
|
||||||
if first < start {
|
if first < start {
|
||||||
if Prov::ERR_ON_PARTIAL_PTR_OVERWRITE {
|
if Prov::ERR_ON_PARTIAL_PTR_OVERWRITE {
|
||||||
|
@ -599,41 +599,40 @@ impl<Prov: Copy, Extra> Allocation<Prov, Extra> {
|
||||||
self.init_mask.set_range(end, last, false);
|
self.init_mask.set_range(end, last, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Forget all the relocations.
|
// Forget all the provenance.
|
||||||
// Since relocations do not overlap, we know that removing until `last` (exclusive) is fine,
|
// Since provenance do not overlap, we know that removing until `last` (exclusive) is fine,
|
||||||
// i.e., this will not remove any other relocations just after the ones we care about.
|
// i.e., this will not remove any other provenance just after the ones we care about.
|
||||||
self.relocations.0.remove_range(first..last);
|
self.provenance.0.remove_range(first..last);
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Errors if there are relocations overlapping with the edges of the
|
/// Errors if there is provenance overlapping with the edges of the given memory range.
|
||||||
/// given memory range.
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn check_relocation_edges(&self, cx: &impl HasDataLayout, range: AllocRange) -> AllocResult {
|
fn check_provenance_edges(&self, cx: &impl HasDataLayout, range: AllocRange) -> AllocResult {
|
||||||
self.check_relocations(cx, alloc_range(range.start, Size::ZERO))?;
|
self.check_provenance(cx, alloc_range(range.start, Size::ZERO))?;
|
||||||
self.check_relocations(cx, alloc_range(range.end(), Size::ZERO))?;
|
self.check_provenance(cx, alloc_range(range.end(), Size::ZERO))?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// "Relocations" stores the provenance information of pointers stored in memory.
|
/// Stores the provenance information of pointers stored in memory.
|
||||||
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)]
|
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)]
|
||||||
pub struct Relocations<Prov = AllocId>(SortedMap<Size, Prov>);
|
pub struct ProvenanceMap<Prov = AllocId>(SortedMap<Size, Prov>);
|
||||||
|
|
||||||
impl<Prov> Relocations<Prov> {
|
impl<Prov> ProvenanceMap<Prov> {
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
Relocations(SortedMap::new())
|
ProvenanceMap(SortedMap::new())
|
||||||
}
|
}
|
||||||
|
|
||||||
// The caller must guarantee that the given relocations are already sorted
|
// The caller must guarantee that the given provenance list is already sorted
|
||||||
// by address and contain no duplicates.
|
// by address and contain no duplicates.
|
||||||
pub fn from_presorted(r: Vec<(Size, Prov)>) -> Self {
|
pub fn from_presorted(r: Vec<(Size, Prov)>) -> Self {
|
||||||
Relocations(SortedMap::from_presorted_elements(r))
|
ProvenanceMap(SortedMap::from_presorted_elements(r))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<Prov> Deref for Relocations<Prov> {
|
impl<Prov> Deref for ProvenanceMap<Prov> {
|
||||||
type Target = SortedMap<Size, Prov>;
|
type Target = SortedMap<Size, Prov>;
|
||||||
|
|
||||||
fn deref(&self) -> &Self::Target {
|
fn deref(&self) -> &Self::Target {
|
||||||
|
@ -641,36 +640,36 @@ impl<Prov> Deref for Relocations<Prov> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A partial, owned list of relocations to transfer into another allocation.
|
/// A partial, owned list of provenance to transfer into another allocation.
|
||||||
///
|
///
|
||||||
/// Offsets are already adjusted to the destination allocation.
|
/// Offsets are already adjusted to the destination allocation.
|
||||||
pub struct AllocationRelocations<Prov> {
|
pub struct AllocationProvenance<Prov> {
|
||||||
dest_relocations: Vec<(Size, Prov)>,
|
dest_provenance: Vec<(Size, Prov)>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<Prov: Copy, Extra> Allocation<Prov, Extra> {
|
impl<Prov: Copy, Extra> Allocation<Prov, Extra> {
|
||||||
pub fn prepare_relocation_copy(
|
pub fn prepare_provenance_copy(
|
||||||
&self,
|
&self,
|
||||||
cx: &impl HasDataLayout,
|
cx: &impl HasDataLayout,
|
||||||
src: AllocRange,
|
src: AllocRange,
|
||||||
dest: Size,
|
dest: Size,
|
||||||
count: u64,
|
count: u64,
|
||||||
) -> AllocationRelocations<Prov> {
|
) -> AllocationProvenance<Prov> {
|
||||||
let relocations = self.get_relocations(cx, src);
|
let provenance = self.range_get_provenance(cx, src);
|
||||||
if relocations.is_empty() {
|
if provenance.is_empty() {
|
||||||
return AllocationRelocations { dest_relocations: Vec::new() };
|
return AllocationProvenance { dest_provenance: Vec::new() };
|
||||||
}
|
}
|
||||||
|
|
||||||
let size = src.size;
|
let size = src.size;
|
||||||
let mut new_relocations = Vec::with_capacity(relocations.len() * (count as usize));
|
let mut new_provenance = Vec::with_capacity(provenance.len() * (count as usize));
|
||||||
|
|
||||||
// If `count` is large, this is rather wasteful -- we are allocating a big array here, which
|
// If `count` is large, this is rather wasteful -- we are allocating a big array here, which
|
||||||
// is mostly filled with redundant information since it's just N copies of the same `Prov`s
|
// is mostly filled with redundant information since it's just N copies of the same `Prov`s
|
||||||
// at slightly adjusted offsets. The reason we do this is so that in `mark_relocation_range`
|
// at slightly adjusted offsets. The reason we do this is so that in `mark_provenance_range`
|
||||||
// we can use `insert_presorted`. That wouldn't work with an `Iterator` that just produces
|
// we can use `insert_presorted`. That wouldn't work with an `Iterator` that just produces
|
||||||
// the right sequence of relocations for all N copies.
|
// the right sequence of provenance for all N copies.
|
||||||
for i in 0..count {
|
for i in 0..count {
|
||||||
new_relocations.extend(relocations.iter().map(|&(offset, reloc)| {
|
new_provenance.extend(provenance.iter().map(|&(offset, reloc)| {
|
||||||
// compute offset for current repetition
|
// compute offset for current repetition
|
||||||
let dest_offset = dest + size * i; // `Size` operations
|
let dest_offset = dest + size * i; // `Size` operations
|
||||||
(
|
(
|
||||||
|
@ -681,17 +680,17 @@ impl<Prov: Copy, Extra> Allocation<Prov, Extra> {
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
AllocationRelocations { dest_relocations: new_relocations }
|
AllocationProvenance { dest_provenance: new_provenance }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Applies a relocation copy.
|
/// Applies a provenance copy.
|
||||||
/// The affected range, as defined in the parameters to `prepare_relocation_copy` is expected
|
/// The affected range, as defined in the parameters to `prepare_provenance_copy` is expected
|
||||||
/// to be clear of relocations.
|
/// to be clear of provenance.
|
||||||
///
|
///
|
||||||
/// This is dangerous to use as it can violate internal `Allocation` invariants!
|
/// This is dangerous to use as it can violate internal `Allocation` invariants!
|
||||||
/// It only exists to support an efficient implementation of `mem_copy_repeatedly`.
|
/// It only exists to support an efficient implementation of `mem_copy_repeatedly`.
|
||||||
pub fn mark_relocation_range(&mut self, relocations: AllocationRelocations<Prov>) {
|
pub fn mark_provenance_range(&mut self, provenance: AllocationProvenance<Prov>) {
|
||||||
self.relocations.0.insert_presorted(relocations.dest_relocations);
|
self.provenance.0.insert_presorted(provenance.dest_provenance);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -128,7 +128,7 @@ pub use self::value::{get_slice_bytes, ConstAlloc, ConstValue, Scalar};
|
||||||
|
|
||||||
pub use self::allocation::{
|
pub use self::allocation::{
|
||||||
alloc_range, AllocRange, Allocation, ConstAllocation, InitChunk, InitChunkIter, InitMask,
|
alloc_range, AllocRange, Allocation, ConstAllocation, InitChunk, InitChunkIter, InitMask,
|
||||||
Relocations,
|
ProvenanceMap,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub use self::pointer::{Pointer, PointerArithmetic, Provenance};
|
pub use self::pointer::{Pointer, PointerArithmetic, Provenance};
|
||||||
|
|
|
@ -130,9 +130,7 @@ pub enum Scalar<Prov = AllocId> {
|
||||||
/// The raw bytes of a simple value.
|
/// The raw bytes of a simple value.
|
||||||
Int(ScalarInt),
|
Int(ScalarInt),
|
||||||
|
|
||||||
/// A pointer into an `Allocation`. An `Allocation` in the `memory` module has a list of
|
/// A pointer.
|
||||||
/// relocations, but a `Scalar` is only large enough to contain one, so we just represent the
|
|
||||||
/// relocation and its associated offset together as a `Pointer` here.
|
|
||||||
///
|
///
|
||||||
/// We also store the size of the pointer, such that a `Scalar` always knows how big it is.
|
/// We also store the size of the pointer, such that a `Scalar` always knows how big it is.
|
||||||
/// The size is always the pointer size of the current target, but this is not information
|
/// The size is always the pointer size of the current target, but this is not information
|
||||||
|
|
|
@ -2692,8 +2692,8 @@ fn pretty_print_const_value<'tcx>(
|
||||||
match inner.kind() {
|
match inner.kind() {
|
||||||
ty::Slice(t) => {
|
ty::Slice(t) => {
|
||||||
if *t == u8_type {
|
if *t == u8_type {
|
||||||
// The `inspect` here is okay since we checked the bounds, and there are
|
// The `inspect` here is okay since we checked the bounds, and `u8` carries
|
||||||
// no relocations (we have an active slice reference here). We don't use
|
// no provenance (we have an active slice reference here). We don't use
|
||||||
// this result to affect interpreter execution.
|
// this result to affect interpreter execution.
|
||||||
let byte_str = data
|
let byte_str = data
|
||||||
.inner()
|
.inner()
|
||||||
|
@ -2703,8 +2703,8 @@ fn pretty_print_const_value<'tcx>(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ty::Str => {
|
ty::Str => {
|
||||||
// The `inspect` here is okay since we checked the bounds, and there are no
|
// The `inspect` here is okay since we checked the bounds, and `str` carries
|
||||||
// relocations (we have an active `str` reference here). We don't use this
|
// no provenance (we have an active `str` reference here). We don't use this
|
||||||
// result to affect interpreter execution.
|
// result to affect interpreter execution.
|
||||||
let slice = data
|
let slice = data
|
||||||
.inner()
|
.inner()
|
||||||
|
|
|
@ -676,7 +676,7 @@ pub fn write_allocations<'tcx>(
|
||||||
fn alloc_ids_from_alloc(
|
fn alloc_ids_from_alloc(
|
||||||
alloc: ConstAllocation<'_>,
|
alloc: ConstAllocation<'_>,
|
||||||
) -> impl DoubleEndedIterator<Item = AllocId> + '_ {
|
) -> impl DoubleEndedIterator<Item = AllocId> + '_ {
|
||||||
alloc.inner().relocations().values().map(|id| *id)
|
alloc.inner().provenance().values().map(|id| *id)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn alloc_ids_from_const_val(val: ConstValue<'_>) -> impl Iterator<Item = AllocId> + '_ {
|
fn alloc_ids_from_const_val(val: ConstValue<'_>) -> impl Iterator<Item = AllocId> + '_ {
|
||||||
|
@ -778,7 +778,7 @@ pub fn write_allocations<'tcx>(
|
||||||
/// If the allocation is small enough to fit into a single line, no start address is given.
|
/// If the allocation is small enough to fit into a single line, no start address is given.
|
||||||
/// After the hex dump, an ascii dump follows, replacing all unprintable characters (control
|
/// After the hex dump, an ascii dump follows, replacing all unprintable characters (control
|
||||||
/// characters or characters whose value is larger than 127) with a `.`
|
/// characters or characters whose value is larger than 127) with a `.`
|
||||||
/// This also prints relocations adequately.
|
/// This also prints provenance adequately.
|
||||||
pub fn display_allocation<'a, 'tcx, Prov, Extra>(
|
pub fn display_allocation<'a, 'tcx, Prov, Extra>(
|
||||||
tcx: TyCtxt<'tcx>,
|
tcx: TyCtxt<'tcx>,
|
||||||
alloc: &'a Allocation<Prov, Extra>,
|
alloc: &'a Allocation<Prov, Extra>,
|
||||||
|
@ -873,34 +873,34 @@ fn write_allocation_bytes<'tcx, Prov: Provenance, Extra>(
|
||||||
if i != line_start {
|
if i != line_start {
|
||||||
write!(w, " ")?;
|
write!(w, " ")?;
|
||||||
}
|
}
|
||||||
if let Some(&prov) = alloc.relocations().get(&i) {
|
if let Some(&prov) = alloc.provenance().get(&i) {
|
||||||
// Memory with a relocation must be defined
|
// Memory with provenance must be defined
|
||||||
assert!(alloc.init_mask().is_range_initialized(i, i + ptr_size).is_ok());
|
assert!(alloc.init_mask().is_range_initialized(i, i + ptr_size).is_ok());
|
||||||
let j = i.bytes_usize();
|
let j = i.bytes_usize();
|
||||||
let offset = alloc
|
let offset = alloc
|
||||||
.inspect_with_uninit_and_ptr_outside_interpreter(j..j + ptr_size.bytes_usize());
|
.inspect_with_uninit_and_ptr_outside_interpreter(j..j + ptr_size.bytes_usize());
|
||||||
let offset = read_target_uint(tcx.data_layout.endian, offset).unwrap();
|
let offset = read_target_uint(tcx.data_layout.endian, offset).unwrap();
|
||||||
let offset = Size::from_bytes(offset);
|
let offset = Size::from_bytes(offset);
|
||||||
let relocation_width = |bytes| bytes * 3;
|
let provenance_width = |bytes| bytes * 3;
|
||||||
let ptr = Pointer::new(prov, offset);
|
let ptr = Pointer::new(prov, offset);
|
||||||
let mut target = format!("{:?}", ptr);
|
let mut target = format!("{:?}", ptr);
|
||||||
if target.len() > relocation_width(ptr_size.bytes_usize() - 1) {
|
if target.len() > provenance_width(ptr_size.bytes_usize() - 1) {
|
||||||
// This is too long, try to save some space.
|
// This is too long, try to save some space.
|
||||||
target = format!("{:#?}", ptr);
|
target = format!("{:#?}", ptr);
|
||||||
}
|
}
|
||||||
if ((i - line_start) + ptr_size).bytes_usize() > BYTES_PER_LINE {
|
if ((i - line_start) + ptr_size).bytes_usize() > BYTES_PER_LINE {
|
||||||
// This branch handles the situation where a relocation starts in the current line
|
// This branch handles the situation where a provenance starts in the current line
|
||||||
// but ends in the next one.
|
// but ends in the next one.
|
||||||
let remainder = Size::from_bytes(BYTES_PER_LINE) - (i - line_start);
|
let remainder = Size::from_bytes(BYTES_PER_LINE) - (i - line_start);
|
||||||
let overflow = ptr_size - remainder;
|
let overflow = ptr_size - remainder;
|
||||||
let remainder_width = relocation_width(remainder.bytes_usize()) - 2;
|
let remainder_width = provenance_width(remainder.bytes_usize()) - 2;
|
||||||
let overflow_width = relocation_width(overflow.bytes_usize() - 1) + 1;
|
let overflow_width = provenance_width(overflow.bytes_usize() - 1) + 1;
|
||||||
ascii.push('╾');
|
ascii.push('╾');
|
||||||
for _ in 0..remainder.bytes() - 1 {
|
for _ in 0..remainder.bytes() - 1 {
|
||||||
ascii.push('─');
|
ascii.push('─');
|
||||||
}
|
}
|
||||||
if overflow_width > remainder_width && overflow_width >= target.len() {
|
if overflow_width > remainder_width && overflow_width >= target.len() {
|
||||||
// The case where the relocation fits into the part in the next line
|
// The case where the provenance fits into the part in the next line
|
||||||
write!(w, "╾{0:─^1$}", "", remainder_width)?;
|
write!(w, "╾{0:─^1$}", "", remainder_width)?;
|
||||||
line_start =
|
line_start =
|
||||||
write_allocation_newline(w, line_start, &ascii, pos_width, prefix)?;
|
write_allocation_newline(w, line_start, &ascii, pos_width, prefix)?;
|
||||||
|
@ -921,11 +921,11 @@ fn write_allocation_bytes<'tcx, Prov: Provenance, Extra>(
|
||||||
i += ptr_size;
|
i += ptr_size;
|
||||||
continue;
|
continue;
|
||||||
} else {
|
} else {
|
||||||
// This branch handles a relocation that starts and ends in the current line.
|
// This branch handles a provenance that starts and ends in the current line.
|
||||||
let relocation_width = relocation_width(ptr_size.bytes_usize() - 1);
|
let provenance_width = provenance_width(ptr_size.bytes_usize() - 1);
|
||||||
oversized_ptr(&mut target, relocation_width);
|
oversized_ptr(&mut target, provenance_width);
|
||||||
ascii.push('╾');
|
ascii.push('╾');
|
||||||
write!(w, "╾{0:─^1$}╼", target, relocation_width)?;
|
write!(w, "╾{0:─^1$}╼", target, provenance_width)?;
|
||||||
for _ in 0..ptr_size.bytes() - 2 {
|
for _ in 0..ptr_size.bytes() - 2 {
|
||||||
ascii.push('─');
|
ascii.push('─');
|
||||||
}
|
}
|
||||||
|
@ -935,7 +935,7 @@ fn write_allocation_bytes<'tcx, Prov: Provenance, Extra>(
|
||||||
} else if alloc.init_mask().is_range_initialized(i, i + Size::from_bytes(1)).is_ok() {
|
} else if alloc.init_mask().is_range_initialized(i, i + Size::from_bytes(1)).is_ok() {
|
||||||
let j = i.bytes_usize();
|
let j = i.bytes_usize();
|
||||||
|
|
||||||
// Checked definedness (and thus range) and relocations. This access also doesn't
|
// Checked definedness (and thus range) and provenance. This access also doesn't
|
||||||
// influence interpreter execution but is only for debugging.
|
// influence interpreter execution but is only for debugging.
|
||||||
let c = alloc.inspect_with_uninit_and_ptr_outside_interpreter(j..j + 1)[0];
|
let c = alloc.inspect_with_uninit_and_ptr_outside_interpreter(j..j + 1)[0];
|
||||||
write!(w, "{:02x}", c)?;
|
write!(w, "{:02x}", c)?;
|
||||||
|
|
|
@ -113,7 +113,7 @@ impl<'a> HashStable<StableHashingContext<'a>> for mir::interpret::AllocId {
|
||||||
}
|
}
|
||||||
|
|
||||||
// `Relocations` with default type parameters is a sorted map.
|
// `Relocations` with default type parameters is a sorted map.
|
||||||
impl<'a, Prov> HashStable<StableHashingContext<'a>> for mir::interpret::Relocations<Prov>
|
impl<'a, Prov> HashStable<StableHashingContext<'a>> for mir::interpret::ProvenanceMap<Prov>
|
||||||
where
|
where
|
||||||
Prov: HashStable<StableHashingContext<'a>>,
|
Prov: HashStable<StableHashingContext<'a>>,
|
||||||
{
|
{
|
||||||
|
|
|
@ -461,7 +461,7 @@ fn collect_items_rec<'tcx>(
|
||||||
recursion_depth_reset = None;
|
recursion_depth_reset = None;
|
||||||
|
|
||||||
if let Ok(alloc) = tcx.eval_static_initializer(def_id) {
|
if let Ok(alloc) = tcx.eval_static_initializer(def_id) {
|
||||||
for &id in alloc.inner().relocations().values() {
|
for &id in alloc.inner().provenance().values() {
|
||||||
collect_miri(tcx, id, &mut neighbors);
|
collect_miri(tcx, id, &mut neighbors);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1424,7 +1424,7 @@ fn collect_miri<'tcx>(tcx: TyCtxt<'tcx>, alloc_id: AllocId, output: &mut MonoIte
|
||||||
}
|
}
|
||||||
GlobalAlloc::Memory(alloc) => {
|
GlobalAlloc::Memory(alloc) => {
|
||||||
trace!("collecting {:?} with {:#?}", alloc_id, alloc);
|
trace!("collecting {:?} with {:#?}", alloc_id, alloc);
|
||||||
for &inner in alloc.inner().relocations().values() {
|
for &inner in alloc.inner().provenance().values() {
|
||||||
rustc_data_structures::stack::ensure_sufficient_stack(|| {
|
rustc_data_structures::stack::ensure_sufficient_stack(|| {
|
||||||
collect_miri(tcx, inner, output);
|
collect_miri(tcx, inner, output);
|
||||||
});
|
});
|
||||||
|
@ -1463,7 +1463,7 @@ fn collect_const_value<'tcx>(
|
||||||
match value {
|
match value {
|
||||||
ConstValue::Scalar(Scalar::Ptr(ptr, _size)) => collect_miri(tcx, ptr.provenance, output),
|
ConstValue::Scalar(Scalar::Ptr(ptr, _size)) => collect_miri(tcx, ptr.provenance, output),
|
||||||
ConstValue::Slice { data: alloc, start: _, end: _ } | ConstValue::ByRef { alloc, .. } => {
|
ConstValue::Slice { data: alloc, start: _, end: _ } | ConstValue::ByRef { alloc, .. } => {
|
||||||
for &id in alloc.inner().relocations().values() {
|
for &id in alloc.inner().provenance().values() {
|
||||||
collect_miri(tcx, id, output);
|
collect_miri(tcx, id, output);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -542,13 +542,13 @@ fn maybe_check_static_with_link_section(tcx: TyCtxt<'_>, id: LocalDefId) {
|
||||||
// For the wasm32 target statics with `#[link_section]` are placed into custom
|
// For the wasm32 target statics with `#[link_section]` are placed into custom
|
||||||
// sections of the final output file, but this isn't link custom sections of
|
// sections of the final output file, but this isn't link custom sections of
|
||||||
// other executable formats. Namely we can only embed a list of bytes,
|
// other executable formats. Namely we can only embed a list of bytes,
|
||||||
// nothing with pointers to anything else or relocations. If any relocation
|
// nothing with provenance (pointers to anything else). If any provenance
|
||||||
// show up, reject them here.
|
// show up, reject it here.
|
||||||
// `#[link_section]` may contain arbitrary, or even undefined bytes, but it is
|
// `#[link_section]` may contain arbitrary, or even undefined bytes, but it is
|
||||||
// the consumer's responsibility to ensure all bytes that have been read
|
// the consumer's responsibility to ensure all bytes that have been read
|
||||||
// have defined values.
|
// have defined values.
|
||||||
if let Ok(alloc) = tcx.eval_static_initializer(id.to_def_id())
|
if let Ok(alloc) = tcx.eval_static_initializer(id.to_def_id())
|
||||||
&& alloc.inner().relocations().len() != 0
|
&& alloc.inner().provenance().len() != 0
|
||||||
{
|
{
|
||||||
let msg = "statics with a custom `#[link_section]` must be a \
|
let msg = "statics with a custom `#[link_section]` must be a \
|
||||||
simple list of bytes on the wasm target with no \
|
simple list of bytes on the wasm target with no \
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue