1
Fork 0

Auto merge of #115865 - RalfJung:mir-mod, r=oli-obk

move things out of mir/mod.rs

This moves a bunch of things out of `mir/mod.rs`:
- all const-related stuff to a new file consts.rs
- all statement/place/operand-related stuff to a new file statement.rs
- all pretty-printing related stuff to pretty.rs

`mod.rs` started out with 3100 lines and ends up with 1600. :)

Also there was some pretty-printing stuff in terminator.rs, that also got moved to pretty.rs, and I reordered things in pretty.rs so that it can be grouped by functionality.

Only the commit "use pretty_print_const_value from MIR constant 'extra' printing" has any behavior changes; it resolves the issue of having a fancy and a very crude pretty-printer for `ConstValue`.

r? `@oli-obk`
This commit is contained in:
bors 2023-09-19 13:22:48 +00:00
commit 0692db1a90
41 changed files with 2306 additions and 2251 deletions

View file

@ -2,7 +2,8 @@
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
use rustc_middle::mir::interpret::{read_target_uint, AllocId, ConstValue, GlobalAlloc, Scalar};
use rustc_middle::mir::interpret::{read_target_uint, AllocId, GlobalAlloc, Scalar};
use rustc_middle::mir::ConstValue;
use cranelift_module::*;

View file

@ -1,7 +1,7 @@
#![allow(non_camel_case_types)]
use rustc_hir::LangItem;
use rustc_middle::mir::interpret::ConstValue;
use rustc_middle::mir;
use rustc_middle::ty::{self, layout::TyAndLayout, Ty, TyCtxt};
use rustc_span::Span;
@ -194,10 +194,10 @@ pub fn shift_mask_val<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
pub fn asm_const_to_str<'tcx>(
tcx: TyCtxt<'tcx>,
sp: Span,
const_value: ConstValue<'tcx>,
const_value: mir::ConstValue<'tcx>,
ty_and_layout: TyAndLayout<'tcx>,
) -> String {
let ConstValue::Scalar(scalar) = const_value else {
let mir::ConstValue::Scalar(scalar) = const_value else {
span_bug!(sp, "expected Scalar for promoted asm const, but got {:#?}", const_value)
};
let value = scalar.assert_bits(ty_and_layout.size);

View file

@ -2,7 +2,7 @@ use crate::errors;
use crate::mir::operand::OperandRef;
use crate::traits::*;
use rustc_middle::mir;
use rustc_middle::mir::interpret::{ConstValue, ErrorHandled};
use rustc_middle::mir::interpret::ErrorHandled;
use rustc_middle::ty::layout::HasTyCtxt;
use rustc_middle::ty::{self, Ty};
use rustc_target::abi::Abi;
@ -20,7 +20,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
OperandRef::from_const(bx, val, ty)
}
pub fn eval_mir_constant(&self, constant: &mir::Constant<'tcx>) -> ConstValue<'tcx> {
pub fn eval_mir_constant(&self, constant: &mir::Constant<'tcx>) -> mir::ConstValue<'tcx> {
self.monomorphize(constant.literal)
.eval(self.cx.tcx(), ty::ParamEnv::reveal_all(), Some(constant.span))
.expect("erroneous constant not captured by required_consts")

View file

@ -6,8 +6,8 @@ use crate::glue;
use crate::traits::*;
use crate::MemFlags;
use rustc_middle::mir;
use rustc_middle::mir::interpret::{alloc_range, ConstValue, Pointer, Scalar};
use rustc_middle::mir::interpret::{alloc_range, Pointer, Scalar};
use rustc_middle::mir::{self, ConstValue};
use rustc_middle::ty::layout::{LayoutOf, TyAndLayout};
use rustc_middle::ty::Ty;
use rustc_target::abi::{self, Abi, Align, Size};
@ -86,7 +86,7 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
pub fn from_const<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
bx: &mut Bx,
val: ConstValue<'tcx>,
val: mir::ConstValue<'tcx>,
ty: Ty<'tcx>,
) -> Self {
let layout = bx.layout_of(ty);

View file

@ -4,9 +4,9 @@ use crate::errors::ConstEvalError;
use either::{Left, Right};
use rustc_hir::def::DefKind;
use rustc_middle::mir;
use rustc_middle::mir::interpret::{ErrorHandled, InterpErrorInfo};
use rustc_middle::mir::pretty::write_allocation_bytes;
use rustc_middle::mir::{self, ConstAlloc, ConstValue};
use rustc_middle::traits::Reveal;
use rustc_middle::ty::layout::LayoutOf;
use rustc_middle::ty::print::with_no_trimmed_paths;
@ -18,9 +18,8 @@ use super::{CanAccessStatics, CompileTimeEvalContext, CompileTimeInterpreter};
use crate::errors;
use crate::interpret::eval_nullary_intrinsic;
use crate::interpret::{
intern_const_alloc_recursive, ConstAlloc, ConstValue, CtfeValidationMode, GlobalId, Immediate,
InternKind, InterpCx, InterpError, InterpResult, MPlaceTy, MemoryKind, OpTy, RefTracking,
StackPopCleanup,
intern_const_alloc_recursive, CtfeValidationMode, GlobalId, Immediate, InternKind, InterpCx,
InterpError, InterpResult, MPlaceTy, MemoryKind, OpTy, RefTracking, StackPopCleanup,
};
// Returns a pointer to where the result lives

View file

@ -1,7 +1,7 @@
// Not in interpret to make sure we do not use private implementation details
use crate::errors::MaxNumNodesInConstErr;
use crate::interpret::{intern_const_alloc_recursive, ConstValue, InternKind, InterpCx, Scalar};
use crate::interpret::{intern_const_alloc_recursive, InternKind, InterpCx, Scalar};
use rustc_middle::mir;
use rustc_middle::mir::interpret::{EvalToValTreeResult, GlobalId};
use rustc_middle::ty::{self, Ty, TyCtxt};
@ -22,7 +22,7 @@ pub(crate) use valtrees::{const_to_valtree_inner, valtree_to_const_value};
pub(crate) fn const_caller_location(
tcx: TyCtxt<'_>,
(file, line, col): (Symbol, u32, u32),
) -> ConstValue<'_> {
) -> mir::ConstValue<'_> {
trace!("const_caller_location: {}:{}:{}", file, line, col);
let mut ecx = mk_eval_cx(tcx, DUMMY_SP, ty::ParamEnv::reveal_all(), CanAccessStatics::No);
@ -30,7 +30,7 @@ pub(crate) fn const_caller_location(
if intern_const_alloc_recursive(&mut ecx, InternKind::Constant, &loc_place).is_err() {
bug!("intern_const_alloc_recursive should not error in this case")
}
ConstValue::Scalar(Scalar::from_maybe_pointer(loc_place.ptr(), &tcx))
mir::ConstValue::Scalar(Scalar::from_maybe_pointer(loc_place.ptr(), &tcx))
}
// We forbid type-level constants that contain more than `VALTREE_MAX_NODES` nodes.
@ -87,7 +87,7 @@ pub(crate) fn eval_to_valtree<'tcx>(
#[instrument(skip(tcx), level = "debug")]
pub(crate) fn try_destructure_mir_constant_for_diagnostics<'tcx>(
tcx: TyCtxt<'tcx>,
val: ConstValue<'tcx>,
val: mir::ConstValue<'tcx>,
ty: Ty<'tcx>,
) -> Option<mir::DestructuredConstant<'tcx>> {
let param_env = ty::ParamEnv::reveal_all();

View file

@ -4,9 +4,10 @@ use super::{ValTreeCreationError, ValTreeCreationResult, VALTREE_MAX_NODES};
use crate::const_eval::CanAccessStatics;
use crate::interpret::MPlaceTy;
use crate::interpret::{
intern_const_alloc_recursive, ConstValue, ImmTy, Immediate, InternKind, MemPlaceMeta,
MemoryKind, PlaceTy, Projectable, Scalar,
intern_const_alloc_recursive, ImmTy, Immediate, InternKind, MemPlaceMeta, MemoryKind, PlaceTy,
Projectable, Scalar,
};
use rustc_middle::mir;
use rustc_middle::ty::layout::{LayoutCx, LayoutOf, TyAndLayout};
use rustc_middle::ty::{self, ScalarInt, Ty, TyCtxt};
use rustc_span::source_map::DUMMY_SP;
@ -206,7 +207,7 @@ pub fn valtree_to_const_value<'tcx>(
tcx: TyCtxt<'tcx>,
param_env_ty: ty::ParamEnvAnd<'tcx, Ty<'tcx>>,
valtree: ty::ValTree<'tcx>,
) -> ConstValue<'tcx> {
) -> mir::ConstValue<'tcx> {
// Basic idea: We directly construct `Scalar` values from trivial `ValTree`s
// (those for constants with type bool, int, uint, float or char).
// For all other types we create an `MPlace` and fill that by walking
@ -219,10 +220,10 @@ pub fn valtree_to_const_value<'tcx>(
match ty.kind() {
ty::FnDef(..) => {
assert!(valtree.unwrap_branch().is_empty());
ConstValue::ZeroSized
mir::ConstValue::ZeroSized
}
ty::Bool | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Char => match valtree {
ty::ValTree::Leaf(scalar_int) => ConstValue::Scalar(Scalar::Int(scalar_int)),
ty::ValTree::Leaf(scalar_int) => mir::ConstValue::Scalar(Scalar::Int(scalar_int)),
ty::ValTree::Branch(_) => bug!(
"ValTrees for Bool, Int, Uint, Float or Char should have the form ValTree::Leaf"
),
@ -237,7 +238,7 @@ pub fn valtree_to_const_value<'tcx>(
let layout = tcx.layout_of(param_env_ty).unwrap();
if layout.is_zst() {
// Fast path to avoid some allocations.
return ConstValue::ZeroSized;
return mir::ConstValue::ZeroSized;
}
if layout.abi.is_scalar()
&& (matches!(ty.kind(), ty::Tuple(_))

View file

@ -5,10 +5,8 @@
use rustc_hir::def_id::DefId;
use rustc_middle::mir::{
self,
interpret::{
Allocation, ConstAllocation, ConstValue, GlobalId, InterpResult, PointerArithmetic, Scalar,
},
BinOp, NonDivergingIntrinsic,
interpret::{Allocation, ConstAllocation, GlobalId, InterpResult, PointerArithmetic, Scalar},
BinOp, ConstValue, NonDivergingIntrinsic,
};
use rustc_middle::ty;
use rustc_middle::ty::layout::{LayoutOf as _, ValidityRequirement};

View file

@ -13,9 +13,8 @@ use rustc_middle::{mir, ty};
use rustc_target::abi::{self, Abi, Align, HasDataLayout, Size};
use super::{
alloc_range, from_known_layout, mir_assign_valid_types, AllocId, ConstValue, Frame, InterpCx,
InterpResult, MPlaceTy, Machine, MemPlace, MemPlaceMeta, PlaceTy, Pointer, Projectable,
Provenance, Scalar,
alloc_range, from_known_layout, mir_assign_valid_types, AllocId, Frame, InterpCx, InterpResult,
MPlaceTy, Machine, MemPlace, MemPlaceMeta, PlaceTy, Pointer, Projectable, Provenance, Scalar,
};
/// An `Immediate` represents a single immediate self-contained Rust value.
@ -702,7 +701,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
pub(crate) fn const_val_to_op(
&self,
val_val: ConstValue<'tcx>,
val_val: mir::ConstValue<'tcx>,
ty: Ty<'tcx>,
layout: Option<TyAndLayout<'tcx>>,
) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
@ -715,15 +714,15 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
};
let layout = from_known_layout(self.tcx, self.param_env, layout, || self.layout_of(ty))?;
let op = match val_val {
ConstValue::Indirect { alloc_id, offset } => {
mir::ConstValue::Indirect { alloc_id, offset } => {
// We rely on mutability being set correctly in that allocation to prevent writes
// where none should happen.
let ptr = self.global_base_pointer(Pointer::new(alloc_id, offset))?;
Operand::Indirect(MemPlace::from_ptr(ptr.into()))
}
ConstValue::Scalar(x) => Operand::Immediate(adjust_scalar(x)?.into()),
ConstValue::ZeroSized => Operand::Immediate(Immediate::Uninit),
ConstValue::Slice { data, start, end } => {
mir::ConstValue::Scalar(x) => Operand::Immediate(adjust_scalar(x)?.into()),
mir::ConstValue::ZeroSized => Operand::Immediate(Immediate::Uninit),
mir::ConstValue::Slice { data, start, end } => {
// We rely on mutability being set correctly in `data` to prevent writes
// where none should happen.
let ptr = Pointer::new(

View file

@ -9,16 +9,15 @@ use either::{Either, Left, Right};
use rustc_ast::Mutability;
use rustc_index::IndexSlice;
use rustc_middle::mir;
use rustc_middle::mir::interpret::PointerArithmetic;
use rustc_middle::ty;
use rustc_middle::ty::layout::{LayoutOf, TyAndLayout};
use rustc_middle::ty::Ty;
use rustc_target::abi::{Abi, Align, FieldIdx, HasDataLayout, Size, FIRST_VARIANT};
use super::{
alloc_range, mir_assign_valid_types, AllocId, AllocRef, AllocRefMut, CheckInAllocMsg,
ConstAlloc, ImmTy, Immediate, InterpCx, InterpResult, Machine, MemoryKind, OpTy, Operand,
Pointer, Projectable, Provenance, Readable, Scalar,
alloc_range, mir_assign_valid_types, AllocId, AllocRef, AllocRefMut, CheckInAllocMsg, ImmTy,
Immediate, InterpCx, InterpResult, Machine, MemoryKind, OpTy, Operand, Pointer,
PointerArithmetic, Projectable, Provenance, Readable, Scalar,
};
#[derive(Copy, Clone, Hash, PartialEq, Eq, Debug)]
@ -1037,7 +1036,7 @@ where
pub fn raw_const_to_mplace(
&self,
raw: ConstAlloc<'tcx>,
raw: mir::ConstAlloc<'tcx>,
) -> InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> {
// This must be an allocation in `tcx`
let _ = self.tcx.global_alloc(raw.alloc_id);

View file

@ -47,6 +47,8 @@ extern crate cfg_if;
#[macro_use]
extern crate rustc_macros;
use std::fmt;
pub use rustc_index::static_assert_size;
#[inline(never)]
@ -126,6 +128,23 @@ impl<F: FnOnce()> Drop for OnDrop<F> {
}
}
/// Turns a closure that takes an `&mut Formatter` into something that can be display-formatted.
pub fn make_display(f: impl Fn(&mut fmt::Formatter<'_>) -> fmt::Result) -> impl fmt::Display {
struct Printer<F> {
f: F,
}
impl<F> fmt::Display for Printer<F>
where
F: Fn(&mut fmt::Formatter<'_>) -> fmt::Result,
{
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
(self.f)(fmt)
}
}
Printer { f }
}
// See comments in src/librustc_middle/lib.rs
#[doc(hidden)]
pub fn __noop_fix_for_27438() {}

View file

@ -0,0 +1,574 @@
use std::fmt::{self, Debug, Display, Formatter};
use rustc_hir;
use rustc_hir::def_id::{DefId, LocalDefId};
use rustc_hir::{self as hir};
use rustc_span::Span;
use rustc_target::abi::{HasDataLayout, Size};
use crate::mir::interpret::{
alloc_range, AllocId, ConstAllocation, ErrorHandled, GlobalAlloc, Scalar,
};
use crate::mir::{pretty_print_const_value, Promoted};
use crate::ty::{self, print::pretty_print_const, List, Ty, TyCtxt};
use crate::ty::{GenericArgs, GenericArgsRef};
use crate::ty::{ScalarInt, UserTypeAnnotationIndex};
///////////////////////////////////////////////////////////////////////////
/// Evaluated Constants
/// Represents the result of const evaluation via the `eval_to_allocation` query.
/// Not to be confused with `ConstAllocation`, which directly refers to the underlying data!
/// Here we indirect via an `AllocId`.
#[derive(Copy, Clone, HashStable, TyEncodable, TyDecodable, Debug, Hash, Eq, PartialEq)]
pub struct ConstAlloc<'tcx> {
/// The value lives here, at offset 0, and that allocation definitely is an `AllocKind::Memory`
/// (so you can use `AllocMap::unwrap_memory`).
pub alloc_id: AllocId,
pub ty: Ty<'tcx>,
}
/// Represents a constant value in Rust. `Scalar` and `Slice` are optimizations for
/// array length computations, enum discriminants and the pattern matching logic.
#[derive(Copy, Clone, Debug, Eq, PartialEq, TyEncodable, TyDecodable, Hash)]
#[derive(HashStable, Lift)]
pub enum ConstValue<'tcx> {
/// Used for types with `layout::abi::Scalar` ABI.
///
/// Not using the enum `Value` to encode that this must not be `Uninit`.
Scalar(Scalar),
/// Only for ZSTs.
ZeroSized,
/// Used for `&[u8]` and `&str`.
///
/// This is worth an optimized representation since Rust has literals of these types.
/// Not having to indirect those through an `AllocId` (or two, if we used `Indirect`) has shown
/// measurable performance improvements on stress tests.
Slice { data: ConstAllocation<'tcx>, start: usize, end: usize },
/// A value not representable by the other variants; needs to be stored in-memory.
///
/// Must *not* be used for scalars or ZST, but having `&str` or other slices in this variant is fine.
Indirect {
/// The backing memory of the value. May contain more memory than needed for just the value
/// if this points into some other larger ConstValue.
///
/// We use an `AllocId` here instead of a `ConstAllocation<'tcx>` to make sure that when a
/// raw constant (which is basically just an `AllocId`) is turned into a `ConstValue` and
/// back, we can preserve the original `AllocId`.
alloc_id: AllocId,
/// Offset into `alloc`
offset: Size,
},
}
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
static_assert_size!(ConstValue<'_>, 32);
impl<'tcx> ConstValue<'tcx> {
#[inline]
pub fn try_to_scalar(&self) -> Option<Scalar<AllocId>> {
match *self {
ConstValue::Indirect { .. } | ConstValue::Slice { .. } | ConstValue::ZeroSized => None,
ConstValue::Scalar(val) => Some(val),
}
}
pub fn try_to_scalar_int(&self) -> Option<ScalarInt> {
self.try_to_scalar()?.try_to_int().ok()
}
pub fn try_to_bits(&self, size: Size) -> Option<u128> {
self.try_to_scalar_int()?.to_bits(size).ok()
}
pub fn try_to_bool(&self) -> Option<bool> {
self.try_to_scalar_int()?.try_into().ok()
}
pub fn try_to_target_usize(&self, tcx: TyCtxt<'tcx>) -> Option<u64> {
self.try_to_scalar_int()?.try_to_target_usize(tcx).ok()
}
pub fn try_to_bits_for_ty(
&self,
tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
ty: Ty<'tcx>,
) -> Option<u128> {
let size = tcx.layout_of(param_env.with_reveal_all_normalized(tcx).and(ty)).ok()?.size;
self.try_to_bits(size)
}
pub fn from_bool(b: bool) -> Self {
ConstValue::Scalar(Scalar::from_bool(b))
}
pub fn from_u64(i: u64) -> Self {
ConstValue::Scalar(Scalar::from_u64(i))
}
pub fn from_u128(i: u128) -> Self {
ConstValue::Scalar(Scalar::from_u128(i))
}
pub fn from_target_usize(i: u64, cx: &impl HasDataLayout) -> Self {
ConstValue::Scalar(Scalar::from_target_usize(i, cx))
}
/// Must only be called on constants of type `&str` or `&[u8]`!
pub fn try_get_slice_bytes_for_diagnostics(&self, tcx: TyCtxt<'tcx>) -> Option<&'tcx [u8]> {
let (data, start, end) = match self {
ConstValue::Scalar(_) | ConstValue::ZeroSized => {
bug!("`try_get_slice_bytes` on non-slice constant")
}
&ConstValue::Slice { data, start, end } => (data, start, end),
&ConstValue::Indirect { alloc_id, offset } => {
// The reference itself is stored behind an indirection.
// Load the reference, and then load the actual slice contents.
let a = tcx.global_alloc(alloc_id).unwrap_memory().inner();
let ptr_size = tcx.data_layout.pointer_size;
if a.size() < offset + 2 * ptr_size {
// (partially) dangling reference
return None;
}
// Read the wide pointer components.
let ptr = a
.read_scalar(
&tcx,
alloc_range(offset, ptr_size),
/* read_provenance */ true,
)
.ok()?;
let ptr = ptr.to_pointer(&tcx).ok()?;
let len = a
.read_scalar(
&tcx,
alloc_range(offset + ptr_size, ptr_size),
/* read_provenance */ false,
)
.ok()?;
let len = len.to_target_usize(&tcx).ok()?;
let len: usize = len.try_into().ok()?;
if len == 0 {
return Some(&[]);
}
// Non-empty slice, must have memory. We know this is a relative pointer.
let (inner_alloc_id, offset) = ptr.into_parts();
let data = tcx.global_alloc(inner_alloc_id?).unwrap_memory();
(data, offset.bytes_usize(), offset.bytes_usize() + len)
}
};
// This is for diagnostics only, so we are okay to use `inspect_with_uninit_and_ptr_outside_interpreter`.
Some(data.inner().inspect_with_uninit_and_ptr_outside_interpreter(start..end))
}
}
///////////////////////////////////////////////////////////////////////////
/// Constants
///
/// Two constants are equal if they are the same constant. Note that
/// this does not necessarily mean that they are `==` in Rust. In
/// particular, one must be wary of `NaN`!
#[derive(Clone, Copy, PartialEq, TyEncodable, TyDecodable, Hash, HashStable)]
#[derive(TypeFoldable, TypeVisitable)]
pub struct Constant<'tcx> {
pub span: Span,
/// Optional user-given type: for something like
/// `collect::<Vec<_>>`, this would be present and would
/// indicate that `Vec<_>` was explicitly specified.
///
/// Needed for NLL to impose user-given type constraints.
pub user_ty: Option<UserTypeAnnotationIndex>,
pub literal: ConstantKind<'tcx>,
}
#[derive(Clone, Copy, PartialEq, Eq, TyEncodable, TyDecodable, Hash, HashStable, Debug)]
#[derive(TypeFoldable, TypeVisitable)]
pub enum ConstantKind<'tcx> {
/// This constant came from the type system.
///
/// Any way of turning `ty::Const` into `ConstValue` should go through `valtree_to_const_val`;
/// this ensures that we consistently produce "clean" values without data in the padding or
/// anything like that.
Ty(ty::Const<'tcx>),
/// An unevaluated mir constant which is not part of the type system.
///
/// Note that `Ty(ty::ConstKind::Unevaluated)` and this variant are *not* identical! `Ty` will
/// always flow through a valtree, so all data not captured in the valtree is lost. This variant
/// directly uses the evaluated result of the given constant, including e.g. data stored in
/// padding.
Unevaluated(UnevaluatedConst<'tcx>, Ty<'tcx>),
/// This constant cannot go back into the type system, as it represents
/// something the type system cannot handle (e.g. pointers).
Val(ConstValue<'tcx>, Ty<'tcx>),
}
impl<'tcx> Constant<'tcx> {
pub fn check_static_ptr(&self, tcx: TyCtxt<'_>) -> Option<DefId> {
match self.literal.try_to_scalar() {
Some(Scalar::Ptr(ptr, _size)) => match tcx.global_alloc(ptr.provenance) {
GlobalAlloc::Static(def_id) => {
assert!(!tcx.is_thread_local_static(def_id));
Some(def_id)
}
_ => None,
},
_ => None,
}
}
#[inline]
pub fn ty(&self) -> Ty<'tcx> {
self.literal.ty()
}
}
impl<'tcx> ConstantKind<'tcx> {
#[inline(always)]
pub fn ty(&self) -> Ty<'tcx> {
match self {
ConstantKind::Ty(c) => c.ty(),
ConstantKind::Val(_, ty) | ConstantKind::Unevaluated(_, ty) => *ty,
}
}
#[inline]
pub fn try_to_scalar(self) -> Option<Scalar> {
match self {
ConstantKind::Ty(c) => match c.kind() {
ty::ConstKind::Value(valtree) => match valtree {
ty::ValTree::Leaf(scalar_int) => Some(Scalar::Int(scalar_int)),
ty::ValTree::Branch(_) => None,
},
_ => None,
},
ConstantKind::Val(val, _) => val.try_to_scalar(),
ConstantKind::Unevaluated(..) => None,
}
}
#[inline]
pub fn try_to_scalar_int(self) -> Option<ScalarInt> {
self.try_to_scalar()?.try_to_int().ok()
}
#[inline]
pub fn try_to_bits(self, size: Size) -> Option<u128> {
self.try_to_scalar_int()?.to_bits(size).ok()
}
#[inline]
pub fn try_to_bool(self) -> Option<bool> {
self.try_to_scalar_int()?.try_into().ok()
}
#[inline]
pub fn eval(
self,
tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
span: Option<Span>,
) -> Result<ConstValue<'tcx>, ErrorHandled> {
match self {
ConstantKind::Ty(c) => {
// We want to consistently have a "clean" value for type system constants (i.e., no
// data hidden in the padding), so we always go through a valtree here.
let val = c.eval(tcx, param_env, span)?;
Ok(tcx.valtree_to_const_val((self.ty(), val)))
}
ConstantKind::Unevaluated(uneval, _) => {
// FIXME: We might want to have a `try_eval`-like function on `Unevaluated`
tcx.const_eval_resolve(param_env, uneval, span)
}
ConstantKind::Val(val, _) => Ok(val),
}
}
/// Normalizes the constant to a value or an error if possible.
#[inline]
pub fn normalize(self, tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> Self {
match self.eval(tcx, param_env, None) {
Ok(val) => Self::Val(val, self.ty()),
Err(ErrorHandled::Reported(guar, _span)) => {
Self::Ty(ty::Const::new_error(tcx, guar.into(), self.ty()))
}
Err(ErrorHandled::TooGeneric(_span)) => self,
}
}
#[inline]
pub fn try_eval_scalar(
self,
tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
) -> Option<Scalar> {
self.eval(tcx, param_env, None).ok()?.try_to_scalar()
}
#[inline]
pub fn try_eval_scalar_int(
self,
tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
) -> Option<ScalarInt> {
self.try_eval_scalar(tcx, param_env)?.try_to_int().ok()
}
#[inline]
pub fn try_eval_bits(
&self,
tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
ty: Ty<'tcx>,
) -> Option<u128> {
let int = self.try_eval_scalar_int(tcx, param_env)?;
assert_eq!(self.ty(), ty);
let size = tcx.layout_of(param_env.with_reveal_all_normalized(tcx).and(ty)).ok()?.size;
int.to_bits(size).ok()
}
/// Panics if the value cannot be evaluated or doesn't contain a valid integer of the given type.
#[inline]
pub fn eval_bits(self, tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>, ty: Ty<'tcx>) -> u128 {
self.try_eval_bits(tcx, param_env, ty)
.unwrap_or_else(|| bug!("expected bits of {:#?}, got {:#?}", ty, self))
}
#[inline]
pub fn try_eval_target_usize(
self,
tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
) -> Option<u64> {
self.try_eval_scalar_int(tcx, param_env)?.try_to_target_usize(tcx).ok()
}
#[inline]
/// Panics if the value cannot be evaluated or doesn't contain a valid `usize`.
pub fn eval_target_usize(self, tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> u64 {
self.try_eval_target_usize(tcx, param_env)
.unwrap_or_else(|| bug!("expected usize, got {:#?}", self))
}
#[inline]
pub fn try_eval_bool(self, tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> Option<bool> {
self.try_eval_scalar_int(tcx, param_env)?.try_into().ok()
}
#[inline]
pub fn from_value(val: ConstValue<'tcx>, ty: Ty<'tcx>) -> Self {
Self::Val(val, ty)
}
pub fn from_bits(
tcx: TyCtxt<'tcx>,
bits: u128,
param_env_ty: ty::ParamEnvAnd<'tcx, Ty<'tcx>>,
) -> Self {
let size = tcx
.layout_of(param_env_ty)
.unwrap_or_else(|e| {
bug!("could not compute layout for {:?}: {:?}", param_env_ty.value, e)
})
.size;
let cv = ConstValue::Scalar(Scalar::from_uint(bits, size));
Self::Val(cv, param_env_ty.value)
}
#[inline]
pub fn from_bool(tcx: TyCtxt<'tcx>, v: bool) -> Self {
let cv = ConstValue::from_bool(v);
Self::Val(cv, tcx.types.bool)
}
#[inline]
pub fn zero_sized(ty: Ty<'tcx>) -> Self {
let cv = ConstValue::ZeroSized;
Self::Val(cv, ty)
}
pub fn from_usize(tcx: TyCtxt<'tcx>, n: u64) -> Self {
let ty = tcx.types.usize;
Self::from_bits(tcx, n as u128, ty::ParamEnv::empty().and(ty))
}
#[inline]
pub fn from_scalar(_tcx: TyCtxt<'tcx>, s: Scalar, ty: Ty<'tcx>) -> Self {
let val = ConstValue::Scalar(s);
Self::Val(val, ty)
}
/// Literals are converted to `ConstantKindVal`, const generic parameters are eagerly
/// converted to a constant, everything else becomes `Unevaluated`.
#[instrument(skip(tcx), level = "debug", ret)]
pub fn from_anon_const(
tcx: TyCtxt<'tcx>,
def: LocalDefId,
param_env: ty::ParamEnv<'tcx>,
) -> Self {
let body_id = match tcx.hir().get_by_def_id(def) {
hir::Node::AnonConst(ac) => ac.body,
_ => {
span_bug!(tcx.def_span(def), "from_anon_const can only process anonymous constants")
}
};
let expr = &tcx.hir().body(body_id).value;
debug!(?expr);
// Unwrap a block, so that e.g. `{ P }` is recognised as a parameter. Const arguments
// currently have to be wrapped in curly brackets, so it's necessary to special-case.
let expr = match &expr.kind {
hir::ExprKind::Block(block, _) if block.stmts.is_empty() && block.expr.is_some() => {
block.expr.as_ref().unwrap()
}
_ => expr,
};
debug!("expr.kind: {:?}", expr.kind);
let ty = tcx.type_of(def).instantiate_identity();
debug!(?ty);
// FIXME(const_generics): We currently have to special case parameters because `min_const_generics`
// does not provide the parents generics to anonymous constants. We still allow generic const
// parameters by themselves however, e.g. `N`. These constants would cause an ICE if we were to
// ever try to substitute the generic parameters in their bodies.
//
// While this doesn't happen as these constants are always used as `ty::ConstKind::Param`, it does
// cause issues if we were to remove that special-case and try to evaluate the constant instead.
use hir::{def::DefKind::ConstParam, def::Res, ExprKind, Path, QPath};
match expr.kind {
ExprKind::Path(QPath::Resolved(_, &Path { res: Res::Def(ConstParam, def_id), .. })) => {
// Find the name and index of the const parameter by indexing the generics of
// the parent item and construct a `ParamConst`.
let item_def_id = tcx.parent(def_id);
let generics = tcx.generics_of(item_def_id);
let index = generics.param_def_id_to_index[&def_id];
let name = tcx.item_name(def_id);
let ty_const = ty::Const::new_param(tcx, ty::ParamConst::new(index, name), ty);
debug!(?ty_const);
return Self::Ty(ty_const);
}
_ => {}
}
let hir_id = tcx.hir().local_def_id_to_hir_id(def);
let parent_args = if let Some(parent_hir_id) = tcx.hir().opt_parent_id(hir_id)
&& let Some(parent_did) = parent_hir_id.as_owner()
{
GenericArgs::identity_for_item(tcx, parent_did)
} else {
List::empty()
};
debug!(?parent_args);
let did = def.to_def_id();
let child_args = GenericArgs::identity_for_item(tcx, did);
let args = tcx.mk_args_from_iter(parent_args.into_iter().chain(child_args.into_iter()));
debug!(?args);
let span = tcx.def_span(def);
let uneval = UnevaluatedConst::new(did, args);
debug!(?span, ?param_env);
match tcx.const_eval_resolve(param_env, uneval, Some(span)) {
Ok(val) => {
debug!("evaluated const value");
Self::Val(val, ty)
}
Err(_) => {
debug!("error encountered during evaluation");
// Error was handled in `const_eval_resolve`. Here we just create a
// new unevaluated const and error hard later in codegen
Self::Unevaluated(
UnevaluatedConst {
def: did,
args: GenericArgs::identity_for_item(tcx, did),
promoted: None,
},
ty,
)
}
}
}
pub fn from_ty_const(c: ty::Const<'tcx>, tcx: TyCtxt<'tcx>) -> Self {
match c.kind() {
ty::ConstKind::Value(valtree) => {
// Make sure that if `c` is normalized, then the return value is normalized.
let const_val = tcx.valtree_to_const_val((c.ty(), valtree));
Self::Val(const_val, c.ty())
}
_ => Self::Ty(c),
}
}
}
/// An unevaluated (potentially generic) constant used in MIR.
#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, TyEncodable, TyDecodable)]
#[derive(Hash, HashStable, TypeFoldable, TypeVisitable)]
pub struct UnevaluatedConst<'tcx> {
pub def: DefId,
pub args: GenericArgsRef<'tcx>,
pub promoted: Option<Promoted>,
}
impl<'tcx> UnevaluatedConst<'tcx> {
#[inline]
pub fn shrink(self) -> ty::UnevaluatedConst<'tcx> {
assert_eq!(self.promoted, None);
ty::UnevaluatedConst { def: self.def, args: self.args }
}
}
impl<'tcx> UnevaluatedConst<'tcx> {
#[inline]
pub fn new(def: DefId, args: GenericArgsRef<'tcx>) -> UnevaluatedConst<'tcx> {
UnevaluatedConst { def, args, promoted: Default::default() }
}
#[inline]
pub fn from_instance(instance: ty::Instance<'tcx>) -> Self {
UnevaluatedConst::new(instance.def_id(), instance.args)
}
}
impl<'tcx> Debug for Constant<'tcx> {
fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
write!(fmt, "{self}")
}
}
impl<'tcx> Display for Constant<'tcx> {
fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
match self.ty().kind() {
ty::FnDef(..) => {}
_ => write!(fmt, "const ")?,
}
Display::fmt(&self.literal, fmt)
}
}
impl<'tcx> Display for ConstantKind<'tcx> {
fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
match *self {
ConstantKind::Ty(c) => pretty_print_const(c, fmt, true),
ConstantKind::Val(val, ty) => pretty_print_const_value(val, ty, fmt),
// FIXME(valtrees): Correctly print mir constants.
ConstantKind::Unevaluated(..) => {
fmt.write_str("_")?;
Ok(())
}
}
}
}

View file

@ -1,7 +1,7 @@
use super::{AllocId, AllocRange, ConstAlloc, Pointer, Scalar};
use super::{AllocId, AllocRange, Pointer, Scalar};
use crate::error;
use crate::mir::interpret::ConstValue;
use crate::mir::{ConstAlloc, ConstValue};
use crate::query::TyCtxtAt;
use crate::ty::{layout, tls, Ty, TyCtxt, ValTree};

View file

@ -149,7 +149,7 @@ pub use self::error::{
UnsupportedOpInfo, ValidationErrorInfo, ValidationErrorKind,
};
pub use self::value::{ConstAlloc, ConstValue, Scalar};
pub use self::value::Scalar;
pub use self::allocation::{
alloc_range, AllocBytes, AllocError, AllocRange, AllocResult, Allocation, ConstAllocation,

View file

@ -9,163 +9,9 @@ use rustc_apfloat::{
use rustc_macros::HashStable;
use rustc_target::abi::{HasDataLayout, Size};
use crate::{
mir::interpret::alloc_range,
ty::{ParamEnv, ScalarInt, Ty, TyCtxt},
};
use crate::ty::ScalarInt;
use super::{
AllocId, ConstAllocation, InterpResult, Pointer, PointerArithmetic, Provenance,
ScalarSizeMismatch,
};
/// Represents the result of const evaluation via the `eval_to_allocation` query.
#[derive(Copy, Clone, HashStable, TyEncodable, TyDecodable, Debug, Hash, Eq, PartialEq)]
pub struct ConstAlloc<'tcx> {
/// The value lives here, at offset 0, and that allocation definitely is an `AllocKind::Memory`
/// (so you can use `AllocMap::unwrap_memory`).
pub alloc_id: AllocId,
pub ty: Ty<'tcx>,
}
/// Represents a constant value in Rust. `Scalar` and `Slice` are optimizations for
/// array length computations, enum discriminants and the pattern matching logic.
#[derive(Copy, Clone, Debug, Eq, PartialEq, TyEncodable, TyDecodable, Hash)]
#[derive(HashStable, Lift)]
pub enum ConstValue<'tcx> {
/// Used for types with `layout::abi::Scalar` ABI.
///
/// Not using the enum `Value` to encode that this must not be `Uninit`.
Scalar(Scalar),
/// Only for ZSTs.
ZeroSized,
/// Used for `&[u8]` and `&str`.
///
/// This is worth an optimized representation since Rust has literals of these types.
/// Not having to indirect those through an `AllocId` (or two, if we used `Indirect`) has shown
/// measurable performance improvements on stress tests.
Slice { data: ConstAllocation<'tcx>, start: usize, end: usize },
/// A value not representable by the other variants; needs to be stored in-memory.
///
/// Must *not* be used for scalars or ZST, but having `&str` or other slices in this variant is fine.
Indirect {
/// The backing memory of the value. May contain more memory than needed for just the value
/// if this points into some other larger ConstValue.
///
/// We use an `AllocId` here instead of a `ConstAllocation<'tcx>` to make sure that when a
/// raw constant (which is basically just an `AllocId`) is turned into a `ConstValue` and
/// back, we can preserve the original `AllocId`.
alloc_id: AllocId,
/// Offset into `alloc`
offset: Size,
},
}
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
static_assert_size!(ConstValue<'_>, 32);
impl<'tcx> ConstValue<'tcx> {
#[inline]
pub fn try_to_scalar(&self) -> Option<Scalar<AllocId>> {
match *self {
ConstValue::Indirect { .. } | ConstValue::Slice { .. } | ConstValue::ZeroSized => None,
ConstValue::Scalar(val) => Some(val),
}
}
pub fn try_to_scalar_int(&self) -> Option<ScalarInt> {
self.try_to_scalar()?.try_to_int().ok()
}
pub fn try_to_bits(&self, size: Size) -> Option<u128> {
self.try_to_scalar_int()?.to_bits(size).ok()
}
pub fn try_to_bool(&self) -> Option<bool> {
self.try_to_scalar_int()?.try_into().ok()
}
pub fn try_to_target_usize(&self, tcx: TyCtxt<'tcx>) -> Option<u64> {
self.try_to_scalar_int()?.try_to_target_usize(tcx).ok()
}
pub fn try_to_bits_for_ty(
&self,
tcx: TyCtxt<'tcx>,
param_env: ParamEnv<'tcx>,
ty: Ty<'tcx>,
) -> Option<u128> {
let size = tcx.layout_of(param_env.with_reveal_all_normalized(tcx).and(ty)).ok()?.size;
self.try_to_bits(size)
}
pub fn from_bool(b: bool) -> Self {
ConstValue::Scalar(Scalar::from_bool(b))
}
pub fn from_u64(i: u64) -> Self {
ConstValue::Scalar(Scalar::from_u64(i))
}
pub fn from_u128(i: u128) -> Self {
ConstValue::Scalar(Scalar::from_u128(i))
}
pub fn from_target_usize(i: u64, cx: &impl HasDataLayout) -> Self {
ConstValue::Scalar(Scalar::from_target_usize(i, cx))
}
/// Must only be called on constants of type `&str` or `&[u8]`!
pub fn try_get_slice_bytes_for_diagnostics(&self, tcx: TyCtxt<'tcx>) -> Option<&'tcx [u8]> {
let (data, start, end) = match self {
ConstValue::Scalar(_) | ConstValue::ZeroSized => {
bug!("`try_get_slice_bytes` on non-slice constant")
}
&ConstValue::Slice { data, start, end } => (data, start, end),
&ConstValue::Indirect { alloc_id, offset } => {
// The reference itself is stored behind an indirection.
// Load the reference, and then load the actual slice contents.
let a = tcx.global_alloc(alloc_id).unwrap_memory().inner();
let ptr_size = tcx.data_layout.pointer_size;
if a.size() < offset + 2 * ptr_size {
// (partially) dangling reference
return None;
}
// Read the wide pointer components.
let ptr = a
.read_scalar(
&tcx,
alloc_range(offset, ptr_size),
/* read_provenance */ true,
)
.ok()?;
let ptr = ptr.to_pointer(&tcx).ok()?;
let len = a
.read_scalar(
&tcx,
alloc_range(offset + ptr_size, ptr_size),
/* read_provenance */ false,
)
.ok()?;
let len = len.to_target_usize(&tcx).ok()?;
let len: usize = len.try_into().ok()?;
if len == 0 {
return Some(&[]);
}
// Non-empty slice, must have memory. We know this is a relative pointer.
let (inner_alloc_id, offset) = ptr.into_parts();
let data = tcx.global_alloc(inner_alloc_id?).unwrap_memory();
(data, offset.bytes_usize(), offset.bytes_usize() + len)
}
};
// This is for diagnostics only, so we are okay to use `inspect_with_uninit_and_ptr_outside_interpreter`.
Some(data.inner().inspect_with_uninit_and_ptr_outside_interpreter(start..end))
}
}
use super::{AllocId, InterpResult, Pointer, PointerArithmetic, Provenance, ScalarSizeMismatch};
/// A `Scalar` represents an immediate, primitive value existing outside of a
/// `memory::Allocation`. It is in many ways like a small chunk of an `Allocation`, up to 16 bytes in

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,5 @@
//! Values computed by queries that use MIR.
use crate::mir::interpret::ConstValue;
use crate::ty::{self, OpaqueHiddenType, Ty, TyCtxt};
use rustc_data_structures::fx::FxIndexMap;
use rustc_data_structures::unord::UnordSet;
@ -16,7 +15,7 @@ use smallvec::SmallVec;
use std::cell::Cell;
use std::fmt::{self, Debug};
use super::SourceInfo;
use super::{ConstValue, SourceInfo};
#[derive(Copy, Clone, PartialEq, TyEncodable, TyDecodable, HashStable, Debug)]
pub enum UnsafetyViolationKind {

View file

@ -0,0 +1,441 @@
/// Functionality for statements, operands, places, and things that appear in them.
use super::*;
///////////////////////////////////////////////////////////////////////////
// Statements
/// A statement in a basic block, including information about its source code.
#[derive(Clone, TyEncodable, TyDecodable, HashStable, TypeFoldable, TypeVisitable)]
pub struct Statement<'tcx> {
pub source_info: SourceInfo,
pub kind: StatementKind<'tcx>,
}
impl Statement<'_> {
/// Changes a statement to a nop. This is both faster than deleting instructions and avoids
/// invalidating statement indices in `Location`s.
pub fn make_nop(&mut self) {
self.kind = StatementKind::Nop
}
/// Changes a statement to a nop and returns the original statement.
#[must_use = "If you don't need the statement, use `make_nop` instead"]
pub fn replace_nop(&mut self) -> Self {
Statement {
source_info: self.source_info,
kind: mem::replace(&mut self.kind, StatementKind::Nop),
}
}
}
impl<'tcx> StatementKind<'tcx> {
pub fn as_assign_mut(&mut self) -> Option<&mut (Place<'tcx>, Rvalue<'tcx>)> {
match self {
StatementKind::Assign(x) => Some(x),
_ => None,
}
}
pub fn as_assign(&self) -> Option<&(Place<'tcx>, Rvalue<'tcx>)> {
match self {
StatementKind::Assign(x) => Some(x),
_ => None,
}
}
}
///////////////////////////////////////////////////////////////////////////
// Places
impl<V, T> ProjectionElem<V, T> {
/// Returns `true` if the target of this projection may refer to a different region of memory
/// than the base.
fn is_indirect(&self) -> bool {
match self {
Self::Deref => true,
Self::Field(_, _)
| Self::Index(_)
| Self::OpaqueCast(_)
| Self::ConstantIndex { .. }
| Self::Subslice { .. }
| Self::Downcast(_, _) => false,
}
}
/// Returns `true` if the target of this projection always refers to the same memory region
/// whatever the state of the program.
pub fn is_stable_offset(&self) -> bool {
match self {
Self::Deref | Self::Index(_) => false,
Self::Field(_, _)
| Self::OpaqueCast(_)
| Self::ConstantIndex { .. }
| Self::Subslice { .. }
| Self::Downcast(_, _) => true,
}
}
/// Returns `true` if this is a `Downcast` projection with the given `VariantIdx`.
pub fn is_downcast_to(&self, v: VariantIdx) -> bool {
matches!(*self, Self::Downcast(_, x) if x == v)
}
/// Returns `true` if this is a `Field` projection with the given index.
pub fn is_field_to(&self, f: FieldIdx) -> bool {
matches!(*self, Self::Field(x, _) if x == f)
}
/// Returns `true` if this is accepted inside `VarDebugInfoContents::Place`.
pub fn can_use_in_debuginfo(&self) -> bool {
match self {
Self::ConstantIndex { from_end: false, .. }
| Self::Deref
| Self::Downcast(_, _)
| Self::Field(_, _) => true,
Self::ConstantIndex { from_end: true, .. }
| Self::Index(_)
| Self::OpaqueCast(_)
| Self::Subslice { .. } => false,
}
}
}
/// Alias for projections as they appear in `UserTypeProjection`, where we
/// need neither the `V` parameter for `Index` nor the `T` for `Field`.
pub type ProjectionKind = ProjectionElem<(), ()>;
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct PlaceRef<'tcx> {
pub local: Local,
pub projection: &'tcx [PlaceElem<'tcx>],
}
// Once we stop implementing `Ord` for `DefId`,
// this impl will be unnecessary. Until then, we'll
// leave this impl in place to prevent re-adding a
// dependency on the `Ord` impl for `DefId`
impl<'tcx> !PartialOrd for PlaceRef<'tcx> {}
impl<'tcx> Place<'tcx> {
// FIXME change this to a const fn by also making List::empty a const fn.
pub fn return_place() -> Place<'tcx> {
Place { local: RETURN_PLACE, projection: List::empty() }
}
/// Returns `true` if this `Place` contains a `Deref` projection.
///
/// If `Place::is_indirect` returns false, the caller knows that the `Place` refers to the
/// same region of memory as its base.
pub fn is_indirect(&self) -> bool {
self.projection.iter().any(|elem| elem.is_indirect())
}
/// Returns `true` if this `Place`'s first projection is `Deref`.
///
/// This is useful because for MIR phases `AnalysisPhase::PostCleanup` and later,
/// `Deref` projections can only occur as the first projection. In that case this method
/// is equivalent to `is_indirect`, but faster.
pub fn is_indirect_first_projection(&self) -> bool {
self.as_ref().is_indirect_first_projection()
}
/// Finds the innermost `Local` from this `Place`, *if* it is either a local itself or
/// a single deref of a local.
#[inline(always)]
pub fn local_or_deref_local(&self) -> Option<Local> {
self.as_ref().local_or_deref_local()
}
/// If this place represents a local variable like `_X` with no
/// projections, return `Some(_X)`.
#[inline(always)]
pub fn as_local(&self) -> Option<Local> {
self.as_ref().as_local()
}
#[inline]
pub fn as_ref(&self) -> PlaceRef<'tcx> {
PlaceRef { local: self.local, projection: &self.projection }
}
/// Iterate over the projections in evaluation order, i.e., the first element is the base with
/// its projection and then subsequently more projections are added.
/// As a concrete example, given the place a.b.c, this would yield:
/// - (a, .b)
/// - (a.b, .c)
///
/// Given a place without projections, the iterator is empty.
#[inline]
pub fn iter_projections(
self,
) -> impl Iterator<Item = (PlaceRef<'tcx>, PlaceElem<'tcx>)> + DoubleEndedIterator {
self.as_ref().iter_projections()
}
/// Generates a new place by appending `more_projections` to the existing ones
/// and interning the result.
pub fn project_deeper(self, more_projections: &[PlaceElem<'tcx>], tcx: TyCtxt<'tcx>) -> Self {
if more_projections.is_empty() {
return self;
}
self.as_ref().project_deeper(more_projections, tcx)
}
}
impl From<Local> for Place<'_> {
#[inline]
fn from(local: Local) -> Self {
Place { local, projection: List::empty() }
}
}
impl<'tcx> PlaceRef<'tcx> {
/// Finds the innermost `Local` from this `Place`, *if* it is either a local itself or
/// a single deref of a local.
pub fn local_or_deref_local(&self) -> Option<Local> {
match *self {
PlaceRef { local, projection: [] }
| PlaceRef { local, projection: [ProjectionElem::Deref] } => Some(local),
_ => None,
}
}
/// Returns `true` if this `Place` contains a `Deref` projection.
///
/// If `Place::is_indirect` returns false, the caller knows that the `Place` refers to the
/// same region of memory as its base.
pub fn is_indirect(&self) -> bool {
self.projection.iter().any(|elem| elem.is_indirect())
}
/// Returns `true` if this `Place`'s first projection is `Deref`.
///
/// This is useful because for MIR phases `AnalysisPhase::PostCleanup` and later,
/// `Deref` projections can only occur as the first projection. In that case this method
/// is equivalent to `is_indirect`, but faster.
pub fn is_indirect_first_projection(&self) -> bool {
// To make sure this is not accidentally used in wrong mir phase
debug_assert!(
self.projection.is_empty() || !self.projection[1..].contains(&PlaceElem::Deref)
);
self.projection.first() == Some(&PlaceElem::Deref)
}
/// If this place represents a local variable like `_X` with no
/// projections, return `Some(_X)`.
#[inline]
pub fn as_local(&self) -> Option<Local> {
match *self {
PlaceRef { local, projection: [] } => Some(local),
_ => None,
}
}
#[inline]
pub fn last_projection(&self) -> Option<(PlaceRef<'tcx>, PlaceElem<'tcx>)> {
if let &[ref proj_base @ .., elem] = self.projection {
Some((PlaceRef { local: self.local, projection: proj_base }, elem))
} else {
None
}
}
/// Iterate over the projections in evaluation order, i.e., the first element is the base with
/// its projection and then subsequently more projections are added.
/// As a concrete example, given the place a.b.c, this would yield:
/// - (a, .b)
/// - (a.b, .c)
///
/// Given a place without projections, the iterator is empty.
#[inline]
pub fn iter_projections(
self,
) -> impl Iterator<Item = (PlaceRef<'tcx>, PlaceElem<'tcx>)> + DoubleEndedIterator {
self.projection.iter().enumerate().map(move |(i, proj)| {
let base = PlaceRef { local: self.local, projection: &self.projection[..i] };
(base, *proj)
})
}
/// Generates a new place by appending `more_projections` to the existing ones
/// and interning the result.
pub fn project_deeper(
self,
more_projections: &[PlaceElem<'tcx>],
tcx: TyCtxt<'tcx>,
) -> Place<'tcx> {
let mut v: Vec<PlaceElem<'tcx>>;
let new_projections = if self.projection.is_empty() {
more_projections
} else {
v = Vec::with_capacity(self.projection.len() + more_projections.len());
v.extend(self.projection);
v.extend(more_projections);
&v
};
Place { local: self.local, projection: tcx.mk_place_elems(new_projections) }
}
}
impl From<Local> for PlaceRef<'_> {
#[inline]
fn from(local: Local) -> Self {
PlaceRef { local, projection: &[] }
}
}
///////////////////////////////////////////////////////////////////////////
// Operands
impl<'tcx> Operand<'tcx> {
/// Convenience helper to make a constant that refers to the fn
/// with given `DefId` and args. Since this is used to synthesize
/// MIR, assumes `user_ty` is None.
pub fn function_handle(
tcx: TyCtxt<'tcx>,
def_id: DefId,
args: impl IntoIterator<Item = GenericArg<'tcx>>,
span: Span,
) -> Self {
let ty = Ty::new_fn_def(tcx, def_id, args);
Operand::Constant(Box::new(Constant {
span,
user_ty: None,
literal: ConstantKind::Val(ConstValue::ZeroSized, ty),
}))
}
pub fn is_move(&self) -> bool {
matches!(self, Operand::Move(..))
}
/// Convenience helper to make a literal-like constant from a given scalar value.
/// Since this is used to synthesize MIR, assumes `user_ty` is None.
pub fn const_from_scalar(
tcx: TyCtxt<'tcx>,
ty: Ty<'tcx>,
val: Scalar,
span: Span,
) -> Operand<'tcx> {
debug_assert!({
let param_env_and_ty = ty::ParamEnv::empty().and(ty);
let type_size = tcx
.layout_of(param_env_and_ty)
.unwrap_or_else(|e| panic!("could not compute layout for {ty:?}: {e:?}"))
.size;
let scalar_size = match val {
Scalar::Int(int) => int.size(),
_ => panic!("Invalid scalar type {val:?}"),
};
scalar_size == type_size
});
Operand::Constant(Box::new(Constant {
span,
user_ty: None,
literal: ConstantKind::Val(ConstValue::Scalar(val), ty),
}))
}
pub fn to_copy(&self) -> Self {
match *self {
Operand::Copy(_) | Operand::Constant(_) => self.clone(),
Operand::Move(place) => Operand::Copy(place),
}
}
/// Returns the `Place` that is the target of this `Operand`, or `None` if this `Operand` is a
/// constant.
pub fn place(&self) -> Option<Place<'tcx>> {
match self {
Operand::Copy(place) | Operand::Move(place) => Some(*place),
Operand::Constant(_) => None,
}
}
/// Returns the `Constant` that is the target of this `Operand`, or `None` if this `Operand` is a
/// place.
pub fn constant(&self) -> Option<&Constant<'tcx>> {
match self {
Operand::Constant(x) => Some(&**x),
Operand::Copy(_) | Operand::Move(_) => None,
}
}
/// Gets the `ty::FnDef` from an operand if it's a constant function item.
///
/// While this is unlikely in general, it's the normal case of what you'll
/// find as the `func` in a [`TerminatorKind::Call`].
pub fn const_fn_def(&self) -> Option<(DefId, GenericArgsRef<'tcx>)> {
let const_ty = self.constant()?.literal.ty();
if let ty::FnDef(def_id, args) = *const_ty.kind() { Some((def_id, args)) } else { None }
}
}
///////////////////////////////////////////////////////////////////////////
/// Rvalues
impl<'tcx> Rvalue<'tcx> {
/// Returns true if rvalue can be safely removed when the result is unused.
#[inline]
pub fn is_safe_to_remove(&self) -> bool {
match self {
// Pointer to int casts may be side-effects due to exposing the provenance.
// While the model is undecided, we should be conservative. See
// <https://www.ralfj.de/blog/2022/04/11/provenance-exposed.html>
Rvalue::Cast(CastKind::PointerExposeAddress, _, _) => false,
Rvalue::Use(_)
| Rvalue::CopyForDeref(_)
| Rvalue::Repeat(_, _)
| Rvalue::Ref(_, _, _)
| Rvalue::ThreadLocalRef(_)
| Rvalue::AddressOf(_, _)
| Rvalue::Len(_)
| Rvalue::Cast(
CastKind::IntToInt
| CastKind::FloatToInt
| CastKind::FloatToFloat
| CastKind::IntToFloat
| CastKind::FnPtrToPtr
| CastKind::PtrToPtr
| CastKind::PointerCoercion(_)
| CastKind::PointerFromExposedAddress
| CastKind::DynStar
| CastKind::Transmute,
_,
_,
)
| Rvalue::BinaryOp(_, _)
| Rvalue::CheckedBinaryOp(_, _)
| Rvalue::NullaryOp(_, _)
| Rvalue::UnaryOp(_, _)
| Rvalue::Discriminant(_)
| Rvalue::Aggregate(_, _)
| Rvalue::ShallowInitBox(_, _) => true,
}
}
}
impl BorrowKind {
pub fn mutability(&self) -> Mutability {
match *self {
BorrowKind::Shared | BorrowKind::Shallow => Mutability::Not,
BorrowKind::Mut { .. } => Mutability::Mut,
}
}
pub fn allows_two_phase_borrow(&self) -> bool {
match *self {
BorrowKind::Shared
| BorrowKind::Shallow
| BorrowKind::Mut { kind: MutBorrowKind::Default | MutBorrowKind::ClosureCapture } => {
false
}
BorrowKind::Mut { kind: MutBorrowKind::TwoPhaseBorrow } => true,
}
}
}

View file

@ -3,7 +3,7 @@
//! This is in a dedicated file so that changes to this file can be reviewed more carefully.
//! The intention is that this file only contains datatype declarations, no code.
use super::{BasicBlock, Constant, Local, SwitchTargets, UserTypeProjection};
use super::{BasicBlock, Constant, Local, UserTypeProjection};
use crate::mir::coverage::{CodeRegion, CoverageKind};
use crate::traits::Reveal;
@ -24,6 +24,7 @@ use rustc_span::def_id::LocalDefId;
use rustc_span::symbol::Symbol;
use rustc_span::Span;
use rustc_target::asm::InlineAsmRegOrRegClass;
use smallvec::SmallVec;
/// Represents the "flavors" of MIR.
///
@ -828,6 +829,27 @@ impl TerminatorKind<'_> {
}
}
#[derive(Debug, Clone, TyEncodable, TyDecodable, Hash, HashStable, PartialEq)]
pub struct SwitchTargets {
/// Possible values. The locations to branch to in each case
/// are found in the corresponding indices from the `targets` vector.
pub(super) values: SmallVec<[u128; 1]>,
/// Possible branch sites. The last element of this vector is used
/// for the otherwise branch, so targets.len() == values.len() + 1
/// should hold.
//
// This invariant is quite non-obvious and also could be improved.
// One way to make this invariant is to have something like this instead:
//
// branches: Vec<(ConstInt, BasicBlock)>,
// otherwise: Option<BasicBlock> // exhaustive if None
//
// However weve decided to keep this as-is until we figure a case
// where some other approach seems to be strictly better than other.
pub(super) targets: SmallVec<[BasicBlock; 2]>,
}
/// Action to be taken when a stack unwind happens.
#[derive(Copy, Clone, Debug, PartialEq, Eq, TyEncodable, TyDecodable, Hash, HashStable)]
#[derive(TypeFoldable, TypeVisitable)]

View file

@ -1,39 +1,16 @@
/// Functionality for terminators and helper types that appear in terminators.
use rustc_hir::LangItem;
use smallvec::SmallVec;
use super::{BasicBlock, InlineAsmOperand, Operand, SourceInfo, TerminatorKind, UnwindAction};
use rustc_ast::InlineAsmTemplatePiece;
pub use rustc_ast::Mutability;
use rustc_macros::HashStable;
use std::borrow::Cow;
use std::fmt::{self, Debug, Formatter, Write};
use std::iter;
use std::slice;
pub use super::query::*;
use super::*;
#[derive(Debug, Clone, TyEncodable, TyDecodable, Hash, HashStable, PartialEq)]
pub struct SwitchTargets {
/// Possible values. The locations to branch to in each case
/// are found in the corresponding indices from the `targets` vector.
values: SmallVec<[u128; 1]>,
/// Possible branch sites. The last element of this vector is used
/// for the otherwise branch, so targets.len() == values.len() + 1
/// should hold.
//
// This invariant is quite non-obvious and also could be improved.
// One way to make this invariant is to have something like this instead:
//
// branches: Vec<(ConstInt, BasicBlock)>,
// otherwise: Option<BasicBlock> // exhaustive if None
//
// However weve decided to keep this as-is until we figure a case
// where some other approach seems to be strictly better than other.
targets: SmallVec<[BasicBlock; 2]>,
}
impl SwitchTargets {
/// Creates switch targets from an iterator of values and target blocks.
///
@ -135,6 +112,168 @@ impl UnwindTerminateReason {
}
}
impl<O> AssertKind<O> {
/// Returns true if this an overflow checking assertion controlled by -C overflow-checks.
pub fn is_optional_overflow_check(&self) -> bool {
use AssertKind::*;
use BinOp::*;
matches!(self, OverflowNeg(..) | Overflow(Add | Sub | Mul | Shl | Shr, ..))
}
/// Get the message that is printed at runtime when this assertion fails.
///
/// The caller is expected to handle `BoundsCheck` and `MisalignedPointerDereference` by
/// invoking the appropriate lang item (panic_bounds_check/panic_misaligned_pointer_dereference)
/// instead of printing a static message.
pub fn description(&self) -> &'static str {
use AssertKind::*;
match self {
Overflow(BinOp::Add, _, _) => "attempt to add with overflow",
Overflow(BinOp::Sub, _, _) => "attempt to subtract with overflow",
Overflow(BinOp::Mul, _, _) => "attempt to multiply with overflow",
Overflow(BinOp::Div, _, _) => "attempt to divide with overflow",
Overflow(BinOp::Rem, _, _) => "attempt to calculate the remainder with overflow",
OverflowNeg(_) => "attempt to negate with overflow",
Overflow(BinOp::Shr, _, _) => "attempt to shift right with overflow",
Overflow(BinOp::Shl, _, _) => "attempt to shift left with overflow",
Overflow(op, _, _) => bug!("{:?} cannot overflow", op),
DivisionByZero(_) => "attempt to divide by zero",
RemainderByZero(_) => "attempt to calculate the remainder with a divisor of zero",
ResumedAfterReturn(GeneratorKind::Gen) => "generator resumed after completion",
ResumedAfterReturn(GeneratorKind::Async(_)) => "`async fn` resumed after completion",
ResumedAfterPanic(GeneratorKind::Gen) => "generator resumed after panicking",
ResumedAfterPanic(GeneratorKind::Async(_)) => "`async fn` resumed after panicking",
BoundsCheck { .. } | MisalignedPointerDereference { .. } => {
bug!("Unexpected AssertKind")
}
}
}
/// Format the message arguments for the `assert(cond, msg..)` terminator in MIR printing.
///
/// Needs to be kept in sync with the run-time behavior (which is defined by
/// `AssertKind::description` and the lang items mentioned in its docs).
/// Note that we deliberately show more details here than we do at runtime, such as the actual
/// numbers that overflowed -- it is much easier to do so here than at runtime.
pub fn fmt_assert_args<W: fmt::Write>(&self, f: &mut W) -> fmt::Result
where
O: Debug,
{
use AssertKind::*;
match self {
BoundsCheck { ref len, ref index } => write!(
f,
"\"index out of bounds: the length is {{}} but the index is {{}}\", {len:?}, {index:?}"
),
OverflowNeg(op) => {
write!(f, "\"attempt to negate `{{}}`, which would overflow\", {op:?}")
}
DivisionByZero(op) => write!(f, "\"attempt to divide `{{}}` by zero\", {op:?}"),
RemainderByZero(op) => write!(
f,
"\"attempt to calculate the remainder of `{{}}` with a divisor of zero\", {op:?}"
),
Overflow(BinOp::Add, l, r) => write!(
f,
"\"attempt to compute `{{}} + {{}}`, which would overflow\", {l:?}, {r:?}"
),
Overflow(BinOp::Sub, l, r) => write!(
f,
"\"attempt to compute `{{}} - {{}}`, which would overflow\", {l:?}, {r:?}"
),
Overflow(BinOp::Mul, l, r) => write!(
f,
"\"attempt to compute `{{}} * {{}}`, which would overflow\", {l:?}, {r:?}"
),
Overflow(BinOp::Div, l, r) => write!(
f,
"\"attempt to compute `{{}} / {{}}`, which would overflow\", {l:?}, {r:?}"
),
Overflow(BinOp::Rem, l, r) => write!(
f,
"\"attempt to compute the remainder of `{{}} % {{}}`, which would overflow\", {l:?}, {r:?}"
),
Overflow(BinOp::Shr, _, r) => {
write!(f, "\"attempt to shift right by `{{}}`, which would overflow\", {r:?}")
}
Overflow(BinOp::Shl, _, r) => {
write!(f, "\"attempt to shift left by `{{}}`, which would overflow\", {r:?}")
}
MisalignedPointerDereference { required, found } => {
write!(
f,
"\"misaligned pointer dereference: address must be a multiple of {{}} but is {{}}\", {required:?}, {found:?}"
)
}
_ => write!(f, "\"{}\"", self.description()),
}
}
/// Format the diagnostic message for use in a lint (e.g. when the assertion fails during const-eval).
///
/// Needs to be kept in sync with the run-time behavior (which is defined by
/// `AssertKind::description` and the lang items mentioned in its docs).
/// Note that we deliberately show more details here than we do at runtime, such as the actual
/// numbers that overflowed -- it is much easier to do so here than at runtime.
pub fn diagnostic_message(&self) -> DiagnosticMessage {
use crate::fluent_generated::*;
use AssertKind::*;
match self {
BoundsCheck { .. } => middle_bounds_check,
Overflow(BinOp::Shl, _, _) => middle_assert_shl_overflow,
Overflow(BinOp::Shr, _, _) => middle_assert_shr_overflow,
Overflow(_, _, _) => middle_assert_op_overflow,
OverflowNeg(_) => middle_assert_overflow_neg,
DivisionByZero(_) => middle_assert_divide_by_zero,
RemainderByZero(_) => middle_assert_remainder_by_zero,
ResumedAfterReturn(GeneratorKind::Async(_)) => middle_assert_async_resume_after_return,
ResumedAfterReturn(GeneratorKind::Gen) => middle_assert_generator_resume_after_return,
ResumedAfterPanic(GeneratorKind::Async(_)) => middle_assert_async_resume_after_panic,
ResumedAfterPanic(GeneratorKind::Gen) => middle_assert_generator_resume_after_panic,
MisalignedPointerDereference { .. } => middle_assert_misaligned_ptr_deref,
}
}
pub fn add_args(self, adder: &mut dyn FnMut(Cow<'static, str>, DiagnosticArgValue<'static>))
where
O: fmt::Debug,
{
use AssertKind::*;
macro_rules! add {
($name: expr, $value: expr) => {
adder($name.into(), $value.into_diagnostic_arg());
};
}
match self {
BoundsCheck { len, index } => {
add!("len", format!("{len:?}"));
add!("index", format!("{index:?}"));
}
Overflow(BinOp::Shl | BinOp::Shr, _, val)
| DivisionByZero(val)
| RemainderByZero(val)
| OverflowNeg(val) => {
add!("val", format!("{val:#?}"));
}
Overflow(binop, left, right) => {
add!("op", binop.to_hir_binop().as_str());
add!("left", format!("{left:#?}"));
add!("right", format!("{right:#?}"));
}
ResumedAfterReturn(_) | ResumedAfterPanic(_) => {}
MisalignedPointerDereference { required, found } => {
add!("required", format!("{required:#?}"));
add!("found", format!("{found:#?}"));
}
}
}
}
#[derive(Clone, Debug, TyEncodable, TyDecodable, HashStable, TypeFoldable, TypeVisitable)]
pub struct Terminator<'tcx> {
pub source_info: SourceInfo,
@ -299,187 +438,6 @@ impl<'tcx> TerminatorKind<'tcx> {
}
}
impl<'tcx> Debug for TerminatorKind<'tcx> {
fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
self.fmt_head(fmt)?;
let successor_count = self.successors().count();
let labels = self.fmt_successor_labels();
assert_eq!(successor_count, labels.len());
// `Cleanup` is already included in successors
let show_unwind = !matches!(self.unwind(), None | Some(UnwindAction::Cleanup(_)));
let fmt_unwind = |fmt: &mut Formatter<'_>| -> fmt::Result {
write!(fmt, "unwind ")?;
match self.unwind() {
// Not needed or included in successors
None | Some(UnwindAction::Cleanup(_)) => unreachable!(),
Some(UnwindAction::Continue) => write!(fmt, "continue"),
Some(UnwindAction::Unreachable) => write!(fmt, "unreachable"),
Some(UnwindAction::Terminate(reason)) => {
write!(fmt, "terminate({})", reason.as_short_str())
}
}
};
match (successor_count, show_unwind) {
(0, false) => Ok(()),
(0, true) => {
write!(fmt, " -> ")?;
fmt_unwind(fmt)
}
(1, false) => write!(fmt, " -> {:?}", self.successors().next().unwrap()),
_ => {
write!(fmt, " -> [")?;
for (i, target) in self.successors().enumerate() {
if i > 0 {
write!(fmt, ", ")?;
}
write!(fmt, "{}: {:?}", labels[i], target)?;
}
if show_unwind {
write!(fmt, ", ")?;
fmt_unwind(fmt)?;
}
write!(fmt, "]")
}
}
}
}
impl<'tcx> TerminatorKind<'tcx> {
/// Writes the "head" part of the terminator; that is, its name and the data it uses to pick the
/// successor basic block, if any. The only information not included is the list of possible
/// successors, which may be rendered differently between the text and the graphviz format.
pub fn fmt_head<W: Write>(&self, fmt: &mut W) -> fmt::Result {
use self::TerminatorKind::*;
match self {
Goto { .. } => write!(fmt, "goto"),
SwitchInt { discr, .. } => write!(fmt, "switchInt({discr:?})"),
Return => write!(fmt, "return"),
GeneratorDrop => write!(fmt, "generator_drop"),
UnwindResume => write!(fmt, "resume"),
UnwindTerminate(reason) => {
write!(fmt, "abort({})", reason.as_short_str())
}
Yield { value, resume_arg, .. } => write!(fmt, "{resume_arg:?} = yield({value:?})"),
Unreachable => write!(fmt, "unreachable"),
Drop { place, .. } => write!(fmt, "drop({place:?})"),
Call { func, args, destination, .. } => {
write!(fmt, "{destination:?} = ")?;
write!(fmt, "{func:?}(")?;
for (index, arg) in args.iter().enumerate() {
if index > 0 {
write!(fmt, ", ")?;
}
write!(fmt, "{arg:?}")?;
}
write!(fmt, ")")
}
Assert { cond, expected, msg, .. } => {
write!(fmt, "assert(")?;
if !expected {
write!(fmt, "!")?;
}
write!(fmt, "{cond:?}, ")?;
msg.fmt_assert_args(fmt)?;
write!(fmt, ")")
}
FalseEdge { .. } => write!(fmt, "falseEdge"),
FalseUnwind { .. } => write!(fmt, "falseUnwind"),
InlineAsm { template, ref operands, options, .. } => {
write!(fmt, "asm!(\"{}\"", InlineAsmTemplatePiece::to_string(template))?;
for op in operands {
write!(fmt, ", ")?;
let print_late = |&late| if late { "late" } else { "" };
match op {
InlineAsmOperand::In { reg, value } => {
write!(fmt, "in({reg}) {value:?}")?;
}
InlineAsmOperand::Out { reg, late, place: Some(place) } => {
write!(fmt, "{}out({}) {:?}", print_late(late), reg, place)?;
}
InlineAsmOperand::Out { reg, late, place: None } => {
write!(fmt, "{}out({}) _", print_late(late), reg)?;
}
InlineAsmOperand::InOut {
reg,
late,
in_value,
out_place: Some(out_place),
} => {
write!(
fmt,
"in{}out({}) {:?} => {:?}",
print_late(late),
reg,
in_value,
out_place
)?;
}
InlineAsmOperand::InOut { reg, late, in_value, out_place: None } => {
write!(fmt, "in{}out({}) {:?} => _", print_late(late), reg, in_value)?;
}
InlineAsmOperand::Const { value } => {
write!(fmt, "const {value:?}")?;
}
InlineAsmOperand::SymFn { value } => {
write!(fmt, "sym_fn {value:?}")?;
}
InlineAsmOperand::SymStatic { def_id } => {
write!(fmt, "sym_static {def_id:?}")?;
}
}
}
write!(fmt, ", options({options:?}))")
}
}
}
/// Returns the list of labels for the edges to the successor basic blocks.
pub fn fmt_successor_labels(&self) -> Vec<Cow<'static, str>> {
use self::TerminatorKind::*;
match *self {
Return | UnwindResume | UnwindTerminate(_) | Unreachable | GeneratorDrop => vec![],
Goto { .. } => vec!["".into()],
SwitchInt { ref targets, .. } => targets
.values
.iter()
.map(|&u| Cow::Owned(u.to_string()))
.chain(iter::once("otherwise".into()))
.collect(),
Call { target: Some(_), unwind: UnwindAction::Cleanup(_), .. } => {
vec!["return".into(), "unwind".into()]
}
Call { target: Some(_), unwind: _, .. } => vec!["return".into()],
Call { target: None, unwind: UnwindAction::Cleanup(_), .. } => vec!["unwind".into()],
Call { target: None, unwind: _, .. } => vec![],
Yield { drop: Some(_), .. } => vec!["resume".into(), "drop".into()],
Yield { drop: None, .. } => vec!["resume".into()],
Drop { unwind: UnwindAction::Cleanup(_), .. } => vec!["return".into(), "unwind".into()],
Drop { unwind: _, .. } => vec!["return".into()],
Assert { unwind: UnwindAction::Cleanup(_), .. } => {
vec!["success".into(), "unwind".into()]
}
Assert { unwind: _, .. } => vec!["success".into()],
FalseEdge { .. } => vec!["real".into(), "imaginary".into()],
FalseUnwind { unwind: UnwindAction::Cleanup(_), .. } => {
vec!["real".into(), "unwind".into()]
}
FalseUnwind { unwind: _, .. } => vec!["real".into()],
InlineAsm { destination: Some(_), unwind: UnwindAction::Cleanup(_), .. } => {
vec!["return".into(), "unwind".into()]
}
InlineAsm { destination: Some(_), unwind: _, .. } => {
vec!["return".into()]
}
InlineAsm { destination: None, unwind: UnwindAction::Cleanup(_), .. } => {
vec!["unwind".into()]
}
InlineAsm { destination: None, unwind: _, .. } => vec![],
}
}
}
#[derive(Copy, Clone, Debug)]
pub enum TerminatorEdges<'mir, 'tcx> {
/// For terminators that have no successor, like `return`.

View file

@ -121,16 +121,12 @@ impl EraseType for Result<mir::ConstantKind<'_>, mir::interpret::LitToConstError
[u8; size_of::<Result<mir::ConstantKind<'static>, mir::interpret::LitToConstError>>()];
}
impl EraseType for Result<mir::interpret::ConstAlloc<'_>, mir::interpret::ErrorHandled> {
type Result = [u8; size_of::<
Result<mir::interpret::ConstAlloc<'static>, mir::interpret::ErrorHandled>,
>()];
impl EraseType for Result<mir::ConstAlloc<'_>, mir::interpret::ErrorHandled> {
type Result = [u8; size_of::<Result<mir::ConstAlloc<'static>, mir::interpret::ErrorHandled>>()];
}
impl EraseType for Result<mir::interpret::ConstValue<'_>, mir::interpret::ErrorHandled> {
type Result = [u8; size_of::<
Result<mir::interpret::ConstValue<'static>, mir::interpret::ErrorHandled>,
>()];
impl EraseType for Result<mir::ConstValue<'_>, mir::interpret::ErrorHandled> {
type Result = [u8; size_of::<Result<mir::ConstValue<'static>, mir::interpret::ErrorHandled>>()];
}
impl EraseType for Result<Option<ty::ValTree<'_>>, mir::interpret::ErrorHandled> {
@ -317,8 +313,8 @@ tcx_lifetime! {
rustc_middle::middle::exported_symbols::ExportedSymbol,
rustc_middle::mir::ConstantKind,
rustc_middle::mir::DestructuredConstant,
rustc_middle::mir::interpret::ConstAlloc,
rustc_middle::mir::interpret::ConstValue,
rustc_middle::mir::ConstAlloc,
rustc_middle::mir::ConstValue,
rustc_middle::mir::interpret::GlobalId,
rustc_middle::mir::interpret::LitToConstInput,
rustc_middle::traits::query::MethodAutoderefStepsResult,

View file

@ -2,7 +2,6 @@
use crate::infer::canonical::Canonical;
use crate::mir;
use crate::mir::interpret::ConstValue;
use crate::traits;
use crate::ty::fast_reject::SimplifiedType;
use crate::ty::layout::{TyAndLayout, ValidityRequirement};
@ -369,7 +368,7 @@ impl<'tcx> Key for (ty::Const<'tcx>, FieldIdx) {
}
}
impl<'tcx> Key for (ConstValue<'tcx>, Ty<'tcx>) {
impl<'tcx> Key for (mir::ConstValue<'tcx>, Ty<'tcx>) {
type CacheSelector = DefaultCacheSelector<Self>;
fn default_span(&self, _: TyCtxt<'_>) -> Span {
@ -377,7 +376,7 @@ impl<'tcx> Key for (ConstValue<'tcx>, Ty<'tcx>) {
}
}
impl<'tcx> Key for mir::interpret::ConstAlloc<'tcx> {
impl<'tcx> Key for mir::ConstAlloc<'tcx> {
type CacheSelector = DefaultCacheSelector<Self>;
fn default_span(&self, _: TyCtxt<'_>) -> Span {

View file

@ -21,7 +21,7 @@ use crate::middle::stability::{self, DeprecationEntry};
use crate::mir;
use crate::mir::interpret::GlobalId;
use crate::mir::interpret::{
ConstValue, EvalToAllocationRawResult, EvalToConstValueResult, EvalToValTreeResult,
EvalToAllocationRawResult, EvalToConstValueResult, EvalToValTreeResult,
};
use crate::mir::interpret::{LitToConstError, LitToConstInput};
use crate::mir::mono::CodegenUnit;
@ -1091,7 +1091,7 @@ rustc_queries! {
}
/// Converts a type level constant value into `ConstValue`
query valtree_to_const_val(key: (Ty<'tcx>, ty::ValTree<'tcx>)) -> ConstValue<'tcx> {
query valtree_to_const_val(key: (Ty<'tcx>, ty::ValTree<'tcx>)) -> mir::ConstValue<'tcx> {
desc { "converting type-level constant value to mir constant value"}
}
@ -1104,14 +1104,14 @@ rustc_queries! {
/// Tries to destructure an `mir::ConstantKind` ADT or array into its variant index
/// and its field values. This should only be used for pretty printing.
query try_destructure_mir_constant_for_diagnostics(
key: (ConstValue<'tcx>, Ty<'tcx>)
key: (mir::ConstValue<'tcx>, Ty<'tcx>)
) -> Option<mir::DestructuredConstant<'tcx>> {
desc { "destructuring MIR constant"}
no_hash
eval_always
}
query const_caller_location(key: (rustc_span::Symbol, u32, u32)) -> ConstValue<'tcx> {
query const_caller_location(key: (rustc_span::Symbol, u32, u32)) -> mir::ConstValue<'tcx> {
desc { "getting a &core::panic::Location referring to a span" }
}

View file

@ -1713,6 +1713,21 @@ pub trait PrettyPrinter<'tcx>:
}
}
pub(crate) fn pretty_print_const<'tcx>(
c: ty::Const<'tcx>,
fmt: &mut fmt::Formatter<'_>,
print_types: bool,
) -> fmt::Result {
ty::tls::with(|tcx| {
let literal = tcx.lift(c).unwrap();
let mut cx = FmtPrinter::new(tcx, Namespace::ValueNS);
cx.print_alloc_ids = true;
let cx = cx.pretty_print_const(literal, print_types)?;
fmt.write_str(&cx.into_buffer())?;
Ok(())
})
}
// HACK(eddyb) boxed to avoid moving around a large struct by-value.
pub struct FmtPrinter<'a, 'tcx>(Box<FmtPrinterData<'a, 'tcx>>);

View file

@ -1,4 +1,4 @@
use rustc_middle::mir::interpret::{ConstValue, Scalar};
use rustc_middle::mir::interpret::Scalar;
use rustc_middle::mir::tcx::PlaceTy;
use rustc_middle::ty::cast::mir_cast_kind;
use rustc_middle::{mir::*, thir::*, ty};

View file

@ -3,9 +3,7 @@
use crate::build::{parse_float_into_constval, Builder};
use rustc_ast as ast;
use rustc_middle::mir;
use rustc_middle::mir::interpret::{
Allocation, ConstValue, LitToConstError, LitToConstInput, Scalar,
};
use rustc_middle::mir::interpret::{Allocation, LitToConstError, LitToConstInput, Scalar};
use rustc_middle::mir::*;
use rustc_middle::thir::*;
use rustc_middle::ty::{

View file

@ -15,7 +15,6 @@ use rustc_index::{Idx, IndexSlice, IndexVec};
use rustc_infer::infer::{InferCtxt, TyCtxtInferExt};
use rustc_middle::hir::place::PlaceBase as HirPlaceBase;
use rustc_middle::middle::region;
use rustc_middle::mir::interpret::ConstValue;
use rustc_middle::mir::interpret::Scalar;
use rustc_middle::mir::*;
use rustc_middle::thir::{

View file

@ -18,7 +18,7 @@ use rustc_hir::pat_util::EnumerateAndAdjustIterator;
use rustc_hir::RangeEnd;
use rustc_index::Idx;
use rustc_middle::mir::interpret::{
ConstValue, ErrorHandled, GlobalId, LitToConstError, LitToConstInput, Scalar,
ErrorHandled, GlobalId, LitToConstError, LitToConstInput, Scalar,
};
use rustc_middle::mir::{self, ConstantKind, UserTypeProjection};
use rustc_middle::mir::{BorrowKind, Mutability};
@ -855,8 +855,8 @@ pub(crate) fn compare_const_vals<'tcx>(
ty::Float(_) | ty::Int(_) => {} // require special handling, see below
_ => match (a, b) {
(
mir::ConstantKind::Val(ConstValue::Scalar(Scalar::Int(a)), _a_ty),
mir::ConstantKind::Val(ConstValue::Scalar(Scalar::Int(b)), _b_ty),
mir::ConstantKind::Val(mir::ConstValue::Scalar(Scalar::Int(a)), _a_ty),
mir::ConstantKind::Val(mir::ConstValue::Scalar(Scalar::Int(b)), _b_ty),
) => return Some(a.cmp(&b)),
(mir::ConstantKind::Ty(a), mir::ConstantKind::Ty(b)) => {
return Some(a.kind().cmp(&b.kind()));

View file

@ -4,7 +4,7 @@ use rustc_hir::lang_items::LangItem;
use rustc_index::IndexVec;
use rustc_middle::mir::*;
use rustc_middle::mir::{
interpret::{ConstValue, Scalar},
interpret::Scalar,
visit::{PlaceContext, Visitor},
};
use rustc_middle::ty::{Ty, TyCtxt, TypeAndMut};

View file

@ -22,8 +22,8 @@ use rustc_target::spec::abi::Abi as CallAbi;
use crate::dataflow_const_prop::Patch;
use crate::MirPass;
use rustc_const_eval::interpret::{
self, compile_time_machine, AllocId, ConstAllocation, ConstValue, FnArg, Frame, ImmTy,
Immediate, InterpCx, InterpResult, MemoryKind, OpTy, PlaceTy, Pointer, Scalar, StackPopCleanup,
self, compile_time_machine, AllocId, ConstAllocation, FnArg, Frame, ImmTy, Immediate, InterpCx,
InterpResult, MemoryKind, OpTy, PlaceTy, Pointer, Scalar, StackPopCleanup,
};
/// The maximum number of bytes that we'll allocate space for a local or the return value.

View file

@ -6,7 +6,7 @@ use rustc_const_eval::const_eval::CheckAlignment;
use rustc_const_eval::interpret::{ImmTy, Immediate, InterpCx, OpTy, Projectable};
use rustc_data_structures::fx::FxHashMap;
use rustc_hir::def::DefKind;
use rustc_middle::mir::interpret::{AllocId, ConstAllocation, ConstValue, InterpResult, Scalar};
use rustc_middle::mir::interpret::{AllocId, ConstAllocation, InterpResult, Scalar};
use rustc_middle::mir::visit::{MutVisitor, PlaceContext, Visitor};
use rustc_middle::mir::*;
use rustc_middle::ty::layout::TyAndLayout;

View file

@ -153,7 +153,7 @@ impl EnumSizeOpt {
span,
user_ty: None,
literal: ConstantKind::Val(
interpret::ConstValue::Indirect { alloc_id, offset: Size::ZERO },
ConstValue::Indirect { alloc_id, offset: Size::ZERO },
tmp_ty,
),
};

View file

@ -1,7 +1,6 @@
//! Removes operations on ZST places, and convert ZST operands to constants.
use crate::MirPass;
use rustc_middle::mir::interpret::ConstValue;
use rustc_middle::mir::visit::*;
use rustc_middle::mir::*;
use rustc_middle::ty::{self, Ty, TyCtxt};

View file

@ -170,8 +170,7 @@ use rustc_hir as hir;
use rustc_hir::def::DefKind;
use rustc_hir::def_id::{DefId, DefIdMap, LocalDefId};
use rustc_hir::lang_items::LangItem;
use rustc_middle::mir::interpret::{AllocId, ConstValue};
use rustc_middle::mir::interpret::{ErrorHandled, GlobalAlloc, Scalar};
use rustc_middle::mir::interpret::{AllocId, ErrorHandled, GlobalAlloc, Scalar};
use rustc_middle::mir::mono::{InstantiationMode, MonoItem};
use rustc_middle::mir::visit::Visitor as MirVisitor;
use rustc_middle::mir::{self, Local, Location};
@ -1442,13 +1441,15 @@ fn collect_used_items<'tcx>(
#[instrument(skip(tcx, output), level = "debug")]
fn collect_const_value<'tcx>(
tcx: TyCtxt<'tcx>,
value: ConstValue<'tcx>,
value: mir::ConstValue<'tcx>,
output: &mut MonoItems<'tcx>,
) {
match value {
ConstValue::Scalar(Scalar::Ptr(ptr, _size)) => collect_alloc(tcx, ptr.provenance, output),
ConstValue::Indirect { alloc_id, .. } => collect_alloc(tcx, alloc_id, output),
ConstValue::Slice { data, start: _, end: _ } => {
mir::ConstValue::Scalar(Scalar::Ptr(ptr, _size)) => {
collect_alloc(tcx, ptr.provenance, output)
}
mir::ConstValue::Indirect { alloc_id, .. } => collect_alloc(tcx, alloc_id, output),
mir::ConstValue::Slice { data, start: _, end: _ } => {
for &id in data.inner().provenance().ptrs().values() {
collect_alloc(tcx, id, output);
}

View file

@ -1,4 +1,7 @@
use rustc_middle::mir::interpret::{alloc_range, AllocRange, ConstValue, Pointer};
use rustc_middle::mir::{
interpret::{alloc_range, AllocRange, Pointer},
ConstValue,
};
use crate::{
rustc_smir::{Stable, Tables},