1
Fork 0

refactor(rustc_middle): Substs -> GenericArg

This commit is contained in:
Mahdi Dibaiee 2023-07-11 22:35:29 +01:00
parent df5c2cf9bc
commit e55583c4b8
466 changed files with 4574 additions and 4604 deletions

View file

@ -14,7 +14,7 @@ use rustc_middle::mir::visit::{
};
use rustc_middle::mir::*;
use rustc_middle::ty::layout::{LayoutError, LayoutOf, LayoutOfHelpers, TyAndLayout};
use rustc_middle::ty::InternalSubsts;
use rustc_middle::ty::GenericArgs;
use rustc_middle::ty::{self, ConstKind, Instance, ParamEnv, Ty, TyCtxt, TypeVisitableExt};
use rustc_span::{def_id::DefId, Span, DUMMY_SP};
use rustc_target::abi::{self, Align, HasDataLayout, Size, TargetDataLayout};
@ -87,7 +87,7 @@ impl<'tcx> MirPass<'tcx> for ConstProp {
return;
}
let is_generator = tcx.type_of(def_id.to_def_id()).subst_identity().is_generator();
let is_generator = tcx.type_of(def_id.to_def_id()).instantiate_identity().is_generator();
// FIXME(welseywiser) const prop doesn't work on generators because of query cycles
// computing their layout.
if is_generator {
@ -338,7 +338,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
tcx: TyCtxt<'tcx>,
) -> ConstPropagator<'mir, 'tcx> {
let def_id = body.source.def_id();
let substs = &InternalSubsts::identity_for_item(tcx, def_id);
let args = &GenericArgs::identity_for_item(tcx, def_id);
let param_env = tcx.param_env_reveal_all_normalized(def_id);
let can_const_prop = CanConstProp::check(tcx, param_env, body);
@ -350,7 +350,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
);
let ret_layout = ecx
.layout_of(body.bound_return_ty().subst(tcx, substs))
.layout_of(body.bound_return_ty().instantiate(tcx, args))
.ok()
// Don't bother allocating memory for large values.
// I don't know how return types can seem to be unsized but this happens in the
@ -366,7 +366,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
.into();
ecx.push_stack_frame(
Instance::new(def_id, substs),
Instance::new(def_id, args),
dummy_body,
&ret,
StackPopCleanup::Root { cleanup: false },

View file

@ -16,7 +16,7 @@ use rustc_index::bit_set::BitSet;
use rustc_middle::mir::visit::Visitor;
use rustc_middle::mir::*;
use rustc_middle::ty::layout::{LayoutError, LayoutOf, LayoutOfHelpers, TyAndLayout};
use rustc_middle::ty::InternalSubsts;
use rustc_middle::ty::GenericArgs;
use rustc_middle::ty::{
self, ConstInt, Instance, ParamEnv, ScalarInt, Ty, TyCtxt, TypeVisitableExt,
};
@ -55,7 +55,7 @@ impl<'tcx> MirLint<'tcx> for ConstProp {
return;
}
let is_generator = tcx.type_of(def_id.to_def_id()).subst_identity().is_generator();
let is_generator = tcx.type_of(def_id.to_def_id()).instantiate_identity().is_generator();
// FIXME(welseywiser) const prop doesn't work on generators because of query cycles
// computing their layout.
if is_generator {
@ -171,7 +171,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
tcx: TyCtxt<'tcx>,
) -> ConstPropagator<'mir, 'tcx> {
let def_id = body.source.def_id();
let substs = &InternalSubsts::identity_for_item(tcx, def_id);
let args = &GenericArgs::identity_for_item(tcx, def_id);
let param_env = tcx.param_env_reveal_all_normalized(def_id);
let can_const_prop = CanConstProp::check(tcx, param_env, body);
@ -183,7 +183,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
);
let ret_layout = ecx
.layout_of(body.bound_return_ty().subst(tcx, substs))
.layout_of(body.bound_return_ty().instantiate(tcx, args))
.ok()
// Don't bother allocating memory for large values.
// I don't know how return types can seem to be unsized but this happens in the
@ -199,7 +199,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
.into();
ecx.push_stack_frame(
Instance::new(def_id, substs),
Instance::new(def_id, args),
dummy_body,
&ret,
StackPopCleanup::Root { cleanup: false },

View file

@ -166,7 +166,7 @@ pub fn deduced_param_attrs<'tcx>(
// Codegen won't use this information for anything if all the function parameters are passed
// directly. Detect that and bail, for compilation speed.
let fn_ty = tcx.type_of(def_id).subst_identity();
let fn_ty = tcx.type_of(def_id).instantiate_identity();
if matches!(fn_ty.kind(), ty::FnDef(..)) {
if fn_ty
.fn_sig(tcx)

View file

@ -18,9 +18,9 @@ pub fn build_ptr_tys<'tcx>(
unique_did: DefId,
nonnull_did: DefId,
) -> (Ty<'tcx>, Ty<'tcx>, Ty<'tcx>) {
let substs = tcx.mk_substs(&[pointee.into()]);
let unique_ty = tcx.type_of(unique_did).subst(tcx, substs);
let nonnull_ty = tcx.type_of(nonnull_did).subst(tcx, substs);
let args = tcx.mk_args(&[pointee.into()]);
let unique_ty = tcx.type_of(unique_did).instantiate(tcx, args);
let nonnull_ty = tcx.type_of(nonnull_did).instantiate(tcx, args);
let ptr_ty = Ty::new_imm_ptr(tcx, pointee);
(unique_ty, nonnull_ty, ptr_ty)
@ -95,7 +95,8 @@ impl<'tcx> MirPass<'tcx> for ElaborateBoxDerefs {
let unique_did =
tcx.adt_def(def_id).non_enum_variant().fields[FieldIdx::from_u32(0)].did;
let Some(nonnull_def) = tcx.type_of(unique_did).subst_identity().ty_adt_def() else {
let Some(nonnull_def) = tcx.type_of(unique_did).instantiate_identity().ty_adt_def()
else {
span_bug!(tcx.def_span(unique_did), "expected Box to contain Unique")
};

View file

@ -2,7 +2,7 @@ use itertools::Itertools;
use rustc_hir::def_id::DefId;
use rustc_middle::mir::visit::Visitor;
use rustc_middle::mir::*;
use rustc_middle::ty::{self, EarlyBinder, SubstsRef, Ty, TyCtxt};
use rustc_middle::ty::{self, EarlyBinder, GenericArgsRef, Ty, TyCtxt};
use rustc_session::lint::builtin::FUNCTION_ITEM_REFERENCES;
use rustc_span::{symbol::sym, Span};
use rustc_target::spec::abi::Abi;
@ -40,20 +40,19 @@ impl<'tcx> Visitor<'tcx> for FunctionItemRefChecker<'_, 'tcx> {
{
let source_info = *self.body.source_info(location);
let func_ty = func.ty(self.body, self.tcx);
if let ty::FnDef(def_id, substs_ref) = *func_ty.kind() {
if let ty::FnDef(def_id, args_ref) = *func_ty.kind() {
// Handle calls to `transmute`
if self.tcx.is_diagnostic_item(sym::transmute, def_id) {
let arg_ty = args[0].ty(self.body, self.tcx);
for inner_ty in arg_ty.walk().filter_map(|arg| arg.as_type()) {
if let Some((fn_id, fn_substs)) =
FunctionItemRefChecker::is_fn_ref(inner_ty)
if let Some((fn_id, fn_args)) = FunctionItemRefChecker::is_fn_ref(inner_ty)
{
let span = self.nth_arg_span(&args, 0);
self.emit_lint(fn_id, fn_substs, source_info, span);
self.emit_lint(fn_id, fn_args, source_info, span);
}
}
} else {
self.check_bound_args(def_id, substs_ref, &args, source_info);
self.check_bound_args(def_id, args_ref, &args, source_info);
}
}
}
@ -63,11 +62,11 @@ impl<'tcx> Visitor<'tcx> for FunctionItemRefChecker<'_, 'tcx> {
impl<'tcx> FunctionItemRefChecker<'_, 'tcx> {
/// Emits a lint for function reference arguments bound by `fmt::Pointer` in calls to the
/// function defined by `def_id` with the substitutions `substs_ref`.
/// function defined by `def_id` with the substitutions `args_ref`.
fn check_bound_args(
&self,
def_id: DefId,
substs_ref: SubstsRef<'tcx>,
args_ref: GenericArgsRef<'tcx>,
args: &[Operand<'tcx>],
source_info: SourceInfo,
) {
@ -76,15 +75,17 @@ impl<'tcx> FunctionItemRefChecker<'_, 'tcx> {
for bound in bounds {
if let Some(bound_ty) = self.is_pointer_trait(bound) {
// Get the argument types as they appear in the function signature.
let arg_defs = self.tcx.fn_sig(def_id).subst_identity().skip_binder().inputs();
let arg_defs =
self.tcx.fn_sig(def_id).instantiate_identity().skip_binder().inputs();
for (arg_num, arg_def) in arg_defs.iter().enumerate() {
// For all types reachable from the argument type in the fn sig
for inner_ty in arg_def.walk().filter_map(|arg| arg.as_type()) {
// If the inner type matches the type bound by `Pointer`
if inner_ty == bound_ty {
// Do a substitution using the parameters from the callsite
let subst_ty = EarlyBinder::bind(inner_ty).subst(self.tcx, substs_ref);
if let Some((fn_id, fn_substs)) =
let subst_ty =
EarlyBinder::bind(inner_ty).instantiate(self.tcx, args_ref);
if let Some((fn_id, fn_args)) =
FunctionItemRefChecker::is_fn_ref(subst_ty)
{
let mut span = self.nth_arg_span(args, arg_num);
@ -94,7 +95,7 @@ impl<'tcx> FunctionItemRefChecker<'_, 'tcx> {
let callsite_ctxt = span.source_callsite().ctxt();
span = span.with_ctxt(callsite_ctxt);
}
self.emit_lint(fn_id, fn_substs, source_info, span);
self.emit_lint(fn_id, fn_args, source_info, span);
}
}
}
@ -115,8 +116,8 @@ impl<'tcx> FunctionItemRefChecker<'_, 'tcx> {
}
/// If a type is a reference or raw pointer to the anonymous type of a function definition,
/// returns that function's `DefId` and `SubstsRef`.
fn is_fn_ref(ty: Ty<'tcx>) -> Option<(DefId, SubstsRef<'tcx>)> {
/// returns that function's `DefId` and `GenericArgsRef`.
fn is_fn_ref(ty: Ty<'tcx>) -> Option<(DefId, GenericArgsRef<'tcx>)> {
let referent_ty = match ty.kind() {
ty::Ref(_, referent_ty, _) => Some(referent_ty),
ty::RawPtr(ty_and_mut) => Some(&ty_and_mut.ty),
@ -124,8 +125,8 @@ impl<'tcx> FunctionItemRefChecker<'_, 'tcx> {
};
referent_ty
.map(|ref_ty| {
if let ty::FnDef(def_id, substs_ref) = *ref_ty.kind() {
Some((def_id, substs_ref))
if let ty::FnDef(def_id, args_ref) = *ref_ty.kind() {
Some((def_id, args_ref))
} else {
None
}
@ -145,7 +146,7 @@ impl<'tcx> FunctionItemRefChecker<'_, 'tcx> {
fn emit_lint(
&self,
fn_id: DefId,
fn_substs: SubstsRef<'tcx>,
fn_args: GenericArgsRef<'tcx>,
source_info: SourceInfo,
span: Span,
) {
@ -155,7 +156,7 @@ impl<'tcx> FunctionItemRefChecker<'_, 'tcx> {
.assert_crate_local()
.lint_root;
// FIXME: use existing printing routines to print the function signature
let fn_sig = self.tcx.fn_sig(fn_id).subst(self.tcx, fn_substs);
let fn_sig = self.tcx.fn_sig(fn_id).instantiate(self.tcx, fn_args);
let unsafety = fn_sig.unsafety().prefix_str();
let abi = match fn_sig.abi() {
Abi::Rust => String::from(""),
@ -167,8 +168,8 @@ impl<'tcx> FunctionItemRefChecker<'_, 'tcx> {
}
};
let ident = self.tcx.item_name(fn_id).to_ident_string();
let ty_params = fn_substs.types().map(|ty| format!("{}", ty));
let const_params = fn_substs.consts().map(|c| format!("{}", c));
let ty_params = fn_args.types().map(|ty| format!("{}", ty));
let const_params = fn_args.consts().map(|c| format!("{}", c));
let params = ty_params.chain(const_params).join(", ");
let num_args = fn_sig.inputs().map_bound(|inputs| inputs.len()).skip_binder();
let variadic = if fn_sig.c_variadic() { ", ..." } else { "" };

View file

@ -65,7 +65,7 @@ use rustc_middle::mir::dump_mir;
use rustc_middle::mir::visit::{MutVisitor, PlaceContext, Visitor};
use rustc_middle::mir::*;
use rustc_middle::ty::{self, AdtDef, Ty, TyCtxt};
use rustc_middle::ty::{GeneratorSubsts, SubstsRef};
use rustc_middle::ty::{GeneratorArgs, GenericArgsRef};
use rustc_mir_dataflow::impls::{
MaybeBorrowedLocals, MaybeLiveLocals, MaybeRequiresStorage, MaybeStorageLive,
};
@ -194,11 +194,11 @@ fn replace_base<'tcx>(place: &mut Place<'tcx>, new_base: Place<'tcx>, tcx: TyCtx
const SELF_ARG: Local = Local::from_u32(1);
/// Generator has not been resumed yet.
const UNRESUMED: usize = GeneratorSubsts::UNRESUMED;
const UNRESUMED: usize = GeneratorArgs::UNRESUMED;
/// Generator has returned / is completed.
const RETURNED: usize = GeneratorSubsts::RETURNED;
const RETURNED: usize = GeneratorArgs::RETURNED;
/// Generator has panicked and is poisoned.
const POISONED: usize = GeneratorSubsts::POISONED;
const POISONED: usize = GeneratorArgs::POISONED;
/// Number of variants to reserve in generator state. Corresponds to
/// `UNRESUMED` (beginning of a generator) and `RETURNED`/`POISONED`
@ -223,7 +223,7 @@ struct TransformVisitor<'tcx> {
tcx: TyCtxt<'tcx>,
is_async_kind: bool,
state_adt_ref: AdtDef<'tcx>,
state_substs: SubstsRef<'tcx>,
state_args: GenericArgsRef<'tcx>,
// The type of the discriminant in the generator struct
discr_ty: Ty<'tcx>,
@ -265,7 +265,7 @@ impl<'tcx> TransformVisitor<'tcx> {
(false, true) => 1, // Poll::Pending
});
let kind = AggregateKind::Adt(self.state_adt_ref.did(), idx, self.state_substs, None, None);
let kind = AggregateKind::Adt(self.state_adt_ref.did(), idx, self.state_args, None, None);
// `Poll::Pending`
if self.is_async_kind && idx == VariantIdx::new(1) {
@ -431,8 +431,8 @@ fn make_generator_state_argument_pinned<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body
let pin_did = tcx.require_lang_item(LangItem::Pin, Some(body.span));
let pin_adt_ref = tcx.adt_def(pin_did);
let substs = tcx.mk_substs(&[ref_gen_ty.into()]);
let pin_ref_gen_ty = Ty::new_adt(tcx, pin_adt_ref, substs);
let args = tcx.mk_args(&[ref_gen_ty.into()]);
let pin_ref_gen_ty = Ty::new_adt(tcx, pin_adt_ref, args);
// Replace the by ref generator argument
body.local_decls.raw[1].ty = pin_ref_gen_ty;
@ -1431,7 +1431,7 @@ pub(crate) fn mir_generator_witnesses<'tcx>(
// The first argument is the generator type passed by value
let gen_ty = body.local_decls[ty::CAPTURE_STRUCT_LOCAL].ty;
// Get the interior types and substs which typeck computed
// Get the interior types and args which typeck computed
let movable = match *gen_ty.kind() {
ty::Generator(_, _, movability) => movability == hir::Movability::Movable,
ty::Error(_) => return None,
@ -1465,14 +1465,14 @@ impl<'tcx> MirPass<'tcx> for StateTransform {
// The first argument is the generator type passed by value
let gen_ty = body.local_decls.raw[1].ty;
// Get the discriminant type and substs which typeck computed
// Get the discriminant type and args which typeck computed
let (discr_ty, upvars, interior, movable) = match *gen_ty.kind() {
ty::Generator(_, substs, movability) => {
let substs = substs.as_generator();
ty::Generator(_, args, movability) => {
let args = args.as_generator();
(
substs.discr_ty(tcx),
substs.upvar_tys().collect::<Vec<_>>(),
substs.witness(),
args.discr_ty(tcx),
args.upvar_tys().collect::<Vec<_>>(),
args.witness(),
movability == hir::Movability::Movable,
)
}
@ -1483,20 +1483,20 @@ impl<'tcx> MirPass<'tcx> for StateTransform {
};
let is_async_kind = matches!(body.generator_kind(), Some(GeneratorKind::Async(_)));
let (state_adt_ref, state_substs) = if is_async_kind {
let (state_adt_ref, state_args) = if is_async_kind {
// Compute Poll<return_ty>
let poll_did = tcx.require_lang_item(LangItem::Poll, None);
let poll_adt_ref = tcx.adt_def(poll_did);
let poll_substs = tcx.mk_substs(&[body.return_ty().into()]);
(poll_adt_ref, poll_substs)
let poll_args = tcx.mk_args(&[body.return_ty().into()]);
(poll_adt_ref, poll_args)
} else {
// Compute GeneratorState<yield_ty, return_ty>
let state_did = tcx.require_lang_item(LangItem::GeneratorState, None);
let state_adt_ref = tcx.adt_def(state_did);
let state_substs = tcx.mk_substs(&[yield_ty.into(), body.return_ty().into()]);
(state_adt_ref, state_substs)
let state_args = tcx.mk_args(&[yield_ty.into(), body.return_ty().into()]);
(state_adt_ref, state_args)
};
let ret_ty = Ty::new_adt(tcx, state_adt_ref, state_substs);
let ret_ty = Ty::new_adt(tcx, state_adt_ref, state_args);
// We rename RETURN_PLACE which has type mir.return_ty to new_ret_local
// RETURN_PLACE then is a fresh unused local with type ret_ty.
@ -1570,7 +1570,7 @@ impl<'tcx> MirPass<'tcx> for StateTransform {
tcx,
is_async_kind,
state_adt_ref,
state_substs,
state_args,
remap,
storage_liveness,
always_live_locals,

View file

@ -105,7 +105,7 @@ struct Inliner<'tcx> {
/// Caller codegen attributes.
codegen_fn_attrs: &'tcx CodegenFnAttrs,
/// Stack of inlined instances.
/// We only check the `DefId` and not the substs because we want to
/// We only check the `DefId` and not the args because we want to
/// avoid inlining cases of polymorphic recursion.
/// The number of `DefId`s is finite, so checking history is enough
/// to ensure that we do not loop endlessly while inlining.
@ -329,11 +329,11 @@ impl<'tcx> Inliner<'tcx> {
let terminator = bb_data.terminator();
if let TerminatorKind::Call { ref func, target, fn_span, .. } = terminator.kind {
let func_ty = func.ty(caller_body, self.tcx);
if let ty::FnDef(def_id, substs) = *func_ty.kind() {
// To resolve an instance its substs have to be fully normalized.
let substs = self.tcx.try_normalize_erasing_regions(self.param_env, substs).ok()?;
if let ty::FnDef(def_id, args) = *func_ty.kind() {
// To resolve an instance its args have to be fully normalized.
let args = self.tcx.try_normalize_erasing_regions(self.param_env, args).ok()?;
let callee =
Instance::resolve(self.tcx, self.param_env, def_id, substs).ok().flatten()?;
Instance::resolve(self.tcx, self.param_env, def_id, args).ok().flatten()?;
if let InstanceDef::Virtual(..) | InstanceDef::Intrinsic(_) = callee.def {
return None;
@ -343,7 +343,7 @@ impl<'tcx> Inliner<'tcx> {
return None;
}
let fn_sig = self.tcx.fn_sig(def_id).subst(self.tcx, substs);
let fn_sig = self.tcx.fn_sig(def_id).instantiate(self.tcx, args);
let source_info = SourceInfo { span: fn_span, ..terminator.source_info };
return Some(CallSite { callee, fn_sig, block: bb, target, source_info });
@ -368,7 +368,7 @@ impl<'tcx> Inliner<'tcx> {
// inlining. This is to ensure that the final crate doesn't have MIR that
// reference unexported symbols
if callsite.callee.def_id().is_local() {
let is_generic = callsite.callee.substs.non_erasable_generics().next().is_some();
let is_generic = callsite.callee.args.non_erasable_generics().next().is_some();
if !is_generic && !callee_attrs.requests_inline() {
return Err("not exported");
}
@ -855,8 +855,8 @@ impl<'tcx> Visitor<'tcx> for CostChecker<'_, 'tcx> {
};
let kind = match parent_ty.ty.kind() {
&ty::Alias(ty::Opaque, ty::AliasTy { def_id, substs, .. }) => {
self.tcx.type_of(def_id).subst(self.tcx, substs).kind()
&ty::Alias(ty::Opaque, ty::AliasTy { def_id, args, .. }) => {
self.tcx.type_of(def_id).instantiate(self.tcx, args).kind()
}
kind => kind,
};
@ -869,23 +869,23 @@ impl<'tcx> Visitor<'tcx> for CostChecker<'_, 'tcx> {
};
check_equal(self, *f_ty);
}
ty::Adt(adt_def, substs) => {
ty::Adt(adt_def, args) => {
let var = parent_ty.variant_index.unwrap_or(FIRST_VARIANT);
let Some(field) = adt_def.variant(var).fields.get(f) else {
self.validation = Err("malformed MIR");
return;
};
check_equal(self, field.ty(self.tcx, substs));
check_equal(self, field.ty(self.tcx, args));
}
ty::Closure(_, substs) => {
let substs = substs.as_closure();
let Some(f_ty) = substs.upvar_tys().nth(f.as_usize()) else {
ty::Closure(_, args) => {
let args = args.as_closure();
let Some(f_ty) = args.upvar_tys().nth(f.as_usize()) else {
self.validation = Err("malformed MIR");
return;
};
check_equal(self, f_ty);
}
&ty::Generator(def_id, substs, _) => {
&ty::Generator(def_id, args, _) => {
let f_ty = if let Some(var) = parent_ty.variant_index {
let gen_body = if def_id == self.callee_body.source.def_id() {
self.callee_body
@ -910,7 +910,7 @@ impl<'tcx> Visitor<'tcx> for CostChecker<'_, 'tcx> {
f_ty.ty
} else {
let Some(f_ty) = substs.as_generator().prefix_tys().nth(f.index()) else {
let Some(f_ty) = args.as_generator().prefix_tys().nth(f.index()) else {
self.validation = Err("malformed MIR");
return;
};
@ -1130,10 +1130,10 @@ fn try_instance_mir<'tcx>(
) -> Result<&'tcx Body<'tcx>, &'static str> {
match instance {
ty::InstanceDef::DropGlue(_, Some(ty)) => match ty.kind() {
ty::Adt(def, substs) => {
ty::Adt(def, args) => {
let fields = def.all_fields();
for field in fields {
let field_ty = field.ty(tcx, substs);
let field_ty = field.ty(tcx, args);
if field_ty.has_param() && field_ty.has_projections() {
return Err("cannot build drop shim for polymorphic type");
}

View file

@ -3,7 +3,7 @@ use rustc_data_structures::stack::ensure_sufficient_stack;
use rustc_hir::def_id::{DefId, LocalDefId};
use rustc_middle::mir::TerminatorKind;
use rustc_middle::ty::TypeVisitableExt;
use rustc_middle::ty::{self, subst::SubstsRef, InstanceDef, TyCtxt};
use rustc_middle::ty::{self, GenericArgsRef, InstanceDef, TyCtxt};
use rustc_session::Limit;
// FIXME: check whether it is cheaper to precompute the entire call graph instead of invoking
@ -43,16 +43,16 @@ pub(crate) fn mir_callgraph_reachable<'tcx>(
recursion_limit: Limit,
) -> bool {
trace!(%caller);
for &(callee, substs) in tcx.mir_inliner_callees(caller.def) {
let Ok(substs) = caller.try_subst_mir_and_normalize_erasing_regions(
for &(callee, args) in tcx.mir_inliner_callees(caller.def) {
let Ok(args) = caller.try_subst_mir_and_normalize_erasing_regions(
tcx,
param_env,
ty::EarlyBinder::bind(substs),
ty::EarlyBinder::bind(args),
) else {
trace!(?caller, ?param_env, ?substs, "cannot normalize, skipping");
trace!(?caller, ?param_env, ?args, "cannot normalize, skipping");
continue;
};
let Ok(Some(callee)) = ty::Instance::resolve(tcx, param_env, callee, substs) else {
let Ok(Some(callee)) = ty::Instance::resolve(tcx, param_env, callee, args) else {
trace!(?callee, "cannot resolve, skipping");
continue;
};
@ -147,7 +147,7 @@ pub(crate) fn mir_callgraph_reachable<'tcx>(
pub(crate) fn mir_inliner_callees<'tcx>(
tcx: TyCtxt<'tcx>,
instance: ty::InstanceDef<'tcx>,
) -> &'tcx [(DefId, SubstsRef<'tcx>)] {
) -> &'tcx [(DefId, GenericArgsRef<'tcx>)] {
let steal;
let guard;
let body = match (instance, instance.def_id().as_local()) {
@ -165,7 +165,7 @@ pub(crate) fn mir_inliner_callees<'tcx>(
if let TerminatorKind::Call { func, .. } = &terminator.kind {
let ty = func.ty(&body.local_decls, tcx);
let call = match ty.kind() {
ty::FnDef(def_id, substs) => (*def_id, *substs),
ty::FnDef(def_id, args) => (*def_id, *args),
_ => continue,
};
calls.insert(call);

View file

@ -5,7 +5,7 @@ use crate::MirPass;
use rustc_hir::Mutability;
use rustc_middle::mir::*;
use rustc_middle::ty::layout::ValidityRequirement;
use rustc_middle::ty::{self, ParamEnv, SubstsRef, Ty, TyCtxt};
use rustc_middle::ty::{self, GenericArgsRef, ParamEnv, Ty, TyCtxt};
use rustc_span::symbol::Symbol;
use rustc_target::abi::FieldIdx;
@ -163,14 +163,14 @@ impl<'tcx> InstSimplifyContext<'tcx, '_> {
}
// Transmuting a transparent struct/union to a field's type is a projection
if let ty::Adt(adt_def, substs) = operand_ty.kind()
if let ty::Adt(adt_def, args) = operand_ty.kind()
&& adt_def.repr().transparent()
&& (adt_def.is_struct() || adt_def.is_union())
&& let Some(place) = operand.place()
{
let variant = adt_def.non_enum_variant();
for (i, field) in variant.fields.iter().enumerate() {
let field_ty = field.ty(self.tcx, substs);
let field_ty = field.ty(self.tcx, args);
if field_ty == *cast_ty {
let place = place.project_deeper(&[ProjectionElem::Field(FieldIdx::from_usize(i), *cast_ty)], self.tcx);
let operand = if operand.is_move() { Operand::Move(place) } else { Operand::Copy(place) };
@ -201,10 +201,10 @@ impl<'tcx> InstSimplifyContext<'tcx, '_> {
let Some(destination_block) = *target else { return };
// Only bother looking more if it's easy to know what we're calling
let Some((fn_def_id, fn_substs)) = func.const_fn_def() else { return };
let Some((fn_def_id, fn_args)) = func.const_fn_def() else { return };
// Clone needs one subst, so we can cheaply rule out other stuff
if fn_substs.len() != 1 {
if fn_args.len() != 1 {
return;
}
@ -226,7 +226,7 @@ impl<'tcx> InstSimplifyContext<'tcx, '_> {
if !self.tcx.consider_optimizing(|| {
format!(
"InstSimplify - Call: {:?} SourceInfo: {:?}",
(fn_def_id, fn_substs),
(fn_def_id, fn_args),
terminator.source_info
)
}) {
@ -259,14 +259,14 @@ impl<'tcx> InstSimplifyContext<'tcx, '_> {
return;
};
let func_ty = func.ty(self.local_decls, self.tcx);
let Some((intrinsic_name, substs)) = resolve_rust_intrinsic(self.tcx, func_ty) else {
let Some((intrinsic_name, args)) = resolve_rust_intrinsic(self.tcx, func_ty) else {
return;
};
// The intrinsics we are interested in have one generic parameter
if substs.is_empty() {
if args.is_empty() {
return;
}
let ty = substs.type_at(0);
let ty = args.type_at(0);
let known_is_valid = intrinsic_assert_panics(self.tcx, self.param_env, ty, intrinsic_name);
match known_is_valid {
@ -297,10 +297,10 @@ fn intrinsic_assert_panics<'tcx>(
fn resolve_rust_intrinsic<'tcx>(
tcx: TyCtxt<'tcx>,
func_ty: Ty<'tcx>,
) -> Option<(Symbol, SubstsRef<'tcx>)> {
if let ty::FnDef(def_id, substs) = *func_ty.kind() {
) -> Option<(Symbol, GenericArgsRef<'tcx>)> {
if let ty::FnDef(def_id, args) = *func_ty.kind() {
if tcx.is_intrinsic(def_id) {
return Some((tcx.item_name(def_id), substs));
return Some((tcx.item_name(def_id), args));
}
}
None

View file

@ -48,7 +48,7 @@ impl EnumSizeOpt {
alloc_cache: &mut FxHashMap<Ty<'tcx>, AllocId>,
) -> Option<(AdtDef<'tcx>, usize, AllocId)> {
let adt_def = match ty.kind() {
ty::Adt(adt_def, _substs) if adt_def.is_enum() => adt_def,
ty::Adt(adt_def, _args) if adt_def.is_enum() => adt_def,
_ => return None,
};
let layout = tcx.layout_of(param_env.and(ty)).ok()?;

View file

@ -2,7 +2,7 @@
use crate::{errors, MirPass};
use rustc_middle::mir::*;
use rustc_middle::ty::subst::SubstsRef;
use rustc_middle::ty::GenericArgsRef;
use rustc_middle::ty::{self, Ty, TyCtxt};
use rustc_span::symbol::{sym, Symbol};
use rustc_span::Span;
@ -19,7 +19,8 @@ impl<'tcx> MirPass<'tcx> for LowerIntrinsics {
&mut terminator.kind
{
let func_ty = func.ty(local_decls, tcx);
let Some((intrinsic_name, substs)) = resolve_rust_intrinsic(tcx, func_ty) else {
let Some((intrinsic_name, generic_args)) = resolve_rust_intrinsic(tcx, func_ty)
else {
continue;
};
match intrinsic_name {
@ -149,7 +150,7 @@ impl<'tcx> MirPass<'tcx> for LowerIntrinsics {
}
sym::size_of | sym::min_align_of => {
if let Some(target) = *target {
let tp_ty = substs.type_at(0);
let tp_ty = generic_args.type_at(0);
let null_op = match intrinsic_name {
sym::size_of => NullOp::SizeOf,
sym::min_align_of => NullOp::AlignOf,
@ -317,10 +318,10 @@ impl<'tcx> MirPass<'tcx> for LowerIntrinsics {
fn resolve_rust_intrinsic<'tcx>(
tcx: TyCtxt<'tcx>,
func_ty: Ty<'tcx>,
) -> Option<(Symbol, SubstsRef<'tcx>)> {
if let ty::FnDef(def_id, substs) = *func_ty.kind() {
) -> Option<(Symbol, GenericArgsRef<'tcx>)> {
if let ty::FnDef(def_id, args) = *func_ty.kind() {
if tcx.is_intrinsic(def_id) {
return Some((tcx.item_name(def_id), substs));
return Some((tcx.item_name(def_id), args));
}
}
None

View file

@ -1,6 +1,6 @@
use rustc_index::bit_set::ChunkedBitSet;
use rustc_middle::mir::{Body, TerminatorKind};
use rustc_middle::ty::subst::SubstsRef;
use rustc_middle::ty::GenericArgsRef;
use rustc_middle::ty::{self, ParamEnv, Ty, TyCtxt, VariantDef};
use rustc_mir_dataflow::impls::MaybeInitializedPlaces;
use rustc_mir_dataflow::move_paths::{LookupResult, MoveData, MovePathIndex};
@ -98,7 +98,7 @@ fn is_needs_drop_and_init<'tcx>(
// This pass is only needed for const-checking, so it doesn't handle as many cases as
// `DropCtxt::open_drop`, since they aren't relevant in a const-context.
match ty.kind() {
ty::Adt(adt, substs) => {
ty::Adt(adt, args) => {
let dont_elaborate = adt.is_union() || adt.is_manually_drop() || adt.has_dtor(tcx);
if dont_elaborate {
return true;
@ -118,7 +118,7 @@ fn is_needs_drop_and_init<'tcx>(
let downcast =
move_path_children_matching(move_data, mpi, |x| x.is_downcast_to(vid));
let Some(dc_mpi) = downcast else {
return variant_needs_drop(tcx, param_env, substs, variant);
return variant_needs_drop(tcx, param_env, args, variant);
};
dc_mpi
@ -130,7 +130,7 @@ fn is_needs_drop_and_init<'tcx>(
.fields
.iter()
.enumerate()
.map(|(f, field)| (FieldIdx::from_usize(f), field.ty(tcx, substs), mpi))
.map(|(f, field)| (FieldIdx::from_usize(f), field.ty(tcx, args), mpi))
.any(field_needs_drop_and_init)
})
}
@ -148,11 +148,11 @@ fn is_needs_drop_and_init<'tcx>(
fn variant_needs_drop<'tcx>(
tcx: TyCtxt<'tcx>,
param_env: ParamEnv<'tcx>,
substs: SubstsRef<'tcx>,
args: GenericArgsRef<'tcx>,
variant: &VariantDef,
) -> bool {
variant.fields.iter().any(|field| {
let f_ty = field.ty(tcx, substs);
let f_ty = field.ty(tcx, args);
f_ty.needs_drop(tcx, param_env)
})
}

View file

@ -15,7 +15,7 @@ impl<'tcx> MirPass<'tcx> for RemoveZsts {
fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
// Avoid query cycles (generators require optimized MIR for layout).
if tcx.type_of(body.source.def_id()).subst_identity().is_generator() {
if tcx.type_of(body.source.def_id()).instantiate_identity().is_generator() {
return;
}
let param_env = tcx.param_env_reveal_all_normalized(body.source.def_id());

View file

@ -3,8 +3,8 @@ use rustc_hir::def_id::DefId;
use rustc_hir::lang_items::LangItem;
use rustc_middle::mir::*;
use rustc_middle::query::Providers;
use rustc_middle::ty::InternalSubsts;
use rustc_middle::ty::{self, EarlyBinder, GeneratorSubsts, Ty, TyCtxt};
use rustc_middle::ty::GenericArgs;
use rustc_middle::ty::{self, EarlyBinder, GeneratorArgs, Ty, TyCtxt};
use rustc_target::abi::{FieldIdx, VariantIdx, FIRST_VARIANT};
use rustc_index::{Idx, IndexVec};
@ -69,9 +69,9 @@ fn make_shim<'tcx>(tcx: TyCtxt<'tcx>, instance: ty::InstanceDef<'tcx>) -> Body<'
ty::InstanceDef::DropGlue(def_id, ty) => {
// FIXME(#91576): Drop shims for generators aren't subject to the MIR passes at the end
// of this function. Is this intentional?
if let Some(ty::Generator(gen_def_id, substs, _)) = ty.map(Ty::kind) {
if let Some(ty::Generator(gen_def_id, args, _)) = ty.map(Ty::kind) {
let body = tcx.optimized_mir(*gen_def_id).generator_drop().unwrap();
let body = EarlyBinder::bind(body.clone()).subst(tcx, substs);
let body = EarlyBinder::bind(body.clone()).instantiate(tcx, args);
debug!("make_shim({:?}) = {:?}", instance, body);
return body;
}
@ -160,12 +160,12 @@ fn build_drop_shim<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, ty: Option<Ty<'tcx>>)
assert!(!matches!(ty, Some(ty) if ty.is_generator()));
let substs = if let Some(ty) = ty {
tcx.mk_substs(&[ty.into()])
let args = if let Some(ty) = ty {
tcx.mk_args(&[ty.into()])
} else {
InternalSubsts::identity_for_item(tcx, def_id)
GenericArgs::identity_for_item(tcx, def_id)
};
let sig = tcx.fn_sig(def_id).subst(tcx, substs);
let sig = tcx.fn_sig(def_id).instantiate(tcx, args);
let sig = tcx.erase_late_bound_regions(sig);
let span = tcx.def_span(def_id);
@ -377,12 +377,10 @@ fn build_clone_shim<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, self_ty: Ty<'tcx>) -
match self_ty.kind() {
_ if is_copy => builder.copy_shim(),
ty::Closure(_, substs) => {
builder.tuple_like_shim(dest, src, substs.as_closure().upvar_tys())
}
ty::Closure(_, args) => builder.tuple_like_shim(dest, src, args.as_closure().upvar_tys()),
ty::Tuple(..) => builder.tuple_like_shim(dest, src, self_ty.tuple_fields()),
ty::Generator(gen_def_id, substs, hir::Movability::Movable) => {
builder.generator_shim(dest, src, *gen_def_id, substs.as_generator())
ty::Generator(gen_def_id, args, hir::Movability::Movable) => {
builder.generator_shim(dest, src, *gen_def_id, args.as_generator())
}
_ => bug!("clone shim for `{:?}` which is not `Copy` and is not an aggregate", self_ty),
};
@ -404,7 +402,7 @@ impl<'tcx> CloneShimBuilder<'tcx> {
// we must subst the self_ty because it's
// otherwise going to be TySelf and we can't index
// or access fields of a Place of type TySelf.
let sig = tcx.fn_sig(def_id).subst(tcx, &[self_ty.into()]);
let sig = tcx.fn_sig(def_id).instantiate(tcx, &[self_ty.into()]);
let sig = tcx.erase_late_bound_regions(sig);
let span = tcx.def_span(def_id);
@ -587,17 +585,17 @@ impl<'tcx> CloneShimBuilder<'tcx> {
dest: Place<'tcx>,
src: Place<'tcx>,
gen_def_id: DefId,
substs: GeneratorSubsts<'tcx>,
args: GeneratorArgs<'tcx>,
) {
self.block(vec![], TerminatorKind::Goto { target: self.block_index_offset(3) }, false);
let unwind = self.block(vec![], TerminatorKind::Resume, true);
// This will get overwritten with a switch once we know the target blocks
let switch = self.block(vec![], TerminatorKind::Unreachable, false);
let unwind = self.clone_fields(dest, src, switch, unwind, substs.upvar_tys());
let unwind = self.clone_fields(dest, src, switch, unwind, args.upvar_tys());
let target = self.block(vec![], TerminatorKind::Return, false);
let unreachable = self.block(vec![], TerminatorKind::Unreachable, false);
let mut cases = Vec::with_capacity(substs.state_tys(gen_def_id, self.tcx).count());
for (index, state_tys) in substs.state_tys(gen_def_id, self.tcx).enumerate() {
let mut cases = Vec::with_capacity(args.state_tys(gen_def_id, self.tcx).count());
for (index, state_tys) in args.state_tys(gen_def_id, self.tcx).enumerate() {
let variant_index = VariantIdx::new(index);
let dest = self.tcx.mk_place_downcast_unnamed(dest, variant_index);
let src = self.tcx.mk_place_downcast_unnamed(src, variant_index);
@ -613,7 +611,7 @@ impl<'tcx> CloneShimBuilder<'tcx> {
cases.push((index as u128, start_block));
let _final_cleanup_block = self.clone_fields(dest, src, target, unwind, state_tys);
}
let discr_ty = substs.discr_ty(self.tcx);
let discr_ty = args.discr_ty(self.tcx);
let temp = self.make_place(Mutability::Mut, discr_ty);
let rvalue = Rvalue::Discriminant(src);
let statement = self.make_statement(StatementKind::Assign(Box::new((temp, rvalue))));
@ -642,7 +640,7 @@ fn build_call_shim<'tcx>(
// `FnPtrShim` contains the fn pointer type that a call shim is being built for - this is used
// to substitute into the signature of the shim. It is not necessary for users of this
// MIR body to perform further substitutions (see `InstanceDef::has_polymorphic_mir_body`).
let (sig_substs, untuple_args) = if let ty::InstanceDef::FnPtrShim(_, ty) = instance {
let (sig_args, untuple_args) = if let ty::InstanceDef::FnPtrShim(_, ty) = instance {
let sig = tcx.erase_late_bound_regions(ty.fn_sig(tcx));
let untuple_args = sig.inputs();
@ -659,11 +657,11 @@ fn build_call_shim<'tcx>(
let sig = tcx.fn_sig(def_id);
let sig = sig.map_bound(|sig| tcx.erase_late_bound_regions(sig));
assert_eq!(sig_substs.is_some(), !instance.has_polymorphic_mir_body());
let mut sig = if let Some(sig_substs) = sig_substs {
sig.subst(tcx, &sig_substs)
assert_eq!(sig_args.is_some(), !instance.has_polymorphic_mir_body());
let mut sig = if let Some(sig_args) = sig_args {
sig.instantiate(tcx, &sig_args)
} else {
sig.subst_identity()
sig.instantiate_identity()
};
if let CallKind::Indirect(fnty) = call_kind {
@ -751,7 +749,7 @@ fn build_call_shim<'tcx>(
// `FnDef` call with optional receiver.
CallKind::Direct(def_id) => {
let ty = tcx.type_of(def_id).subst_identity();
let ty = tcx.type_of(def_id).instantiate_identity();
(
Operand::Constant(Box::new(Constant {
span,
@ -868,12 +866,12 @@ pub fn build_adt_ctor(tcx: TyCtxt<'_>, ctor_id: DefId) -> Body<'_> {
// Normalize the sig.
let sig = tcx
.fn_sig(ctor_id)
.subst_identity()
.instantiate_identity()
.no_bound_vars()
.expect("LBR in ADT constructor signature");
let sig = tcx.normalize_erasing_regions(param_env, sig);
let ty::Adt(adt_def, substs) = sig.output().kind() else {
let ty::Adt(adt_def, args) = sig.output().kind() else {
bug!("unexpected type for ADT ctor {:?}", sig.output());
};
@ -896,7 +894,7 @@ pub fn build_adt_ctor(tcx: TyCtxt<'_>, ctor_id: DefId) -> Body<'_> {
// return;
debug!("build_ctor: variant_index={:?}", variant_index);
let kind = AggregateKind::Adt(adt_def.did(), variant_index, substs, None, None);
let kind = AggregateKind::Adt(adt_def.did(), variant_index, args, None, None);
let variant = adt_def.variant(variant_index);
let statement = Statement {
kind: StatementKind::Assign(Box::new((
@ -941,7 +939,7 @@ pub fn build_adt_ctor(tcx: TyCtxt<'_>, ctor_id: DefId) -> Body<'_> {
fn build_fn_ptr_addr_shim<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, self_ty: Ty<'tcx>) -> Body<'tcx> {
assert!(matches!(self_ty.kind(), ty::FnPtr(..)), "expected fn ptr, found {self_ty}");
let span = tcx.def_span(def_id);
let Some(sig) = tcx.fn_sig(def_id).subst(tcx, &[self_ty.into()]).no_bound_vars() else {
let Some(sig) = tcx.fn_sig(def_id).instantiate(tcx, &[self_ty.into()]).no_bound_vars() else {
span_bug!(span, "FnPtr::addr with bound vars for `{self_ty}`");
};
let locals = local_decls_for_sig(&sig, span);

View file

@ -20,7 +20,7 @@ impl<'tcx> MirPass<'tcx> for ScalarReplacementOfAggregates {
debug!(def_id = ?body.source.def_id());
// Avoid query cycles (generators require optimized MIR for layout).
if tcx.type_of(body.source.def_id()).subst_identity().is_generator() {
if tcx.type_of(body.source.def_id()).instantiate_identity().is_generator() {
return;
}
@ -64,7 +64,7 @@ fn escaping_locals<'tcx>(
if ty.is_union() || ty.is_enum() {
return true;
}
if let ty::Adt(def, _substs) = ty.kind() {
if let ty::Adt(def, _args) = ty.kind() {
if def.repr().flags.contains(ReprFlags::IS_SIMD) {
// Exclude #[repr(simd)] types so that they are not de-optimized into an array
return true;