Auto merge of #139766 - jhpratt:rollup-afrfmnk, r=jhpratt
Rollup of 10 pull requests Successful merges: - #137043 (Initial `UnsafePinned` implementation [Part 1: Libs]) - #138962 (Expect an array when expected and acutal types are both arrays during cast) - #139001 (add `naked_functions_rustic_abi` feature gate) - #139379 (Use delayed bug for normalization errors in drop elaboration) - #139582 (Various coercion cleanups) - #139628 (Suggest remove redundant `$()?` around `vis`) - #139644 (Micro-optimize `InstSimplify`'s `simplify_primitive_clone`) - #139674 (In `rustc_mir_transform`, iterate over index newtypes instead of ints) - #139740 (Convert `tests/ui/lint/dead-code/self-assign.rs` to a known-bug test) - #139741 (fix smir's run! doc and import) r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
15f58c46da
73 changed files with 825 additions and 391 deletions
|
@ -315,7 +315,7 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
|
|||
repr: &ReprOptions,
|
||||
variants: &IndexSlice<VariantIdx, IndexVec<FieldIdx, F>>,
|
||||
is_enum: bool,
|
||||
is_unsafe_cell: bool,
|
||||
is_special_no_niche: bool,
|
||||
scalar_valid_range: (Bound<u128>, Bound<u128>),
|
||||
discr_range_of_repr: impl Fn(i128, i128) -> (Integer, bool),
|
||||
discriminants: impl Iterator<Item = (VariantIdx, i128)>,
|
||||
|
@ -348,7 +348,7 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
|
|||
repr,
|
||||
variants,
|
||||
is_enum,
|
||||
is_unsafe_cell,
|
||||
is_special_no_niche,
|
||||
scalar_valid_range,
|
||||
always_sized,
|
||||
present_first,
|
||||
|
@ -505,7 +505,7 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
|
|||
repr: &ReprOptions,
|
||||
variants: &IndexSlice<VariantIdx, IndexVec<FieldIdx, F>>,
|
||||
is_enum: bool,
|
||||
is_unsafe_cell: bool,
|
||||
is_special_no_niche: bool,
|
||||
scalar_valid_range: (Bound<u128>, Bound<u128>),
|
||||
always_sized: bool,
|
||||
present_first: VariantIdx,
|
||||
|
@ -524,7 +524,7 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
|
|||
let mut st = self.univariant(&variants[v], repr, kind)?;
|
||||
st.variants = Variants::Single { index: v };
|
||||
|
||||
if is_unsafe_cell {
|
||||
if is_special_no_niche {
|
||||
let hide_niches = |scalar: &mut _| match scalar {
|
||||
Scalar::Initialized { value, valid_range } => {
|
||||
*valid_range = WrappingRange::full(value.size(dl))
|
||||
|
|
|
@ -12,7 +12,7 @@ use rustc_ast::{self as ast, DUMMY_NODE_ID, NodeId};
|
|||
use rustc_ast_pretty::pprust;
|
||||
use rustc_attr_parsing::{AttributeKind, find_attr};
|
||||
use rustc_data_structures::fx::{FxHashMap, FxIndexMap};
|
||||
use rustc_errors::{Applicability, ErrorGuaranteed};
|
||||
use rustc_errors::{Applicability, Diag, ErrorGuaranteed};
|
||||
use rustc_feature::Features;
|
||||
use rustc_hir as hir;
|
||||
use rustc_lint_defs::BuiltinLintDiag;
|
||||
|
@ -27,19 +27,18 @@ use rustc_span::hygiene::Transparency;
|
|||
use rustc_span::{Ident, MacroRulesNormalizedIdent, Span, kw, sym};
|
||||
use tracing::{debug, instrument, trace, trace_span};
|
||||
|
||||
use super::diagnostics;
|
||||
use super::macro_parser::{NamedMatches, NamedParseResult};
|
||||
use super::{SequenceRepetition, diagnostics};
|
||||
use crate::base::{
|
||||
DummyResult, ExpandResult, ExtCtxt, MacResult, MacroExpanderResult, SyntaxExtension,
|
||||
SyntaxExtensionKind, TTMacroExpander,
|
||||
};
|
||||
use crate::expand::{AstFragment, AstFragmentKind, ensure_complete_parse, parse_ast_fragment};
|
||||
use crate::mbe;
|
||||
use crate::mbe::diagnostics::{annotate_doc_comment, parse_failure_msg};
|
||||
use crate::mbe::macro_check;
|
||||
use crate::mbe::macro_parser::NamedMatch::*;
|
||||
use crate::mbe::macro_parser::{Error, ErrorReported, Failure, MatcherLoc, Success, TtParser};
|
||||
use crate::mbe::transcribe::transcribe;
|
||||
use crate::mbe::{self, KleeneOp, macro_check};
|
||||
|
||||
pub(crate) struct ParserAnyMacro<'a> {
|
||||
parser: Parser<'a>,
|
||||
|
@ -640,6 +639,37 @@ fn is_empty_token_tree(sess: &Session, seq: &mbe::SequenceRepetition) -> bool {
|
|||
}
|
||||
}
|
||||
|
||||
/// Checks if a `vis` nonterminal fragment is unnecessarily wrapped in an optional repetition.
|
||||
///
|
||||
/// When a `vis` fragment (which can already be empty) is wrapped in `$(...)?`,
|
||||
/// this suggests removing the redundant repetition syntax since it provides no additional benefit.
|
||||
fn check_redundant_vis_repetition(
|
||||
err: &mut Diag<'_>,
|
||||
sess: &Session,
|
||||
seq: &SequenceRepetition,
|
||||
span: &DelimSpan,
|
||||
) {
|
||||
let is_zero_or_one: bool = seq.kleene.op == KleeneOp::ZeroOrOne;
|
||||
let is_vis = seq.tts.first().map_or(false, |tt| {
|
||||
matches!(tt, mbe::TokenTree::MetaVarDecl(_, _, Some(NonterminalKind::Vis)))
|
||||
});
|
||||
|
||||
if is_vis && is_zero_or_one {
|
||||
err.note("a `vis` fragment can already be empty");
|
||||
err.multipart_suggestion(
|
||||
"remove the `$(` and `)?`",
|
||||
vec![
|
||||
(
|
||||
sess.source_map().span_extend_to_prev_char_before(span.open, '$', true),
|
||||
"".to_string(),
|
||||
),
|
||||
(span.close.with_hi(seq.kleene.span.hi()), "".to_string()),
|
||||
],
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/// Checks that the lhs contains no repetition which could match an empty token
|
||||
/// tree, because then the matcher would hang indefinitely.
|
||||
fn check_lhs_no_empty_seq(sess: &Session, tts: &[mbe::TokenTree]) -> Result<(), ErrorGuaranteed> {
|
||||
|
@ -654,8 +684,10 @@ fn check_lhs_no_empty_seq(sess: &Session, tts: &[mbe::TokenTree]) -> Result<(),
|
|||
TokenTree::Sequence(span, seq) => {
|
||||
if is_empty_token_tree(sess, seq) {
|
||||
let sp = span.entire();
|
||||
let guar = sess.dcx().span_err(sp, "repetition matches empty token tree");
|
||||
return Err(guar);
|
||||
let mut err =
|
||||
sess.dcx().struct_span_err(sp, "repetition matches empty token tree");
|
||||
check_redundant_vis_repetition(&mut err, sess, seq, span);
|
||||
return Err(err.emit());
|
||||
}
|
||||
check_lhs_no_empty_seq(sess, &seq.tts)?
|
||||
}
|
||||
|
|
|
@ -565,6 +565,8 @@ declare_features! (
|
|||
(incomplete, mut_ref, "1.79.0", Some(123076)),
|
||||
/// Allows using `#[naked]` on functions.
|
||||
(unstable, naked_functions, "1.9.0", Some(90957)),
|
||||
/// Allows using `#[naked]` on `extern "Rust"` functions.
|
||||
(unstable, naked_functions_rustic_abi, "CURRENT_RUSTC_VERSION", Some(138997)),
|
||||
/// Allows using `#[target_feature(enable = "...")]` on `#[naked]` on functions.
|
||||
(unstable, naked_functions_target_feature, "1.86.0", Some(138568)),
|
||||
/// Allows specifying the as-needed link modifier
|
||||
|
|
|
@ -182,6 +182,7 @@ language_item_table! {
|
|||
DynMetadata, sym::dyn_metadata, dyn_metadata, Target::Struct, GenericRequirement::None;
|
||||
|
||||
Freeze, sym::freeze, freeze_trait, Target::Trait, GenericRequirement::Exact(0);
|
||||
UnsafeUnpin, sym::unsafe_unpin, unsafe_unpin_trait, Target::Trait, GenericRequirement::Exact(0);
|
||||
|
||||
FnPtrTrait, sym::fn_ptr_trait, fn_ptr_trait, Target::Trait, GenericRequirement::Exact(0);
|
||||
FnPtrAddr, sym::fn_ptr_addr, fn_ptr_addr, Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;
|
||||
|
@ -235,6 +236,8 @@ language_item_table! {
|
|||
IndexMut, sym::index_mut, index_mut_trait, Target::Trait, GenericRequirement::Exact(1);
|
||||
|
||||
UnsafeCell, sym::unsafe_cell, unsafe_cell_type, Target::Struct, GenericRequirement::None;
|
||||
UnsafePinned, sym::unsafe_pinned, unsafe_pinned_type, Target::Struct, GenericRequirement::None;
|
||||
|
||||
VaList, sym::va_list, va_list, Target::Struct, GenericRequirement::None;
|
||||
|
||||
Deref, sym::deref, deref_trait, Target::Trait, GenericRequirement::Exact(0);
|
||||
|
|
|
@ -1042,30 +1042,31 @@ impl<'a, 'tcx> CastCheck<'tcx> {
|
|||
m_cast: ty::TypeAndMut<'tcx>,
|
||||
) -> Result<CastKind, CastError<'tcx>> {
|
||||
// array-ptr-cast: allow mut-to-mut, mut-to-const, const-to-const
|
||||
if m_expr.mutbl >= m_cast.mutbl {
|
||||
if let ty::Array(ety, _) = m_expr.ty.kind() {
|
||||
// Due to the limitations of LLVM global constants,
|
||||
// region pointers end up pointing at copies of
|
||||
// vector elements instead of the original values.
|
||||
// To allow raw pointers to work correctly, we
|
||||
// need to special-case obtaining a raw pointer
|
||||
// from a region pointer to a vector.
|
||||
if m_expr.mutbl >= m_cast.mutbl
|
||||
&& let ty::Array(ety, _) = m_expr.ty.kind()
|
||||
&& fcx.can_eq(fcx.param_env, *ety, m_cast.ty)
|
||||
{
|
||||
// Due to the limitations of LLVM global constants,
|
||||
// region pointers end up pointing at copies of
|
||||
// vector elements instead of the original values.
|
||||
// To allow raw pointers to work correctly, we
|
||||
// need to special-case obtaining a raw pointer
|
||||
// from a region pointer to a vector.
|
||||
|
||||
// Coerce to a raw pointer so that we generate RawPtr in MIR.
|
||||
let array_ptr_type = Ty::new_ptr(fcx.tcx, m_expr.ty, m_expr.mutbl);
|
||||
fcx.coerce(self.expr, self.expr_ty, array_ptr_type, AllowTwoPhase::No, None)
|
||||
.unwrap_or_else(|_| {
|
||||
bug!(
|
||||
// Coerce to a raw pointer so that we generate RawPtr in MIR.
|
||||
let array_ptr_type = Ty::new_ptr(fcx.tcx, m_expr.ty, m_expr.mutbl);
|
||||
fcx.coerce(self.expr, self.expr_ty, array_ptr_type, AllowTwoPhase::No, None)
|
||||
.unwrap_or_else(|_| {
|
||||
bug!(
|
||||
"could not cast from reference to array to pointer to array ({:?} to {:?})",
|
||||
self.expr_ty,
|
||||
array_ptr_type,
|
||||
)
|
||||
});
|
||||
});
|
||||
|
||||
// this will report a type mismatch if needed
|
||||
fcx.demand_eqtype(self.span, *ety, m_cast.ty);
|
||||
return Ok(CastKind::ArrayPtrCast);
|
||||
}
|
||||
// this will report a type mismatch if needed
|
||||
fcx.demand_eqtype(self.span, *ety, m_cast.ty);
|
||||
return Ok(CastKind::ArrayPtrCast);
|
||||
}
|
||||
|
||||
Err(CastError::IllegalCast)
|
||||
|
|
|
@ -103,15 +103,6 @@ fn coerce_mutbls<'tcx>(
|
|||
if from_mutbl >= to_mutbl { Ok(()) } else { Err(TypeError::Mutability) }
|
||||
}
|
||||
|
||||
/// Do not require any adjustments, i.e. coerce `x -> x`.
|
||||
fn identity(_: Ty<'_>) -> Vec<Adjustment<'_>> {
|
||||
vec![]
|
||||
}
|
||||
|
||||
fn simple<'tcx>(kind: Adjust) -> impl FnOnce(Ty<'tcx>) -> Vec<Adjustment<'tcx>> {
|
||||
move |target| vec![Adjustment { kind, target }]
|
||||
}
|
||||
|
||||
/// This always returns `Ok(...)`.
|
||||
fn success<'tcx>(
|
||||
adj: Vec<Adjustment<'tcx>>,
|
||||
|
@ -131,7 +122,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
|
|||
Coerce { fcx, cause, allow_two_phase, use_lub: false, coerce_never }
|
||||
}
|
||||
|
||||
fn unify(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> InferResult<'tcx, Ty<'tcx>> {
|
||||
fn unify_raw(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> InferResult<'tcx, Ty<'tcx>> {
|
||||
debug!("unify(a: {:?}, b: {:?}, use_lub: {})", a, b, self.use_lub);
|
||||
self.commit_if_ok(|_| {
|
||||
let at = self.at(&self.cause, self.fcx.param_env);
|
||||
|
@ -161,13 +152,30 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
|
|||
})
|
||||
}
|
||||
|
||||
/// Unify two types (using sub or lub).
|
||||
fn unify(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> CoerceResult<'tcx> {
|
||||
self.unify_raw(a, b)
|
||||
.and_then(|InferOk { value: ty, obligations }| success(vec![], ty, obligations))
|
||||
}
|
||||
|
||||
/// Unify two types (using sub or lub) and produce a specific coercion.
|
||||
fn unify_and<F>(&self, a: Ty<'tcx>, b: Ty<'tcx>, f: F) -> CoerceResult<'tcx>
|
||||
where
|
||||
F: FnOnce(Ty<'tcx>) -> Vec<Adjustment<'tcx>>,
|
||||
{
|
||||
self.unify(a, b)
|
||||
.and_then(|InferOk { value: ty, obligations }| success(f(ty), ty, obligations))
|
||||
fn unify_and(
|
||||
&self,
|
||||
a: Ty<'tcx>,
|
||||
b: Ty<'tcx>,
|
||||
adjustments: impl IntoIterator<Item = Adjustment<'tcx>>,
|
||||
final_adjustment: Adjust,
|
||||
) -> CoerceResult<'tcx> {
|
||||
self.unify_raw(a, b).and_then(|InferOk { value: ty, obligations }| {
|
||||
success(
|
||||
adjustments
|
||||
.into_iter()
|
||||
.chain(std::iter::once(Adjustment { target: ty, kind: final_adjustment }))
|
||||
.collect(),
|
||||
ty,
|
||||
obligations,
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
#[instrument(skip(self))]
|
||||
|
@ -180,10 +188,14 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
|
|||
// Coercing from `!` to any type is allowed:
|
||||
if a.is_never() {
|
||||
if self.coerce_never {
|
||||
return success(simple(Adjust::NeverToAny)(b), b, PredicateObligations::new());
|
||||
return success(
|
||||
vec![Adjustment { kind: Adjust::NeverToAny, target: b }],
|
||||
b,
|
||||
PredicateObligations::new(),
|
||||
);
|
||||
} else {
|
||||
// Otherwise the only coercion we can do is unification.
|
||||
return self.unify_and(a, b, identity);
|
||||
return self.unify(a, b);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -191,7 +203,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
|
|||
// we have no information about the source type. This will always
|
||||
// ultimately fall back to some form of subtyping.
|
||||
if a.is_ty_var() {
|
||||
return self.coerce_from_inference_variable(a, b, identity);
|
||||
return self.coerce_from_inference_variable(a, b);
|
||||
}
|
||||
|
||||
// Consider coercing the subtype to a DST
|
||||
|
@ -247,7 +259,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
|
|||
ty::FnPtr(a_sig_tys, a_hdr) => {
|
||||
// We permit coercion of fn pointers to drop the
|
||||
// unsafe qualifier.
|
||||
self.coerce_from_fn_pointer(a, a_sig_tys.with(a_hdr), b)
|
||||
self.coerce_from_fn_pointer(a_sig_tys.with(a_hdr), b)
|
||||
}
|
||||
ty::Closure(closure_def_id_a, args_a) => {
|
||||
// Non-capturing closures are coercible to
|
||||
|
@ -257,7 +269,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
|
|||
}
|
||||
_ => {
|
||||
// Otherwise, just use unification rules.
|
||||
self.unify_and(a, b, identity)
|
||||
self.unify(a, b)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -265,12 +277,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
|
|||
/// Coercing *from* an inference variable. In this case, we have no information
|
||||
/// about the source type, so we can't really do a true coercion and we always
|
||||
/// fall back to subtyping (`unify_and`).
|
||||
fn coerce_from_inference_variable(
|
||||
&self,
|
||||
a: Ty<'tcx>,
|
||||
b: Ty<'tcx>,
|
||||
make_adjustments: impl FnOnce(Ty<'tcx>) -> Vec<Adjustment<'tcx>>,
|
||||
) -> CoerceResult<'tcx> {
|
||||
fn coerce_from_inference_variable(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> CoerceResult<'tcx> {
|
||||
debug!("coerce_from_inference_variable(a={:?}, b={:?})", a, b);
|
||||
assert!(a.is_ty_var() && self.shallow_resolve(a) == a);
|
||||
assert!(self.shallow_resolve(b) == b);
|
||||
|
@ -298,12 +305,11 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
|
|||
"coerce_from_inference_variable: two inference variables, target_ty={:?}, obligations={:?}",
|
||||
target_ty, obligations
|
||||
);
|
||||
let adjustments = make_adjustments(target_ty);
|
||||
InferResult::Ok(InferOk { value: (adjustments, target_ty), obligations })
|
||||
success(vec![], target_ty, obligations)
|
||||
} else {
|
||||
// One unresolved type variable: just apply subtyping, we may be able
|
||||
// to do something useful.
|
||||
self.unify_and(a, b, make_adjustments)
|
||||
self.unify(a, b)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -331,7 +337,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
|
|||
coerce_mutbls(mt_a.mutbl, mutbl_b)?;
|
||||
(r_a, mt_a)
|
||||
}
|
||||
_ => return self.unify_and(a, b, identity),
|
||||
_ => return self.unify(a, b),
|
||||
};
|
||||
|
||||
let span = self.cause.span;
|
||||
|
@ -437,7 +443,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
|
|||
referent_ty,
|
||||
mutbl_b, // [1] above
|
||||
);
|
||||
match self.unify(derefd_ty_a, b) {
|
||||
match self.unify_raw(derefd_ty_a, b) {
|
||||
Ok(ok) => {
|
||||
found = Some(ok);
|
||||
break;
|
||||
|
@ -579,13 +585,13 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
|
|||
// We only have the latter, so we use an inference variable
|
||||
// for the former and let type inference do the rest.
|
||||
let coerce_target = self.next_ty_var(self.cause.span);
|
||||
let mut coercion = self.unify_and(coerce_target, target, |target| {
|
||||
let unsize = Adjustment { kind: Adjust::Pointer(PointerCoercion::Unsize), target };
|
||||
match reborrow {
|
||||
None => vec![unsize],
|
||||
Some((ref deref, ref autoref)) => vec![deref.clone(), autoref.clone(), unsize],
|
||||
}
|
||||
})?;
|
||||
|
||||
let mut coercion = self.unify_and(
|
||||
coerce_target,
|
||||
target,
|
||||
reborrow.into_iter().flat_map(|(deref, autoref)| [deref, autoref]),
|
||||
Adjust::Pointer(PointerCoercion::Unsize),
|
||||
)?;
|
||||
|
||||
let mut selcx = traits::SelectionContext::new(self);
|
||||
|
||||
|
@ -708,7 +714,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
|
|||
&& let ty::Dynamic(b_data, _, ty::DynStar) = b.kind()
|
||||
&& a_data.principal_def_id() == b_data.principal_def_id()
|
||||
{
|
||||
return self.unify_and(a, b, |_| vec![]);
|
||||
return self.unify(a, b);
|
||||
}
|
||||
|
||||
// Check the obligations of the cast -- for example, when casting
|
||||
|
@ -808,23 +814,15 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
|
|||
|
||||
// To complete the reborrow, we need to make sure we can unify the inner types, and if so we
|
||||
// add the adjustments.
|
||||
self.unify_and(a, b, |_inner_ty| {
|
||||
vec![Adjustment { kind: Adjust::ReborrowPin(mut_b), target: b }]
|
||||
})
|
||||
self.unify_and(a, b, [], Adjust::ReborrowPin(mut_b))
|
||||
}
|
||||
|
||||
fn coerce_from_safe_fn<F, G>(
|
||||
fn coerce_from_safe_fn(
|
||||
&self,
|
||||
a: Ty<'tcx>,
|
||||
fn_ty_a: ty::PolyFnSig<'tcx>,
|
||||
b: Ty<'tcx>,
|
||||
to_unsafe: F,
|
||||
normal: G,
|
||||
) -> CoerceResult<'tcx>
|
||||
where
|
||||
F: FnOnce(Ty<'tcx>) -> Vec<Adjustment<'tcx>>,
|
||||
G: FnOnce(Ty<'tcx>) -> Vec<Adjustment<'tcx>>,
|
||||
{
|
||||
adjustment: Option<Adjust>,
|
||||
) -> CoerceResult<'tcx> {
|
||||
self.commit_if_ok(|snapshot| {
|
||||
let outer_universe = self.infcx.universe();
|
||||
|
||||
|
@ -833,9 +831,19 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
|
|||
&& hdr_b.safety.is_unsafe()
|
||||
{
|
||||
let unsafe_a = self.tcx.safe_to_unsafe_fn_ty(fn_ty_a);
|
||||
self.unify_and(unsafe_a, b, to_unsafe)
|
||||
self.unify_and(
|
||||
unsafe_a,
|
||||
b,
|
||||
adjustment
|
||||
.map(|kind| Adjustment { kind, target: Ty::new_fn_ptr(self.tcx, fn_ty_a) }),
|
||||
Adjust::Pointer(PointerCoercion::UnsafeFnPointer),
|
||||
)
|
||||
} else {
|
||||
self.unify_and(a, b, normal)
|
||||
let a = Ty::new_fn_ptr(self.tcx, fn_ty_a);
|
||||
match adjustment {
|
||||
Some(adjust) => self.unify_and(a, b, [], adjust),
|
||||
None => self.unify(a, b),
|
||||
}
|
||||
};
|
||||
|
||||
// FIXME(#73154): This is a hack. Currently LUB can generate
|
||||
|
@ -852,7 +860,6 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
|
|||
|
||||
fn coerce_from_fn_pointer(
|
||||
&self,
|
||||
a: Ty<'tcx>,
|
||||
fn_ty_a: ty::PolyFnSig<'tcx>,
|
||||
b: Ty<'tcx>,
|
||||
) -> CoerceResult<'tcx> {
|
||||
|
@ -861,15 +868,9 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
|
|||
//!
|
||||
|
||||
let b = self.shallow_resolve(b);
|
||||
debug!("coerce_from_fn_pointer(a={:?}, b={:?})", a, b);
|
||||
debug!(?fn_ty_a, ?b, "coerce_from_fn_pointer");
|
||||
|
||||
self.coerce_from_safe_fn(
|
||||
a,
|
||||
fn_ty_a,
|
||||
b,
|
||||
simple(Adjust::Pointer(PointerCoercion::UnsafeFnPointer)),
|
||||
identity,
|
||||
)
|
||||
self.coerce_from_safe_fn(fn_ty_a, b, None)
|
||||
}
|
||||
|
||||
fn coerce_from_fn_item(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> CoerceResult<'tcx> {
|
||||
|
@ -916,30 +917,16 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
|
|||
self.at(&self.cause, self.param_env).normalize(a_sig);
|
||||
obligations.extend(o1);
|
||||
|
||||
let a_fn_pointer = Ty::new_fn_ptr(self.tcx, a_sig);
|
||||
let InferOk { value, obligations: o2 } = self.coerce_from_safe_fn(
|
||||
a_fn_pointer,
|
||||
a_sig,
|
||||
b,
|
||||
|unsafe_ty| {
|
||||
vec![
|
||||
Adjustment {
|
||||
kind: Adjust::Pointer(PointerCoercion::ReifyFnPointer),
|
||||
target: a_fn_pointer,
|
||||
},
|
||||
Adjustment {
|
||||
kind: Adjust::Pointer(PointerCoercion::UnsafeFnPointer),
|
||||
target: unsafe_ty,
|
||||
},
|
||||
]
|
||||
},
|
||||
simple(Adjust::Pointer(PointerCoercion::ReifyFnPointer)),
|
||||
Some(Adjust::Pointer(PointerCoercion::ReifyFnPointer)),
|
||||
)?;
|
||||
|
||||
obligations.extend(o2);
|
||||
Ok(InferOk { value, obligations })
|
||||
}
|
||||
_ => self.unify_and(a, b, identity),
|
||||
_ => self.unify(a, b),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -983,10 +970,11 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
|
|||
self.unify_and(
|
||||
pointer_ty,
|
||||
b,
|
||||
simple(Adjust::Pointer(PointerCoercion::ClosureFnPointer(safety))),
|
||||
[],
|
||||
Adjust::Pointer(PointerCoercion::ClosureFnPointer(safety)),
|
||||
)
|
||||
}
|
||||
_ => self.unify_and(a, b, identity),
|
||||
_ => self.unify(a, b),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1001,7 +989,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
|
|||
let (is_ref, mt_a) = match *a.kind() {
|
||||
ty::Ref(_, ty, mutbl) => (true, ty::TypeAndMut { ty, mutbl }),
|
||||
ty::RawPtr(ty, mutbl) => (false, ty::TypeAndMut { ty, mutbl }),
|
||||
_ => return self.unify_and(a, b, identity),
|
||||
_ => return self.unify(a, b),
|
||||
};
|
||||
coerce_mutbls(mt_a.mutbl, mutbl_b)?;
|
||||
|
||||
|
@ -1011,16 +999,16 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
|
|||
// representation, we still register an Adjust::DerefRef so that
|
||||
// regionck knows that the region for `a` must be valid here.
|
||||
if is_ref {
|
||||
self.unify_and(a_raw, b, |target| {
|
||||
vec![
|
||||
Adjustment { kind: Adjust::Deref(None), target: mt_a.ty },
|
||||
Adjustment { kind: Adjust::Borrow(AutoBorrow::RawPtr(mutbl_b)), target },
|
||||
]
|
||||
})
|
||||
self.unify_and(
|
||||
a_raw,
|
||||
b,
|
||||
[Adjustment { kind: Adjust::Deref(None), target: mt_a.ty }],
|
||||
Adjust::Borrow(AutoBorrow::RawPtr(mutbl_b)),
|
||||
)
|
||||
} else if mt_a.mutbl != mutbl_b {
|
||||
self.unify_and(a_raw, b, simple(Adjust::Pointer(PointerCoercion::MutToConstPointer)))
|
||||
self.unify_and(a_raw, b, [], Adjust::Pointer(PointerCoercion::MutToConstPointer))
|
||||
} else {
|
||||
self.unify_and(a_raw, b, identity)
|
||||
self.unify(a_raw, b)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1118,9 +1106,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
let cause = self.cause(DUMMY_SP, ObligationCauseCode::ExprAssignable);
|
||||
// We don't ever need two-phase here since we throw out the result of the coercion.
|
||||
let coerce = Coerce::new(self, cause, AllowTwoPhase::No, true);
|
||||
coerce
|
||||
.autoderef(DUMMY_SP, expr_ty)
|
||||
.find_map(|(ty, steps)| self.probe(|_| coerce.unify(ty, target)).ok().map(|_| steps))
|
||||
coerce.autoderef(DUMMY_SP, expr_ty).find_map(|(ty, steps)| {
|
||||
self.probe(|_| coerce.unify_raw(ty, target)).ok().map(|_| steps)
|
||||
})
|
||||
}
|
||||
|
||||
/// Given a type, this function will calculate and return the type given
|
||||
|
|
|
@ -257,6 +257,13 @@ impl Parse for Newtype {
|
|||
}
|
||||
}
|
||||
|
||||
impl std::ops::AddAssign<usize> for #name {
|
||||
#[inline]
|
||||
fn add_assign(&mut self, other: usize) {
|
||||
*self = *self + other;
|
||||
}
|
||||
}
|
||||
|
||||
impl rustc_index::Idx for #name {
|
||||
#[inline]
|
||||
fn new(value: usize) -> Self {
|
||||
|
|
|
@ -606,6 +606,11 @@ fn register_builtins(store: &mut LintStore) {
|
|||
"converted into hard error, see issue #127323 \
|
||||
<https://github.com/rust-lang/rust/issues/127323> for more information",
|
||||
);
|
||||
store.register_removed(
|
||||
"undefined_naked_function_abi",
|
||||
"converted into hard error, see PR #139001 \
|
||||
<https://github.com/rust-lang/rust/issues/139001> for more information",
|
||||
);
|
||||
}
|
||||
|
||||
fn register_internals(store: &mut LintStore) {
|
||||
|
|
|
@ -864,8 +864,8 @@ fn ty_is_known_nonnull<'tcx>(
|
|||
return true;
|
||||
}
|
||||
|
||||
// `UnsafeCell` has its niche hidden.
|
||||
if def.is_unsafe_cell() {
|
||||
// `UnsafeCell` and `UnsafePinned` have their niche hidden.
|
||||
if def.is_unsafe_cell() || def.is_unsafe_pinned() {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
|
|
@ -110,7 +110,6 @@ declare_lint_pass! {
|
|||
UNCONDITIONAL_PANIC,
|
||||
UNCONDITIONAL_RECURSION,
|
||||
UNCOVERED_PARAM_IN_PROJECTION,
|
||||
UNDEFINED_NAKED_FUNCTION_ABI,
|
||||
UNEXPECTED_CFGS,
|
||||
UNFULFILLED_LINT_EXPECTATIONS,
|
||||
UNINHABITED_STATIC,
|
||||
|
@ -2830,39 +2829,6 @@ declare_lint! {
|
|||
"detects deprecation attributes with no effect",
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// The `undefined_naked_function_abi` lint detects naked function definitions that
|
||||
/// either do not specify an ABI or specify the Rust ABI.
|
||||
///
|
||||
/// ### Example
|
||||
///
|
||||
/// ```rust
|
||||
/// #![feature(asm_experimental_arch, naked_functions)]
|
||||
///
|
||||
/// use std::arch::naked_asm;
|
||||
///
|
||||
/// #[naked]
|
||||
/// pub fn default_abi() -> u32 {
|
||||
/// unsafe { naked_asm!(""); }
|
||||
/// }
|
||||
///
|
||||
/// #[naked]
|
||||
/// pub extern "Rust" fn rust_abi() -> u32 {
|
||||
/// unsafe { naked_asm!(""); }
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// {{produces}}
|
||||
///
|
||||
/// ### Explanation
|
||||
///
|
||||
/// The Rust ABI is currently undefined. Therefore, naked functions should
|
||||
/// specify a non-Rust ABI.
|
||||
pub UNDEFINED_NAKED_FUNCTION_ABI,
|
||||
Warn,
|
||||
"undefined naked function ABI"
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// The `ineffective_unstable_trait_impl` lint detects `#[unstable]` attributes which are not used.
|
||||
///
|
||||
|
|
|
@ -53,6 +53,10 @@ bitflags::bitflags! {
|
|||
const IS_VARIANT_LIST_NON_EXHAUSTIVE = 1 << 8;
|
||||
/// Indicates whether the type is `UnsafeCell`.
|
||||
const IS_UNSAFE_CELL = 1 << 9;
|
||||
/// Indicates whether the type is `UnsafePinned`.
|
||||
const IS_UNSAFE_PINNED = 1 << 10;
|
||||
/// Indicates whether the type is anonymous.
|
||||
const IS_ANONYMOUS = 1 << 11;
|
||||
}
|
||||
}
|
||||
rustc_data_structures::external_bitflags_debug! { AdtFlags }
|
||||
|
@ -302,6 +306,9 @@ impl AdtDefData {
|
|||
if tcx.is_lang_item(did, LangItem::UnsafeCell) {
|
||||
flags |= AdtFlags::IS_UNSAFE_CELL;
|
||||
}
|
||||
if tcx.is_lang_item(did, LangItem::UnsafePinned) {
|
||||
flags |= AdtFlags::IS_UNSAFE_PINNED;
|
||||
}
|
||||
|
||||
AdtDefData { did, variants, flags, repr }
|
||||
}
|
||||
|
@ -405,6 +412,12 @@ impl<'tcx> AdtDef<'tcx> {
|
|||
self.flags().contains(AdtFlags::IS_UNSAFE_CELL)
|
||||
}
|
||||
|
||||
/// Returns `true` if this is `UnsafePinned<T>`.
|
||||
#[inline]
|
||||
pub fn is_unsafe_pinned(self) -> bool {
|
||||
self.flags().contains(AdtFlags::IS_UNSAFE_PINNED)
|
||||
}
|
||||
|
||||
/// Returns `true` if this is `ManuallyDrop<T>`.
|
||||
#[inline]
|
||||
pub fn is_manually_drop(self) -> bool {
|
||||
|
|
|
@ -547,7 +547,7 @@ fn transform_async_context<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
|
|||
|
||||
let get_context_def_id = tcx.require_lang_item(LangItem::GetContext, None);
|
||||
|
||||
for bb in START_BLOCK..body.basic_blocks.next_index() {
|
||||
for bb in body.basic_blocks.indices() {
|
||||
let bb_data = &body[bb];
|
||||
if bb_data.is_cleanup {
|
||||
continue;
|
||||
|
@ -556,11 +556,11 @@ fn transform_async_context<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
|
|||
match &bb_data.terminator().kind {
|
||||
TerminatorKind::Call { func, .. } => {
|
||||
let func_ty = func.ty(body, tcx);
|
||||
if let ty::FnDef(def_id, _) = *func_ty.kind() {
|
||||
if def_id == get_context_def_id {
|
||||
let local = eliminate_get_context_call(&mut body[bb]);
|
||||
replace_resume_ty_local(tcx, body, local, context_mut_ref);
|
||||
}
|
||||
if let ty::FnDef(def_id, _) = *func_ty.kind()
|
||||
&& def_id == get_context_def_id
|
||||
{
|
||||
let local = eliminate_get_context_call(&mut body[bb]);
|
||||
replace_resume_ty_local(tcx, body, local, context_mut_ref);
|
||||
}
|
||||
}
|
||||
TerminatorKind::Yield { resume_arg, .. } => {
|
||||
|
@ -1057,7 +1057,7 @@ fn insert_switch<'tcx>(
|
|||
let blocks = body.basic_blocks_mut().iter_mut();
|
||||
|
||||
for target in blocks.flat_map(|b| b.terminator_mut().successors_mut()) {
|
||||
*target = BasicBlock::new(target.index() + 1);
|
||||
*target += 1;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1209,14 +1209,8 @@ fn can_return<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>, typing_env: ty::Typing
|
|||
}
|
||||
|
||||
// If there's a return terminator the function may return.
|
||||
for block in body.basic_blocks.iter() {
|
||||
if let TerminatorKind::Return = block.terminator().kind {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
body.basic_blocks.iter().any(|block| matches!(block.terminator().kind, TerminatorKind::Return))
|
||||
// Otherwise the function can't return.
|
||||
false
|
||||
}
|
||||
|
||||
fn can_unwind<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>) -> bool {
|
||||
|
@ -1293,12 +1287,12 @@ fn create_coroutine_resume_function<'tcx>(
|
|||
kind: TerminatorKind::Goto { target: poison_block },
|
||||
};
|
||||
}
|
||||
} else if !block.is_cleanup {
|
||||
} else if !block.is_cleanup
|
||||
// Any terminators that *can* unwind but don't have an unwind target set are also
|
||||
// pointed at our poisoning block (unless they're part of the cleanup path).
|
||||
if let Some(unwind @ UnwindAction::Continue) = block.terminator_mut().unwind_mut() {
|
||||
*unwind = UnwindAction::Cleanup(poison_block);
|
||||
}
|
||||
&& let Some(unwind @ UnwindAction::Continue) = block.terminator_mut().unwind_mut()
|
||||
{
|
||||
*unwind = UnwindAction::Cleanup(poison_block);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1340,12 +1334,14 @@ fn create_coroutine_resume_function<'tcx>(
|
|||
make_coroutine_state_argument_indirect(tcx, body);
|
||||
|
||||
match transform.coroutine_kind {
|
||||
CoroutineKind::Coroutine(_)
|
||||
| CoroutineKind::Desugared(CoroutineDesugaring::Async | CoroutineDesugaring::AsyncGen, _) =>
|
||||
{
|
||||
make_coroutine_state_argument_pinned(tcx, body);
|
||||
}
|
||||
// Iterator::next doesn't accept a pinned argument,
|
||||
// unlike for all other coroutine kinds.
|
||||
CoroutineKind::Desugared(CoroutineDesugaring::Gen, _) => {}
|
||||
_ => {
|
||||
make_coroutine_state_argument_pinned(tcx, body);
|
||||
}
|
||||
}
|
||||
|
||||
// Make sure we remove dead blocks to remove
|
||||
|
@ -1408,8 +1404,7 @@ fn create_cases<'tcx>(
|
|||
let mut statements = Vec::new();
|
||||
|
||||
// Create StorageLive instructions for locals with live storage
|
||||
for i in 0..(body.local_decls.len()) {
|
||||
let l = Local::new(i);
|
||||
for l in body.local_decls.indices() {
|
||||
let needs_storage_live = point.storage_liveness.contains(l)
|
||||
&& !transform.remap.contains(l)
|
||||
&& !transform.always_live_locals.contains(l);
|
||||
|
@ -1535,15 +1530,10 @@ impl<'tcx> crate::MirPass<'tcx> for StateTransform {
|
|||
let coroutine_kind = body.coroutine_kind().unwrap();
|
||||
|
||||
// Get the discriminant type and args which typeck computed
|
||||
let (discr_ty, movable) = match *coroutine_ty.kind() {
|
||||
ty::Coroutine(_, args) => {
|
||||
let args = args.as_coroutine();
|
||||
(args.discr_ty(tcx), coroutine_kind.movability() == hir::Movability::Movable)
|
||||
}
|
||||
_ => {
|
||||
tcx.dcx().span_bug(body.span, format!("unexpected coroutine type {coroutine_ty}"));
|
||||
}
|
||||
let ty::Coroutine(_, args) = coroutine_ty.kind() else {
|
||||
tcx.dcx().span_bug(body.span, format!("unexpected coroutine type {coroutine_ty}"));
|
||||
};
|
||||
let discr_ty = args.as_coroutine().discr_ty(tcx);
|
||||
|
||||
let new_ret_ty = match coroutine_kind {
|
||||
CoroutineKind::Desugared(CoroutineDesugaring::Async, _) => {
|
||||
|
@ -1610,6 +1600,7 @@ impl<'tcx> crate::MirPass<'tcx> for StateTransform {
|
|||
|
||||
let always_live_locals = always_storage_live_locals(body);
|
||||
|
||||
let movable = coroutine_kind.movability() == hir::Movability::Movable;
|
||||
let liveness_info =
|
||||
locals_live_across_suspend_points(tcx, body, &always_live_locals, movable);
|
||||
|
||||
|
|
|
@ -103,9 +103,8 @@ impl<'tcx> crate::MirPass<'tcx> for EarlyOtherwiseBranch {
|
|||
let mut should_cleanup = false;
|
||||
|
||||
// Also consider newly generated bbs in the same pass
|
||||
for i in 0..body.basic_blocks.len() {
|
||||
for parent in body.basic_blocks.indices() {
|
||||
let bbs = &*body.basic_blocks;
|
||||
let parent = BasicBlock::from_usize(i);
|
||||
let Some(opt_data) = evaluate_candidate(tcx, body, parent) else { continue };
|
||||
|
||||
trace!("SUCCESS: found optimization possibility to apply: {opt_data:?}");
|
||||
|
|
|
@ -266,19 +266,16 @@ where
|
|||
let tcx = self.tcx();
|
||||
|
||||
assert_eq!(self.elaborator.typing_env().typing_mode, ty::TypingMode::PostAnalysis);
|
||||
// The type error for normalization may have been in dropck: see
|
||||
// `compute_drop_data` in rustc_borrowck, in which case we wouldn't have
|
||||
// deleted the MIR body and could have an error here as well.
|
||||
let field_ty = match tcx
|
||||
.try_normalize_erasing_regions(self.elaborator.typing_env(), f.ty(tcx, args))
|
||||
{
|
||||
Ok(t) => t,
|
||||
Err(_) => Ty::new_error(
|
||||
self.tcx(),
|
||||
self.elaborator
|
||||
.body()
|
||||
.tainted_by_errors
|
||||
.expect("Error in drop elaboration not found by dropck."),
|
||||
self.tcx().dcx().span_delayed_bug(
|
||||
self.elaborator.body().span,
|
||||
"Error normalizing in drop elaboration.",
|
||||
),
|
||||
),
|
||||
};
|
||||
|
||||
|
|
|
@ -10,7 +10,6 @@ use rustc_middle::ty::{self, GenericArgsRef, Ty, TyCtxt, layout};
|
|||
use rustc_span::{DUMMY_SP, Symbol, sym};
|
||||
|
||||
use crate::simplify::simplify_duplicate_switch_targets;
|
||||
use crate::take_array;
|
||||
|
||||
pub(super) enum InstSimplify {
|
||||
BeforeInline,
|
||||
|
@ -214,7 +213,9 @@ impl<'tcx> InstSimplifyContext<'_, 'tcx> {
|
|||
terminator: &mut Terminator<'tcx>,
|
||||
statements: &mut Vec<Statement<'tcx>>,
|
||||
) {
|
||||
let TerminatorKind::Call { func, args, destination, target, .. } = &mut terminator.kind
|
||||
let TerminatorKind::Call {
|
||||
func, args, destination, target: Some(destination_block), ..
|
||||
} = &terminator.kind
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
@ -222,15 +223,8 @@ impl<'tcx> InstSimplifyContext<'_, 'tcx> {
|
|||
// It's definitely not a clone if there are multiple arguments
|
||||
let [arg] = &args[..] else { return };
|
||||
|
||||
let Some(destination_block) = *target else { return };
|
||||
|
||||
// Only bother looking more if it's easy to know what we're calling
|
||||
let Some((fn_def_id, fn_args)) = func.const_fn_def() else { return };
|
||||
|
||||
// Clone needs one arg, so we can cheaply rule out other stuff
|
||||
if fn_args.len() != 1 {
|
||||
return;
|
||||
}
|
||||
let Some((fn_def_id, ..)) = func.const_fn_def() else { return };
|
||||
|
||||
// These types are easily available from locals, so check that before
|
||||
// doing DefId lookups to figure out what we're actually calling.
|
||||
|
@ -238,15 +232,12 @@ impl<'tcx> InstSimplifyContext<'_, 'tcx> {
|
|||
|
||||
let ty::Ref(_region, inner_ty, Mutability::Not) = *arg_ty.kind() else { return };
|
||||
|
||||
if !inner_ty.is_trivially_pure_clone_copy() {
|
||||
if !self.tcx.is_lang_item(fn_def_id, LangItem::CloneFn)
|
||||
|| !inner_ty.is_trivially_pure_clone_copy()
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if !self.tcx.is_lang_item(fn_def_id, LangItem::CloneFn) {
|
||||
return;
|
||||
}
|
||||
|
||||
let Ok([arg]) = take_array(args) else { return };
|
||||
let Some(arg_place) = arg.node.place() else { return };
|
||||
|
||||
statements.push(Statement {
|
||||
|
@ -258,7 +249,7 @@ impl<'tcx> InstSimplifyContext<'_, 'tcx> {
|
|||
)),
|
||||
))),
|
||||
});
|
||||
terminator.kind = TerminatorKind::Goto { target: destination_block };
|
||||
terminator.kind = TerminatorKind::Goto { target: *destination_block };
|
||||
}
|
||||
|
||||
fn simplify_nounwind_call(&self, terminator: &mut Terminator<'tcx>) {
|
||||
|
|
|
@ -20,13 +20,11 @@ impl<'tcx> crate::MirPass<'tcx> for MatchBranchSimplification {
|
|||
fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
|
||||
let typing_env = body.typing_env(tcx);
|
||||
let mut should_cleanup = false;
|
||||
for i in 0..body.basic_blocks.len() {
|
||||
let bbs = &*body.basic_blocks;
|
||||
let bb_idx = BasicBlock::from_usize(i);
|
||||
match bbs[bb_idx].terminator().kind {
|
||||
for bb_idx in body.basic_blocks.indices() {
|
||||
match &body.basic_blocks[bb_idx].terminator().kind {
|
||||
TerminatorKind::SwitchInt {
|
||||
discr: ref _discr @ (Operand::Copy(_) | Operand::Move(_)),
|
||||
ref targets,
|
||||
discr: Operand::Copy(_) | Operand::Move(_),
|
||||
targets,
|
||||
..
|
||||
// We require that the possible target blocks don't contain this block.
|
||||
} if !targets.all_targets().contains(&bb_idx) => {}
|
||||
|
@ -66,9 +64,10 @@ trait SimplifyMatch<'tcx> {
|
|||
typing_env: ty::TypingEnv<'tcx>,
|
||||
) -> Option<()> {
|
||||
let bbs = &body.basic_blocks;
|
||||
let (discr, targets) = match bbs[switch_bb_idx].terminator().kind {
|
||||
TerminatorKind::SwitchInt { ref discr, ref targets, .. } => (discr, targets),
|
||||
_ => unreachable!(),
|
||||
let TerminatorKind::SwitchInt { discr, targets, .. } =
|
||||
&bbs[switch_bb_idx].terminator().kind
|
||||
else {
|
||||
unreachable!();
|
||||
};
|
||||
|
||||
let discr_ty = discr.ty(body.local_decls(), tcx);
|
||||
|
|
|
@ -18,19 +18,17 @@ impl<'tcx> crate::MirPass<'tcx> for MultipleReturnTerminators {
|
|||
// find basic blocks with no statement and a return terminator
|
||||
let mut bbs_simple_returns = DenseBitSet::new_empty(body.basic_blocks.len());
|
||||
let bbs = body.basic_blocks_mut();
|
||||
for idx in bbs.indices() {
|
||||
if bbs[idx].statements.is_empty()
|
||||
&& bbs[idx].terminator().kind == TerminatorKind::Return
|
||||
{
|
||||
for (idx, bb) in bbs.iter_enumerated() {
|
||||
if bb.statements.is_empty() && bb.terminator().kind == TerminatorKind::Return {
|
||||
bbs_simple_returns.insert(idx);
|
||||
}
|
||||
}
|
||||
|
||||
for bb in bbs {
|
||||
if let TerminatorKind::Goto { target } = bb.terminator().kind {
|
||||
if bbs_simple_returns.contains(target) {
|
||||
bb.terminator_mut().kind = TerminatorKind::Return;
|
||||
}
|
||||
if let TerminatorKind::Goto { target } = bb.terminator().kind
|
||||
&& bbs_simple_returns.contains(target)
|
||||
{
|
||||
bb.terminator_mut().kind = TerminatorKind::Return;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -221,12 +221,11 @@ impl<'a, 'tcx> CfgChecker<'a, 'tcx> {
|
|||
|
||||
// Check for cycles
|
||||
let mut stack = FxHashSet::default();
|
||||
for i in 0..parent.len() {
|
||||
let mut bb = BasicBlock::from_usize(i);
|
||||
for (mut bb, parent) in parent.iter_enumerated_mut() {
|
||||
stack.clear();
|
||||
stack.insert(bb);
|
||||
loop {
|
||||
let Some(parent) = parent[bb].take() else { break };
|
||||
let Some(parent) = parent.take() else { break };
|
||||
let no_cycle = stack.insert(parent);
|
||||
if !no_cycle {
|
||||
self.fail(
|
||||
|
|
|
@ -742,9 +742,6 @@ passes_trait_impl_const_stable =
|
|||
passes_transparent_incompatible =
|
||||
transparent {$target} cannot have other repr hints
|
||||
|
||||
passes_undefined_naked_function_abi =
|
||||
Rust ABI is unsupported in naked functions
|
||||
|
||||
passes_unknown_external_lang_item =
|
||||
unknown external lang item: `{$lang_item}`
|
||||
|
||||
|
|
|
@ -624,6 +624,21 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
|
|||
match target {
|
||||
Target::Fn
|
||||
| Target::Method(MethodKind::Trait { body: true } | MethodKind::Inherent) => {
|
||||
let fn_sig = self.tcx.hir_node(hir_id).fn_sig().unwrap();
|
||||
let abi = fn_sig.header.abi;
|
||||
if abi.is_rustic_abi() && !self.tcx.features().naked_functions_rustic_abi() {
|
||||
feature_err(
|
||||
&self.tcx.sess,
|
||||
sym::naked_functions_rustic_abi,
|
||||
fn_sig.span,
|
||||
format!(
|
||||
"`#[naked]` is currently unstable on `extern \"{}\"` functions",
|
||||
abi.as_str()
|
||||
),
|
||||
)
|
||||
.emit();
|
||||
}
|
||||
|
||||
for other_attr in attrs {
|
||||
// this covers "sugared doc comments" of the form `/// ...`
|
||||
// it does not cover `#[doc = "..."]`, which is handled below
|
||||
|
|
|
@ -1197,10 +1197,6 @@ pub(crate) struct UnlabeledCfInWhileCondition<'a> {
|
|||
pub cf_type: &'a str,
|
||||
}
|
||||
|
||||
#[derive(LintDiagnostic)]
|
||||
#[diag(passes_undefined_naked_function_abi)]
|
||||
pub(crate) struct UndefinedNakedFunctionAbi;
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(passes_no_patterns)]
|
||||
pub(crate) struct NoPatterns {
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
//! Checks validity of naked functions.
|
||||
|
||||
use rustc_abi::ExternAbi;
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def::DefKind;
|
||||
use rustc_hir::def_id::{LocalDefId, LocalModDefId};
|
||||
|
@ -10,12 +9,11 @@ use rustc_middle::hir::nested_filter::OnlyBodies;
|
|||
use rustc_middle::query::Providers;
|
||||
use rustc_middle::span_bug;
|
||||
use rustc_middle::ty::TyCtxt;
|
||||
use rustc_session::lint::builtin::UNDEFINED_NAKED_FUNCTION_ABI;
|
||||
use rustc_span::{Span, sym};
|
||||
|
||||
use crate::errors::{
|
||||
NakedAsmOutsideNakedFn, NakedFunctionsAsmBlock, NakedFunctionsMustNakedAsm, NoPatterns,
|
||||
ParamsNotAllowed, UndefinedNakedFunctionAbi,
|
||||
ParamsNotAllowed,
|
||||
};
|
||||
|
||||
pub(crate) fn provide(providers: &mut Providers) {
|
||||
|
@ -29,26 +27,21 @@ fn check_mod_naked_functions(tcx: TyCtxt<'_>, module_def_id: LocalModDefId) {
|
|||
continue;
|
||||
}
|
||||
|
||||
let (fn_header, body_id) = match tcx.hir_node_by_def_id(def_id) {
|
||||
let body = match tcx.hir_node_by_def_id(def_id) {
|
||||
hir::Node::Item(hir::Item {
|
||||
kind: hir::ItemKind::Fn { sig, body: body_id, .. },
|
||||
..
|
||||
kind: hir::ItemKind::Fn { body: body_id, .. }, ..
|
||||
})
|
||||
| hir::Node::TraitItem(hir::TraitItem {
|
||||
kind: hir::TraitItemKind::Fn(sig, hir::TraitFn::Provided(body_id)),
|
||||
kind: hir::TraitItemKind::Fn(_, hir::TraitFn::Provided(body_id)),
|
||||
..
|
||||
})
|
||||
| hir::Node::ImplItem(hir::ImplItem {
|
||||
kind: hir::ImplItemKind::Fn(sig, body_id),
|
||||
..
|
||||
}) => (sig.header, *body_id),
|
||||
kind: hir::ImplItemKind::Fn(_, body_id), ..
|
||||
}) => tcx.hir_body(*body_id),
|
||||
_ => continue,
|
||||
};
|
||||
|
||||
let body = tcx.hir_body(body_id);
|
||||
|
||||
if tcx.has_attr(def_id, sym::naked) {
|
||||
check_abi(tcx, def_id, fn_header.abi);
|
||||
check_no_patterns(tcx, body.params);
|
||||
check_no_parameters_use(tcx, body);
|
||||
check_asm(tcx, def_id, body);
|
||||
|
@ -60,20 +53,6 @@ fn check_mod_naked_functions(tcx: TyCtxt<'_>, module_def_id: LocalModDefId) {
|
|||
}
|
||||
}
|
||||
|
||||
/// Checks that function uses non-Rust ABI.
|
||||
fn check_abi(tcx: TyCtxt<'_>, def_id: LocalDefId, abi: ExternAbi) {
|
||||
if abi == ExternAbi::Rust {
|
||||
let hir_id = tcx.local_def_id_to_hir_id(def_id);
|
||||
let span = tcx.def_span(def_id);
|
||||
tcx.emit_node_span_lint(
|
||||
UNDEFINED_NAKED_FUNCTION_ABI,
|
||||
hir_id,
|
||||
span,
|
||||
UndefinedNakedFunctionAbi,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/// Checks that parameters don't use patterns. Mirrors the checks for function declarations.
|
||||
fn check_no_patterns(tcx: TyCtxt<'_>, params: &[hir::Param<'_>]) {
|
||||
for param in params {
|
||||
|
|
|
@ -244,6 +244,7 @@ where
|
|||
/// ```ignore(needs-extern-crate)
|
||||
/// # extern crate rustc_driver;
|
||||
/// # extern crate rustc_interface;
|
||||
/// # extern crate rustc_middle;
|
||||
/// # #[macro_use]
|
||||
/// # extern crate rustc_smir;
|
||||
/// # extern crate stable_mir;
|
||||
|
@ -264,6 +265,7 @@ where
|
|||
/// ```ignore(needs-extern-crate)
|
||||
/// # extern crate rustc_driver;
|
||||
/// # extern crate rustc_interface;
|
||||
/// # extern crate rustc_middle;
|
||||
/// # #[macro_use]
|
||||
/// # extern crate rustc_smir;
|
||||
/// # extern crate stable_mir;
|
||||
|
@ -328,6 +330,7 @@ macro_rules! run_driver {
|
|||
use rustc_driver::{Callbacks, Compilation, run_compiler};
|
||||
use rustc_middle::ty::TyCtxt;
|
||||
use rustc_interface::interface;
|
||||
use rustc_smir::rustc_internal;
|
||||
use stable_mir::CompilerError;
|
||||
use std::ops::ControlFlow;
|
||||
|
||||
|
|
|
@ -633,6 +633,24 @@ impl SourceMap {
|
|||
sp
|
||||
}
|
||||
|
||||
/// Extends the given `Span` to just before the previous occurrence of `c`. Return the same span
|
||||
/// if an error occurred while retrieving the code snippet.
|
||||
pub fn span_extend_to_prev_char_before(
|
||||
&self,
|
||||
sp: Span,
|
||||
c: char,
|
||||
accept_newlines: bool,
|
||||
) -> Span {
|
||||
if let Ok(prev_source) = self.span_to_prev_source(sp) {
|
||||
let prev_source = prev_source.rsplit(c).next().unwrap_or("");
|
||||
if accept_newlines || !prev_source.contains('\n') {
|
||||
return sp.with_lo(BytePos(sp.lo().0 - prev_source.len() as u32 - 1_u32));
|
||||
}
|
||||
}
|
||||
|
||||
sp
|
||||
}
|
||||
|
||||
/// Extends the given `Span` to just after the previous occurrence of `pat` when surrounded by
|
||||
/// whitespace. Returns None if the pattern could not be found or if an error occurred while
|
||||
/// retrieving the code snippet.
|
||||
|
|
|
@ -1400,6 +1400,7 @@ symbols! {
|
|||
naked,
|
||||
naked_asm,
|
||||
naked_functions,
|
||||
naked_functions_rustic_abi,
|
||||
naked_functions_target_feature,
|
||||
name,
|
||||
names,
|
||||
|
@ -2215,6 +2216,8 @@ symbols! {
|
|||
unsafe_fields,
|
||||
unsafe_no_drop_flag,
|
||||
unsafe_pin_internals,
|
||||
unsafe_pinned,
|
||||
unsafe_unpin,
|
||||
unsize,
|
||||
unsized_const_param_ty,
|
||||
unsized_const_params,
|
||||
|
|
|
@ -514,6 +514,9 @@ fn layout_of_uncached<'tcx>(
|
|||
return map_layout(cx.calc.layout_of_union(&def.repr(), &variants));
|
||||
}
|
||||
|
||||
// UnsafeCell and UnsafePinned both disable niche optimizations
|
||||
let is_special_no_niche = def.is_unsafe_cell() || def.is_unsafe_pinned();
|
||||
|
||||
let get_discriminant_type =
|
||||
|min, max| abi::Integer::repr_discr(tcx, ty, &def.repr(), min, max);
|
||||
|
||||
|
@ -542,7 +545,7 @@ fn layout_of_uncached<'tcx>(
|
|||
&def.repr(),
|
||||
&variants,
|
||||
def.is_enum(),
|
||||
def.is_unsafe_cell(),
|
||||
is_special_no_niche,
|
||||
tcx.layout_scalar_valid_range(def.did()),
|
||||
get_discriminant_type,
|
||||
discriminants_iter(),
|
||||
|
@ -568,7 +571,7 @@ fn layout_of_uncached<'tcx>(
|
|||
&def.repr(),
|
||||
&variants,
|
||||
def.is_enum(),
|
||||
def.is_unsafe_cell(),
|
||||
is_special_no_niche,
|
||||
tcx.layout_scalar_valid_range(def.did()),
|
||||
get_discriminant_type,
|
||||
discriminants_iter(),
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue