Auto merge of #136641 - matthiaskrgr:rollup-lajwje5, r=matthiaskrgr
Rollup of 7 pull requests Successful merges: - #136073 (Always compute coroutine layout for eagerly emitting recursive layout errors) - #136235 (Pretty print pattern type values with transmute if they don't satisfy their pattern) - #136311 (Ensure that we never try to monomorphize the upcasting or vtable calls of impossible dyn types) - #136315 (Use short ty string for binop and unop errors) - #136393 (Fix accidentally not emitting overflowing literals lints anymore in patterns) - #136435 (Simplify some code for lowering THIR patterns) - #136630 (Change two std process tests to not output to std{out,err}, and fix test suite stat reset in bootstrap CI test rendering) r? `@ghost` `@rustbot` modify labels: rollup try-job: aarch64-gnu-debug
This commit is contained in:
commit
942db6782f
45 changed files with 524 additions and 349 deletions
|
@ -51,6 +51,8 @@ pub fn provide(providers: &mut Providers) {
|
|||
providers.check_validity_requirement = |tcx, (init_kind, param_env_and_ty)| {
|
||||
util::check_validity_requirement(tcx, init_kind, param_env_and_ty)
|
||||
};
|
||||
providers.hooks.validate_scalar_in_layout =
|
||||
|tcx, scalar, layout| util::validate_scalar_in_layout(tcx, scalar, layout);
|
||||
}
|
||||
|
||||
/// `rustc_driver::main` installs a handler that will set this to `true` if
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
use rustc_abi::{BackendRepr, FieldsShape, Scalar, Variants};
|
||||
use rustc_middle::bug;
|
||||
use rustc_middle::ty::layout::{
|
||||
HasTyCtxt, LayoutCx, LayoutError, LayoutOf, TyAndLayout, ValidityRequirement,
|
||||
};
|
||||
use rustc_middle::ty::{PseudoCanonicalInput, Ty, TyCtxt};
|
||||
use rustc_middle::ty::{PseudoCanonicalInput, ScalarInt, Ty, TyCtxt};
|
||||
use rustc_middle::{bug, ty};
|
||||
use rustc_span::DUMMY_SP;
|
||||
|
||||
use crate::const_eval::{CanAccessMutGlobal, CheckAlignment, CompileTimeMachine};
|
||||
use crate::interpret::{InterpCx, MemoryKind};
|
||||
|
@ -34,7 +35,7 @@ pub fn check_validity_requirement<'tcx>(
|
|||
|
||||
let layout_cx = LayoutCx::new(tcx, input.typing_env);
|
||||
if kind == ValidityRequirement::Uninit || tcx.sess.opts.unstable_opts.strict_init_checks {
|
||||
check_validity_requirement_strict(layout, &layout_cx, kind)
|
||||
Ok(check_validity_requirement_strict(layout, &layout_cx, kind))
|
||||
} else {
|
||||
check_validity_requirement_lax(layout, &layout_cx, kind)
|
||||
}
|
||||
|
@ -46,10 +47,10 @@ fn check_validity_requirement_strict<'tcx>(
|
|||
ty: TyAndLayout<'tcx>,
|
||||
cx: &LayoutCx<'tcx>,
|
||||
kind: ValidityRequirement,
|
||||
) -> Result<bool, &'tcx LayoutError<'tcx>> {
|
||||
) -> bool {
|
||||
let machine = CompileTimeMachine::new(CanAccessMutGlobal::No, CheckAlignment::Error);
|
||||
|
||||
let mut cx = InterpCx::new(cx.tcx(), rustc_span::DUMMY_SP, cx.typing_env, machine);
|
||||
let mut cx = InterpCx::new(cx.tcx(), DUMMY_SP, cx.typing_env, machine);
|
||||
|
||||
let allocated = cx
|
||||
.allocate(ty, MemoryKind::Machine(crate::const_eval::MemoryKind::Heap))
|
||||
|
@ -69,14 +70,13 @@ fn check_validity_requirement_strict<'tcx>(
|
|||
// due to this.
|
||||
// The value we are validating is temporary and discarded at the end of this function, so
|
||||
// there is no point in reseting provenance and padding.
|
||||
Ok(cx
|
||||
.validate_operand(
|
||||
cx.validate_operand(
|
||||
&allocated.into(),
|
||||
/*recursive*/ false,
|
||||
/*reset_provenance_and_padding*/ false,
|
||||
)
|
||||
.discard_err()
|
||||
.is_some())
|
||||
.is_some()
|
||||
}
|
||||
|
||||
/// Implements the 'lax' (default) version of the [`check_validity_requirement`] checks; see that
|
||||
|
@ -168,3 +168,31 @@ fn check_validity_requirement_lax<'tcx>(
|
|||
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
pub(crate) fn validate_scalar_in_layout<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
scalar: ScalarInt,
|
||||
ty: Ty<'tcx>,
|
||||
) -> bool {
|
||||
let machine = CompileTimeMachine::new(CanAccessMutGlobal::No, CheckAlignment::Error);
|
||||
|
||||
let typing_env = ty::TypingEnv::fully_monomorphized();
|
||||
let mut cx = InterpCx::new(tcx, DUMMY_SP, typing_env, machine);
|
||||
|
||||
let Ok(layout) = cx.layout_of(ty) else {
|
||||
bug!("could not compute layout of {scalar:?}:{ty:?}")
|
||||
};
|
||||
let allocated = cx
|
||||
.allocate(layout, MemoryKind::Machine(crate::const_eval::MemoryKind::Heap))
|
||||
.expect("OOM: failed to allocate for uninit check");
|
||||
|
||||
cx.write_scalar(scalar, &allocated).unwrap();
|
||||
|
||||
cx.validate_operand(
|
||||
&allocated.into(),
|
||||
/*recursive*/ false,
|
||||
/*reset_provenance_and_padding*/ false,
|
||||
)
|
||||
.discard_err()
|
||||
.is_some()
|
||||
}
|
||||
|
|
|
@ -8,6 +8,7 @@ mod type_name;
|
|||
|
||||
pub use self::alignment::{is_disaligned, is_within_packed};
|
||||
pub use self::check_validity_requirement::check_validity_requirement;
|
||||
pub(crate) use self::check_validity_requirement::validate_scalar_in_layout;
|
||||
pub use self::compare_types::{relate_types, sub_types};
|
||||
pub use self::type_name::type_name;
|
||||
|
||||
|
|
|
@ -345,6 +345,9 @@ pub trait Visitor<'v>: Sized {
|
|||
fn visit_pat_expr(&mut self, expr: &'v PatExpr<'v>) -> Self::Result {
|
||||
walk_pat_expr(self, expr)
|
||||
}
|
||||
fn visit_lit(&mut self, _hir_id: HirId, _lit: &'v Lit, _negated: bool) -> Self::Result {
|
||||
Self::Result::output()
|
||||
}
|
||||
fn visit_anon_const(&mut self, c: &'v AnonConst) -> Self::Result {
|
||||
walk_anon_const(self, c)
|
||||
}
|
||||
|
@ -764,7 +767,7 @@ pub fn walk_pat_field<'v, V: Visitor<'v>>(visitor: &mut V, field: &'v PatField<'
|
|||
pub fn walk_pat_expr<'v, V: Visitor<'v>>(visitor: &mut V, expr: &'v PatExpr<'v>) -> V::Result {
|
||||
try_visit!(visitor.visit_id(expr.hir_id));
|
||||
match &expr.kind {
|
||||
PatExprKind::Lit { .. } => V::Result::output(),
|
||||
PatExprKind::Lit { lit, negated } => visitor.visit_lit(expr.hir_id, lit, *negated),
|
||||
PatExprKind::ConstBlock(c) => visitor.visit_inline_const(c),
|
||||
PatExprKind::Path(qpath) => visitor.visit_qpath(qpath, expr.hir_id, expr.span),
|
||||
}
|
||||
|
@ -912,7 +915,8 @@ pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr<'v>)
|
|||
try_visit!(visitor.visit_expr(expr));
|
||||
visit_opt!(visitor, visit_ty_unambig, ty);
|
||||
}
|
||||
ExprKind::Lit(_) | ExprKind::Err(_) => {}
|
||||
ExprKind::Lit(lit) => try_visit!(visitor.visit_lit(expression.hir_id, lit, false)),
|
||||
ExprKind::Err(_) => {}
|
||||
}
|
||||
V::Result::output()
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@ use rustc_middle::span_bug;
|
|||
use rustc_middle::ty::error::TypeErrorToStringExt;
|
||||
use rustc_middle::ty::fold::{BottomUpFolder, fold_regions};
|
||||
use rustc_middle::ty::layout::{LayoutError, MAX_SIMD_LANES};
|
||||
use rustc_middle::ty::util::{Discr, InspectCoroutineFields, IntTypeExt};
|
||||
use rustc_middle::ty::util::{Discr, IntTypeExt};
|
||||
use rustc_middle::ty::{
|
||||
AdtDef, GenericArgKind, RegionKind, TypeSuperVisitable, TypeVisitable, TypeVisitableExt,
|
||||
};
|
||||
|
@ -257,32 +257,11 @@ pub(super) fn check_opaque_for_cycles<'tcx>(
|
|||
|
||||
// First, try to look at any opaque expansion cycles, considering coroutine fields
|
||||
// (even though these aren't necessarily true errors).
|
||||
if tcx
|
||||
.try_expand_impl_trait_type(def_id.to_def_id(), args, InspectCoroutineFields::Yes)
|
||||
.is_err()
|
||||
{
|
||||
// Look for true opaque expansion cycles, but ignore coroutines.
|
||||
// This will give us any true errors. Coroutines are only problematic
|
||||
// if they cause layout computation errors.
|
||||
if tcx
|
||||
.try_expand_impl_trait_type(def_id.to_def_id(), args, InspectCoroutineFields::No)
|
||||
.is_err()
|
||||
{
|
||||
if tcx.try_expand_impl_trait_type(def_id.to_def_id(), args).is_err() {
|
||||
let reported = opaque_type_cycle_error(tcx, def_id);
|
||||
return Err(reported);
|
||||
}
|
||||
|
||||
// And also look for cycle errors in the layout of coroutines.
|
||||
if let Err(&LayoutError::Cycle(guar)) =
|
||||
tcx.layout_of(
|
||||
ty::TypingEnv::post_analysis(tcx, def_id.to_def_id())
|
||||
.as_query_input(Ty::new_opaque(tcx, def_id.to_def_id(), args)),
|
||||
)
|
||||
{
|
||||
return Err(guar);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
|
|
@ -306,6 +306,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
lang_item_for_op(self.tcx, Op::Binary(op, is_assign), op.span);
|
||||
let missing_trait = trait_def_id
|
||||
.map(|def_id| with_no_trimmed_paths!(self.tcx.def_path_str(def_id)));
|
||||
let mut path = None;
|
||||
let lhs_ty_str = self.tcx.short_string(lhs_ty, &mut path);
|
||||
let rhs_ty_str = self.tcx.short_string(rhs_ty, &mut path);
|
||||
let (mut err, output_def_id) = match is_assign {
|
||||
IsAssign::Yes => {
|
||||
let mut err = struct_span_code_err!(
|
||||
|
@ -314,11 +317,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
E0368,
|
||||
"binary assignment operation `{}=` cannot be applied to type `{}`",
|
||||
op.node.as_str(),
|
||||
lhs_ty,
|
||||
lhs_ty_str,
|
||||
);
|
||||
err.span_label(
|
||||
lhs_expr.span,
|
||||
format!("cannot use `{}=` on type `{}`", op.node.as_str(), lhs_ty),
|
||||
format!("cannot use `{}=` on type `{}`", op.node.as_str(), lhs_ty_str),
|
||||
);
|
||||
self.note_unmet_impls_on_type(&mut err, errors, false);
|
||||
(err, None)
|
||||
|
@ -326,41 +329,42 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
IsAssign::No => {
|
||||
let message = match op.node {
|
||||
hir::BinOpKind::Add => {
|
||||
format!("cannot add `{rhs_ty}` to `{lhs_ty}`")
|
||||
format!("cannot add `{rhs_ty_str}` to `{lhs_ty_str}`")
|
||||
}
|
||||
hir::BinOpKind::Sub => {
|
||||
format!("cannot subtract `{rhs_ty}` from `{lhs_ty}`")
|
||||
format!("cannot subtract `{rhs_ty_str}` from `{lhs_ty_str}`")
|
||||
}
|
||||
hir::BinOpKind::Mul => {
|
||||
format!("cannot multiply `{lhs_ty}` by `{rhs_ty}`")
|
||||
format!("cannot multiply `{lhs_ty_str}` by `{rhs_ty_str}`")
|
||||
}
|
||||
hir::BinOpKind::Div => {
|
||||
format!("cannot divide `{lhs_ty}` by `{rhs_ty}`")
|
||||
format!("cannot divide `{lhs_ty_str}` by `{rhs_ty_str}`")
|
||||
}
|
||||
hir::BinOpKind::Rem => {
|
||||
format!(
|
||||
"cannot calculate the remainder of `{lhs_ty}` divided by `{rhs_ty}`"
|
||||
"cannot calculate the remainder of `{lhs_ty_str}` divided by \
|
||||
`{rhs_ty_str}`"
|
||||
)
|
||||
}
|
||||
hir::BinOpKind::BitAnd => {
|
||||
format!("no implementation for `{lhs_ty} & {rhs_ty}`")
|
||||
format!("no implementation for `{lhs_ty_str} & {rhs_ty_str}`")
|
||||
}
|
||||
hir::BinOpKind::BitXor => {
|
||||
format!("no implementation for `{lhs_ty} ^ {rhs_ty}`")
|
||||
format!("no implementation for `{lhs_ty_str} ^ {rhs_ty_str}`")
|
||||
}
|
||||
hir::BinOpKind::BitOr => {
|
||||
format!("no implementation for `{lhs_ty} | {rhs_ty}`")
|
||||
format!("no implementation for `{lhs_ty_str} | {rhs_ty_str}`")
|
||||
}
|
||||
hir::BinOpKind::Shl => {
|
||||
format!("no implementation for `{lhs_ty} << {rhs_ty}`")
|
||||
format!("no implementation for `{lhs_ty_str} << {rhs_ty_str}`")
|
||||
}
|
||||
hir::BinOpKind::Shr => {
|
||||
format!("no implementation for `{lhs_ty} >> {rhs_ty}`")
|
||||
format!("no implementation for `{lhs_ty_str} >> {rhs_ty_str}`")
|
||||
}
|
||||
_ => format!(
|
||||
"binary operation `{}` cannot be applied to type `{}`",
|
||||
op.node.as_str(),
|
||||
lhs_ty
|
||||
lhs_ty_str,
|
||||
),
|
||||
};
|
||||
let output_def_id = trait_def_id.and_then(|def_id| {
|
||||
|
@ -375,14 +379,15 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
let mut err =
|
||||
struct_span_code_err!(self.dcx(), op.span, E0369, "{message}");
|
||||
if !lhs_expr.span.eq(&rhs_expr.span) {
|
||||
err.span_label(lhs_expr.span, lhs_ty.to_string());
|
||||
err.span_label(rhs_expr.span, rhs_ty.to_string());
|
||||
err.span_label(lhs_expr.span, lhs_ty_str.clone());
|
||||
err.span_label(rhs_expr.span, rhs_ty_str);
|
||||
}
|
||||
let suggest_derive = self.can_eq(self.param_env, lhs_ty, rhs_ty);
|
||||
self.note_unmet_impls_on_type(&mut err, errors, suggest_derive);
|
||||
(err, output_def_id)
|
||||
}
|
||||
};
|
||||
*err.long_ty_path() = path;
|
||||
|
||||
// Try to suggest a semicolon if it's `A \n *B` where `B` is a place expr
|
||||
let maybe_missing_semi = self.check_for_missing_semi(expr, &mut err);
|
||||
|
@ -417,7 +422,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
IsAssign::Yes => "=",
|
||||
IsAssign::No => "",
|
||||
},
|
||||
lhs_deref_ty,
|
||||
self.tcx.short_string(lhs_deref_ty, err.long_ty_path()),
|
||||
);
|
||||
err.span_suggestion_verbose(
|
||||
lhs_expr.span.shrink_to_lo(),
|
||||
|
@ -443,8 +448,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
)
|
||||
.is_ok()
|
||||
{
|
||||
let op_str = op.node.as_str();
|
||||
err.note(format!("an implementation for `{lhs_adjusted_ty} {op_str} {rhs_adjusted_ty}` exists"));
|
||||
let lhs = self.tcx.short_string(lhs_adjusted_ty, err.long_ty_path());
|
||||
let rhs = self.tcx.short_string(rhs_adjusted_ty, err.long_ty_path());
|
||||
let op = op.node.as_str();
|
||||
err.note(format!("an implementation for `{lhs} {op} {rhs}` exists"));
|
||||
|
||||
if let Some(lhs_new_mutbl) = lhs_new_mutbl
|
||||
&& let Some(rhs_new_mutbl) = rhs_new_mutbl
|
||||
|
@ -628,7 +635,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
// When we know that a missing bound is responsible, we don't show
|
||||
// this note as it is redundant.
|
||||
err.note(format!(
|
||||
"the trait `{missing_trait}` is not implemented for `{lhs_ty}`"
|
||||
"the trait `{missing_trait}` is not implemented for `{lhs_ty_str}`"
|
||||
));
|
||||
}
|
||||
}
|
||||
|
@ -654,24 +661,32 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
hir::BinOpKind::Sub => {
|
||||
if lhs_ty.is_unsafe_ptr() && rhs_ty.is_integral() {
|
||||
err.multipart_suggestion(
|
||||
"consider using `wrapping_sub` or `sub` for pointer - {integer}",
|
||||
"consider using `wrapping_sub` or `sub` for \
|
||||
pointer - {integer}",
|
||||
vec![
|
||||
(lhs_expr.span.between(rhs_expr.span), ".wrapping_sub(".to_owned()),
|
||||
(
|
||||
lhs_expr.span.between(rhs_expr.span),
|
||||
".wrapping_sub(".to_owned(),
|
||||
),
|
||||
(rhs_expr.span.shrink_to_hi(), ")".to_owned()),
|
||||
],
|
||||
Applicability::MaybeIncorrect
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
}
|
||||
|
||||
if lhs_ty.is_unsafe_ptr() && rhs_ty.is_unsafe_ptr() {
|
||||
err.multipart_suggestion(
|
||||
"consider using `offset_from` for pointer - pointer if the pointers point to the same allocation",
|
||||
"consider using `offset_from` for pointer - pointer if the \
|
||||
pointers point to the same allocation",
|
||||
vec![
|
||||
(lhs_expr.span.shrink_to_lo(), "unsafe { ".to_owned()),
|
||||
(lhs_expr.span.between(rhs_expr.span), ".offset_from(".to_owned()),
|
||||
(
|
||||
lhs_expr.span.between(rhs_expr.span),
|
||||
".offset_from(".to_owned(),
|
||||
),
|
||||
(rhs_expr.span.shrink_to_hi(), ") }".to_owned()),
|
||||
],
|
||||
Applicability::MaybeIncorrect
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -793,14 +808,16 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
Err(errors) => {
|
||||
let actual = self.resolve_vars_if_possible(operand_ty);
|
||||
let guar = actual.error_reported().err().unwrap_or_else(|| {
|
||||
let mut file = None;
|
||||
let ty_str = self.tcx.short_string(actual, &mut file);
|
||||
let mut err = struct_span_code_err!(
|
||||
self.dcx(),
|
||||
ex.span,
|
||||
E0600,
|
||||
"cannot apply unary operator `{}` to type `{}`",
|
||||
"cannot apply unary operator `{}` to type `{ty_str}`",
|
||||
op.as_str(),
|
||||
actual
|
||||
);
|
||||
*err.long_ty_path() = file;
|
||||
err.span_label(
|
||||
ex.span,
|
||||
format!("cannot apply unary operator `{}`", op.as_str()),
|
||||
|
|
|
@ -909,6 +909,11 @@ fn run_required_analyses(tcx: TyCtxt<'_>) {
|
|||
tcx.ensure_ok().check_coroutine_obligations(
|
||||
tcx.typeck_root_def_id(def_id.to_def_id()).expect_local(),
|
||||
);
|
||||
// Eagerly check the unsubstituted layout for cycles.
|
||||
tcx.ensure_ok().layout_of(
|
||||
ty::TypingEnv::post_analysis(tcx, def_id.to_def_id())
|
||||
.as_query_input(tcx.type_of(def_id).instantiate_identity()),
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
|
|
@ -152,6 +152,10 @@ impl<'tcx, T: LateLintPass<'tcx>> hir_visit::Visitor<'tcx> for LateContextAndPas
|
|||
hir_visit::walk_pat(self, p);
|
||||
}
|
||||
|
||||
fn visit_lit(&mut self, hir_id: HirId, lit: &'tcx hir::Lit, negated: bool) {
|
||||
lint_callback!(self, check_lit, hir_id, lit, negated);
|
||||
}
|
||||
|
||||
fn visit_expr_field(&mut self, field: &'tcx hir::ExprField<'tcx>) {
|
||||
self.with_lint_attrs(field.hir_id, |cx| hir_visit::walk_expr_field(cx, field))
|
||||
}
|
||||
|
|
|
@ -23,6 +23,7 @@ macro_rules! late_lint_methods {
|
|||
fn check_stmt(a: &'tcx rustc_hir::Stmt<'tcx>);
|
||||
fn check_arm(a: &'tcx rustc_hir::Arm<'tcx>);
|
||||
fn check_pat(a: &'tcx rustc_hir::Pat<'tcx>);
|
||||
fn check_lit(hir_id: rustc_hir::HirId, a: &'tcx rustc_hir::Lit, negated: bool);
|
||||
fn check_expr(a: &'tcx rustc_hir::Expr<'tcx>);
|
||||
fn check_expr_post(a: &'tcx rustc_hir::Expr<'tcx>);
|
||||
fn check_ty(a: &'tcx rustc_hir::Ty<'tcx, rustc_hir::AmbigArg>);
|
||||
|
|
|
@ -5,7 +5,7 @@ use rustc_abi::{BackendRepr, ExternAbi, TagEncoding, VariantIdx, Variants, Wrapp
|
|||
use rustc_data_structures::fx::FxHashSet;
|
||||
use rustc_errors::DiagMessage;
|
||||
use rustc_hir::intravisit::VisitorExt;
|
||||
use rustc_hir::{AmbigArg, Expr, ExprKind, LangItem};
|
||||
use rustc_hir::{AmbigArg, Expr, ExprKind, HirId, LangItem};
|
||||
use rustc_middle::bug;
|
||||
use rustc_middle::ty::layout::{LayoutOf, SizeSkeleton};
|
||||
use rustc_middle::ty::{
|
||||
|
@ -536,6 +536,16 @@ fn lint_fn_pointer<'tcx>(
|
|||
}
|
||||
|
||||
impl<'tcx> LateLintPass<'tcx> for TypeLimits {
|
||||
fn check_lit(
|
||||
&mut self,
|
||||
cx: &LateContext<'tcx>,
|
||||
hir_id: HirId,
|
||||
lit: &'tcx hir::Lit,
|
||||
negated: bool,
|
||||
) {
|
||||
lint_literal(cx, self, hir_id, lit.span, lit, negated)
|
||||
}
|
||||
|
||||
fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &'tcx hir::Expr<'tcx>) {
|
||||
match e.kind {
|
||||
hir::ExprKind::Unary(hir::UnOp::Neg, expr) => {
|
||||
|
@ -557,7 +567,6 @@ impl<'tcx> LateLintPass<'tcx> for TypeLimits {
|
|||
}
|
||||
}
|
||||
}
|
||||
hir::ExprKind::Lit(lit) => lint_literal(cx, self, e, lit),
|
||||
hir::ExprKind::Call(path, [l, r])
|
||||
if let ExprKind::Path(ref qpath) = path.kind
|
||||
&& let Some(def_id) = cx.qpath_res(qpath, path.hir_id).opt_def_id()
|
||||
|
|
|
@ -1,8 +1,10 @@
|
|||
use hir::{ExprKind, Node, is_range_literal};
|
||||
use rustc_abi::{Integer, Size};
|
||||
use rustc_hir::HirId;
|
||||
use rustc_middle::ty::Ty;
|
||||
use rustc_middle::ty::layout::IntegerExt;
|
||||
use rustc_middle::{bug, ty};
|
||||
use rustc_span::Span;
|
||||
use {rustc_ast as ast, rustc_attr_parsing as attr, rustc_hir as hir};
|
||||
|
||||
use crate::LateContext;
|
||||
|
@ -21,21 +23,22 @@ fn lint_overflowing_range_endpoint<'tcx>(
|
|||
lit: &hir::Lit,
|
||||
lit_val: u128,
|
||||
max: u128,
|
||||
expr: &'tcx hir::Expr<'tcx>,
|
||||
hir_id: HirId,
|
||||
lit_span: Span,
|
||||
ty: &str,
|
||||
) -> bool {
|
||||
// Look past casts to support cases like `0..256 as u8`
|
||||
let (expr, lit_span) = if let Node::Expr(par_expr) = cx.tcx.parent_hir_node(expr.hir_id)
|
||||
let (hir_id, span) = if let Node::Expr(par_expr) = cx.tcx.parent_hir_node(hir_id)
|
||||
&& let ExprKind::Cast(_, _) = par_expr.kind
|
||||
{
|
||||
(par_expr, expr.span)
|
||||
(par_expr.hir_id, par_expr.span)
|
||||
} else {
|
||||
(expr, expr.span)
|
||||
(hir_id, lit_span)
|
||||
};
|
||||
|
||||
// We only want to handle exclusive (`..`) ranges,
|
||||
// which are represented as `ExprKind::Struct`.
|
||||
let Node::ExprField(field) = cx.tcx.parent_hir_node(expr.hir_id) else { return false };
|
||||
let Node::ExprField(field) = cx.tcx.parent_hir_node(hir_id) else { return false };
|
||||
let Node::Expr(struct_expr) = cx.tcx.parent_hir_node(field.hir_id) else { return false };
|
||||
if !is_range_literal(struct_expr) {
|
||||
return false;
|
||||
|
@ -45,7 +48,7 @@ fn lint_overflowing_range_endpoint<'tcx>(
|
|||
// We can suggest using an inclusive range
|
||||
// (`..=`) instead only if it is the `end` that is
|
||||
// overflowing and only by 1.
|
||||
if !(end.expr.hir_id == expr.hir_id && lit_val - 1 == max) {
|
||||
if !(end.expr.hir_id == hir_id && lit_val - 1 == max) {
|
||||
return false;
|
||||
};
|
||||
|
||||
|
@ -57,7 +60,7 @@ fn lint_overflowing_range_endpoint<'tcx>(
|
|||
_ => bug!(),
|
||||
};
|
||||
|
||||
let sub_sugg = if expr.span.lo() == lit_span.lo() {
|
||||
let sub_sugg = if span.lo() == lit_span.lo() {
|
||||
let Ok(start) = cx.sess().source_map().span_to_snippet(start.span) else { return false };
|
||||
UseInclusiveRange::WithoutParen {
|
||||
sugg: struct_expr.span.shrink_to_lo().to(lit_span.shrink_to_hi()),
|
||||
|
@ -67,7 +70,7 @@ fn lint_overflowing_range_endpoint<'tcx>(
|
|||
}
|
||||
} else {
|
||||
UseInclusiveRange::WithParen {
|
||||
eq_sugg: expr.span.shrink_to_lo(),
|
||||
eq_sugg: span.shrink_to_lo(),
|
||||
lit_sugg: lit_span,
|
||||
literal: lit_val - 1,
|
||||
suffix,
|
||||
|
@ -125,7 +128,8 @@ fn get_bin_hex_repr(cx: &LateContext<'_>, lit: &hir::Lit) -> Option<String> {
|
|||
|
||||
fn report_bin_hex_error(
|
||||
cx: &LateContext<'_>,
|
||||
expr: &hir::Expr<'_>,
|
||||
hir_id: HirId,
|
||||
span: Span,
|
||||
ty: attr::IntType,
|
||||
size: Size,
|
||||
repr_str: String,
|
||||
|
@ -144,11 +148,11 @@ fn report_bin_hex_error(
|
|||
};
|
||||
let sign =
|
||||
if negative { OverflowingBinHexSign::Negative } else { OverflowingBinHexSign::Positive };
|
||||
let sub = get_type_suggestion(cx.typeck_results().node_type(expr.hir_id), val, negative).map(
|
||||
let sub = get_type_suggestion(cx.typeck_results().node_type(hir_id), val, negative).map(
|
||||
|suggestion_ty| {
|
||||
if let Some(pos) = repr_str.chars().position(|c| c == 'i' || c == 'u') {
|
||||
let (sans_suffix, _) = repr_str.split_at(pos);
|
||||
OverflowingBinHexSub::Suggestion { span: expr.span, suggestion_ty, sans_suffix }
|
||||
OverflowingBinHexSub::Suggestion { span, suggestion_ty, sans_suffix }
|
||||
} else {
|
||||
OverflowingBinHexSub::Help { suggestion_ty }
|
||||
}
|
||||
|
@ -156,7 +160,7 @@ fn report_bin_hex_error(
|
|||
);
|
||||
let sign_bit_sub = (!negative)
|
||||
.then(|| {
|
||||
let ty::Int(int_ty) = cx.typeck_results().node_type(expr.hir_id).kind() else {
|
||||
let ty::Int(int_ty) = cx.typeck_results().node_type(hir_id).kind() else {
|
||||
return None;
|
||||
};
|
||||
|
||||
|
@ -177,7 +181,7 @@ fn report_bin_hex_error(
|
|||
};
|
||||
|
||||
Some(OverflowingBinHexSignBitSub {
|
||||
span: expr.span,
|
||||
span,
|
||||
lit_no_suffix,
|
||||
negative_val: actually.clone(),
|
||||
int_ty: int_ty.name_str(),
|
||||
|
@ -186,7 +190,7 @@ fn report_bin_hex_error(
|
|||
})
|
||||
.flatten();
|
||||
|
||||
cx.emit_span_lint(OVERFLOWING_LITERALS, expr.span, OverflowingBinHex {
|
||||
cx.emit_span_lint(OVERFLOWING_LITERALS, span, OverflowingBinHex {
|
||||
ty: t,
|
||||
lit: repr_str.clone(),
|
||||
dec: val,
|
||||
|
@ -236,15 +240,17 @@ fn literal_to_i128(val: u128, negative: bool) -> Option<i128> {
|
|||
fn lint_int_literal<'tcx>(
|
||||
cx: &LateContext<'tcx>,
|
||||
type_limits: &TypeLimits,
|
||||
e: &'tcx hir::Expr<'tcx>,
|
||||
hir_id: HirId,
|
||||
span: Span,
|
||||
lit: &hir::Lit,
|
||||
t: ty::IntTy,
|
||||
v: u128,
|
||||
negated: bool,
|
||||
) {
|
||||
let int_type = t.normalize(cx.sess().target.pointer_width);
|
||||
let (min, max) = int_ty_range(int_type);
|
||||
let max = max as u128;
|
||||
let negative = type_limits.negated_expr_id == Some(e.hir_id);
|
||||
let negative = negated ^ (type_limits.negated_expr_id == Some(hir_id));
|
||||
|
||||
// Detect literal value out of range [min, max] inclusive
|
||||
// avoiding use of -min to prevent overflow/panic
|
||||
|
@ -252,7 +258,8 @@ fn lint_int_literal<'tcx>(
|
|||
if let Some(repr_str) = get_bin_hex_repr(cx, lit) {
|
||||
report_bin_hex_error(
|
||||
cx,
|
||||
e,
|
||||
hir_id,
|
||||
span,
|
||||
attr::IntType::SignedInt(ty::ast_int_ty(t)),
|
||||
Integer::from_int_ty(cx, t).size(),
|
||||
repr_str,
|
||||
|
@ -262,18 +269,18 @@ fn lint_int_literal<'tcx>(
|
|||
return;
|
||||
}
|
||||
|
||||
if lint_overflowing_range_endpoint(cx, lit, v, max, e, t.name_str()) {
|
||||
if lint_overflowing_range_endpoint(cx, lit, v, max, hir_id, span, t.name_str()) {
|
||||
// The overflowing literal lint was emitted by `lint_overflowing_range_endpoint`.
|
||||
return;
|
||||
}
|
||||
|
||||
let span = if negative { type_limits.negated_expr_span.unwrap() } else { e.span };
|
||||
let span = if negative { type_limits.negated_expr_span.unwrap() } else { span };
|
||||
let lit = cx
|
||||
.sess()
|
||||
.source_map()
|
||||
.span_to_snippet(span)
|
||||
.unwrap_or_else(|_| if negative { format!("-{v}") } else { v.to_string() });
|
||||
let help = get_type_suggestion(cx.typeck_results().node_type(e.hir_id), v, negative)
|
||||
let help = get_type_suggestion(cx.typeck_results().node_type(hir_id), v, negative)
|
||||
.map(|suggestion_ty| OverflowingIntHelp { suggestion_ty });
|
||||
|
||||
cx.emit_span_lint(OVERFLOWING_LITERALS, span, OverflowingInt {
|
||||
|
@ -288,7 +295,8 @@ fn lint_int_literal<'tcx>(
|
|||
|
||||
fn lint_uint_literal<'tcx>(
|
||||
cx: &LateContext<'tcx>,
|
||||
e: &'tcx hir::Expr<'tcx>,
|
||||
hir_id: HirId,
|
||||
span: Span,
|
||||
lit: &hir::Lit,
|
||||
t: ty::UintTy,
|
||||
) {
|
||||
|
@ -302,7 +310,7 @@ fn lint_uint_literal<'tcx>(
|
|||
};
|
||||
|
||||
if lit_val < min || lit_val > max {
|
||||
if let Node::Expr(par_e) = cx.tcx.parent_hir_node(e.hir_id) {
|
||||
if let Node::Expr(par_e) = cx.tcx.parent_hir_node(hir_id) {
|
||||
match par_e.kind {
|
||||
hir::ExprKind::Cast(..) => {
|
||||
if let ty::Char = cx.typeck_results().expr_ty(par_e).kind() {
|
||||
|
@ -316,14 +324,15 @@ fn lint_uint_literal<'tcx>(
|
|||
_ => {}
|
||||
}
|
||||
}
|
||||
if lint_overflowing_range_endpoint(cx, lit, lit_val, max, e, t.name_str()) {
|
||||
if lint_overflowing_range_endpoint(cx, lit, lit_val, max, hir_id, span, t.name_str()) {
|
||||
// The overflowing literal lint was emitted by `lint_overflowing_range_endpoint`.
|
||||
return;
|
||||
}
|
||||
if let Some(repr_str) = get_bin_hex_repr(cx, lit) {
|
||||
report_bin_hex_error(
|
||||
cx,
|
||||
e,
|
||||
hir_id,
|
||||
span,
|
||||
attr::IntType::UnsignedInt(ty::ast_uint_ty(t)),
|
||||
Integer::from_uint_ty(cx, t).size(),
|
||||
repr_str,
|
||||
|
@ -332,7 +341,7 @@ fn lint_uint_literal<'tcx>(
|
|||
);
|
||||
return;
|
||||
}
|
||||
cx.emit_span_lint(OVERFLOWING_LITERALS, e.span, OverflowingUInt {
|
||||
cx.emit_span_lint(OVERFLOWING_LITERALS, span, OverflowingUInt {
|
||||
ty: t.name_str(),
|
||||
lit: cx
|
||||
.sess()
|
||||
|
@ -348,19 +357,24 @@ fn lint_uint_literal<'tcx>(
|
|||
pub(crate) fn lint_literal<'tcx>(
|
||||
cx: &LateContext<'tcx>,
|
||||
type_limits: &TypeLimits,
|
||||
e: &'tcx hir::Expr<'tcx>,
|
||||
hir_id: HirId,
|
||||
span: Span,
|
||||
lit: &hir::Lit,
|
||||
negated: bool,
|
||||
) {
|
||||
match *cx.typeck_results().node_type(e.hir_id).kind() {
|
||||
match *cx.typeck_results().node_type(hir_id).kind() {
|
||||
ty::Int(t) => {
|
||||
match lit.node {
|
||||
ast::LitKind::Int(v, ast::LitIntType::Signed(_) | ast::LitIntType::Unsuffixed) => {
|
||||
lint_int_literal(cx, type_limits, e, lit, t, v.get())
|
||||
lint_int_literal(cx, type_limits, hir_id, span, lit, t, v.get(), negated)
|
||||
}
|
||||
_ => bug!(),
|
||||
};
|
||||
}
|
||||
ty::Uint(t) => lint_uint_literal(cx, e, lit, t),
|
||||
ty::Uint(t) => {
|
||||
assert!(!negated);
|
||||
lint_uint_literal(cx, hir_id, span, lit, t)
|
||||
}
|
||||
ty::Float(t) => {
|
||||
let (is_infinite, sym) = match lit.node {
|
||||
ast::LitKind::Float(v, _) => match t {
|
||||
|
@ -374,7 +388,7 @@ pub(crate) fn lint_literal<'tcx>(
|
|||
_ => bug!(),
|
||||
};
|
||||
if is_infinite == Ok(true) {
|
||||
cx.emit_span_lint(OVERFLOWING_LITERALS, e.span, OverflowingLiteral {
|
||||
cx.emit_span_lint(OVERFLOWING_LITERALS, span, OverflowingLiteral {
|
||||
ty: t.name_str(),
|
||||
lit: cx
|
||||
.sess()
|
||||
|
|
|
@ -98,6 +98,10 @@ declare_hooks! {
|
|||
hook save_dep_graph() -> ();
|
||||
|
||||
hook query_key_hash_verify_all() -> ();
|
||||
|
||||
/// Ensure the given scalar is valid for the given type.
|
||||
/// This checks non-recursive runtime validity.
|
||||
hook validate_scalar_in_layout(scalar: crate::ty::ScalarInt, ty: Ty<'tcx>) -> bool;
|
||||
}
|
||||
|
||||
#[cold]
|
||||
|
|
|
@ -11,6 +11,7 @@
|
|||
use std::cmp::Ordering;
|
||||
use std::fmt;
|
||||
use std::ops::Index;
|
||||
use std::sync::Arc;
|
||||
|
||||
use rustc_abi::{FieldIdx, Integer, Size, VariantIdx};
|
||||
use rustc_ast::{AsmMacro, InlineAsmOptions, InlineAsmTemplatePiece};
|
||||
|
@ -618,7 +619,7 @@ pub enum InlineAsmOperand<'tcx> {
|
|||
#[derive(Debug, HashStable, TypeVisitable)]
|
||||
pub struct FieldPat<'tcx> {
|
||||
pub field: FieldIdx,
|
||||
pub pattern: Box<Pat<'tcx>>,
|
||||
pub pattern: Pat<'tcx>,
|
||||
}
|
||||
|
||||
#[derive(Debug, HashStable, TypeVisitable)]
|
||||
|
@ -679,7 +680,7 @@ impl<'tcx> Pat<'tcx> {
|
|||
Or { pats } => pats.iter().for_each(|p| p.walk_(it)),
|
||||
Array { box ref prefix, ref slice, box ref suffix }
|
||||
| Slice { box ref prefix, ref slice, box ref suffix } => {
|
||||
prefix.iter().chain(slice.iter()).chain(suffix.iter()).for_each(|p| p.walk_(it))
|
||||
prefix.iter().chain(slice.as_deref()).chain(suffix.iter()).for_each(|p| p.walk_(it))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -836,28 +837,28 @@ pub enum PatKind<'tcx> {
|
|||
subpattern: Box<Pat<'tcx>>,
|
||||
},
|
||||
|
||||
Range(Box<PatRange<'tcx>>),
|
||||
Range(Arc<PatRange<'tcx>>),
|
||||
|
||||
/// Matches against a slice, checking the length and extracting elements.
|
||||
/// irrefutable when there is a slice pattern and both `prefix` and `suffix` are empty.
|
||||
/// e.g., `&[ref xs @ ..]`.
|
||||
Slice {
|
||||
prefix: Box<[Box<Pat<'tcx>>]>,
|
||||
prefix: Box<[Pat<'tcx>]>,
|
||||
slice: Option<Box<Pat<'tcx>>>,
|
||||
suffix: Box<[Box<Pat<'tcx>>]>,
|
||||
suffix: Box<[Pat<'tcx>]>,
|
||||
},
|
||||
|
||||
/// Fixed match against an array; irrefutable.
|
||||
Array {
|
||||
prefix: Box<[Box<Pat<'tcx>>]>,
|
||||
prefix: Box<[Pat<'tcx>]>,
|
||||
slice: Option<Box<Pat<'tcx>>>,
|
||||
suffix: Box<[Box<Pat<'tcx>>]>,
|
||||
suffix: Box<[Pat<'tcx>]>,
|
||||
},
|
||||
|
||||
/// An or-pattern, e.g. `p | q`.
|
||||
/// Invariant: `pats.len() >= 2`.
|
||||
Or {
|
||||
pats: Box<[Box<Pat<'tcx>>]>,
|
||||
pats: Box<[Pat<'tcx>]>,
|
||||
},
|
||||
|
||||
/// A never pattern `!`.
|
||||
|
|
|
@ -1741,7 +1741,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write {
|
|||
" as ",
|
||||
)?;
|
||||
}
|
||||
ty::Pat(base_ty, pat) => {
|
||||
ty::Pat(base_ty, pat) if self.tcx().validate_scalar_in_layout(int, ty) => {
|
||||
self.pretty_print_const_scalar_int(int, *base_ty, print_ty)?;
|
||||
p!(write(" is {pat:?}"));
|
||||
}
|
||||
|
|
|
@ -777,7 +777,6 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||
self,
|
||||
def_id: DefId,
|
||||
args: GenericArgsRef<'tcx>,
|
||||
inspect_coroutine_fields: InspectCoroutineFields,
|
||||
) -> Result<Ty<'tcx>, Ty<'tcx>> {
|
||||
let mut visitor = OpaqueTypeExpander {
|
||||
seen_opaque_tys: FxHashSet::default(),
|
||||
|
@ -786,9 +785,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||
found_recursion: false,
|
||||
found_any_recursion: false,
|
||||
check_recursion: true,
|
||||
expand_coroutines: true,
|
||||
tcx: self,
|
||||
inspect_coroutine_fields,
|
||||
};
|
||||
|
||||
let expanded_type = visitor.expand_opaque_ty(def_id, args).unwrap();
|
||||
|
@ -965,19 +962,11 @@ struct OpaqueTypeExpander<'tcx> {
|
|||
primary_def_id: Option<DefId>,
|
||||
found_recursion: bool,
|
||||
found_any_recursion: bool,
|
||||
expand_coroutines: bool,
|
||||
/// Whether or not to check for recursive opaque types.
|
||||
/// This is `true` when we're explicitly checking for opaque type
|
||||
/// recursion, and 'false' otherwise to avoid unnecessary work.
|
||||
check_recursion: bool,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
inspect_coroutine_fields: InspectCoroutineFields,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
|
||||
pub enum InspectCoroutineFields {
|
||||
No,
|
||||
Yes,
|
||||
}
|
||||
|
||||
impl<'tcx> OpaqueTypeExpander<'tcx> {
|
||||
|
@ -1009,41 +998,6 @@ impl<'tcx> OpaqueTypeExpander<'tcx> {
|
|||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn expand_coroutine(&mut self, def_id: DefId, args: GenericArgsRef<'tcx>) -> Option<Ty<'tcx>> {
|
||||
if self.found_any_recursion {
|
||||
return None;
|
||||
}
|
||||
let args = args.fold_with(self);
|
||||
if !self.check_recursion || self.seen_opaque_tys.insert(def_id) {
|
||||
let expanded_ty = match self.expanded_cache.get(&(def_id, args)) {
|
||||
Some(expanded_ty) => *expanded_ty,
|
||||
None => {
|
||||
if matches!(self.inspect_coroutine_fields, InspectCoroutineFields::Yes) {
|
||||
for bty in self.tcx.bound_coroutine_hidden_types(def_id) {
|
||||
let hidden_ty = self.tcx.instantiate_bound_regions_with_erased(
|
||||
bty.instantiate(self.tcx, args),
|
||||
);
|
||||
self.fold_ty(hidden_ty);
|
||||
}
|
||||
}
|
||||
let expanded_ty = Ty::new_coroutine_witness(self.tcx, def_id, args);
|
||||
self.expanded_cache.insert((def_id, args), expanded_ty);
|
||||
expanded_ty
|
||||
}
|
||||
};
|
||||
if self.check_recursion {
|
||||
self.seen_opaque_tys.remove(&def_id);
|
||||
}
|
||||
Some(expanded_ty)
|
||||
} else {
|
||||
// If another opaque type that we contain is recursive, then it
|
||||
// will report the error, so we don't have to.
|
||||
self.found_any_recursion = true;
|
||||
self.found_recursion = def_id == *self.primary_def_id.as_ref().unwrap();
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> TypeFolder<TyCtxt<'tcx>> for OpaqueTypeExpander<'tcx> {
|
||||
|
@ -1052,20 +1006,14 @@ impl<'tcx> TypeFolder<TyCtxt<'tcx>> for OpaqueTypeExpander<'tcx> {
|
|||
}
|
||||
|
||||
fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> {
|
||||
let mut t = if let ty::Alias(ty::Opaque, ty::AliasTy { def_id, args, .. }) = *t.kind() {
|
||||
if let ty::Alias(ty::Opaque, ty::AliasTy { def_id, args, .. }) = *t.kind() {
|
||||
self.expand_opaque_ty(def_id, args).unwrap_or(t)
|
||||
} else if t.has_opaque_types() || t.has_coroutines() {
|
||||
} else if t.has_opaque_types() {
|
||||
t.super_fold_with(self)
|
||||
} else {
|
||||
t
|
||||
};
|
||||
if self.expand_coroutines {
|
||||
if let ty::CoroutineWitness(def_id, args) = *t.kind() {
|
||||
t = self.expand_coroutine(def_id, args).unwrap_or(t);
|
||||
}
|
||||
}
|
||||
t
|
||||
}
|
||||
|
||||
fn fold_predicate(&mut self, p: ty::Predicate<'tcx>) -> ty::Predicate<'tcx> {
|
||||
if let ty::PredicateKind::Clause(clause) = p.kind().skip_binder()
|
||||
|
@ -1753,9 +1701,7 @@ pub fn reveal_opaque_types_in_bounds<'tcx>(
|
|||
found_recursion: false,
|
||||
found_any_recursion: false,
|
||||
check_recursion: false,
|
||||
expand_coroutines: false,
|
||||
tcx,
|
||||
inspect_coroutine_fields: InspectCoroutineFields::No,
|
||||
};
|
||||
val.fold_with(&mut visitor)
|
||||
}
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use rustc_middle::mir::*;
|
||||
use rustc_middle::thir::{self, *};
|
||||
use rustc_middle::ty::{self, Ty, TypeVisitableExt};
|
||||
|
@ -12,11 +14,11 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||
/// [`PatKind::Leaf`].
|
||||
///
|
||||
/// Used internally by [`MatchPairTree::for_pattern`].
|
||||
fn field_match_pairs<'pat>(
|
||||
fn field_match_pairs(
|
||||
&mut self,
|
||||
place: PlaceBuilder<'tcx>,
|
||||
subpatterns: &'pat [FieldPat<'tcx>],
|
||||
) -> Vec<MatchPairTree<'pat, 'tcx>> {
|
||||
subpatterns: &[FieldPat<'tcx>],
|
||||
) -> Vec<MatchPairTree<'tcx>> {
|
||||
subpatterns
|
||||
.iter()
|
||||
.map(|fieldpat| {
|
||||
|
@ -31,13 +33,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||
/// array pattern or slice pattern, and adds those trees to `match_pairs`.
|
||||
///
|
||||
/// Used internally by [`MatchPairTree::for_pattern`].
|
||||
fn prefix_slice_suffix<'pat>(
|
||||
fn prefix_slice_suffix(
|
||||
&mut self,
|
||||
match_pairs: &mut Vec<MatchPairTree<'pat, 'tcx>>,
|
||||
match_pairs: &mut Vec<MatchPairTree<'tcx>>,
|
||||
place: &PlaceBuilder<'tcx>,
|
||||
prefix: &'pat [Box<Pat<'tcx>>],
|
||||
opt_slice: &'pat Option<Box<Pat<'tcx>>>,
|
||||
suffix: &'pat [Box<Pat<'tcx>>],
|
||||
prefix: &[Pat<'tcx>],
|
||||
opt_slice: &Option<Box<Pat<'tcx>>>,
|
||||
suffix: &[Pat<'tcx>],
|
||||
) {
|
||||
let tcx = self.tcx;
|
||||
let (min_length, exact_size) = if let Some(place_resolved) = place.try_to_place(self) {
|
||||
|
@ -83,14 +85,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'pat, 'tcx> MatchPairTree<'pat, 'tcx> {
|
||||
impl<'tcx> MatchPairTree<'tcx> {
|
||||
/// Recursively builds a match pair tree for the given pattern and its
|
||||
/// subpatterns.
|
||||
pub(in crate::builder) fn for_pattern(
|
||||
mut place_builder: PlaceBuilder<'tcx>,
|
||||
pattern: &'pat Pat<'tcx>,
|
||||
pattern: &Pat<'tcx>,
|
||||
cx: &mut Builder<'_, 'tcx>,
|
||||
) -> MatchPairTree<'pat, 'tcx> {
|
||||
) -> MatchPairTree<'tcx> {
|
||||
// Force the place type to the pattern's type.
|
||||
// FIXME(oli-obk): can we use this to simplify slice/array pattern hacks?
|
||||
if let Some(resolved) = place_builder.resolve_upvar(cx) {
|
||||
|
@ -125,7 +127,7 @@ impl<'pat, 'tcx> MatchPairTree<'pat, 'tcx> {
|
|||
if range.is_full_range(cx.tcx) == Some(true) {
|
||||
default_irrefutable()
|
||||
} else {
|
||||
TestCase::Range(range)
|
||||
TestCase::Range(Arc::clone(range))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -255,6 +257,12 @@ impl<'pat, 'tcx> MatchPairTree<'pat, 'tcx> {
|
|||
PatKind::Never => TestCase::Never,
|
||||
};
|
||||
|
||||
MatchPairTree { place, test_case, subpairs, pattern }
|
||||
MatchPairTree {
|
||||
place,
|
||||
test_case,
|
||||
subpairs,
|
||||
pattern_ty: pattern.ty,
|
||||
pattern_span: pattern.span,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,6 +33,7 @@ mod util;
|
|||
use std::assert_matches::assert_matches;
|
||||
use std::borrow::Borrow;
|
||||
use std::mem;
|
||||
use std::sync::Arc;
|
||||
|
||||
/// Arguments to [`Builder::then_else_break_inner`] that are usually forwarded
|
||||
/// to recursive invocations.
|
||||
|
@ -361,11 +362,11 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||
let scrutinee_place =
|
||||
unpack!(block = self.lower_scrutinee(block, scrutinee_id, scrutinee_span));
|
||||
|
||||
let arms = arms.iter().map(|arm| &self.thir[*arm]);
|
||||
let match_start_span = span.shrink_to_lo().to(scrutinee_span);
|
||||
let patterns = arms
|
||||
.clone()
|
||||
.map(|arm| {
|
||||
.iter()
|
||||
.map(|&arm| {
|
||||
let arm = &self.thir[arm];
|
||||
let has_match_guard =
|
||||
if arm.guard.is_some() { HasMatchGuard::Yes } else { HasMatchGuard::No };
|
||||
(&*arm.pattern, has_match_guard)
|
||||
|
@ -412,20 +413,18 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||
/// (by [Builder::lower_match_tree]).
|
||||
///
|
||||
/// `outer_source_info` is the SourceInfo for the whole match.
|
||||
fn lower_match_arms<'pat>(
|
||||
fn lower_match_arms(
|
||||
&mut self,
|
||||
destination: Place<'tcx>,
|
||||
scrutinee_place_builder: PlaceBuilder<'tcx>,
|
||||
scrutinee_span: Span,
|
||||
arms: impl IntoIterator<Item = &'pat Arm<'tcx>>,
|
||||
arms: &[ArmId],
|
||||
built_match_tree: BuiltMatchTree<'tcx>,
|
||||
outer_source_info: SourceInfo,
|
||||
) -> BlockAnd<()>
|
||||
where
|
||||
'tcx: 'pat,
|
||||
{
|
||||
) -> BlockAnd<()> {
|
||||
let arm_end_blocks: Vec<BasicBlock> = arms
|
||||
.into_iter()
|
||||
.iter()
|
||||
.map(|&arm| &self.thir[arm])
|
||||
.zip(built_match_tree.branches)
|
||||
.map(|(arm, branch)| {
|
||||
debug!("lowering arm {:?}\ncorresponding branch = {:?}", arm, branch);
|
||||
|
@ -604,19 +603,15 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||
// Optimize the case of `let x: T = ...` to write directly
|
||||
// into `x` and then require that `T == typeof(x)`.
|
||||
PatKind::AscribeUserType {
|
||||
subpattern:
|
||||
box Pat {
|
||||
kind:
|
||||
PatKind::Binding {
|
||||
ref subpattern,
|
||||
ascription: thir::Ascription { ref annotation, variance: _ },
|
||||
} if let PatKind::Binding {
|
||||
mode: BindingMode(ByRef::No, _),
|
||||
var,
|
||||
subpattern: None,
|
||||
..
|
||||
},
|
||||
..
|
||||
},
|
||||
ascription: thir::Ascription { ref annotation, variance: _ },
|
||||
} => {
|
||||
} = subpattern.kind =>
|
||||
{
|
||||
let place = self.storage_live_binding(
|
||||
block,
|
||||
var,
|
||||
|
@ -989,23 +984,19 @@ impl<'tcx> PatternExtraData<'tcx> {
|
|||
///
|
||||
/// Will typically be incorporated into a [`Candidate`].
|
||||
#[derive(Debug, Clone)]
|
||||
struct FlatPat<'pat, 'tcx> {
|
||||
struct FlatPat<'tcx> {
|
||||
/// To match the pattern, all of these must be satisfied...
|
||||
// Invariant: all the match pairs are recursively simplified.
|
||||
// Invariant: or-patterns must be sorted to the end.
|
||||
match_pairs: Vec<MatchPairTree<'pat, 'tcx>>,
|
||||
match_pairs: Vec<MatchPairTree<'tcx>>,
|
||||
|
||||
extra_data: PatternExtraData<'tcx>,
|
||||
}
|
||||
|
||||
impl<'tcx, 'pat> FlatPat<'pat, 'tcx> {
|
||||
impl<'tcx> FlatPat<'tcx> {
|
||||
/// Creates a `FlatPat` containing a simplified [`MatchPairTree`] list/forest
|
||||
/// for the given pattern.
|
||||
fn new(
|
||||
place: PlaceBuilder<'tcx>,
|
||||
pattern: &'pat Pat<'tcx>,
|
||||
cx: &mut Builder<'_, 'tcx>,
|
||||
) -> Self {
|
||||
fn new(place: PlaceBuilder<'tcx>, pattern: &Pat<'tcx>, cx: &mut Builder<'_, 'tcx>) -> Self {
|
||||
// First, recursively build a tree of match pairs for the given pattern.
|
||||
let mut match_pairs = vec![MatchPairTree::for_pattern(place, pattern, cx)];
|
||||
let mut extra_data = PatternExtraData {
|
||||
|
@ -1033,7 +1024,7 @@ impl<'tcx, 'pat> FlatPat<'pat, 'tcx> {
|
|||
/// of candidates, where each "leaf" candidate represents one of the ways for
|
||||
/// the arm pattern to successfully match.
|
||||
#[derive(Debug)]
|
||||
struct Candidate<'pat, 'tcx> {
|
||||
struct Candidate<'tcx> {
|
||||
/// For the candidate to match, all of these must be satisfied...
|
||||
///
|
||||
/// ---
|
||||
|
@ -1055,7 +1046,7 @@ struct Candidate<'pat, 'tcx> {
|
|||
/// Invariants:
|
||||
/// - All [`TestCase::Irrefutable`] patterns have been removed by simplification.
|
||||
/// - All or-patterns ([`TestCase::Or`]) have been sorted to the end.
|
||||
match_pairs: Vec<MatchPairTree<'pat, 'tcx>>,
|
||||
match_pairs: Vec<MatchPairTree<'tcx>>,
|
||||
|
||||
/// ...and if this is non-empty, one of these subcandidates also has to match...
|
||||
///
|
||||
|
@ -1072,7 +1063,7 @@ struct Candidate<'pat, 'tcx> {
|
|||
/// Invariant: at the end of match tree lowering, this must not contain an
|
||||
/// `is_never` candidate, because that would break binding consistency.
|
||||
/// - See [`Builder::remove_never_subcandidates`].
|
||||
subcandidates: Vec<Candidate<'pat, 'tcx>>,
|
||||
subcandidates: Vec<Candidate<'tcx>>,
|
||||
|
||||
/// ...and if there is a guard it must be evaluated; if it's `false` then branch to `otherwise_block`.
|
||||
///
|
||||
|
@ -1107,10 +1098,10 @@ struct Candidate<'pat, 'tcx> {
|
|||
false_edge_start_block: Option<BasicBlock>,
|
||||
}
|
||||
|
||||
impl<'tcx, 'pat> Candidate<'pat, 'tcx> {
|
||||
impl<'tcx> Candidate<'tcx> {
|
||||
fn new(
|
||||
place: PlaceBuilder<'tcx>,
|
||||
pattern: &'pat Pat<'tcx>,
|
||||
pattern: &Pat<'tcx>,
|
||||
has_guard: HasMatchGuard,
|
||||
cx: &mut Builder<'_, 'tcx>,
|
||||
) -> Self {
|
||||
|
@ -1123,7 +1114,7 @@ impl<'tcx, 'pat> Candidate<'pat, 'tcx> {
|
|||
}
|
||||
|
||||
/// Incorporates an already-simplified [`FlatPat`] into a new candidate.
|
||||
fn from_flat_pat(flat_pat: FlatPat<'pat, 'tcx>, has_guard: bool) -> Self {
|
||||
fn from_flat_pat(flat_pat: FlatPat<'tcx>, has_guard: bool) -> Self {
|
||||
Candidate {
|
||||
match_pairs: flat_pat.match_pairs,
|
||||
extra_data: flat_pat.extra_data,
|
||||
|
@ -1172,7 +1163,7 @@ impl<'tcx, 'pat> Candidate<'pat, 'tcx> {
|
|||
/// reference or by value, and to allow a mutable "context" to be shared by the
|
||||
/// traversal callbacks. Most traversals can use the simpler
|
||||
/// [`Candidate::visit_leaves`] wrapper instead.
|
||||
fn traverse_candidate<'pat, 'tcx: 'pat, C, T, I>(
|
||||
fn traverse_candidate<'tcx, C, T, I>(
|
||||
candidate: C,
|
||||
context: &mut T,
|
||||
// Called when visiting a "leaf" candidate (with no subcandidates).
|
||||
|
@ -1184,7 +1175,7 @@ fn traverse_candidate<'pat, 'tcx: 'pat, C, T, I>(
|
|||
// Called after visiting a "node" candidate's children.
|
||||
complete_children: impl Copy + Fn(&mut T),
|
||||
) where
|
||||
C: Borrow<Candidate<'pat, 'tcx>>, // Typically `Candidate` or `&mut Candidate`
|
||||
C: Borrow<Candidate<'tcx>>, // Typically `Candidate` or `&mut Candidate`
|
||||
I: Iterator<Item = C>,
|
||||
{
|
||||
if candidate.borrow().subcandidates.is_empty() {
|
||||
|
@ -1234,20 +1225,20 @@ struct Ascription<'tcx> {
|
|||
/// participate in or-pattern expansion, where they are transformed into subcandidates.
|
||||
/// - See [`Builder::expand_and_match_or_candidates`].
|
||||
#[derive(Debug, Clone)]
|
||||
enum TestCase<'pat, 'tcx> {
|
||||
enum TestCase<'tcx> {
|
||||
Irrefutable { binding: Option<Binding<'tcx>>, ascription: Option<Ascription<'tcx>> },
|
||||
Variant { adt_def: ty::AdtDef<'tcx>, variant_index: VariantIdx },
|
||||
Constant { value: mir::Const<'tcx> },
|
||||
Range(&'pat PatRange<'tcx>),
|
||||
Range(Arc<PatRange<'tcx>>),
|
||||
Slice { len: usize, variable_length: bool },
|
||||
Deref { temp: Place<'tcx>, mutability: Mutability },
|
||||
Never,
|
||||
Or { pats: Box<[FlatPat<'pat, 'tcx>]> },
|
||||
Or { pats: Box<[FlatPat<'tcx>]> },
|
||||
}
|
||||
|
||||
impl<'pat, 'tcx> TestCase<'pat, 'tcx> {
|
||||
fn as_range(&self) -> Option<&'pat PatRange<'tcx>> {
|
||||
if let Self::Range(v) = self { Some(*v) } else { None }
|
||||
impl<'tcx> TestCase<'tcx> {
|
||||
fn as_range(&self) -> Option<&PatRange<'tcx>> {
|
||||
if let Self::Range(v) = self { Some(v.as_ref()) } else { None }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1257,7 +1248,7 @@ impl<'pat, 'tcx> TestCase<'pat, 'tcx> {
|
|||
/// Each node also has a list of subpairs (possibly empty) that must also match,
|
||||
/// and a reference to the THIR pattern it represents.
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct MatchPairTree<'pat, 'tcx> {
|
||||
pub(crate) struct MatchPairTree<'tcx> {
|
||||
/// This place...
|
||||
///
|
||||
/// ---
|
||||
|
@ -1272,7 +1263,7 @@ pub(crate) struct MatchPairTree<'pat, 'tcx> {
|
|||
/// ---
|
||||
/// Invariant: after creation and simplification in [`FlatPat::new`],
|
||||
/// this must not be [`TestCase::Irrefutable`].
|
||||
test_case: TestCase<'pat, 'tcx>,
|
||||
test_case: TestCase<'tcx>,
|
||||
|
||||
/// ... and these subpairs must match.
|
||||
///
|
||||
|
@ -1283,8 +1274,10 @@ pub(crate) struct MatchPairTree<'pat, 'tcx> {
|
|||
/// that tests its field for the value `3`.
|
||||
subpairs: Vec<Self>,
|
||||
|
||||
/// The pattern this was created from.
|
||||
pattern: &'pat Pat<'tcx>,
|
||||
/// Type field of the pattern this node was created from.
|
||||
pattern_ty: Ty<'tcx>,
|
||||
/// Span field of the pattern this node was created from.
|
||||
pattern_span: Span,
|
||||
}
|
||||
|
||||
/// See [`Test`] for more.
|
||||
|
@ -1320,7 +1313,7 @@ enum TestKind<'tcx> {
|
|||
},
|
||||
|
||||
/// Test whether the value falls within an inclusive or exclusive range.
|
||||
Range(Box<PatRange<'tcx>>),
|
||||
Range(Arc<PatRange<'tcx>>),
|
||||
|
||||
/// Test that the length of the slice is `== len` or `>= len`.
|
||||
Len { len: u64, op: BinOp },
|
||||
|
@ -1423,7 +1416,7 @@ struct BuiltMatchTree<'tcx> {
|
|||
|
||||
impl<'tcx> MatchTreeSubBranch<'tcx> {
|
||||
fn from_sub_candidate(
|
||||
candidate: Candidate<'_, 'tcx>,
|
||||
candidate: Candidate<'tcx>,
|
||||
parent_data: &Vec<PatternExtraData<'tcx>>,
|
||||
) -> Self {
|
||||
debug_assert!(candidate.match_pairs.is_empty());
|
||||
|
@ -1449,12 +1442,12 @@ impl<'tcx> MatchTreeSubBranch<'tcx> {
|
|||
}
|
||||
|
||||
impl<'tcx> MatchTreeBranch<'tcx> {
|
||||
fn from_candidate(candidate: Candidate<'_, 'tcx>) -> Self {
|
||||
fn from_candidate(candidate: Candidate<'tcx>) -> Self {
|
||||
let mut sub_branches = Vec::new();
|
||||
traverse_candidate(
|
||||
candidate,
|
||||
&mut Vec::new(),
|
||||
&mut |candidate: Candidate<'_, '_>, parent_data: &mut Vec<PatternExtraData<'_>>| {
|
||||
&mut |candidate: Candidate<'_>, parent_data: &mut Vec<PatternExtraData<'_>>| {
|
||||
sub_branches.push(MatchTreeSubBranch::from_sub_candidate(candidate, parent_data));
|
||||
},
|
||||
|inner_candidate, parent_data| {
|
||||
|
@ -1485,23 +1478,20 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||
/// `refutable` indicates whether the candidate list is refutable (for `if let` and `let else`)
|
||||
/// or not (for `let` and `match`). In the refutable case we return the block to which we branch
|
||||
/// on failure.
|
||||
fn lower_match_tree<'pat>(
|
||||
fn lower_match_tree(
|
||||
&mut self,
|
||||
block: BasicBlock,
|
||||
scrutinee_span: Span,
|
||||
scrutinee_place_builder: &PlaceBuilder<'tcx>,
|
||||
match_start_span: Span,
|
||||
patterns: Vec<(&'pat Pat<'tcx>, HasMatchGuard)>,
|
||||
patterns: Vec<(&Pat<'tcx>, HasMatchGuard)>,
|
||||
refutable: bool,
|
||||
) -> BuiltMatchTree<'tcx>
|
||||
where
|
||||
'tcx: 'pat,
|
||||
{
|
||||
) -> BuiltMatchTree<'tcx> {
|
||||
// Assemble the initial list of candidates. These top-level candidates are 1:1 with the
|
||||
// input patterns, but other parts of match lowering also introduce subcandidates (for
|
||||
// sub-or-patterns). So inside the algorithm, the candidates list may not correspond to
|
||||
// match arms directly.
|
||||
let mut candidates: Vec<Candidate<'_, '_>> = patterns
|
||||
let mut candidates: Vec<Candidate<'_>> = patterns
|
||||
.into_iter()
|
||||
.map(|(pat, has_guard)| {
|
||||
Candidate::new(scrutinee_place_builder.clone(), pat, has_guard, self)
|
||||
|
@ -1664,7 +1654,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||
span: Span,
|
||||
scrutinee_span: Span,
|
||||
start_block: BasicBlock,
|
||||
candidates: &mut [&mut Candidate<'_, 'tcx>],
|
||||
candidates: &mut [&mut Candidate<'tcx>],
|
||||
) -> BasicBlock {
|
||||
ensure_sufficient_stack(|| {
|
||||
self.match_candidates_inner(span, scrutinee_span, start_block, candidates)
|
||||
|
@ -1678,7 +1668,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||
span: Span,
|
||||
scrutinee_span: Span,
|
||||
mut start_block: BasicBlock,
|
||||
candidates: &mut [&mut Candidate<'_, 'tcx>],
|
||||
candidates: &mut [&mut Candidate<'tcx>],
|
||||
) -> BasicBlock {
|
||||
if let [first, ..] = candidates {
|
||||
if first.false_edge_start_block.is_none() {
|
||||
|
@ -1747,7 +1737,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||
/// [otherwise block]: Candidate::otherwise_block
|
||||
fn select_matched_candidate(
|
||||
&mut self,
|
||||
candidate: &mut Candidate<'_, 'tcx>,
|
||||
candidate: &mut Candidate<'tcx>,
|
||||
start_block: BasicBlock,
|
||||
) -> BasicBlock {
|
||||
assert!(candidate.otherwise_block.is_none());
|
||||
|
@ -1765,13 +1755,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||
/// Takes a list of candidates such that some of the candidates' first match pairs are
|
||||
/// or-patterns. This expands as many or-patterns as possible and processes the resulting
|
||||
/// candidates. Returns the unprocessed candidates if any.
|
||||
fn expand_and_match_or_candidates<'pat, 'b, 'c>(
|
||||
fn expand_and_match_or_candidates<'b, 'c>(
|
||||
&mut self,
|
||||
span: Span,
|
||||
scrutinee_span: Span,
|
||||
start_block: BasicBlock,
|
||||
candidates: &'b mut [&'c mut Candidate<'pat, 'tcx>],
|
||||
) -> BlockAnd<&'b mut [&'c mut Candidate<'pat, 'tcx>]> {
|
||||
candidates: &'b mut [&'c mut Candidate<'tcx>],
|
||||
) -> BlockAnd<&'b mut [&'c mut Candidate<'tcx>]> {
|
||||
// We can't expand or-patterns freely. The rule is:
|
||||
// - If a candidate doesn't start with an or-pattern, we include it in
|
||||
// the expansion list as-is (i.e. it "expands" to itself).
|
||||
|
@ -1865,14 +1855,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||
/// Given a match-pair that corresponds to an or-pattern, expand each subpattern into a new
|
||||
/// subcandidate. Any candidate that has been expanded this way should also be postprocessed
|
||||
/// at the end of [`Self::expand_and_match_or_candidates`].
|
||||
fn create_or_subcandidates<'pat>(
|
||||
fn create_or_subcandidates(
|
||||
&mut self,
|
||||
candidate: &mut Candidate<'pat, 'tcx>,
|
||||
match_pair: MatchPairTree<'pat, 'tcx>,
|
||||
candidate: &mut Candidate<'tcx>,
|
||||
match_pair: MatchPairTree<'tcx>,
|
||||
) {
|
||||
let TestCase::Or { pats } = match_pair.test_case else { bug!() };
|
||||
debug!("expanding or-pattern: candidate={:#?}\npats={:#?}", candidate, pats);
|
||||
candidate.or_span = Some(match_pair.pattern.span);
|
||||
candidate.or_span = Some(match_pair.pattern_span);
|
||||
candidate.subcandidates = pats
|
||||
.into_vec()
|
||||
.into_iter()
|
||||
|
@ -1938,7 +1928,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||
///
|
||||
/// Note that this takes place _after_ the subcandidates have participated
|
||||
/// in match tree lowering.
|
||||
fn merge_trivial_subcandidates(&mut self, candidate: &mut Candidate<'_, 'tcx>) {
|
||||
fn merge_trivial_subcandidates(&mut self, candidate: &mut Candidate<'tcx>) {
|
||||
assert!(!candidate.subcandidates.is_empty());
|
||||
if candidate.has_guard {
|
||||
// FIXME(or_patterns; matthewjasper) Don't give up if we have a guard.
|
||||
|
@ -1981,7 +1971,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||
/// Never subcandidates may have a set of bindings inconsistent with their siblings,
|
||||
/// which would break later code. So we filter them out. Note that we can't filter out
|
||||
/// top-level candidates this way.
|
||||
fn remove_never_subcandidates(&mut self, candidate: &mut Candidate<'_, 'tcx>) {
|
||||
fn remove_never_subcandidates(&mut self, candidate: &mut Candidate<'tcx>) {
|
||||
if candidate.subcandidates.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
@ -2020,7 +2010,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||
&mut self,
|
||||
span: Span,
|
||||
scrutinee_span: Span,
|
||||
candidate: &mut Candidate<'_, 'tcx>,
|
||||
candidate: &mut Candidate<'tcx>,
|
||||
) {
|
||||
if candidate.match_pairs.is_empty() {
|
||||
return;
|
||||
|
@ -2086,7 +2076,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||
/// [`Switch`]: TestKind::Switch
|
||||
/// [`SwitchInt`]: TestKind::SwitchInt
|
||||
/// [`Range`]: TestKind::Range
|
||||
fn pick_test(&mut self, candidates: &[&mut Candidate<'_, 'tcx>]) -> (Place<'tcx>, Test<'tcx>) {
|
||||
fn pick_test(&mut self, candidates: &[&mut Candidate<'tcx>]) -> (Place<'tcx>, Test<'tcx>) {
|
||||
// Extract the match-pair from the highest priority candidate
|
||||
let match_pair = &candidates[0].match_pairs[0];
|
||||
let test = self.pick_test_for_match_pair(match_pair);
|
||||
|
@ -2137,18 +2127,18 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||
/// The sorted candidates are mutated to remove entailed match pairs:
|
||||
/// - candidate 0 becomes `[z @ true]` since we know that `x` was `true`;
|
||||
/// - candidate 1 becomes `[y @ false]` since we know that `x` was `false`.
|
||||
fn sort_candidates<'b, 'c, 'pat>(
|
||||
fn sort_candidates<'b, 'c>(
|
||||
&mut self,
|
||||
match_place: Place<'tcx>,
|
||||
test: &Test<'tcx>,
|
||||
mut candidates: &'b mut [&'c mut Candidate<'pat, 'tcx>],
|
||||
mut candidates: &'b mut [&'c mut Candidate<'tcx>],
|
||||
) -> (
|
||||
&'b mut [&'c mut Candidate<'pat, 'tcx>],
|
||||
FxIndexMap<TestBranch<'tcx>, Vec<&'b mut Candidate<'pat, 'tcx>>>,
|
||||
&'b mut [&'c mut Candidate<'tcx>],
|
||||
FxIndexMap<TestBranch<'tcx>, Vec<&'b mut Candidate<'tcx>>>,
|
||||
) {
|
||||
// For each of the possible outcomes, collect vector of candidates that apply if the test
|
||||
// has that particular outcome.
|
||||
let mut target_candidates: FxIndexMap<_, Vec<&mut Candidate<'_, '_>>> = Default::default();
|
||||
let mut target_candidates: FxIndexMap<_, Vec<&mut Candidate<'_>>> = Default::default();
|
||||
|
||||
let total_candidate_count = candidates.len();
|
||||
|
||||
|
@ -2274,13 +2264,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||
/// ```
|
||||
///
|
||||
/// We return the unprocessed candidates.
|
||||
fn test_candidates<'pat, 'b, 'c>(
|
||||
fn test_candidates<'b, 'c>(
|
||||
&mut self,
|
||||
span: Span,
|
||||
scrutinee_span: Span,
|
||||
candidates: &'b mut [&'c mut Candidate<'pat, 'tcx>],
|
||||
candidates: &'b mut [&'c mut Candidate<'tcx>],
|
||||
start_block: BasicBlock,
|
||||
) -> BlockAnd<&'b mut [&'c mut Candidate<'pat, 'tcx>]> {
|
||||
) -> BlockAnd<&'b mut [&'c mut Candidate<'tcx>]> {
|
||||
// Choose a match pair from the first candidate, and use it to determine a
|
||||
// test to perform that will confirm or refute that match pair.
|
||||
let (match_place, test) = self.pick_test(candidates);
|
||||
|
|
|
@ -23,9 +23,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||
/// Simplify a list of match pairs so they all require a test. Stores relevant bindings and
|
||||
/// ascriptions in `extra_data`.
|
||||
#[instrument(skip(self), level = "debug")]
|
||||
pub(super) fn simplify_match_pairs<'pat>(
|
||||
pub(super) fn simplify_match_pairs(
|
||||
&mut self,
|
||||
match_pairs: &mut Vec<MatchPairTree<'pat, 'tcx>>,
|
||||
match_pairs: &mut Vec<MatchPairTree<'tcx>>,
|
||||
extra_data: &mut PatternExtraData<'tcx>,
|
||||
) {
|
||||
// In order to please the borrow checker, in a pattern like `x @ pat` we must lower the
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
// the candidates based on the result.
|
||||
|
||||
use std::cmp::Ordering;
|
||||
use std::sync::Arc;
|
||||
|
||||
use rustc_data_structures::fx::FxIndexMap;
|
||||
use rustc_hir::{LangItem, RangeEnd};
|
||||
|
@ -26,20 +27,20 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||
/// Identifies what test is needed to decide if `match_pair` is applicable.
|
||||
///
|
||||
/// It is a bug to call this with a not-fully-simplified pattern.
|
||||
pub(super) fn pick_test_for_match_pair<'pat>(
|
||||
pub(super) fn pick_test_for_match_pair(
|
||||
&mut self,
|
||||
match_pair: &MatchPairTree<'pat, 'tcx>,
|
||||
match_pair: &MatchPairTree<'tcx>,
|
||||
) -> Test<'tcx> {
|
||||
let kind = match match_pair.test_case {
|
||||
TestCase::Variant { adt_def, variant_index: _ } => TestKind::Switch { adt_def },
|
||||
|
||||
TestCase::Constant { .. } if match_pair.pattern.ty.is_bool() => TestKind::If,
|
||||
TestCase::Constant { .. } if is_switch_ty(match_pair.pattern.ty) => TestKind::SwitchInt,
|
||||
TestCase::Constant { value } => TestKind::Eq { value, ty: match_pair.pattern.ty },
|
||||
TestCase::Constant { .. } if match_pair.pattern_ty.is_bool() => TestKind::If,
|
||||
TestCase::Constant { .. } if is_switch_ty(match_pair.pattern_ty) => TestKind::SwitchInt,
|
||||
TestCase::Constant { value } => TestKind::Eq { value, ty: match_pair.pattern_ty },
|
||||
|
||||
TestCase::Range(range) => {
|
||||
assert_eq!(range.ty, match_pair.pattern.ty);
|
||||
TestKind::Range(Box::new(range.clone()))
|
||||
TestCase::Range(ref range) => {
|
||||
assert_eq!(range.ty, match_pair.pattern_ty);
|
||||
TestKind::Range(Arc::clone(range))
|
||||
}
|
||||
|
||||
TestCase::Slice { len, variable_length } => {
|
||||
|
@ -56,13 +57,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||
TestCase::Or { .. } => bug!("or-patterns should have already been handled"),
|
||||
|
||||
TestCase::Irrefutable { .. } => span_bug!(
|
||||
match_pair.pattern.span,
|
||||
match_pair.pattern_span,
|
||||
"simplifiable pattern found: {:?}",
|
||||
match_pair.pattern
|
||||
match_pair.pattern_span
|
||||
),
|
||||
};
|
||||
|
||||
Test { span: match_pair.pattern.span, kind }
|
||||
Test { span: match_pair.pattern_span, kind }
|
||||
}
|
||||
|
||||
#[instrument(skip(self, target_blocks, place), level = "debug")]
|
||||
|
@ -521,8 +522,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||
&mut self,
|
||||
test_place: Place<'tcx>,
|
||||
test: &Test<'tcx>,
|
||||
candidate: &mut Candidate<'_, 'tcx>,
|
||||
sorted_candidates: &FxIndexMap<TestBranch<'tcx>, Vec<&mut Candidate<'_, 'tcx>>>,
|
||||
candidate: &mut Candidate<'tcx>,
|
||||
sorted_candidates: &FxIndexMap<TestBranch<'tcx>, Vec<&mut Candidate<'tcx>>>,
|
||||
) -> Option<TestBranch<'tcx>> {
|
||||
// Find the match_pair for this place (if any). At present,
|
||||
// afaik, there can be at most one. (In the future, if we
|
||||
|
@ -558,14 +559,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||
// FIXME(#29623) we could use PatKind::Range to rule
|
||||
// things out here, in some cases.
|
||||
(TestKind::SwitchInt, &TestCase::Constant { value })
|
||||
if is_switch_ty(match_pair.pattern.ty) =>
|
||||
if is_switch_ty(match_pair.pattern_ty) =>
|
||||
{
|
||||
// An important invariant of candidate sorting is that a candidate
|
||||
// must not match in multiple branches. For `SwitchInt` tests, adding
|
||||
// a new value might invalidate that property for range patterns that
|
||||
// have already been sorted into the failure arm, so we must take care
|
||||
// not to add such values here.
|
||||
let is_covering_range = |test_case: &TestCase<'_, 'tcx>| {
|
||||
let is_covering_range = |test_case: &TestCase<'tcx>| {
|
||||
test_case.as_range().is_some_and(|range| {
|
||||
matches!(
|
||||
range.contains(value, self.tcx, self.typing_env()),
|
||||
|
@ -573,7 +574,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||
)
|
||||
})
|
||||
};
|
||||
let is_conflicting_candidate = |candidate: &&mut Candidate<'_, 'tcx>| {
|
||||
let is_conflicting_candidate = |candidate: &&mut Candidate<'tcx>| {
|
||||
candidate
|
||||
.match_pairs
|
||||
.iter()
|
||||
|
@ -685,8 +686,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
(TestKind::Range(test), &TestCase::Range(pat)) => {
|
||||
if test.as_ref() == pat {
|
||||
(TestKind::Range(test), TestCase::Range(pat)) => {
|
||||
if test == pat {
|
||||
fully_matched = true;
|
||||
Some(TestBranch::Success)
|
||||
} else {
|
||||
|
|
|
@ -67,7 +67,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||
/// a MIR pass run after borrow checking.
|
||||
pub(super) fn collect_fake_borrows<'tcx>(
|
||||
cx: &mut Builder<'_, 'tcx>,
|
||||
candidates: &[Candidate<'_, 'tcx>],
|
||||
candidates: &[Candidate<'tcx>],
|
||||
temp_span: Span,
|
||||
scrutinee_base: PlaceBase,
|
||||
) -> Vec<(Place<'tcx>, Local, FakeBorrowKind)> {
|
||||
|
@ -135,7 +135,7 @@ impl<'a, 'b, 'tcx> FakeBorrowCollector<'a, 'b, 'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
fn visit_candidate(&mut self, candidate: &Candidate<'_, 'tcx>) {
|
||||
fn visit_candidate(&mut self, candidate: &Candidate<'tcx>) {
|
||||
for binding in &candidate.extra_data.bindings {
|
||||
self.visit_binding(binding);
|
||||
}
|
||||
|
@ -144,7 +144,7 @@ impl<'a, 'b, 'tcx> FakeBorrowCollector<'a, 'b, 'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
fn visit_flat_pat(&mut self, flat_pat: &FlatPat<'_, 'tcx>) {
|
||||
fn visit_flat_pat(&mut self, flat_pat: &FlatPat<'tcx>) {
|
||||
for binding in &flat_pat.extra_data.bindings {
|
||||
self.visit_binding(binding);
|
||||
}
|
||||
|
@ -153,7 +153,7 @@ impl<'a, 'b, 'tcx> FakeBorrowCollector<'a, 'b, 'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
fn visit_match_pair(&mut self, match_pair: &MatchPairTree<'_, 'tcx>) {
|
||||
fn visit_match_pair(&mut self, match_pair: &MatchPairTree<'tcx>) {
|
||||
if let TestCase::Or { pats, .. } = &match_pair.test_case {
|
||||
for flat_pat in pats.iter() {
|
||||
self.visit_flat_pat(flat_pat)
|
||||
|
|
|
@ -676,12 +676,14 @@ impl<'p, 'tcx> MatchVisitor<'p, 'tcx> {
|
|||
let mut interpreted_as_const = None;
|
||||
let mut interpreted_as_const_sugg = None;
|
||||
|
||||
if let PatKind::ExpandedConstant { def_id, is_inline: false, .. }
|
||||
| PatKind::AscribeUserType {
|
||||
subpattern:
|
||||
box Pat { kind: PatKind::ExpandedConstant { def_id, is_inline: false, .. }, .. },
|
||||
..
|
||||
} = pat.kind
|
||||
// These next few matches want to peek through `AscribeUserType` to see
|
||||
// the underlying pattern.
|
||||
let mut unpeeled_pat = pat;
|
||||
while let PatKind::AscribeUserType { ref subpattern, .. } = unpeeled_pat.kind {
|
||||
unpeeled_pat = subpattern;
|
||||
}
|
||||
|
||||
if let PatKind::ExpandedConstant { def_id, is_inline: false, .. } = unpeeled_pat.kind
|
||||
&& let DefKind::Const = self.tcx.def_kind(def_id)
|
||||
&& let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(pat.span)
|
||||
// We filter out paths with multiple path::segments.
|
||||
|
@ -692,11 +694,7 @@ impl<'p, 'tcx> MatchVisitor<'p, 'tcx> {
|
|||
// When we encounter a constant as the binding name, point at the `const` definition.
|
||||
interpreted_as_const = Some(span);
|
||||
interpreted_as_const_sugg = Some(InterpretedAsConst { span: pat.span, variable });
|
||||
} else if let PatKind::Constant { .. }
|
||||
| PatKind::AscribeUserType {
|
||||
subpattern: box Pat { kind: PatKind::Constant { .. }, .. },
|
||||
..
|
||||
} = pat.kind
|
||||
} else if let PatKind::Constant { .. } = unpeeled_pat.kind
|
||||
&& let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(pat.span)
|
||||
{
|
||||
// If the pattern to match is an integer literal:
|
||||
|
|
|
@ -208,7 +208,7 @@ impl<'tcx> ConstToPat<'tcx> {
|
|||
let field = FieldIdx::new(idx);
|
||||
// Patterns can only use monomorphic types.
|
||||
let ty = self.tcx.normalize_erasing_regions(self.typing_env, ty);
|
||||
FieldPat { field, pattern: self.valtree_to_pat(val, ty) }
|
||||
FieldPat { field, pattern: *self.valtree_to_pat(val, ty) }
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
@ -277,7 +277,7 @@ impl<'tcx> ConstToPat<'tcx> {
|
|||
prefix: cv
|
||||
.unwrap_branch()
|
||||
.iter()
|
||||
.map(|val| self.valtree_to_pat(*val, *elem_ty))
|
||||
.map(|val| *self.valtree_to_pat(*val, *elem_ty))
|
||||
.collect(),
|
||||
slice: None,
|
||||
suffix: Box::new([]),
|
||||
|
@ -286,7 +286,7 @@ impl<'tcx> ConstToPat<'tcx> {
|
|||
prefix: cv
|
||||
.unwrap_branch()
|
||||
.iter()
|
||||
.map(|val| self.valtree_to_pat(*val, *elem_ty))
|
||||
.map(|val| *self.valtree_to_pat(*val, *elem_ty))
|
||||
.collect(),
|
||||
slice: None,
|
||||
suffix: Box::new([]),
|
||||
|
|
|
@ -4,6 +4,7 @@ mod check_match;
|
|||
mod const_to_pat;
|
||||
|
||||
use std::cmp::Ordering;
|
||||
use std::sync::Arc;
|
||||
|
||||
use rustc_abi::{FieldIdx, Integer};
|
||||
use rustc_errors::MultiSpan;
|
||||
|
@ -262,7 +263,7 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> {
|
|||
let hi = lower_endpoint(hi_expr)?.unwrap_or(PatRangeBoundary::PosInfinity);
|
||||
|
||||
let cmp = lo.compare_with(hi, ty, self.tcx, self.typing_env);
|
||||
let mut kind = PatKind::Range(Box::new(PatRange { lo, hi, end, ty }));
|
||||
let mut kind = PatKind::Range(Arc::new(PatRange { lo, hi, end, ty }));
|
||||
match (end, cmp) {
|
||||
// `x..y` where `x < y`.
|
||||
(RangeEnd::Excluded, Some(Ordering::Less)) => {}
|
||||
|
@ -418,7 +419,7 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> {
|
|||
.iter()
|
||||
.map(|field| FieldPat {
|
||||
field: self.typeck_results.field_index(field.hir_id),
|
||||
pattern: self.lower_pattern(field.pat),
|
||||
pattern: *self.lower_pattern(field.pat),
|
||||
})
|
||||
.collect();
|
||||
|
||||
|
@ -446,13 +447,13 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> {
|
|||
.enumerate_and_adjust(expected_len, gap_pos)
|
||||
.map(|(i, subpattern)| FieldPat {
|
||||
field: FieldIdx::new(i),
|
||||
pattern: self.lower_pattern(subpattern),
|
||||
pattern: *self.lower_pattern(subpattern),
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn lower_patterns(&mut self, pats: &'tcx [hir::Pat<'tcx>]) -> Box<[Box<Pat<'tcx>>]> {
|
||||
pats.iter().map(|p| self.lower_pattern(p)).collect()
|
||||
fn lower_patterns(&mut self, pats: &'tcx [hir::Pat<'tcx>]) -> Box<[Pat<'tcx>]> {
|
||||
pats.iter().map(|p| *self.lower_pattern(p)).collect()
|
||||
}
|
||||
|
||||
fn lower_opt_pattern(&mut self, pat: Option<&'tcx hir::Pat<'tcx>>) -> Option<Box<Pat<'tcx>>> {
|
||||
|
|
|
@ -643,8 +643,8 @@ impl<'a, 'tcx> ThirPrinter<'a, 'tcx> {
|
|||
print_indented!(self, "}", depth_lvl);
|
||||
}
|
||||
|
||||
fn print_pat(&mut self, pat: &Box<Pat<'tcx>>, depth_lvl: usize) {
|
||||
let Pat { ty, span, kind } = &**pat;
|
||||
fn print_pat(&mut self, pat: &Pat<'tcx>, depth_lvl: usize) {
|
||||
let &Pat { ty, span, ref kind } = pat;
|
||||
|
||||
print_indented!(self, "Pat: {", depth_lvl);
|
||||
print_indented!(self, format!("ty: {:?}", ty), depth_lvl + 1);
|
||||
|
|
|
@ -318,9 +318,17 @@ pub(crate) fn first_method_vtable_slot<'tcx>(tcx: TyCtxt<'tcx>, key: ty::TraitRe
|
|||
bug!();
|
||||
};
|
||||
let source_principal = tcx.instantiate_bound_regions_with_erased(
|
||||
source.principal().unwrap().with_self_ty(tcx, tcx.types.trait_object_dummy_self),
|
||||
source.principal().unwrap().with_self_ty(tcx, key.self_ty()),
|
||||
);
|
||||
|
||||
// We're monomorphizing a call to a dyn trait object that can never be constructed.
|
||||
if tcx.instantiate_and_check_impossible_predicates((
|
||||
source_principal.def_id,
|
||||
source_principal.args,
|
||||
)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
let target_principal = ty::ExistentialTraitRef::erase_self_ty(tcx, key);
|
||||
|
||||
let vtable_segment_callback = {
|
||||
|
@ -373,19 +381,27 @@ pub(crate) fn supertrait_vtable_slot<'tcx>(
|
|||
let (source, target) = key;
|
||||
|
||||
// If the target principal is `None`, we can just return `None`.
|
||||
let ty::Dynamic(target, _, _) = *target.kind() else {
|
||||
let ty::Dynamic(target_data, _, _) = *target.kind() else {
|
||||
bug!();
|
||||
};
|
||||
let target_principal = tcx.instantiate_bound_regions_with_erased(target.principal()?);
|
||||
let target_principal = tcx.instantiate_bound_regions_with_erased(target_data.principal()?);
|
||||
|
||||
// Given that we have a target principal, it is a bug for there not to be a source principal.
|
||||
let ty::Dynamic(source, _, _) = *source.kind() else {
|
||||
let ty::Dynamic(source_data, _, _) = *source.kind() else {
|
||||
bug!();
|
||||
};
|
||||
let source_principal = tcx.instantiate_bound_regions_with_erased(
|
||||
source.principal().unwrap().with_self_ty(tcx, tcx.types.trait_object_dummy_self),
|
||||
source_data.principal().unwrap().with_self_ty(tcx, source),
|
||||
);
|
||||
|
||||
// We're monomorphizing a dyn trait object upcast that can never be constructed.
|
||||
if tcx.instantiate_and_check_impossible_predicates((
|
||||
source_principal.def_id,
|
||||
source_principal.args,
|
||||
)) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let vtable_segment_callback = {
|
||||
let mut vptr_offset = 0;
|
||||
move |segment| {
|
||||
|
|
|
@ -373,7 +373,8 @@ impl<'a, 'tcx> IsThirPolymorphic<'a, 'tcx> {
|
|||
|
||||
match pat.kind {
|
||||
thir::PatKind::Constant { value } => value.has_non_region_param(),
|
||||
thir::PatKind::Range(box thir::PatRange { lo, hi, .. }) => {
|
||||
thir::PatKind::Range(ref range) => {
|
||||
let &thir::PatRange { lo, hi, .. } = range.as_ref();
|
||||
lo.has_non_region_param() || hi.has_non_region_param()
|
||||
}
|
||||
_ => false,
|
||||
|
|
|
@ -418,8 +418,13 @@ fn test_creation_flags() {
|
|||
const EXIT_PROCESS_DEBUG_EVENT: u32 = 5;
|
||||
const DBG_EXCEPTION_NOT_HANDLED: u32 = 0x80010001;
|
||||
|
||||
let mut child =
|
||||
Command::new("cmd").creation_flags(DEBUG_PROCESS).stdin(Stdio::piped()).spawn().unwrap();
|
||||
let mut child = Command::new("cmd")
|
||||
.creation_flags(DEBUG_PROCESS)
|
||||
.stdin(Stdio::piped())
|
||||
.stdout(Stdio::null())
|
||||
.stderr(Stdio::null())
|
||||
.spawn()
|
||||
.unwrap();
|
||||
child.stdin.take().unwrap().write_all(b"exit\r\n").unwrap();
|
||||
let mut events = 0;
|
||||
let mut event = DEBUG_EVENT { event_code: 0, process_id: 0, thread_id: 0, _junk: [0; 164] };
|
||||
|
@ -486,9 +491,13 @@ fn test_proc_thread_attributes() {
|
|||
}
|
||||
}
|
||||
|
||||
let parent = ProcessDropGuard(Command::new("cmd").spawn().unwrap());
|
||||
let mut parent = Command::new("cmd");
|
||||
parent.stdout(Stdio::null()).stderr(Stdio::null());
|
||||
|
||||
let parent = ProcessDropGuard(parent.spawn().unwrap());
|
||||
|
||||
let mut child_cmd = Command::new("cmd");
|
||||
child_cmd.stdout(Stdio::null()).stderr(Stdio::null());
|
||||
|
||||
let parent_process_handle = parent.0.as_raw_handle();
|
||||
|
||||
|
|
|
@ -310,6 +310,9 @@ impl<'a> Renderer<'a> {
|
|||
match message {
|
||||
Message::Suite(SuiteMessage::Started { test_count }) => {
|
||||
println!("\nrunning {test_count} tests");
|
||||
self.benches = vec![];
|
||||
self.failures = vec![];
|
||||
self.ignored_tests = 0;
|
||||
self.executed_tests = 0;
|
||||
self.terse_tests_in_line = 0;
|
||||
self.tests_count = Some(test_count);
|
||||
|
|
|
@ -3,10 +3,10 @@ use clippy_utils::diagnostics::span_lint_and_help;
|
|||
use clippy_utils::msrvs::{self, Msrv};
|
||||
use rustc_ast::ast::{FloatTy, LitFloatType, LitKind};
|
||||
use rustc_attr_parsing::RustcVersion;
|
||||
use rustc_hir::{Expr, ExprKind};
|
||||
use rustc_hir::{HirId, Lit};
|
||||
use rustc_lint::{LateContext, LateLintPass};
|
||||
use rustc_session::impl_lint_pass;
|
||||
use rustc_span::symbol;
|
||||
use rustc_span::{Span, symbol};
|
||||
use std::f64::consts as f64;
|
||||
|
||||
declare_clippy_lint! {
|
||||
|
@ -73,22 +73,28 @@ impl ApproxConstant {
|
|||
msrv: conf.msrv.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn check_lit(&self, cx: &LateContext<'_>, lit: &LitKind, e: &Expr<'_>) {
|
||||
match *lit {
|
||||
impl<'tcx> LateLintPass<'tcx> for ApproxConstant {
|
||||
fn check_lit(&mut self, cx: &LateContext<'_>, _hir_id: HirId, lit: &Lit, _negated: bool) {
|
||||
match lit.node {
|
||||
LitKind::Float(s, LitFloatType::Suffixed(fty)) => match fty {
|
||||
FloatTy::F16 => self.check_known_consts(cx, e, s, "f16"),
|
||||
FloatTy::F32 => self.check_known_consts(cx, e, s, "f32"),
|
||||
FloatTy::F64 => self.check_known_consts(cx, e, s, "f64"),
|
||||
FloatTy::F128 => self.check_known_consts(cx, e, s, "f128"),
|
||||
FloatTy::F16 => self.check_known_consts(cx, lit.span, s, "f16"),
|
||||
FloatTy::F32 => self.check_known_consts(cx, lit.span, s, "f32"),
|
||||
FloatTy::F64 => self.check_known_consts(cx, lit.span, s, "f64"),
|
||||
FloatTy::F128 => self.check_known_consts(cx, lit.span, s, "f128"),
|
||||
},
|
||||
// FIXME(f16_f128): add `f16` and `f128` when these types become stable.
|
||||
LitKind::Float(s, LitFloatType::Unsuffixed) => self.check_known_consts(cx, e, s, "f{32, 64}"),
|
||||
LitKind::Float(s, LitFloatType::Unsuffixed) => self.check_known_consts(cx, lit.span, s, "f{32, 64}"),
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
|
||||
fn check_known_consts(&self, cx: &LateContext<'_>, e: &Expr<'_>, s: symbol::Symbol, module: &str) {
|
||||
extract_msrv_attr!(LateContext);
|
||||
}
|
||||
|
||||
impl ApproxConstant {
|
||||
fn check_known_consts(&self, cx: &LateContext<'_>, span: Span, s: symbol::Symbol, module: &str) {
|
||||
let s = s.as_str();
|
||||
if s.parse::<f64>().is_ok() {
|
||||
for &(constant, name, min_digits, msrv) in &KNOWN_CONSTS {
|
||||
|
@ -96,7 +102,7 @@ impl ApproxConstant {
|
|||
span_lint_and_help(
|
||||
cx,
|
||||
APPROX_CONSTANT,
|
||||
e.span,
|
||||
span,
|
||||
format!("approximate value of `{module}::consts::{name}` found"),
|
||||
None,
|
||||
"consider using the constant directly",
|
||||
|
@ -110,16 +116,6 @@ impl ApproxConstant {
|
|||
|
||||
impl_lint_pass!(ApproxConstant => [APPROX_CONSTANT]);
|
||||
|
||||
impl<'tcx> LateLintPass<'tcx> for ApproxConstant {
|
||||
fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) {
|
||||
if let ExprKind::Lit(lit) = &e.kind {
|
||||
self.check_lit(cx, &lit.node, e);
|
||||
}
|
||||
}
|
||||
|
||||
extract_msrv_attr!(LateContext);
|
||||
}
|
||||
|
||||
/// Returns `false` if the number of significant figures in `value` are
|
||||
/// less than `min_digits`; otherwise, returns true if `value` is equal
|
||||
/// to `constant`, rounded to the number of digits present in `value`.
|
||||
|
|
|
@ -5,7 +5,7 @@ fn main() -> () {
|
|||
scope 1 {
|
||||
debug x => const 2_u32 is 1..=;
|
||||
scope 2 {
|
||||
debug y => const 0_u32 is 1..=;
|
||||
debug y => const {transmute(0x00000000): (u32) is 1..=};
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -7,6 +7,6 @@ use std::pat::pattern_type;
|
|||
fn main() {
|
||||
// CHECK: debug x => const 2_u32 is 1..=
|
||||
let x: pattern_type!(u32 is 1..) = unsafe { std::mem::transmute(2) };
|
||||
// CHECK: debug y => const 0_u32 is 1..=
|
||||
// CHECK: debug y => const {transmute(0x00000000): (u32) is 1..=}
|
||||
let y: pattern_type!(u32 is 1..) = unsafe { std::mem::transmute(0) };
|
||||
}
|
||||
|
|
|
@ -1,8 +1,5 @@
|
|||
//@ edition: 2021
|
||||
|
||||
// Test doesn't fail until monomorphization time, unfortunately.
|
||||
//@ build-fail
|
||||
|
||||
fn main() {
|
||||
let _ = async {
|
||||
A.first().await.second().await;
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
error[E0733]: recursion in an async fn requires boxing
|
||||
--> $DIR/indirect-recursion-issue-112047.rs:34:5
|
||||
--> $DIR/indirect-recursion-issue-112047.rs:31:5
|
||||
|
|
||||
LL | async fn second(self) {
|
||||
| ^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
17
tests/ui/diagnostic-width/binop.rs
Normal file
17
tests/ui/diagnostic-width/binop.rs
Normal file
|
@ -0,0 +1,17 @@
|
|||
//@ compile-flags: --diagnostic-width=60 -Zwrite-long-types-to-disk=yes
|
||||
// The regex below normalizes the long type file name to make it suitable for compare-modes.
|
||||
//@ normalize-stderr: "'\$TEST_BUILD_DIR/.*\.long-type-\d+.txt'" -> "'$$TEST_BUILD_DIR/$$FILE.long-type-hash.txt'"
|
||||
type A = (i32, i32, i32, i32);
|
||||
type B = (A, A, A, A);
|
||||
type C = (B, B, B, B);
|
||||
type D = (C, C, C, C);
|
||||
|
||||
fn foo(x: D) {
|
||||
x + x; //~ ERROR cannot add `(...
|
||||
}
|
||||
|
||||
fn bar(x: D) {
|
||||
!x; //~ ERROR cannot apply unary operator `!` to type `(...
|
||||
}
|
||||
|
||||
fn main() {}
|
24
tests/ui/diagnostic-width/binop.stderr
Normal file
24
tests/ui/diagnostic-width/binop.stderr
Normal file
|
@ -0,0 +1,24 @@
|
|||
error[E0369]: cannot add `(..., ..., ..., ...)` to `(..., ..., ..., ...)`
|
||||
--> $DIR/binop.rs:10:7
|
||||
|
|
||||
LL | x + x;
|
||||
| - ^ - (..., ..., ..., ...)
|
||||
| |
|
||||
| (..., ..., ..., ...)
|
||||
|
|
||||
= note: the full name for the type has been written to '$TEST_BUILD_DIR/$FILE.long-type-hash.txt'
|
||||
= note: consider using `--verbose` to print the full type name to the console
|
||||
|
||||
error[E0600]: cannot apply unary operator `!` to type `(..., ..., ..., ...)`
|
||||
--> $DIR/binop.rs:14:5
|
||||
|
|
||||
LL | !x;
|
||||
| ^^ cannot apply unary operator `!`
|
||||
|
|
||||
= note: the full name for the type has been written to '$TEST_BUILD_DIR/$FILE.long-type-hash.txt'
|
||||
= note: consider using `--verbose` to print the full type name to the console
|
||||
|
||||
error: aborting due to 2 previous errors
|
||||
|
||||
Some errors have detailed explanations: E0369, E0600.
|
||||
For more information about an error, try `rustc --explain E0369`.
|
|
@ -1,6 +1,6 @@
|
|||
//@ check-fail
|
||||
//@ compile-flags: --crate-type=lib
|
||||
//@ edition: 2021
|
||||
|
||||
#![allow(internal_features)]
|
||||
#![feature(rustc_attrs)]
|
||||
|
||||
|
@ -20,5 +20,7 @@ pub fn callee_justified() {
|
|||
|
||||
async fn async_caller() {
|
||||
callee();
|
||||
//~^ ERROR `callee` could not be inlined
|
||||
callee_justified();
|
||||
//~^ ERROR `callee_justified` could not be inlined
|
||||
}
|
||||
|
|
|
@ -20,5 +20,22 @@ LL | pub fn callee_justified() {
|
|||
|
|
||||
= note: incompatible due to: #[rustc_no_mir_inline]
|
||||
|
||||
error: aborting due to 2 previous errors
|
||||
error: `callee` could not be inlined into `async_caller::{closure#0}` but is required to be inlined
|
||||
--> $DIR/deny-async.rs:22:5
|
||||
|
|
||||
LL | callee();
|
||||
| ^^^^^^^^ ...`callee` called here
|
||||
|
|
||||
= note: could not be inlined due to: #[rustc_no_mir_inline]
|
||||
|
||||
error: `callee_justified` could not be inlined into `async_caller::{closure#0}` but is required to be inlined
|
||||
--> $DIR/deny-async.rs:24:5
|
||||
|
|
||||
LL | callee_justified();
|
||||
| ^^^^^^^^^^^^^^^^^^ ...`callee_justified` called here
|
||||
|
|
||||
= note: could not be inlined due to: #[rustc_no_mir_inline]
|
||||
= note: `callee_justified` is required to be inlined to: the test requires it
|
||||
|
||||
error: aborting due to 4 previous errors
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
error[E0733]: recursion in a coroutine requires boxing
|
||||
--> $DIR/recursive-coroutine-indirect.rs:11:18
|
||||
--> $DIR/recursive-coroutine-indirect.rs:8:18
|
||||
|
|
||||
LL | #[coroutine] move || {
|
||||
| ^^^^^^^
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
error[E0733]: recursion in a coroutine requires boxing
|
||||
--> $DIR/recursive-coroutine-indirect.rs:11:18
|
||||
--> $DIR/recursive-coroutine-indirect.rs:8:18
|
||||
|
|
||||
LL | #[coroutine] move || {
|
||||
| ^^^^^^^
|
||||
|
|
|
@ -2,9 +2,6 @@
|
|||
//@ ignore-compare-mode-next-solver (explicit revisions)
|
||||
//@[next] compile-flags: -Znext-solver
|
||||
|
||||
//@[next] build-fail
|
||||
// Deeply normalizing writeback results of opaques makes this into a post-mono error :(
|
||||
|
||||
#![feature(coroutines)]
|
||||
#![allow(unconditional_recursion)]
|
||||
fn coroutine_hold() -> impl Sized {
|
||||
|
|
21
tests/ui/pattern/overflowing-literals.rs
Normal file
21
tests/ui/pattern/overflowing-literals.rs
Normal file
|
@ -0,0 +1,21 @@
|
|||
//! Check that overflowing literals are in patterns are rejected
|
||||
|
||||
#![feature(pattern_types)]
|
||||
#![feature(pattern_type_macro)]
|
||||
|
||||
use std::pat::pattern_type;
|
||||
|
||||
type TooBig = pattern_type!(u8 is 500..);
|
||||
//~^ ERROR: literal out of range for `u8`
|
||||
type TooSmall = pattern_type!(i8 is -500..);
|
||||
//~^ ERROR: literal out of range for `i8`
|
||||
type TooBigSigned = pattern_type!(i8 is 200..);
|
||||
//~^ ERROR: literal out of range for `i8`
|
||||
|
||||
fn main() {
|
||||
match 5_u8 {
|
||||
500 => {}
|
||||
//~^ ERROR literal out of range for `u8`
|
||||
_ => {}
|
||||
}
|
||||
}
|
37
tests/ui/pattern/overflowing-literals.stderr
Normal file
37
tests/ui/pattern/overflowing-literals.stderr
Normal file
|
@ -0,0 +1,37 @@
|
|||
error: literal out of range for `u8`
|
||||
--> $DIR/overflowing-literals.rs:8:35
|
||||
|
|
||||
LL | type TooBig = pattern_type!(u8 is 500..);
|
||||
| ^^^
|
||||
|
|
||||
= note: the literal `500` does not fit into the type `u8` whose range is `0..=255`
|
||||
= note: `#[deny(overflowing_literals)]` on by default
|
||||
|
||||
error: literal out of range for `i8`
|
||||
--> $DIR/overflowing-literals.rs:10:37
|
||||
|
|
||||
LL | type TooSmall = pattern_type!(i8 is -500..);
|
||||
| ^^^^
|
||||
|
|
||||
= note: the literal `-500` does not fit into the type `i8` whose range is `-128..=127`
|
||||
= help: consider using the type `i16` instead
|
||||
|
||||
error: literal out of range for `i8`
|
||||
--> $DIR/overflowing-literals.rs:12:41
|
||||
|
|
||||
LL | type TooBigSigned = pattern_type!(i8 is 200..);
|
||||
| ^^^
|
||||
|
|
||||
= note: the literal `200` does not fit into the type `i8` whose range is `-128..=127`
|
||||
= help: consider using the type `u8` instead
|
||||
|
||||
error: literal out of range for `u8`
|
||||
--> $DIR/overflowing-literals.rs:17:9
|
||||
|
|
||||
LL | 500 => {}
|
||||
| ^^^
|
||||
|
|
||||
= note: the literal `500` does not fit into the type `u8` whose range is `0..=255`
|
||||
|
||||
error: aborting due to 4 previous errors
|
||||
|
26
tests/ui/traits/trait-upcasting/mono-impossible.rs
Normal file
26
tests/ui/traits/trait-upcasting/mono-impossible.rs
Normal file
|
@ -0,0 +1,26 @@
|
|||
//@ build-pass
|
||||
|
||||
#![feature(trait_upcasting)]
|
||||
|
||||
trait Supertrait<T> {
|
||||
fn method(&self) {}
|
||||
}
|
||||
impl<T> Supertrait<T> for () {}
|
||||
|
||||
trait WithAssoc {
|
||||
type Assoc;
|
||||
}
|
||||
trait Trait<P: WithAssoc>: Supertrait<P::Assoc> + Supertrait<()> {}
|
||||
|
||||
fn upcast<P>(x: &dyn Trait<P>) -> &dyn Supertrait<()> {
|
||||
x
|
||||
}
|
||||
|
||||
fn call<P>(x: &dyn Trait<P>) {
|
||||
x.method();
|
||||
}
|
||||
|
||||
fn main() {
|
||||
println!("{:p}", upcast::<()> as *const ());
|
||||
println!("{:p}", call::<()> as *const ());
|
||||
}
|
|
@ -1,5 +1,4 @@
|
|||
//@ edition: 2021
|
||||
//@ build-fail
|
||||
|
||||
#![feature(impl_trait_in_assoc_type)]
|
||||
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
error[E0733]: recursion in an async block requires boxing
|
||||
--> $DIR/indirect-recursion-issue-112047.rs:22:9
|
||||
--> $DIR/indirect-recursion-issue-112047.rs:21:9
|
||||
|
|
||||
LL | async move { recur(self).await; }
|
||||
| ^^^^^^^^^^ ----------------- recursive call here
|
||||
|
|
||||
note: which leads to this async fn
|
||||
--> $DIR/indirect-recursion-issue-112047.rs:14:1
|
||||
--> $DIR/indirect-recursion-issue-112047.rs:13:1
|
||||
|
|
||||
LL | async fn recur(t: impl Recur) {
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue