1
Fork 0

avoid .into() conversion to identical types

This commit is contained in:
Matthias Krüger 2022-12-18 15:01:26 +01:00
parent 35a99eef32
commit 0aa4cde747
11 changed files with 17 additions and 27 deletions

View file

@ -239,8 +239,7 @@ pub fn expand_test_or_bench(
cx.attr_nested_word(sym::cfg, sym::test, attr_sp), cx.attr_nested_word(sym::cfg, sym::test, attr_sp),
// #[rustc_test_marker = "test_case_sort_key"] // #[rustc_test_marker = "test_case_sort_key"]
cx.attr_name_value_str(sym::rustc_test_marker, test_path_symbol, attr_sp), cx.attr_name_value_str(sym::rustc_test_marker, test_path_symbol, attr_sp),
] ],
.into(),
// const $ident: test::TestDescAndFn = // const $ident: test::TestDescAndFn =
ast::ItemKind::Const( ast::ItemKind::Const(
ast::Defaultness::Final, ast::Defaultness::Final,

View file

@ -39,7 +39,7 @@ pub enum Immediate<Prov: Provenance = AllocId> {
impl<Prov: Provenance> From<Scalar<Prov>> for Immediate<Prov> { impl<Prov: Provenance> From<Scalar<Prov>> for Immediate<Prov> {
#[inline(always)] #[inline(always)]
fn from(val: Scalar<Prov>) -> Self { fn from(val: Scalar<Prov>) -> Self {
Immediate::Scalar(val.into()) Immediate::Scalar(val)
} }
} }
@ -53,7 +53,7 @@ impl<Prov: Provenance> Immediate<Prov> {
} }
pub fn new_slice(val: Scalar<Prov>, len: u64, cx: &impl HasDataLayout) -> Self { pub fn new_slice(val: Scalar<Prov>, len: u64, cx: &impl HasDataLayout) -> Self {
Immediate::ScalarPair(val.into(), Scalar::from_machine_usize(len, cx).into()) Immediate::ScalarPair(val, Scalar::from_machine_usize(len, cx))
} }
pub fn new_dyn_trait( pub fn new_dyn_trait(
@ -61,7 +61,7 @@ impl<Prov: Provenance> Immediate<Prov> {
vtable: Pointer<Option<Prov>>, vtable: Pointer<Option<Prov>>,
cx: &impl HasDataLayout, cx: &impl HasDataLayout,
) -> Self { ) -> Self {
Immediate::ScalarPair(val.into(), Scalar::from_maybe_pointer(vtable, cx)) Immediate::ScalarPair(val, Scalar::from_maybe_pointer(vtable, cx))
} }
#[inline] #[inline]
@ -341,10 +341,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
alloc_range(b_offset, b_size), alloc_range(b_offset, b_size),
/*read_provenance*/ b.is_ptr(), /*read_provenance*/ b.is_ptr(),
)?; )?;
Some(ImmTy { Some(ImmTy { imm: Immediate::ScalarPair(a_val, b_val), layout: mplace.layout })
imm: Immediate::ScalarPair(a_val.into(), b_val.into()),
layout: mplace.layout,
})
} }
_ => { _ => {
// Neither a scalar nor scalar pair. // Neither a scalar nor scalar pair.

View file

@ -36,7 +36,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
if let Abi::ScalarPair(..) = dest.layout.abi { if let Abi::ScalarPair(..) = dest.layout.abi {
// We can use the optimized path and avoid `place_field` (which might do // We can use the optimized path and avoid `place_field` (which might do
// `force_allocation`). // `force_allocation`).
let pair = Immediate::ScalarPair(val.into(), Scalar::from_bool(overflowed).into()); let pair = Immediate::ScalarPair(val, Scalar::from_bool(overflowed));
self.write_immediate(pair, dest)?; self.write_immediate(pair, dest)?;
} else { } else {
assert!(self.tcx.sess.opts.unstable_opts.randomize_layout); assert!(self.tcx.sess.opts.unstable_opts.randomize_layout);

View file

@ -141,7 +141,7 @@ impl<Prov: Provenance> MemPlace<Prov> {
match self.meta { match self.meta {
MemPlaceMeta::None => Immediate::from(Scalar::from_maybe_pointer(self.ptr, cx)), MemPlaceMeta::None => Immediate::from(Scalar::from_maybe_pointer(self.ptr, cx)),
MemPlaceMeta::Meta(meta) => { MemPlaceMeta::Meta(meta) => {
Immediate::ScalarPair(Scalar::from_maybe_pointer(self.ptr, cx).into(), meta.into()) Immediate::ScalarPair(Scalar::from_maybe_pointer(self.ptr, cx), meta)
} }
} }
} }

View file

@ -1920,7 +1920,7 @@ impl<'tcx> TypeTrace<'tcx> {
) -> TypeTrace<'tcx> { ) -> TypeTrace<'tcx> {
TypeTrace { TypeTrace {
cause: cause.clone(), cause: cause.clone(),
values: PolyTraitRefs(ExpectedFound::new(a_is_expected, a.into(), b.into())), values: PolyTraitRefs(ExpectedFound::new(a_is_expected, a, b)),
} }
} }

View file

@ -88,8 +88,8 @@ pub(super) fn vtable_allocation_provider<'tcx>(
let fn_ptr = Pointer::from(fn_alloc_id); let fn_ptr = Pointer::from(fn_alloc_id);
Scalar::from_pointer(fn_ptr, &tcx) Scalar::from_pointer(fn_ptr, &tcx)
} }
VtblEntry::MetadataSize => Scalar::from_uint(size, ptr_size).into(), VtblEntry::MetadataSize => Scalar::from_uint(size, ptr_size),
VtblEntry::MetadataAlign => Scalar::from_uint(align, ptr_size).into(), VtblEntry::MetadataAlign => Scalar::from_uint(align, ptr_size),
VtblEntry::Vacant => continue, VtblEntry::Vacant => continue,
VtblEntry::Method(instance) => { VtblEntry::Method(instance) => {
// Prepare the fn ptr we write into the vtable. // Prepare the fn ptr we write into the vtable.

View file

@ -356,7 +356,7 @@ impl<'tcx> PlaceBuilder<'tcx> {
match self { match self {
PlaceBuilder::Local { local, projection } => PlaceBuilder::Local { PlaceBuilder::Local { local, projection } => PlaceBuilder::Local {
local: *local, local: *local,
projection: Vec::from_iter(projection.iter().copied().chain([elem.into()])), projection: Vec::from_iter(projection.iter().copied().chain([elem])),
}, },
PlaceBuilder::Upvar { upvar, projection } => PlaceBuilder::Upvar { PlaceBuilder::Upvar { upvar, projection } => PlaceBuilder::Upvar {
upvar: *upvar, upvar: *upvar,

View file

@ -701,8 +701,8 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
BinOp::Mul if const_arg.layout.ty.is_integral() && arg_value == 0 => { BinOp::Mul if const_arg.layout.ty.is_integral() && arg_value == 0 => {
if let Rvalue::CheckedBinaryOp(_, _) = rvalue { if let Rvalue::CheckedBinaryOp(_, _) = rvalue {
let val = Immediate::ScalarPair( let val = Immediate::ScalarPair(
const_arg.to_scalar().into(), const_arg.to_scalar(),
Scalar::from_bool(false).into(), Scalar::from_bool(false),
); );
this.ecx.write_immediate(val, &dest) this.ecx.write_immediate(val, &dest)
} else { } else {

View file

@ -25,7 +25,7 @@ use rustc_ast::tokenstream::{TokenStream, TokenTree};
use rustc_ast::util::case::Case; use rustc_ast::util::case::Case;
use rustc_ast::AttrId; use rustc_ast::AttrId;
use rustc_ast::DUMMY_NODE_ID; use rustc_ast::DUMMY_NODE_ID;
use rustc_ast::{self as ast, AnonConst, AttrStyle, AttrVec, Const, DelimArgs, Extern}; use rustc_ast::{self as ast, AnonConst, AttrStyle, Const, DelimArgs, Extern};
use rustc_ast::{Async, AttrArgs, AttrArgsEq, Expr, ExprKind, MacDelimiter, Mutability, StrLit}; use rustc_ast::{Async, AttrArgs, AttrArgsEq, Expr, ExprKind, MacDelimiter, Mutability, StrLit};
use rustc_ast::{HasAttrs, HasTokens, Unsafe, Visibility, VisibilityKind}; use rustc_ast::{HasAttrs, HasTokens, Unsafe, Visibility, VisibilityKind};
use rustc_ast_pretty::pprust; use rustc_ast_pretty::pprust;
@ -1217,11 +1217,7 @@ impl<'a> Parser<'a> {
value: self.mk_expr(blk.span, ExprKind::Block(blk, None)), value: self.mk_expr(blk.span, ExprKind::Block(blk, None)),
}; };
let blk_span = anon_const.value.span; let blk_span = anon_const.value.span;
Ok(self.mk_expr_with_attrs( Ok(self.mk_expr_with_attrs(span.to(blk_span), ExprKind::ConstBlock(anon_const), attrs))
span.to(blk_span),
ExprKind::ConstBlock(anon_const),
AttrVec::from(attrs),
))
} }
/// Parses mutability (`mut` or nothing). /// Parses mutability (`mut` or nothing).

View file

@ -83,7 +83,6 @@ impl<'tcx> LanguageItemCollector<'tcx> {
.map(|p| p.display().to_string()) .map(|p| p.display().to_string())
.collect::<Vec<_>>() .collect::<Vec<_>>()
.join(", ") .join(", ")
.into()
}; };
let first_defined_span = self.tcx.hir().span_if_local(original_def_id); let first_defined_span = self.tcx.hir().span_if_local(original_def_id);
let mut orig_crate_name = Empty; let mut orig_crate_name = Empty;
@ -98,7 +97,6 @@ impl<'tcx> LanguageItemCollector<'tcx> {
.map(|p| p.display().to_string()) .map(|p| p.display().to_string())
.collect::<Vec<_>>() .collect::<Vec<_>>()
.join(", ") .join(", ")
.into()
}; };
if first_defined_span.is_none() { if first_defined_span.is_none() {
orig_crate_name = self.tcx.crate_name(original_def_id.krate); orig_crate_name = self.tcx.crate_name(original_def_id.krate);

View file

@ -1735,8 +1735,8 @@ impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
values.map(|(_, is_normalized_ty_expected, normalized_ty, expected_ty)| { values.map(|(_, is_normalized_ty_expected, normalized_ty, expected_ty)| {
infer::ValuePairs::Terms(ExpectedFound::new( infer::ValuePairs::Terms(ExpectedFound::new(
is_normalized_ty_expected, is_normalized_ty_expected,
normalized_ty.into(), normalized_ty,
expected_ty.into(), expected_ty,
)) ))
}), }),
err, err,