1
Fork 0

Auto merge of #75893 - Dylan-DPC:fix/offset-to-u64, r=oli-obk

change offset from u32 to u64

References #71696

r? @oli-obk

(closed the earlier pr because the rebase got messed up)
This commit is contained in:
bors 2020-08-26 13:10:42 +00:00
commit 6ead62235a
11 changed files with 25 additions and 32 deletions

View file

@ -1564,10 +1564,10 @@ pub enum ProjectionElem<V, T> {
/// ``` /// ```
ConstantIndex { ConstantIndex {
/// index or -index (in Python terms), depending on from_end /// index or -index (in Python terms), depending on from_end
offset: u32, offset: u64,
/// The thing being indexed must be at least this long. For arrays this /// The thing being indexed must be at least this long. For arrays this
/// is always the exact length. /// is always the exact length.
min_length: u32, min_length: u64,
/// Counting backwards from end? This is always false when indexing an /// Counting backwards from end? This is always false when indexing an
/// array. /// array.
from_end: bool, from_end: bool,
@ -1578,8 +1578,8 @@ pub enum ProjectionElem<V, T> {
/// If `from_end` is true `slice[from..slice.len() - to]`. /// If `from_end` is true `slice[from..slice.len() - to]`.
/// Otherwise `array[from..to]`. /// Otherwise `array[from..to]`.
Subslice { Subslice {
from: u32, from: u64,
to: u32, to: u64,
/// Whether `to` counts from the start or end of the array/slice. /// Whether `to` counts from the start or end of the array/slice.
/// For `PlaceElem`s this is `true` if and only if the base is a slice. /// For `PlaceElem`s this is `true` if and only if the base is a slice.
/// For `ProjectionKind`, this can also be `true` for arrays. /// For `ProjectionKind`, this can also be `true` for arrays.
@ -1616,7 +1616,7 @@ pub type PlaceElem<'tcx> = ProjectionElem<Local, Ty<'tcx>>;
// At least on 64 bit systems, `PlaceElem` should not be larger than two pointers. // At least on 64 bit systems, `PlaceElem` should not be larger than two pointers.
#[cfg(target_arch = "x86_64")] #[cfg(target_arch = "x86_64")]
static_assert_size!(PlaceElem<'_>, 16); static_assert_size!(PlaceElem<'_>, 24);
/// Alias for projections as they appear in `UserTypeProjection`, where we /// Alias for projections as they appear in `UserTypeProjection`, where we
/// need neither the `V` parameter for `Index` nor the `T` for `Field`. /// need neither the `V` parameter for `Index` nor the `T` for `Field`.
@ -2330,7 +2330,7 @@ impl<'tcx> UserTypeProjections {
self.map_projections(|pat_ty_proj| pat_ty_proj.index()) self.map_projections(|pat_ty_proj| pat_ty_proj.index())
} }
pub fn subslice(self, from: u32, to: u32) -> Self { pub fn subslice(self, from: u64, to: u64) -> Self {
self.map_projections(|pat_ty_proj| pat_ty_proj.subslice(from, to)) self.map_projections(|pat_ty_proj| pat_ty_proj.subslice(from, to))
} }
@ -2376,7 +2376,7 @@ impl UserTypeProjection {
self self
} }
pub(crate) fn subslice(mut self, from: u32, to: u32) -> Self { pub(crate) fn subslice(mut self, from: u64, to: u64) -> Self {
self.projs.push(ProjectionElem::Subslice { from, to, from_end: true }); self.projs.push(ProjectionElem::Subslice { from, to, from_end: true });
self self
} }

View file

@ -1694,8 +1694,8 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
desired_action: InitializationRequiringAction, desired_action: InitializationRequiringAction,
place_span: (PlaceRef<'tcx>, Span), place_span: (PlaceRef<'tcx>, Span),
maybe_uninits: &BitSet<MovePathIndex>, maybe_uninits: &BitSet<MovePathIndex>,
from: u32, from: u64,
to: u32, to: u64,
) { ) {
if let Some(mpi) = self.move_path_for_place(place_span.0) { if let Some(mpi) = self.move_path_for_place(place_span.0) {
let move_paths = &self.move_data.move_paths; let move_paths = &self.move_data.move_paths;

View file

@ -649,7 +649,7 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> {
PlaceTy::from_ty(match base_ty.kind { PlaceTy::from_ty(match base_ty.kind {
ty::Array(inner, _) => { ty::Array(inner, _) => {
assert!(!from_end, "array subslices should not use from_end"); assert!(!from_end, "array subslices should not use from_end");
tcx.mk_array(inner, (to - from) as u64) tcx.mk_array(inner, to - from)
} }
ty::Slice(..) => { ty::Slice(..) => {
assert!(from_end, "slice subslices should use from_end"); assert!(from_end, "slice subslices should use from_end");

View file

@ -480,7 +480,7 @@ impl<'b, 'a, 'tcx> Gatherer<'b, 'a, 'tcx> {
} }
}; };
let base_ty = base_place.ty(self.builder.body, self.builder.tcx).ty; let base_ty = base_place.ty(self.builder.body, self.builder.tcx).ty;
let len: u32 = match base_ty.kind { let len: u64 = match base_ty.kind {
ty::Array(_, size) => { ty::Array(_, size) => {
let length = size.eval_usize(self.builder.tcx, self.builder.param_env); let length = size.eval_usize(self.builder.tcx, self.builder.param_env);
length length

View file

@ -549,17 +549,17 @@ where
ConstantIndex { offset, min_length, from_end } => { ConstantIndex { offset, min_length, from_end } => {
let n = base.len(self)?; let n = base.len(self)?;
if n < u64::from(min_length) { if n < min_length {
// This can only be reached in ConstProp and non-rustc-MIR. // This can only be reached in ConstProp and non-rustc-MIR.
throw_ub!(BoundsCheckFailed { len: min_length.into(), index: n }); throw_ub!(BoundsCheckFailed { len: min_length.into(), index: n });
} }
let index = if from_end { let index = if from_end {
assert!(0 < offset && offset <= min_length); assert!(0 < offset && offset <= min_length);
n.checked_sub(u64::from(offset)).unwrap() n.checked_sub(offset).unwrap()
} else { } else {
assert!(offset < min_length); assert!(offset < min_length);
u64::from(offset) offset
}; };
self.mplace_index(base, index)? self.mplace_index(base, index)?

View file

@ -295,7 +295,7 @@ impl<'a, 'tcx> DropElaborator<'a, 'tcx> for DropShimElaborator<'a, 'tcx> {
fn downcast_subpath(&self, _path: Self::Path, _variant: VariantIdx) -> Option<Self::Path> { fn downcast_subpath(&self, _path: Self::Path, _variant: VariantIdx) -> Option<Self::Path> {
Some(()) Some(())
} }
fn array_subpath(&self, _path: Self::Path, _index: u32, _size: u32) -> Option<Self::Path> { fn array_subpath(&self, _path: Self::Path, _index: u64, _size: u64) -> Option<Self::Path> {
None None
} }
} }

View file

@ -219,7 +219,7 @@ impl<'a, 'b, 'tcx> DropElaborator<'a, 'tcx> for Elaborator<'a, 'b, 'tcx> {
}) })
} }
fn array_subpath(&self, path: Self::Path, index: u32, size: u32) -> Option<Self::Path> { fn array_subpath(&self, path: Self::Path, index: u64, size: u64) -> Option<Self::Path> {
dataflow::move_path_children_matching(self.ctxt.move_data(), path, |e| match e { dataflow::move_path_children_matching(self.ctxt.move_data(), path, |e| match e {
ProjectionElem::ConstantIndex { offset, min_length, from_end } => { ProjectionElem::ConstantIndex { offset, min_length, from_end } => {
debug_assert!(size == min_length, "min_length should be exact for arrays"); debug_assert!(size == min_length, "min_length should be exact for arrays");

View file

@ -3,6 +3,7 @@ use rustc_middle::mir::*;
use rustc_middle::ty::{Ty, TyCtxt}; use rustc_middle::ty::{Ty, TyCtxt};
use rustc_target::abi::VariantIdx; use rustc_target::abi::VariantIdx;
use std::convert::TryFrom;
use std::iter::TrustedLen; use std::iter::TrustedLen;
/// Expand `lhs = Rvalue::Aggregate(kind, operands)` into assignments to the fields. /// Expand `lhs = Rvalue::Aggregate(kind, operands)` into assignments to the fields.
@ -52,14 +53,11 @@ pub fn expand_aggregate<'tcx>(
.enumerate() .enumerate()
.map(move |(i, (op, ty))| { .map(move |(i, (op, ty))| {
let lhs_field = if let AggregateKind::Array(_) = kind { let lhs_field = if let AggregateKind::Array(_) = kind {
// FIXME(eddyb) `offset` should be u64. let offset = u64::try_from(i).unwrap();
let offset = i as u32;
assert_eq!(offset as usize, i);
tcx.mk_place_elem( tcx.mk_place_elem(
lhs, lhs,
ProjectionElem::ConstantIndex { ProjectionElem::ConstantIndex {
offset, offset,
// FIXME(eddyb) `min_length` doesn't appear to be used.
min_length: offset + 1, min_length: offset + 1,
from_end: false, from_end: false,
}, },

View file

@ -10,8 +10,6 @@ use rustc_middle::ty::{self, Ty, TyCtxt};
use rustc_target::abi::VariantIdx; use rustc_target::abi::VariantIdx;
use std::fmt; use std::fmt;
use std::convert::TryInto;
/// The value of an inserted drop flag. /// The value of an inserted drop flag.
#[derive(Debug, PartialEq, Eq, Copy, Clone)] #[derive(Debug, PartialEq, Eq, Copy, Clone)]
pub enum DropFlagState { pub enum DropFlagState {
@ -150,7 +148,7 @@ pub trait DropElaborator<'a, 'tcx>: fmt::Debug {
/// If this returns `None`, elements of `path` will not get a dedicated drop flag. /// If this returns `None`, elements of `path` will not get a dedicated drop flag.
/// ///
/// This is only relevant for array patterns, which can move out of individual array elements. /// This is only relevant for array patterns, which can move out of individual array elements.
fn array_subpath(&self, path: Self::Path, index: u32, size: u32) -> Option<Self::Path>; fn array_subpath(&self, path: Self::Path, index: u64, size: u64) -> Option<Self::Path>;
} }
#[derive(Debug)] #[derive(Debug)]
@ -744,9 +742,6 @@ where
let tcx = self.tcx(); let tcx = self.tcx();
if let Some(size) = opt_size { if let Some(size) = opt_size {
let size: u32 = size.try_into().unwrap_or_else(|_| {
bug!("move out check isn't implemented for array sizes bigger than u32::MAX");
});
let fields: Vec<(Place<'tcx>, Option<D::Path>)> = (0..size) let fields: Vec<(Place<'tcx>, Option<D::Path>)> = (0..size)
.map(|i| { .map(|i| {
( (

View file

@ -609,8 +609,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
PatKind::Array { ref prefix, ref slice, ref suffix } PatKind::Array { ref prefix, ref slice, ref suffix }
| PatKind::Slice { ref prefix, ref slice, ref suffix } => { | PatKind::Slice { ref prefix, ref slice, ref suffix } => {
let from = u32::try_from(prefix.len()).unwrap(); let from = u64::try_from(prefix.len()).unwrap();
let to = u32::try_from(suffix.len()).unwrap(); let to = u64::try_from(suffix.len()).unwrap();
for subpattern in prefix { for subpattern in prefix {
self.visit_primary_bindings(subpattern, pattern_user_ty.clone().index(), f); self.visit_primary_bindings(subpattern, pattern_user_ty.clone().index(), f);
} }

View file

@ -40,17 +40,17 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
match_pairs.extend(prefix.iter().enumerate().map(|(idx, subpattern)| { match_pairs.extend(prefix.iter().enumerate().map(|(idx, subpattern)| {
let elem = let elem =
ProjectionElem::ConstantIndex { offset: idx as u32, min_length, from_end: false }; ProjectionElem::ConstantIndex { offset: idx as u64, min_length, from_end: false };
let place = tcx.mk_place_elem(*place, elem); let place = tcx.mk_place_elem(*place, elem);
MatchPair::new(place, subpattern) MatchPair::new(place, subpattern)
})); }));
if let Some(subslice_pat) = opt_slice { if let Some(subslice_pat) = opt_slice {
let suffix_len = suffix.len() as u32; let suffix_len = suffix.len() as u64;
let subslice = tcx.mk_place_elem( let subslice = tcx.mk_place_elem(
*place, *place,
ProjectionElem::Subslice { ProjectionElem::Subslice {
from: prefix.len() as u32, from: prefix.len() as u64,
to: if exact_size { min_length - suffix_len } else { suffix_len }, to: if exact_size { min_length - suffix_len } else { suffix_len },
from_end: !exact_size, from_end: !exact_size,
}, },
@ -59,7 +59,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
} }
match_pairs.extend(suffix.iter().rev().enumerate().map(|(idx, subpattern)| { match_pairs.extend(suffix.iter().rev().enumerate().map(|(idx, subpattern)| {
let end_offset = (idx + 1) as u32; let end_offset = (idx + 1) as u64;
let elem = ProjectionElem::ConstantIndex { let elem = ProjectionElem::ConstantIndex {
offset: if exact_size { min_length - end_offset } else { end_offset }, offset: if exact_size { min_length - end_offset } else { end_offset },
min_length, min_length,