1
Fork 0

Auto merge of #108070 - Dylan-DPC:rollup-v6xw7vk, r=Dylan-DPC

Rollup of 7 pull requests

Successful merges:

 - #105300 (rework min_choice algorithm of member constraints)
 - #107163 (Remove some superfluous type parameters from layout.rs.)
 - #107173 (Suggest the correct array length on mismatch)
 - #107411 (Handle discriminant in DataflowConstProp)
 - #107968 (Enable `#[thread_local]` on armv6k-nintendo-3ds)
 - #108032 (Un📦ing the Resolver)
 - #108060 (Revert to using `RtlGenRandom` as a fallback)

Failed merges:

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2023-02-15 08:48:44 +00:00
commit 999ac5f777
47 changed files with 1101 additions and 565 deletions

View file

@ -1,11 +1,5 @@
use super::*; use super::*;
use std::{ use std::{borrow::Borrow, cmp, iter, ops::Bound};
borrow::Borrow,
cmp,
fmt::Debug,
iter,
ops::{Bound, Deref},
};
#[cfg(feature = "randomize")] #[cfg(feature = "randomize")]
use rand::{seq::SliceRandom, SeedableRng}; use rand::{seq::SliceRandom, SeedableRng};
@ -33,7 +27,7 @@ pub trait LayoutCalculator {
fn delay_bug(&self, txt: &str); fn delay_bug(&self, txt: &str);
fn current_data_layout(&self) -> Self::TargetDataLayoutRef; fn current_data_layout(&self) -> Self::TargetDataLayoutRef;
fn scalar_pair<V: Idx>(&self, a: Scalar, b: Scalar) -> LayoutS<V> { fn scalar_pair(&self, a: Scalar, b: Scalar) -> LayoutS {
let dl = self.current_data_layout(); let dl = self.current_data_layout();
let dl = dl.borrow(); let dl = dl.borrow();
let b_align = b.align(dl); let b_align = b.align(dl);
@ -49,7 +43,7 @@ pub trait LayoutCalculator {
.max_by_key(|niche| niche.available(dl)); .max_by_key(|niche| niche.available(dl));
LayoutS { LayoutS {
variants: Variants::Single { index: V::new(0) }, variants: Variants::Single { index: VariantIdx::new(0) },
fields: FieldsShape::Arbitrary { fields: FieldsShape::Arbitrary {
offsets: vec![Size::ZERO, b_offset], offsets: vec![Size::ZERO, b_offset],
memory_index: vec![0, 1], memory_index: vec![0, 1],
@ -61,13 +55,13 @@ pub trait LayoutCalculator {
} }
} }
fn univariant<'a, V: Idx, F: Deref<Target = &'a LayoutS<V>> + Debug>( fn univariant(
&self, &self,
dl: &TargetDataLayout, dl: &TargetDataLayout,
fields: &[F], fields: &[Layout<'_>],
repr: &ReprOptions, repr: &ReprOptions,
kind: StructKind, kind: StructKind,
) -> Option<LayoutS<V>> { ) -> Option<LayoutS> {
let pack = repr.pack; let pack = repr.pack;
let mut align = if pack.is_some() { dl.i8_align } else { dl.aggregate_align }; let mut align = if pack.is_some() { dl.i8_align } else { dl.aggregate_align };
let mut inverse_memory_index: Vec<u32> = (0..fields.len() as u32).collect(); let mut inverse_memory_index: Vec<u32> = (0..fields.len() as u32).collect();
@ -76,17 +70,17 @@ pub trait LayoutCalculator {
let end = let end =
if let StructKind::MaybeUnsized = kind { fields.len() - 1 } else { fields.len() }; if let StructKind::MaybeUnsized = kind { fields.len() - 1 } else { fields.len() };
let optimizing = &mut inverse_memory_index[..end]; let optimizing = &mut inverse_memory_index[..end];
let effective_field_align = |f: &F| { let effective_field_align = |layout: Layout<'_>| {
if let Some(pack) = pack { if let Some(pack) = pack {
// return the packed alignment in bytes // return the packed alignment in bytes
f.align.abi.min(pack).bytes() layout.align().abi.min(pack).bytes()
} else { } else {
// returns log2(effective-align). // returns log2(effective-align).
// This is ok since `pack` applies to all fields equally. // This is ok since `pack` applies to all fields equally.
// The calculation assumes that size is an integer multiple of align, except for ZSTs. // The calculation assumes that size is an integer multiple of align, except for ZSTs.
// //
// group [u8; 4] with align-4 or [u8; 6] with align-2 fields // group [u8; 4] with align-4 or [u8; 6] with align-2 fields
f.align.abi.bytes().max(f.size.bytes()).trailing_zeros() as u64 layout.align().abi.bytes().max(layout.size().bytes()).trailing_zeros() as u64
} }
}; };
@ -111,9 +105,9 @@ pub trait LayoutCalculator {
// Place ZSTs first to avoid "interesting offsets", // Place ZSTs first to avoid "interesting offsets",
// especially with only one or two non-ZST fields. // especially with only one or two non-ZST fields.
// Then place largest alignments first, largest niches within an alignment group last // Then place largest alignments first, largest niches within an alignment group last
let f = &fields[x as usize]; let f = fields[x as usize];
let niche_size = f.largest_niche.map_or(0, |n| n.available(dl)); let niche_size = f.largest_niche().map_or(0, |n| n.available(dl));
(!f.is_zst(), cmp::Reverse(effective_field_align(f)), niche_size) (!f.0.is_zst(), cmp::Reverse(effective_field_align(f)), niche_size)
}); });
} }
@ -123,8 +117,8 @@ pub trait LayoutCalculator {
// And put the largest niche in an alignment group at the end // And put the largest niche in an alignment group at the end
// so it can be used as discriminant in jagged enums // so it can be used as discriminant in jagged enums
optimizing.sort_by_key(|&x| { optimizing.sort_by_key(|&x| {
let f = &fields[x as usize]; let f = fields[x as usize];
let niche_size = f.largest_niche.map_or(0, |n| n.available(dl)); let niche_size = f.largest_niche().map_or(0, |n| n.available(dl));
(effective_field_align(f), niche_size) (effective_field_align(f), niche_size)
}); });
} }
@ -160,15 +154,15 @@ pub trait LayoutCalculator {
)); ));
} }
if field.is_unsized() { if field.0.is_unsized() {
sized = false; sized = false;
} }
// Invariant: offset < dl.obj_size_bound() <= 1<<61 // Invariant: offset < dl.obj_size_bound() <= 1<<61
let field_align = if let Some(pack) = pack { let field_align = if let Some(pack) = pack {
field.align.min(AbiAndPrefAlign::new(pack)) field.align().min(AbiAndPrefAlign::new(pack))
} else { } else {
field.align field.align()
}; };
offset = offset.align_to(field_align.abi); offset = offset.align_to(field_align.abi);
align = align.max(field_align); align = align.max(field_align);
@ -176,7 +170,7 @@ pub trait LayoutCalculator {
debug!("univariant offset: {:?} field: {:#?}", offset, field); debug!("univariant offset: {:?} field: {:#?}", offset, field);
offsets[i as usize] = offset; offsets[i as usize] = offset;
if let Some(mut niche) = field.largest_niche { if let Some(mut niche) = field.largest_niche() {
let available = niche.available(dl); let available = niche.available(dl);
if available > largest_niche_available { if available > largest_niche_available {
largest_niche_available = available; largest_niche_available = available;
@ -185,7 +179,7 @@ pub trait LayoutCalculator {
} }
} }
offset = offset.checked_add(field.size, dl)?; offset = offset.checked_add(field.size(), dl)?;
} }
if let Some(repr_align) = repr.align { if let Some(repr_align) = repr.align {
align = align.max(AbiAndPrefAlign::new(repr_align)); align = align.max(AbiAndPrefAlign::new(repr_align));
@ -205,24 +199,26 @@ pub trait LayoutCalculator {
// Unpack newtype ABIs and find scalar pairs. // Unpack newtype ABIs and find scalar pairs.
if sized && size.bytes() > 0 { if sized && size.bytes() > 0 {
// All other fields must be ZSTs. // All other fields must be ZSTs.
let mut non_zst_fields = fields.iter().enumerate().filter(|&(_, f)| !f.is_zst()); let mut non_zst_fields = fields.iter().enumerate().filter(|&(_, f)| !f.0.is_zst());
match (non_zst_fields.next(), non_zst_fields.next(), non_zst_fields.next()) { match (non_zst_fields.next(), non_zst_fields.next(), non_zst_fields.next()) {
// We have exactly one non-ZST field. // We have exactly one non-ZST field.
(Some((i, field)), None, None) => { (Some((i, field)), None, None) => {
// Field fills the struct and it has a scalar or scalar pair ABI. // Field fills the struct and it has a scalar or scalar pair ABI.
if offsets[i].bytes() == 0 && align.abi == field.align.abi && size == field.size if offsets[i].bytes() == 0
&& align.abi == field.align().abi
&& size == field.size()
{ {
match field.abi { match field.abi() {
// For plain scalars, or vectors of them, we can't unpack // For plain scalars, or vectors of them, we can't unpack
// newtypes for `#[repr(C)]`, as that affects C ABIs. // newtypes for `#[repr(C)]`, as that affects C ABIs.
Abi::Scalar(_) | Abi::Vector { .. } if optimize => { Abi::Scalar(_) | Abi::Vector { .. } if optimize => {
abi = field.abi; abi = field.abi();
} }
// But scalar pairs are Rust-specific and get // But scalar pairs are Rust-specific and get
// treated as aggregates by C ABIs anyway. // treated as aggregates by C ABIs anyway.
Abi::ScalarPair(..) => { Abi::ScalarPair(..) => {
abi = field.abi; abi = field.abi();
} }
_ => {} _ => {}
} }
@ -231,7 +227,7 @@ pub trait LayoutCalculator {
// Two non-ZST fields, and they're both scalars. // Two non-ZST fields, and they're both scalars.
(Some((i, a)), Some((j, b)), None) => { (Some((i, a)), Some((j, b)), None) => {
match (a.abi, b.abi) { match (a.abi(), b.abi()) {
(Abi::Scalar(a), Abi::Scalar(b)) => { (Abi::Scalar(a), Abi::Scalar(b)) => {
// Order by the memory placement, not source order. // Order by the memory placement, not source order.
let ((i, a), (j, b)) = if offsets[i] < offsets[j] { let ((i, a), (j, b)) = if offsets[i] < offsets[j] {
@ -239,7 +235,7 @@ pub trait LayoutCalculator {
} else { } else {
((j, b), (i, a)) ((j, b), (i, a))
}; };
let pair = self.scalar_pair::<V>(a, b); let pair = self.scalar_pair(a, b);
let pair_offsets = match pair.fields { let pair_offsets = match pair.fields {
FieldsShape::Arbitrary { ref offsets, ref memory_index } => { FieldsShape::Arbitrary { ref offsets, ref memory_index } => {
assert_eq!(memory_index, &[0, 1]); assert_eq!(memory_index, &[0, 1]);
@ -264,11 +260,11 @@ pub trait LayoutCalculator {
_ => {} _ => {}
} }
} }
if fields.iter().any(|f| f.abi.is_uninhabited()) { if fields.iter().any(|f| f.abi().is_uninhabited()) {
abi = Abi::Uninhabited; abi = Abi::Uninhabited;
} }
Some(LayoutS { Some(LayoutS {
variants: Variants::Single { index: V::new(0) }, variants: Variants::Single { index: VariantIdx::new(0) },
fields: FieldsShape::Arbitrary { offsets, memory_index }, fields: FieldsShape::Arbitrary { offsets, memory_index },
abi, abi,
largest_niche, largest_niche,
@ -277,11 +273,11 @@ pub trait LayoutCalculator {
}) })
} }
fn layout_of_never_type<V: Idx>(&self) -> LayoutS<V> { fn layout_of_never_type(&self) -> LayoutS {
let dl = self.current_data_layout(); let dl = self.current_data_layout();
let dl = dl.borrow(); let dl = dl.borrow();
LayoutS { LayoutS {
variants: Variants::Single { index: V::new(0) }, variants: Variants::Single { index: VariantIdx::new(0) },
fields: FieldsShape::Primitive, fields: FieldsShape::Primitive,
abi: Abi::Uninhabited, abi: Abi::Uninhabited,
largest_niche: None, largest_niche: None,
@ -290,18 +286,18 @@ pub trait LayoutCalculator {
} }
} }
fn layout_of_struct_or_enum<'a, V: Idx, F: Deref<Target = &'a LayoutS<V>> + Debug>( fn layout_of_struct_or_enum(
&self, &self,
repr: &ReprOptions, repr: &ReprOptions,
variants: &IndexVec<V, Vec<F>>, variants: &IndexVec<VariantIdx, Vec<Layout<'_>>>,
is_enum: bool, is_enum: bool,
is_unsafe_cell: bool, is_unsafe_cell: bool,
scalar_valid_range: (Bound<u128>, Bound<u128>), scalar_valid_range: (Bound<u128>, Bound<u128>),
discr_range_of_repr: impl Fn(i128, i128) -> (Integer, bool), discr_range_of_repr: impl Fn(i128, i128) -> (Integer, bool),
discriminants: impl Iterator<Item = (V, i128)>, discriminants: impl Iterator<Item = (VariantIdx, i128)>,
niche_optimize_enum: bool, niche_optimize_enum: bool,
always_sized: bool, always_sized: bool,
) -> Option<LayoutS<V>> { ) -> Option<LayoutS> {
let dl = self.current_data_layout(); let dl = self.current_data_layout();
let dl = dl.borrow(); let dl = dl.borrow();
@ -316,9 +312,9 @@ pub trait LayoutCalculator {
// but *not* an encoding of the discriminant (e.g., a tag value). // but *not* an encoding of the discriminant (e.g., a tag value).
// See issue #49298 for more details on the need to leave space // See issue #49298 for more details on the need to leave space
// for non-ZST uninhabited data (mostly partial initialization). // for non-ZST uninhabited data (mostly partial initialization).
let absent = |fields: &[F]| { let absent = |fields: &[Layout<'_>]| {
let uninhabited = fields.iter().any(|f| f.abi.is_uninhabited()); let uninhabited = fields.iter().any(|f| f.abi().is_uninhabited());
let is_zst = fields.iter().all(|f| f.is_zst()); let is_zst = fields.iter().all(|f| f.0.is_zst());
uninhabited && is_zst uninhabited && is_zst
}; };
let (present_first, present_second) = { let (present_first, present_second) = {
@ -335,7 +331,7 @@ pub trait LayoutCalculator {
} }
// If it's a struct, still compute a layout so that we can still compute the // If it's a struct, still compute a layout so that we can still compute the
// field offsets. // field offsets.
None => V::new(0), None => VariantIdx::new(0),
}; };
let is_struct = !is_enum || let is_struct = !is_enum ||
@ -439,12 +435,12 @@ pub trait LayoutCalculator {
// variant layouts, so we can't store them in the // variant layouts, so we can't store them in the
// overall LayoutS. Store the overall LayoutS // overall LayoutS. Store the overall LayoutS
// and the variant LayoutSs here until then. // and the variant LayoutSs here until then.
struct TmpLayout<V: Idx> { struct TmpLayout {
layout: LayoutS<V>, layout: LayoutS,
variants: IndexVec<V, LayoutS<V>>, variants: IndexVec<VariantIdx, LayoutS>,
} }
let calculate_niche_filling_layout = || -> Option<TmpLayout<V>> { let calculate_niche_filling_layout = || -> Option<TmpLayout> {
if niche_optimize_enum { if niche_optimize_enum {
return None; return None;
} }
@ -464,15 +460,16 @@ pub trait LayoutCalculator {
Some(st) Some(st)
}) })
.collect::<Option<IndexVec<V, _>>>()?; .collect::<Option<IndexVec<VariantIdx, _>>>()?;
let largest_variant_index = variant_layouts let largest_variant_index = variant_layouts
.iter_enumerated() .iter_enumerated()
.max_by_key(|(_i, layout)| layout.size.bytes()) .max_by_key(|(_i, layout)| layout.size.bytes())
.map(|(i, _layout)| i)?; .map(|(i, _layout)| i)?;
let all_indices = (0..=variants.len() - 1).map(V::new); let all_indices = (0..=variants.len() - 1).map(VariantIdx::new);
let needs_disc = |index: V| index != largest_variant_index && !absent(&variants[index]); let needs_disc =
|index: VariantIdx| index != largest_variant_index && !absent(&variants[index]);
let niche_variants = all_indices.clone().find(|v| needs_disc(*v)).unwrap().index() let niche_variants = all_indices.clone().find(|v| needs_disc(*v)).unwrap().index()
..=all_indices.rev().find(|v| needs_disc(*v)).unwrap().index(); ..=all_indices.rev().find(|v| needs_disc(*v)).unwrap().index();
@ -482,7 +479,7 @@ pub trait LayoutCalculator {
let (field_index, niche, (niche_start, niche_scalar)) = variants[largest_variant_index] let (field_index, niche, (niche_start, niche_scalar)) = variants[largest_variant_index]
.iter() .iter()
.enumerate() .enumerate()
.filter_map(|(j, field)| Some((j, field.largest_niche?))) .filter_map(|(j, field)| Some((j, field.largest_niche()?)))
.max_by_key(|(_, niche)| niche.available(dl)) .max_by_key(|(_, niche)| niche.available(dl))
.and_then(|(j, niche)| Some((j, niche, niche.reserve(dl, count)?)))?; .and_then(|(j, niche)| Some((j, niche, niche.reserve(dl, count)?)))?;
let niche_offset = let niche_offset =
@ -514,7 +511,7 @@ pub trait LayoutCalculator {
match layout.fields { match layout.fields {
FieldsShape::Arbitrary { ref mut offsets, .. } => { FieldsShape::Arbitrary { ref mut offsets, .. } => {
for (j, offset) in offsets.iter_mut().enumerate() { for (j, offset) in offsets.iter_mut().enumerate() {
if !variants[i][j].is_zst() { if !variants[i][j].0.is_zst() {
*offset += this_offset; *offset += this_offset;
} }
} }
@ -572,8 +569,8 @@ pub trait LayoutCalculator {
tag: niche_scalar, tag: niche_scalar,
tag_encoding: TagEncoding::Niche { tag_encoding: TagEncoding::Niche {
untagged_variant: largest_variant_index, untagged_variant: largest_variant_index,
niche_variants: (V::new(*niche_variants.start()) niche_variants: (VariantIdx::new(*niche_variants.start())
..=V::new(*niche_variants.end())), ..=VariantIdx::new(*niche_variants.end())),
niche_start, niche_start,
}, },
tag_field: 0, tag_field: 0,
@ -598,7 +595,7 @@ pub trait LayoutCalculator {
let discr_type = repr.discr_type(); let discr_type = repr.discr_type();
let bits = Integer::from_attr(dl, discr_type).size().bits(); let bits = Integer::from_attr(dl, discr_type).size().bits();
for (i, mut val) in discriminants { for (i, mut val) in discriminants {
if variants[i].iter().any(|f| f.abi.is_uninhabited()) { if variants[i].iter().any(|f| f.abi().is_uninhabited()) {
continue; continue;
} }
if discr_type.is_signed() { if discr_type.is_signed() {
@ -636,7 +633,7 @@ pub trait LayoutCalculator {
if repr.c() { if repr.c() {
for fields in variants { for fields in variants {
for field in fields { for field in fields {
prefix_align = prefix_align.max(field.align.abi); prefix_align = prefix_align.max(field.align().abi);
} }
} }
} }
@ -655,8 +652,8 @@ pub trait LayoutCalculator {
// Find the first field we can't move later // Find the first field we can't move later
// to make room for a larger discriminant. // to make room for a larger discriminant.
for field in st.fields.index_by_increasing_offset().map(|j| &field_layouts[j]) { for field in st.fields.index_by_increasing_offset().map(|j| &field_layouts[j]) {
if !field.is_zst() || field.align.abi.bytes() != 1 { if !field.0.is_zst() || field.align().abi.bytes() != 1 {
start_align = start_align.min(field.align.abi); start_align = start_align.min(field.align().abi);
break; break;
} }
} }
@ -664,7 +661,7 @@ pub trait LayoutCalculator {
align = align.max(st.align); align = align.max(st.align);
Some(st) Some(st)
}) })
.collect::<Option<IndexVec<V, _>>>()?; .collect::<Option<IndexVec<VariantIdx, _>>>()?;
// Align the maximum variant size to the largest alignment. // Align the maximum variant size to the largest alignment.
size = size.align_to(align.abi); size = size.align_to(align.abi);
@ -759,7 +756,7 @@ pub trait LayoutCalculator {
let FieldsShape::Arbitrary { ref offsets, .. } = layout_variant.fields else { let FieldsShape::Arbitrary { ref offsets, .. } = layout_variant.fields else {
panic!(); panic!();
}; };
let mut fields = iter::zip(field_layouts, offsets).filter(|p| !p.0.is_zst()); let mut fields = iter::zip(field_layouts, offsets).filter(|p| !p.0.0.is_zst());
let (field, offset) = match (fields.next(), fields.next()) { let (field, offset) = match (fields.next(), fields.next()) {
(None, None) => { (None, None) => {
common_prim_initialized_in_all_variants = false; common_prim_initialized_in_all_variants = false;
@ -771,7 +768,7 @@ pub trait LayoutCalculator {
break; break;
} }
}; };
let prim = match field.abi { let prim = match field.abi() {
Abi::Scalar(scalar) => { Abi::Scalar(scalar) => {
common_prim_initialized_in_all_variants &= common_prim_initialized_in_all_variants &=
matches!(scalar, Scalar::Initialized { .. }); matches!(scalar, Scalar::Initialized { .. });
@ -802,7 +799,7 @@ pub trait LayoutCalculator {
// Common prim might be uninit. // Common prim might be uninit.
Scalar::Union { value: prim } Scalar::Union { value: prim }
}; };
let pair = self.scalar_pair::<V>(tag, prim_scalar); let pair = self.scalar_pair(tag, prim_scalar);
let pair_offsets = match pair.fields { let pair_offsets = match pair.fields {
FieldsShape::Arbitrary { ref offsets, ref memory_index } => { FieldsShape::Arbitrary { ref offsets, ref memory_index } => {
assert_eq!(memory_index, &[0, 1]); assert_eq!(memory_index, &[0, 1]);
@ -862,9 +859,8 @@ pub trait LayoutCalculator {
// pick the layout with the larger niche; otherwise, // pick the layout with the larger niche; otherwise,
// pick tagged as it has simpler codegen. // pick tagged as it has simpler codegen.
use cmp::Ordering::*; use cmp::Ordering::*;
let niche_size = |tmp_l: &TmpLayout<V>| { let niche_size =
tmp_l.layout.largest_niche.map_or(0, |n| n.available(dl)) |tmp_l: &TmpLayout| tmp_l.layout.largest_niche.map_or(0, |n| n.available(dl));
};
match (tl.layout.size.cmp(&nl.layout.size), niche_size(&tl).cmp(&niche_size(&nl))) { match (tl.layout.size.cmp(&nl.layout.size), niche_size(&tl).cmp(&niche_size(&nl))) {
(Greater, _) => nl, (Greater, _) => nl,
(Equal, Less) => nl, (Equal, Less) => nl,
@ -884,11 +880,11 @@ pub trait LayoutCalculator {
Some(best_layout.layout) Some(best_layout.layout)
} }
fn layout_of_union<'a, V: Idx, F: Deref<Target = &'a LayoutS<V>> + Debug>( fn layout_of_union(
&self, &self,
repr: &ReprOptions, repr: &ReprOptions,
variants: &IndexVec<V, Vec<F>>, variants: &IndexVec<VariantIdx, Vec<Layout<'_>>>,
) -> Option<LayoutS<V>> { ) -> Option<LayoutS> {
let dl = self.current_data_layout(); let dl = self.current_data_layout();
let dl = dl.borrow(); let dl = dl.borrow();
let mut align = if repr.pack.is_some() { dl.i8_align } else { dl.aggregate_align }; let mut align = if repr.pack.is_some() { dl.i8_align } else { dl.aggregate_align };
@ -900,15 +896,15 @@ pub trait LayoutCalculator {
let optimize = !repr.inhibit_union_abi_opt(); let optimize = !repr.inhibit_union_abi_opt();
let mut size = Size::ZERO; let mut size = Size::ZERO;
let mut abi = Abi::Aggregate { sized: true }; let mut abi = Abi::Aggregate { sized: true };
let index = V::new(0); let index = VariantIdx::new(0);
for field in &variants[index] { for field in &variants[index] {
assert!(field.is_sized()); assert!(field.0.is_sized());
align = align.max(field.align); align = align.max(field.align());
// If all non-ZST fields have the same ABI, forward this ABI // If all non-ZST fields have the same ABI, forward this ABI
if optimize && !field.is_zst() { if optimize && !field.0.is_zst() {
// Discard valid range information and allow undef // Discard valid range information and allow undef
let field_abi = match field.abi { let field_abi = match field.abi() {
Abi::Scalar(x) => Abi::Scalar(x.to_union()), Abi::Scalar(x) => Abi::Scalar(x.to_union()),
Abi::ScalarPair(x, y) => Abi::ScalarPair(x.to_union(), y.to_union()), Abi::ScalarPair(x, y) => Abi::ScalarPair(x.to_union(), y.to_union()),
Abi::Vector { element: x, count } => { Abi::Vector { element: x, count } => {
@ -926,7 +922,7 @@ pub trait LayoutCalculator {
} }
} }
size = cmp::max(size, field.size); size = cmp::max(size, field.size());
} }
if let Some(pack) = repr.pack { if let Some(pack) = repr.pack {

View file

@ -8,6 +8,7 @@ use std::ops::{Add, AddAssign, Mul, RangeInclusive, Sub};
use std::str::FromStr; use std::str::FromStr;
use bitflags::bitflags; use bitflags::bitflags;
use rustc_data_structures::intern::Interned;
#[cfg(feature = "nightly")] #[cfg(feature = "nightly")]
use rustc_data_structures::stable_hasher::StableOrd; use rustc_data_structures::stable_hasher::StableOrd;
use rustc_index::vec::{Idx, IndexVec}; use rustc_index::vec::{Idx, IndexVec};
@ -1250,9 +1251,9 @@ impl Abi {
#[derive(PartialEq, Eq, Hash, Clone, Debug)] #[derive(PartialEq, Eq, Hash, Clone, Debug)]
#[cfg_attr(feature = "nightly", derive(HashStable_Generic))] #[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
pub enum Variants<V: Idx> { pub enum Variants {
/// Single enum variants, structs/tuples, unions, and all non-ADTs. /// Single enum variants, structs/tuples, unions, and all non-ADTs.
Single { index: V }, Single { index: VariantIdx },
/// Enum-likes with more than one inhabited variant: each variant comes with /// Enum-likes with more than one inhabited variant: each variant comes with
/// a *discriminant* (usually the same as the variant index but the user can /// a *discriminant* (usually the same as the variant index but the user can
@ -1262,15 +1263,15 @@ pub enum Variants<V: Idx> {
/// For enums, the tag is the sole field of the layout. /// For enums, the tag is the sole field of the layout.
Multiple { Multiple {
tag: Scalar, tag: Scalar,
tag_encoding: TagEncoding<V>, tag_encoding: TagEncoding,
tag_field: usize, tag_field: usize,
variants: IndexVec<V, LayoutS<V>>, variants: IndexVec<VariantIdx, LayoutS>,
}, },
} }
#[derive(PartialEq, Eq, Hash, Clone, Debug)] #[derive(PartialEq, Eq, Hash, Clone, Debug)]
#[cfg_attr(feature = "nightly", derive(HashStable_Generic))] #[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
pub enum TagEncoding<V: Idx> { pub enum TagEncoding {
/// The tag directly stores the discriminant, but possibly with a smaller layout /// The tag directly stores the discriminant, but possibly with a smaller layout
/// (so converting the tag to the discriminant can require sign extension). /// (so converting the tag to the discriminant can require sign extension).
Direct, Direct,
@ -1285,7 +1286,11 @@ pub enum TagEncoding<V: Idx> {
/// For example, `Option<(usize, &T)>` is represented such that /// For example, `Option<(usize, &T)>` is represented such that
/// `None` has a null pointer for the second tuple field, and /// `None` has a null pointer for the second tuple field, and
/// `Some` is the identity function (with a non-null reference). /// `Some` is the identity function (with a non-null reference).
Niche { untagged_variant: V, niche_variants: RangeInclusive<V>, niche_start: u128 }, Niche {
untagged_variant: VariantIdx,
niche_variants: RangeInclusive<VariantIdx>,
niche_start: u128,
},
} }
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
@ -1372,9 +1377,14 @@ impl Niche {
} }
} }
rustc_index::newtype_index! {
#[derive(HashStable_Generic)]
pub struct VariantIdx {}
}
#[derive(PartialEq, Eq, Hash, Clone)] #[derive(PartialEq, Eq, Hash, Clone)]
#[cfg_attr(feature = "nightly", derive(HashStable_Generic))] #[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
pub struct LayoutS<V: Idx> { pub struct LayoutS {
/// Says where the fields are located within the layout. /// Says where the fields are located within the layout.
pub fields: FieldsShape, pub fields: FieldsShape,
@ -1385,7 +1395,7 @@ pub struct LayoutS<V: Idx> {
/// ///
/// To access all fields of this layout, both `fields` and the fields of the active variant /// To access all fields of this layout, both `fields` and the fields of the active variant
/// must be taken into account. /// must be taken into account.
pub variants: Variants<V>, pub variants: Variants,
/// The `abi` defines how this data is passed between functions, and it defines /// The `abi` defines how this data is passed between functions, and it defines
/// value restrictions via `valid_range`. /// value restrictions via `valid_range`.
@ -1404,13 +1414,13 @@ pub struct LayoutS<V: Idx> {
pub size: Size, pub size: Size,
} }
impl<V: Idx> LayoutS<V> { impl LayoutS {
pub fn scalar<C: HasDataLayout>(cx: &C, scalar: Scalar) -> Self { pub fn scalar<C: HasDataLayout>(cx: &C, scalar: Scalar) -> Self {
let largest_niche = Niche::from_scalar(cx, Size::ZERO, scalar); let largest_niche = Niche::from_scalar(cx, Size::ZERO, scalar);
let size = scalar.size(cx); let size = scalar.size(cx);
let align = scalar.align(cx); let align = scalar.align(cx);
LayoutS { LayoutS {
variants: Variants::Single { index: V::new(0) }, variants: Variants::Single { index: VariantIdx::new(0) },
fields: FieldsShape::Primitive, fields: FieldsShape::Primitive,
abi: Abi::Scalar(scalar), abi: Abi::Scalar(scalar),
largest_niche, largest_niche,
@ -1420,7 +1430,7 @@ impl<V: Idx> LayoutS<V> {
} }
} }
impl<V: Idx> fmt::Debug for LayoutS<V> { impl fmt::Debug for LayoutS {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// This is how `Layout` used to print before it become // This is how `Layout` used to print before it become
// `Interned<LayoutS>`. We print it like this to avoid having to update // `Interned<LayoutS>`. We print it like this to avoid having to update
@ -1437,6 +1447,43 @@ impl<V: Idx> fmt::Debug for LayoutS<V> {
} }
} }
#[derive(Copy, Clone, PartialEq, Eq, Hash, HashStable_Generic)]
#[rustc_pass_by_value]
pub struct Layout<'a>(pub Interned<'a, LayoutS>);
impl<'a> fmt::Debug for Layout<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// See comment on `<LayoutS as Debug>::fmt` above.
self.0.0.fmt(f)
}
}
impl<'a> Layout<'a> {
pub fn fields(self) -> &'a FieldsShape {
&self.0.0.fields
}
pub fn variants(self) -> &'a Variants {
&self.0.0.variants
}
pub fn abi(self) -> Abi {
self.0.0.abi
}
pub fn largest_niche(self) -> Option<Niche> {
self.0.0.largest_niche
}
pub fn align(self) -> AbiAndPrefAlign {
self.0.0.align
}
pub fn size(self) -> Size {
self.0.0.size
}
}
#[derive(Copy, Clone, PartialEq, Eq, Debug)] #[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum PointerKind { pub enum PointerKind {
/// Shared reference. `frozen` indicates the absence of any `UnsafeCell`. /// Shared reference. `frozen` indicates the absence of any `UnsafeCell`.
@ -1464,7 +1511,7 @@ pub enum InitKind {
UninitMitigated0x01Fill, UninitMitigated0x01Fill,
} }
impl<V: Idx> LayoutS<V> { impl LayoutS {
/// Returns `true` if the layout corresponds to an unsized type. /// Returns `true` if the layout corresponds to an unsized type.
pub fn is_unsized(&self) -> bool { pub fn is_unsized(&self) -> bool {
self.abi.is_unsized() self.abi.is_unsized()

View file

@ -746,20 +746,33 @@ impl<'tcx> RegionInferenceContext<'tcx> {
} }
debug!(?choice_regions, "after ub"); debug!(?choice_regions, "after ub");
// If we ruled everything out, we're done. // At this point we can pick any member of `choice_regions`, but to avoid potential
if choice_regions.is_empty() { // non-determinism we will pick the *unique minimum* choice.
return false; //
} // Because universal regions are only partially ordered (i.e, not every two regions are
// comparable), we will ignore any region that doesn't compare to all others when picking
// Otherwise, we need to find the minimum remaining choice, if // the minimum choice.
// any, and take that. // For example, consider `choice_regions = ['static, 'a, 'b, 'c, 'd, 'e]`, where
debug!("choice_regions remaining are {:#?}", choice_regions); // `'static: 'a, 'static: 'b, 'a: 'c, 'b: 'c, 'c: 'd, 'c: 'e`.
let Some(&min_choice) = choice_regions.iter().find(|&r1| { // `['d, 'e]` are ignored because they do not compare - the same goes for `['a, 'b]`.
let totally_ordered_subset = choice_regions.iter().copied().filter(|&r1| {
choice_regions.iter().all(|&r2| { choice_regions.iter().all(|&r2| {
self.universal_region_relations.outlives(r2, *r1) self.universal_region_relations.outlives(r1, r2)
|| self.universal_region_relations.outlives(r2, r1)
}) })
});
// Now we're left with `['static, 'c]`. Pick `'c` as the minimum!
let Some(min_choice) = totally_ordered_subset.reduce(|r1, r2| {
let r1_outlives_r2 = self.universal_region_relations.outlives(r1, r2);
let r2_outlives_r1 = self.universal_region_relations.outlives(r2, r1);
match (r1_outlives_r2, r2_outlives_r1) {
(true, true) => r1.min(r2),
(true, false) => r2,
(false, true) => r1,
(false, false) => bug!("incomparable regions in total order"),
}
}) else { }) else {
debug!("no choice region outlived by all others"); debug!("no unique minimum choice");
return false; return false;
}; };

View file

@ -64,6 +64,7 @@ use rustc_errors::{Applicability, DiagnosticBuilder, DiagnosticStyledString};
use rustc_hir as hir; use rustc_hir as hir;
use rustc_hir::def::DefKind; use rustc_hir::def::DefKind;
use rustc_hir::def_id::{DefId, LocalDefId}; use rustc_hir::def_id::{DefId, LocalDefId};
use rustc_hir::intravisit::Visitor;
use rustc_hir::lang_items::LangItem; use rustc_hir::lang_items::LangItem;
use rustc_hir::Node; use rustc_hir::Node;
use rustc_middle::dep_graph::DepContext; use rustc_middle::dep_graph::DepContext;
@ -1985,6 +1986,70 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
(ty::Bool, ty::Tuple(list)) => if list.len() == 0 { (ty::Bool, ty::Tuple(list)) => if list.len() == 0 {
self.suggest_let_for_letchains(&mut err, &trace.cause, span); self.suggest_let_for_letchains(&mut err, &trace.cause, span);
} }
(ty::Array(_, _), ty::Array(_, _)) => 'block: {
let hir = self.tcx.hir();
let TypeError::FixedArraySize(sz) = terr else {
break 'block;
};
let tykind = match hir.find_by_def_id(trace.cause.body_id) {
Some(hir::Node::Item(hir::Item {
kind: hir::ItemKind::Fn(_, _, body_id),
..
})) => {
let body = hir.body(*body_id);
struct LetVisitor<'v> {
span: Span,
result: Option<&'v hir::Ty<'v>>,
}
impl<'v> Visitor<'v> for LetVisitor<'v> {
fn visit_stmt(&mut self, s: &'v hir::Stmt<'v>) {
if self.result.is_some() {
return;
}
// Find a local statement where the initializer has
// the same span as the error and the type is specified.
if let hir::Stmt {
kind: hir::StmtKind::Local(hir::Local {
init: Some(hir::Expr {
span: init_span,
..
}),
ty: Some(array_ty),
..
}),
..
} = s
&& init_span == &self.span {
self.result = Some(*array_ty);
}
}
}
let mut visitor = LetVisitor {span, result: None};
visitor.visit_body(body);
visitor.result.map(|r| &r.peel_refs().kind)
}
Some(hir::Node::Item(hir::Item {
kind: hir::ItemKind::Const(ty, _),
..
})) => {
Some(&ty.peel_refs().kind)
}
_ => None
};
if let Some(tykind) = tykind
&& let hir::TyKind::Array(_, length) = tykind
&& let hir::ArrayLen::Body(hir::AnonConst { hir_id, .. }) = length
&& let Some(span) = self.tcx.hir().opt_span(*hir_id)
{
err.span_suggestion(
span,
"consider specifying the actual array length",
sz.found,
Applicability::MaybeIncorrect,
);
}
}
_ => {} _ => {}
} }
} }

View file

@ -1,4 +1,3 @@
pub use crate::passes::BoxedResolver;
use crate::util; use crate::util;
use rustc_ast::token; use rustc_ast::token;

View file

@ -23,9 +23,9 @@ use rustc_parse::{parse_crate_from_file, parse_crate_from_source_str, validate_a
use rustc_passes::{self, hir_stats, layout_test}; use rustc_passes::{self, hir_stats, layout_test};
use rustc_plugin_impl as plugin; use rustc_plugin_impl as plugin;
use rustc_query_impl::{OnDiskCache, Queries as TcxQueries}; use rustc_query_impl::{OnDiskCache, Queries as TcxQueries};
use rustc_resolve::{Resolver, ResolverArenas}; use rustc_resolve::Resolver;
use rustc_session::config::{CrateType, Input, OutputFilenames, OutputType}; use rustc_session::config::{CrateType, Input, OutputFilenames, OutputType};
use rustc_session::cstore::{CrateStoreDyn, MetadataLoader, MetadataLoaderDyn, Untracked}; use rustc_session::cstore::{CrateStoreDyn, MetadataLoader, Untracked};
use rustc_session::output::filename_for_input; use rustc_session::output::filename_for_input;
use rustc_session::search_paths::PathKind; use rustc_session::search_paths::PathKind;
use rustc_session::{Limit, Session}; use rustc_session::{Limit, Session};
@ -37,9 +37,7 @@ use rustc_trait_selection::traits;
use std::any::Any; use std::any::Any;
use std::ffi::OsString; use std::ffi::OsString;
use std::io::{self, BufWriter, Write}; use std::io::{self, BufWriter, Write};
use std::marker::PhantomPinned;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::pin::Pin;
use std::sync::{Arc, LazyLock}; use std::sync::{Arc, LazyLock};
use std::{env, fs, iter}; use std::{env, fs, iter};
@ -73,84 +71,6 @@ fn count_nodes(krate: &ast::Crate) -> usize {
counter.count counter.count
} }
pub use boxed_resolver::BoxedResolver;
mod boxed_resolver {
use super::*;
pub struct BoxedResolver(Pin<Box<BoxedResolverInner>>);
struct BoxedResolverInner {
session: Lrc<Session>,
resolver_arenas: Option<ResolverArenas<'static>>,
resolver: Option<Resolver<'static>>,
_pin: PhantomPinned,
}
// Note: Drop order is important to prevent dangling references. Resolver must be dropped first,
// then resolver_arenas and session.
impl Drop for BoxedResolverInner {
fn drop(&mut self) {
self.resolver.take();
self.resolver_arenas.take();
}
}
impl BoxedResolver {
pub(super) fn new(
session: Lrc<Session>,
make_resolver: impl for<'a> FnOnce(&'a Session, &'a ResolverArenas<'a>) -> Resolver<'a>,
) -> BoxedResolver {
let mut boxed_resolver = Box::new(BoxedResolverInner {
session,
resolver_arenas: Some(Resolver::arenas()),
resolver: None,
_pin: PhantomPinned,
});
// SAFETY: `make_resolver` takes a resolver arena with an arbitrary lifetime and
// returns a resolver with the same lifetime as the arena. We ensure that the arena
// outlives the resolver in the drop impl and elsewhere so these transmutes are sound.
unsafe {
let resolver = make_resolver(
std::mem::transmute::<&Session, &Session>(&boxed_resolver.session),
std::mem::transmute::<&ResolverArenas<'_>, &ResolverArenas<'_>>(
boxed_resolver.resolver_arenas.as_ref().unwrap(),
),
);
boxed_resolver.resolver = Some(resolver);
BoxedResolver(Pin::new_unchecked(boxed_resolver))
}
}
pub fn access<F: for<'a> FnOnce(&mut Resolver<'a>) -> R, R>(&mut self, f: F) -> R {
// SAFETY: The resolver doesn't need to be pinned.
let mut resolver = unsafe {
self.0.as_mut().map_unchecked_mut(|boxed_resolver| &mut boxed_resolver.resolver)
};
f((&mut *resolver).as_mut().unwrap())
}
pub fn into_outputs(mut self) -> ty::ResolverOutputs {
// SAFETY: The resolver doesn't need to be pinned.
let mut resolver = unsafe {
self.0.as_mut().map_unchecked_mut(|boxed_resolver| &mut boxed_resolver.resolver)
};
resolver.take().unwrap().into_outputs()
}
}
}
pub fn create_resolver(
sess: Lrc<Session>,
metadata_loader: Box<MetadataLoaderDyn>,
krate: &ast::Crate,
crate_name: Symbol,
) -> BoxedResolver {
trace!("create_resolver");
BoxedResolver::new(sess, move |sess, resolver_arenas| {
Resolver::new(sess, krate, crate_name, metadata_loader, resolver_arenas)
})
}
pub fn register_plugins<'a>( pub fn register_plugins<'a>(
sess: &'a Session, sess: &'a Session,
metadata_loader: &'a dyn MetadataLoader, metadata_loader: &'a dyn MetadataLoader,
@ -256,7 +176,7 @@ pub fn configure_and_expand(
lint_store: &LintStore, lint_store: &LintStore,
mut krate: ast::Crate, mut krate: ast::Crate,
crate_name: Symbol, crate_name: Symbol,
resolver: &mut Resolver<'_>, resolver: &mut Resolver<'_, '_>,
) -> Result<ast::Crate> { ) -> Result<ast::Crate> {
trace!("configure_and_expand"); trace!("configure_and_expand");
pre_expansion_lint(sess, lint_store, resolver.registered_tools(), &krate, crate_name); pre_expansion_lint(sess, lint_store, resolver.registered_tools(), &krate, crate_name);

View file

@ -1,6 +1,6 @@
use crate::errors::{FailedWritingFile, RustcErrorFatal, RustcErrorUnexpectedAnnotation}; use crate::errors::{FailedWritingFile, RustcErrorFatal, RustcErrorUnexpectedAnnotation};
use crate::interface::{Compiler, Result}; use crate::interface::{Compiler, Result};
use crate::passes::{self, BoxedResolver}; use crate::passes;
use rustc_ast as ast; use rustc_ast as ast;
use rustc_codegen_ssa::traits::CodegenBackend; use rustc_codegen_ssa::traits::CodegenBackend;
@ -15,6 +15,7 @@ use rustc_middle::arena::Arena;
use rustc_middle::dep_graph::DepGraph; use rustc_middle::dep_graph::DepGraph;
use rustc_middle::ty::{self, GlobalCtxt, TyCtxt}; use rustc_middle::ty::{self, GlobalCtxt, TyCtxt};
use rustc_query_impl::Queries as TcxQueries; use rustc_query_impl::Queries as TcxQueries;
use rustc_resolve::Resolver;
use rustc_session::config::{self, OutputFilenames, OutputType}; use rustc_session::config::{self, OutputFilenames, OutputType};
use rustc_session::{output::find_crate_name, Session}; use rustc_session::{output::find_crate_name, Session};
use rustc_span::symbol::sym; use rustc_span::symbol::sym;
@ -87,7 +88,6 @@ pub struct Queries<'tcx> {
parse: Query<ast::Crate>, parse: Query<ast::Crate>,
crate_name: Query<Symbol>, crate_name: Query<Symbol>,
register_plugins: Query<(ast::Crate, Lrc<LintStore>)>, register_plugins: Query<(ast::Crate, Lrc<LintStore>)>,
expansion: Query<(Lrc<ast::Crate>, BoxedResolver, Lrc<LintStore>)>,
dep_graph: Query<DepGraph>, dep_graph: Query<DepGraph>,
// This just points to what's in `gcx_cell`. // This just points to what's in `gcx_cell`.
gcx: Query<&'tcx GlobalCtxt<'tcx>>, gcx: Query<&'tcx GlobalCtxt<'tcx>>,
@ -106,7 +106,6 @@ impl<'tcx> Queries<'tcx> {
parse: Default::default(), parse: Default::default(),
crate_name: Default::default(), crate_name: Default::default(),
register_plugins: Default::default(), register_plugins: Default::default(),
expansion: Default::default(),
dep_graph: Default::default(), dep_graph: Default::default(),
gcx: Default::default(), gcx: Default::default(),
ongoing_codegen: Default::default(), ongoing_codegen: Default::default(),
@ -168,28 +167,6 @@ impl<'tcx> Queries<'tcx> {
}) })
} }
pub fn expansion(
&self,
) -> Result<QueryResult<'_, (Lrc<ast::Crate>, BoxedResolver, Lrc<LintStore>)>> {
trace!("expansion");
self.expansion.compute(|| {
let crate_name = *self.crate_name()?.borrow();
let (krate, lint_store) = self.register_plugins()?.steal();
let _timer = self.session().timer("configure_and_expand");
let sess = self.session();
let mut resolver = passes::create_resolver(
sess.clone(),
self.codegen_backend().metadata_loader(),
&krate,
crate_name,
);
let krate = resolver.access(|resolver| {
passes::configure_and_expand(sess, &lint_store, krate, crate_name, resolver)
})?;
Ok((Lrc::new(krate), resolver, lint_store))
})
}
fn dep_graph(&self) -> Result<QueryResult<'_, DepGraph>> { fn dep_graph(&self) -> Result<QueryResult<'_, DepGraph>> {
self.dep_graph.compute(|| { self.dep_graph.compute(|| {
let sess = self.session(); let sess = self.session();
@ -209,13 +186,34 @@ impl<'tcx> Queries<'tcx> {
pub fn global_ctxt(&'tcx self) -> Result<QueryResult<'_, &'tcx GlobalCtxt<'tcx>>> { pub fn global_ctxt(&'tcx self) -> Result<QueryResult<'_, &'tcx GlobalCtxt<'tcx>>> {
self.gcx.compute(|| { self.gcx.compute(|| {
let crate_name = *self.crate_name()?.borrow(); let crate_name = *self.crate_name()?.borrow();
let (krate, resolver, lint_store) = self.expansion()?.steal(); let (krate, lint_store) = self.register_plugins()?.steal();
let (krate, resolver_outputs) = {
let _timer = self.session().timer("configure_and_expand");
let sess = self.session();
let arenas = Resolver::arenas();
let mut resolver = Resolver::new(
sess,
&krate,
crate_name,
self.codegen_backend().metadata_loader(),
&arenas,
);
let krate = passes::configure_and_expand(
sess,
&lint_store,
krate,
crate_name,
&mut resolver,
)?;
(Lrc::new(krate), resolver.into_outputs())
};
let ty::ResolverOutputs { let ty::ResolverOutputs {
untracked, untracked,
global_ctxt: untracked_resolutions, global_ctxt: untracked_resolutions,
ast_lowering: untracked_resolver_for_lowering, ast_lowering: untracked_resolver_for_lowering,
} = resolver.into_outputs(); } = resolver_outputs;
let gcx = passes::create_global_ctxt( let gcx = passes::create_global_ctxt(
self.compiler, self.compiler,

View file

@ -8,7 +8,7 @@
macro_rules! arena_types { macro_rules! arena_types {
($macro:path) => ( ($macro:path) => (
$macro!([ $macro!([
[] layout: rustc_target::abi::LayoutS<rustc_target::abi::VariantIdx>, [] layout: rustc_target::abi::LayoutS,
[] fn_abi: rustc_target::abi::call::FnAbi<'tcx, rustc_middle::ty::Ty<'tcx>>, [] fn_abi: rustc_target::abi::call::FnAbi<'tcx, rustc_middle::ty::Ty<'tcx>>,
// AdtDef are interned and compared by address // AdtDef are interned and compared by address
[decode] adt_def: rustc_middle::ty::AdtDefData, [decode] adt_def: rustc_middle::ty::AdtDefData,

View file

@ -1642,6 +1642,14 @@ impl<'tcx> PlaceRef<'tcx> {
} }
} }
/// Returns `true` if this `Place` contains a `Deref` projection.
///
/// If `Place::is_indirect` returns false, the caller knows that the `Place` refers to the
/// same region of memory as its base.
pub fn is_indirect(&self) -> bool {
self.projection.iter().any(|elem| elem.is_indirect())
}
/// If MirPhase >= Derefered and if projection contains Deref, /// If MirPhase >= Derefered and if projection contains Deref,
/// It's guaranteed to be in the first place /// It's guaranteed to be in the first place
pub fn has_deref(&self) -> bool { pub fn has_deref(&self) -> bool {

View file

@ -149,7 +149,7 @@ pub struct CtxtInterners<'tcx> {
const_: InternedSet<'tcx, ConstData<'tcx>>, const_: InternedSet<'tcx, ConstData<'tcx>>,
const_allocation: InternedSet<'tcx, Allocation>, const_allocation: InternedSet<'tcx, Allocation>,
bound_variable_kinds: InternedSet<'tcx, List<ty::BoundVariableKind>>, bound_variable_kinds: InternedSet<'tcx, List<ty::BoundVariableKind>>,
layout: InternedSet<'tcx, LayoutS<VariantIdx>>, layout: InternedSet<'tcx, LayoutS>,
adt_def: InternedSet<'tcx, AdtDefData>, adt_def: InternedSet<'tcx, AdtDefData>,
external_constraints: InternedSet<'tcx, ExternalConstraintsData<'tcx>>, external_constraints: InternedSet<'tcx, ExternalConstraintsData<'tcx>>,
} }
@ -1520,7 +1520,7 @@ direct_interners! {
region: mk_region(RegionKind<'tcx>): Region -> Region<'tcx>, region: mk_region(RegionKind<'tcx>): Region -> Region<'tcx>,
const_: mk_const_internal(ConstData<'tcx>): Const -> Const<'tcx>, const_: mk_const_internal(ConstData<'tcx>): Const -> Const<'tcx>,
const_allocation: intern_const_alloc(Allocation): ConstAllocation -> ConstAllocation<'tcx>, const_allocation: intern_const_alloc(Allocation): ConstAllocation -> ConstAllocation<'tcx>,
layout: intern_layout(LayoutS<VariantIdx>): Layout -> Layout<'tcx>, layout: intern_layout(LayoutS): Layout -> Layout<'tcx>,
adt_def: intern_adt_def(AdtDefData): AdtDef -> AdtDef<'tcx>, adt_def: intern_adt_def(AdtDefData): AdtDef -> AdtDef<'tcx>,
external_constraints: intern_external_constraints(ExternalConstraintsData<'tcx>): ExternalConstraints -> ExternalConstraints<'tcx>, external_constraints: intern_external_constraints(ExternalConstraintsData<'tcx>): ExternalConstraints -> ExternalConstraints<'tcx>,
} }

View file

@ -121,8 +121,10 @@ where
// for now. See discussion on [#61069]. // for now. See discussion on [#61069].
// //
// [#61069]: https://github.com/rust-lang/rust/pull/61069 // [#61069]: https://github.com/rust-lang/rust/pull/61069
if !dropped_place.is_indirect() {
self.trans.gen(dropped_place.local); self.trans.gen(dropped_place.local);
} }
}
TerminatorKind::Abort TerminatorKind::Abort
| TerminatorKind::Assert { .. } | TerminatorKind::Assert { .. }

View file

@ -1,6 +1,7 @@
#![feature(associated_type_defaults)] #![feature(associated_type_defaults)]
#![feature(box_patterns)] #![feature(box_patterns)]
#![feature(exact_size_is_empty)] #![feature(exact_size_is_empty)]
#![feature(let_chains)]
#![feature(min_specialization)] #![feature(min_specialization)]
#![feature(once_cell)] #![feature(once_cell)]
#![feature(stmt_expr_attributes)] #![feature(stmt_expr_attributes)]

View file

@ -24,7 +24,7 @@
//! - The bottom state denotes uninitialized memory. Because we are only doing a sound approximation //! - The bottom state denotes uninitialized memory. Because we are only doing a sound approximation
//! of the actual execution, we can also use this state for places where access would be UB. //! of the actual execution, we can also use this state for places where access would be UB.
//! //!
//! - The assignment logic in `State::assign_place_idx` assumes that the places are non-overlapping, //! - The assignment logic in `State::insert_place_idx` assumes that the places are non-overlapping,
//! or identical. Note that this refers to place expressions, not memory locations. //! or identical. Note that this refers to place expressions, not memory locations.
//! //!
//! - Currently, places that have their reference taken cannot be tracked. Although this would be //! - Currently, places that have their reference taken cannot be tracked. Although this would be
@ -35,6 +35,7 @@
use std::fmt::{Debug, Formatter}; use std::fmt::{Debug, Formatter};
use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::fx::FxHashMap;
use rustc_index::bit_set::BitSet;
use rustc_index::vec::IndexVec; use rustc_index::vec::IndexVec;
use rustc_middle::mir::visit::{MutatingUseContext, PlaceContext, Visitor}; use rustc_middle::mir::visit::{MutatingUseContext, PlaceContext, Visitor};
use rustc_middle::mir::*; use rustc_middle::mir::*;
@ -64,10 +65,8 @@ pub trait ValueAnalysis<'tcx> {
StatementKind::Assign(box (place, rvalue)) => { StatementKind::Assign(box (place, rvalue)) => {
self.handle_assign(*place, rvalue, state); self.handle_assign(*place, rvalue, state);
} }
StatementKind::SetDiscriminant { .. } => { StatementKind::SetDiscriminant { box ref place, .. } => {
// Could treat this as writing a constant to a pseudo-place. state.flood_discr(place.as_ref(), self.map());
// But discriminants are currently not tracked, so we do nothing.
// Related: https://github.com/rust-lang/unsafe-code-guidelines/issues/84
} }
StatementKind::Intrinsic(box intrinsic) => { StatementKind::Intrinsic(box intrinsic) => {
self.handle_intrinsic(intrinsic, state); self.handle_intrinsic(intrinsic, state);
@ -446,26 +445,51 @@ impl<V: Clone + HasTop + HasBottom> State<V> {
} }
pub fn flood_with(&mut self, place: PlaceRef<'_>, map: &Map, value: V) { pub fn flood_with(&mut self, place: PlaceRef<'_>, map: &Map, value: V) {
if let Some(root) = map.find(place) {
self.flood_idx_with(root, map, value);
}
}
pub fn flood(&mut self, place: PlaceRef<'_>, map: &Map) {
self.flood_with(place, map, V::top())
}
pub fn flood_idx_with(&mut self, place: PlaceIndex, map: &Map, value: V) {
let StateData::Reachable(values) = &mut self.0 else { return }; let StateData::Reachable(values) = &mut self.0 else { return };
map.preorder_invoke(place, &mut |place| { map.for_each_aliasing_place(place, None, &mut |place| {
if let Some(vi) = map.places[place].value_index { if let Some(vi) = map.places[place].value_index {
values[vi] = value.clone(); values[vi] = value.clone();
} }
}); });
} }
pub fn flood_idx(&mut self, place: PlaceIndex, map: &Map) { pub fn flood(&mut self, place: PlaceRef<'_>, map: &Map) {
self.flood_idx_with(place, map, V::top()) self.flood_with(place, map, V::top())
}
pub fn flood_discr_with(&mut self, place: PlaceRef<'_>, map: &Map, value: V) {
let StateData::Reachable(values) = &mut self.0 else { return };
map.for_each_aliasing_place(place, Some(TrackElem::Discriminant), &mut |place| {
if let Some(vi) = map.places[place].value_index {
values[vi] = value.clone();
}
});
}
pub fn flood_discr(&mut self, place: PlaceRef<'_>, map: &Map) {
self.flood_discr_with(place, map, V::top())
}
/// Low-level method that assigns to a place.
/// This does nothing if the place is not tracked.
///
/// The target place must have been flooded before calling this method.
pub fn insert_idx(&mut self, target: PlaceIndex, result: ValueOrPlace<V>, map: &Map) {
match result {
ValueOrPlace::Value(value) => self.insert_value_idx(target, value, map),
ValueOrPlace::Place(source) => self.insert_place_idx(target, source, map),
}
}
/// Low-level method that assigns a value to a place.
/// This does nothing if the place is not tracked.
///
/// The target place must have been flooded before calling this method.
pub fn insert_value_idx(&mut self, target: PlaceIndex, value: V, map: &Map) {
let StateData::Reachable(values) = &mut self.0 else { return };
if let Some(value_index) = map.places[target].value_index {
values[value_index] = value;
}
} }
/// Copies `source` to `target`, including all tracked places beneath. /// Copies `source` to `target`, including all tracked places beneath.
@ -473,50 +497,41 @@ impl<V: Clone + HasTop + HasBottom> State<V> {
/// If `target` contains a place that is not contained in `source`, it will be overwritten with /// If `target` contains a place that is not contained in `source`, it will be overwritten with
/// Top. Also, because this will copy all entries one after another, it may only be used for /// Top. Also, because this will copy all entries one after another, it may only be used for
/// places that are non-overlapping or identical. /// places that are non-overlapping or identical.
pub fn assign_place_idx(&mut self, target: PlaceIndex, source: PlaceIndex, map: &Map) { ///
/// The target place must have been flooded before calling this method.
fn insert_place_idx(&mut self, target: PlaceIndex, source: PlaceIndex, map: &Map) {
let StateData::Reachable(values) = &mut self.0 else { return }; let StateData::Reachable(values) = &mut self.0 else { return };
// If both places are tracked, we copy the value to the target. If the target is tracked, // If both places are tracked, we copy the value to the target.
// but the source is not, we have to invalidate the value in target. If the target is not // If the target is tracked, but the source is not, we do nothing, as invalidation has
// tracked, then we don't have to do anything. // already been performed.
if let Some(target_value) = map.places[target].value_index { if let Some(target_value) = map.places[target].value_index {
if let Some(source_value) = map.places[source].value_index { if let Some(source_value) = map.places[source].value_index {
values[target_value] = values[source_value].clone(); values[target_value] = values[source_value].clone();
} else {
values[target_value] = V::top();
} }
} }
for target_child in map.children(target) { for target_child in map.children(target) {
// Try to find corresponding child and recurse. Reasoning is similar as above. // Try to find corresponding child and recurse. Reasoning is similar as above.
let projection = map.places[target_child].proj_elem.unwrap(); let projection = map.places[target_child].proj_elem.unwrap();
if let Some(source_child) = map.projections.get(&(source, projection)) { if let Some(source_child) = map.projections.get(&(source, projection)) {
self.assign_place_idx(target_child, *source_child, map); self.insert_place_idx(target_child, *source_child, map);
} else {
self.flood_idx(target_child, map);
} }
} }
} }
/// Helper method to interpret `target = result`.
pub fn assign(&mut self, target: PlaceRef<'_>, result: ValueOrPlace<V>, map: &Map) { pub fn assign(&mut self, target: PlaceRef<'_>, result: ValueOrPlace<V>, map: &Map) {
self.flood(target, map);
if let Some(target) = map.find(target) { if let Some(target) = map.find(target) {
self.assign_idx(target, result, map); self.insert_idx(target, result, map);
} else {
// We don't track this place nor any projections, assignment can be ignored.
} }
} }
pub fn assign_idx(&mut self, target: PlaceIndex, result: ValueOrPlace<V>, map: &Map) { /// Helper method for assignments to a discriminant.
match result { pub fn assign_discr(&mut self, target: PlaceRef<'_>, result: ValueOrPlace<V>, map: &Map) {
ValueOrPlace::Value(value) => { self.flood_discr(target, map);
// First flood the target place in case we also track any projections (although if let Some(target) = map.find_discr(target) {
// this scenario is currently not well-supported by the API). self.insert_idx(target, result, map);
self.flood_idx(target, map);
let StateData::Reachable(values) = &mut self.0 else { return };
if let Some(value_index) = map.places[target].value_index {
values[value_index] = value;
}
}
ValueOrPlace::Place(source) => self.assign_place_idx(target, source, map),
} }
} }
@ -525,6 +540,14 @@ impl<V: Clone + HasTop + HasBottom> State<V> {
map.find(place).map(|place| self.get_idx(place, map)).unwrap_or(V::top()) map.find(place).map(|place| self.get_idx(place, map)).unwrap_or(V::top())
} }
/// Retrieve the value stored for a place, or if it is not tracked.
pub fn get_discr(&self, place: PlaceRef<'_>, map: &Map) -> V {
match map.find_discr(place) {
Some(place) => self.get_idx(place, map),
None => V::top(),
}
}
/// Retrieve the value stored for a place index, or if it is not tracked. /// Retrieve the value stored for a place index, or if it is not tracked.
pub fn get_idx(&self, place: PlaceIndex, map: &Map) -> V { pub fn get_idx(&self, place: PlaceIndex, map: &Map) -> V {
match &self.0 { match &self.0 {
@ -581,15 +604,15 @@ impl Map {
/// This is currently the only way to create a [`Map`]. The way in which the tracked places are /// This is currently the only way to create a [`Map`]. The way in which the tracked places are
/// chosen is an implementation detail and may not be relied upon (other than that their type /// chosen is an implementation detail and may not be relied upon (other than that their type
/// passes the filter). /// passes the filter).
#[instrument(skip_all, level = "debug")]
pub fn from_filter<'tcx>( pub fn from_filter<'tcx>(
tcx: TyCtxt<'tcx>, tcx: TyCtxt<'tcx>,
body: &Body<'tcx>, body: &Body<'tcx>,
filter: impl FnMut(Ty<'tcx>) -> bool, filter: impl FnMut(Ty<'tcx>) -> bool,
place_limit: Option<usize>,
) -> Self { ) -> Self {
let mut map = Self::new(); let mut map = Self::new();
let exclude = excluded_locals(body); let exclude = excluded_locals(body);
map.register_with_filter(tcx, body, filter, &exclude); map.register_with_filter(tcx, body, filter, exclude, place_limit);
debug!("registered {} places ({} nodes in total)", map.value_count, map.places.len()); debug!("registered {} places ({} nodes in total)", map.value_count, map.places.len());
map map
} }
@ -600,20 +623,28 @@ impl Map {
tcx: TyCtxt<'tcx>, tcx: TyCtxt<'tcx>,
body: &Body<'tcx>, body: &Body<'tcx>,
mut filter: impl FnMut(Ty<'tcx>) -> bool, mut filter: impl FnMut(Ty<'tcx>) -> bool,
exclude: &IndexVec<Local, bool>, exclude: BitSet<Local>,
place_limit: Option<usize>,
) { ) {
// We use this vector as stack, pushing and popping projections. // We use this vector as stack, pushing and popping projections.
let mut projection = Vec::new(); let mut projection = Vec::new();
for (local, decl) in body.local_decls.iter_enumerated() { for (local, decl) in body.local_decls.iter_enumerated() {
if !exclude[local] { if !exclude.contains(local) {
self.register_with_filter_rec(tcx, local, &mut projection, decl.ty, &mut filter); self.register_with_filter_rec(
tcx,
local,
&mut projection,
decl.ty,
&mut filter,
place_limit,
);
} }
} }
} }
/// Potentially register the (local, projection) place and its fields, recursively. /// Potentially register the (local, projection) place and its fields, recursively.
/// ///
/// Invariant: The projection must only contain fields. /// Invariant: The projection must only contain trackable elements.
fn register_with_filter_rec<'tcx>( fn register_with_filter_rec<'tcx>(
&mut self, &mut self,
tcx: TyCtxt<'tcx>, tcx: TyCtxt<'tcx>,
@ -621,27 +652,56 @@ impl Map {
projection: &mut Vec<PlaceElem<'tcx>>, projection: &mut Vec<PlaceElem<'tcx>>,
ty: Ty<'tcx>, ty: Ty<'tcx>,
filter: &mut impl FnMut(Ty<'tcx>) -> bool, filter: &mut impl FnMut(Ty<'tcx>) -> bool,
place_limit: Option<usize>,
) { ) {
// Note: The framework supports only scalars for now. if let Some(place_limit) = place_limit && self.value_count >= place_limit {
if filter(ty) && ty.is_scalar() { return
}
// We know that the projection only contains trackable elements. // We know that the projection only contains trackable elements.
let place = self.make_place(local, projection).unwrap(); let place = self.make_place(local, projection).unwrap();
// Allocate a value slot if it doesn't have one. // Allocate a value slot if it doesn't have one, and the user requested one.
if self.places[place].value_index.is_none() { if self.places[place].value_index.is_none() && filter(ty) {
self.places[place].value_index = Some(self.value_count.into()); self.places[place].value_index = Some(self.value_count.into());
self.value_count += 1; self.value_count += 1;
} }
if ty.is_enum() {
let discr_ty = ty.discriminant_ty(tcx);
if filter(discr_ty) {
let discr = *self
.projections
.entry((place, TrackElem::Discriminant))
.or_insert_with(|| {
// Prepend new child to the linked list.
let next = self.places.push(PlaceInfo::new(Some(TrackElem::Discriminant)));
self.places[next].next_sibling = self.places[place].first_child;
self.places[place].first_child = Some(next);
next
});
// Allocate a value slot if it doesn't have one.
if self.places[discr].value_index.is_none() {
self.places[discr].value_index = Some(self.value_count.into());
self.value_count += 1;
}
}
} }
// Recurse with all fields of this place. // Recurse with all fields of this place.
iter_fields(ty, tcx, |variant, field, ty| { iter_fields(ty, tcx, |variant, field, ty| {
if variant.is_some() { if let Some(variant) = variant {
// Downcasts are currently not supported. projection.push(PlaceElem::Downcast(None, variant));
let _ = self.make_place(local, projection);
projection.push(PlaceElem::Field(field, ty));
self.register_with_filter_rec(tcx, local, projection, ty, filter, place_limit);
projection.pop();
projection.pop();
return; return;
} }
projection.push(PlaceElem::Field(field, ty)); projection.push(PlaceElem::Field(field, ty));
self.register_with_filter_rec(tcx, local, projection, ty, filter); self.register_with_filter_rec(tcx, local, projection, ty, filter, place_limit);
projection.pop(); projection.pop();
}); });
} }
@ -684,23 +744,105 @@ impl Map {
} }
/// Locates the given place, if it exists in the tree. /// Locates the given place, if it exists in the tree.
pub fn find(&self, place: PlaceRef<'_>) -> Option<PlaceIndex> { pub fn find_extra(
&self,
place: PlaceRef<'_>,
extra: impl IntoIterator<Item = TrackElem>,
) -> Option<PlaceIndex> {
let mut index = *self.locals.get(place.local)?.as_ref()?; let mut index = *self.locals.get(place.local)?.as_ref()?;
for &elem in place.projection { for &elem in place.projection {
index = self.apply(index, elem.try_into().ok()?)?; index = self.apply(index, elem.try_into().ok()?)?;
} }
for elem in extra {
index = self.apply(index, elem)?;
}
Some(index) Some(index)
} }
/// Locates the given place, if it exists in the tree.
pub fn find(&self, place: PlaceRef<'_>) -> Option<PlaceIndex> {
self.find_extra(place, [])
}
/// Locates the given place and applies `Discriminant`, if it exists in the tree.
pub fn find_discr(&self, place: PlaceRef<'_>) -> Option<PlaceIndex> {
self.find_extra(place, [TrackElem::Discriminant])
}
/// Iterate over all direct children. /// Iterate over all direct children.
pub fn children(&self, parent: PlaceIndex) -> impl Iterator<Item = PlaceIndex> + '_ { pub fn children(&self, parent: PlaceIndex) -> impl Iterator<Item = PlaceIndex> + '_ {
Children::new(self, parent) Children::new(self, parent)
} }
/// Invoke a function on the given place and all places that may alias it.
///
/// In particular, when the given place has a variant downcast, we invoke the function on all
/// the other variants.
///
/// `tail_elem` allows to support discriminants that are not a place in MIR, but that we track
/// as such.
pub fn for_each_aliasing_place(
&self,
place: PlaceRef<'_>,
tail_elem: Option<TrackElem>,
f: &mut impl FnMut(PlaceIndex),
) {
if place.is_indirect() {
// We do not track indirect places.
return;
}
let Some(&Some(mut index)) = self.locals.get(place.local) else {
// The local is not tracked at all, so it does not alias anything.
return;
};
let elems = place
.projection
.iter()
.map(|&elem| elem.try_into())
.chain(tail_elem.map(Ok).into_iter());
for elem in elems {
// A field aliases the parent place.
f(index);
let Ok(elem) = elem else { return };
let sub = self.apply(index, elem);
if let TrackElem::Variant(..) | TrackElem::Discriminant = elem {
// Enum variant fields and enum discriminants alias each another.
self.for_each_variant_sibling(index, sub, f);
}
if let Some(sub) = sub {
index = sub
} else {
return;
}
}
self.preorder_invoke(index, f);
}
/// Invoke the given function on all the descendants of the given place, except one branch.
fn for_each_variant_sibling(
&self,
parent: PlaceIndex,
preserved_child: Option<PlaceIndex>,
f: &mut impl FnMut(PlaceIndex),
) {
for sibling in self.children(parent) {
let elem = self.places[sibling].proj_elem;
// Only invalidate variants and discriminant. Fields (for generators) are not
// invalidated by assignment to a variant.
if let Some(TrackElem::Variant(..) | TrackElem::Discriminant) = elem
// Only invalidate the other variants, the current one is fine.
&& Some(sibling) != preserved_child
{
self.preorder_invoke(sibling, f);
}
}
}
/// Invoke a function on the given place and all descendants. /// Invoke a function on the given place and all descendants.
pub fn preorder_invoke(&self, root: PlaceIndex, f: &mut impl FnMut(PlaceIndex)) { fn preorder_invoke(&self, root: PlaceIndex, f: &mut impl FnMut(PlaceIndex)) {
f(root); f(root);
for child in self.children(root) { for child in self.children(root) {
self.preorder_invoke(child, f); self.preorder_invoke(child, f);
@ -759,6 +901,7 @@ impl<'a> Iterator for Children<'a> {
} }
/// Used as the result of an operand or r-value. /// Used as the result of an operand or r-value.
#[derive(Debug)]
pub enum ValueOrPlace<V> { pub enum ValueOrPlace<V> {
Value(V), Value(V),
Place(PlaceIndex), Place(PlaceIndex),
@ -776,6 +919,8 @@ impl<V: HasTop> ValueOrPlace<V> {
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub enum TrackElem { pub enum TrackElem {
Field(Field), Field(Field),
Variant(VariantIdx),
Discriminant,
} }
impl<V, T> TryFrom<ProjectionElem<V, T>> for TrackElem { impl<V, T> TryFrom<ProjectionElem<V, T>> for TrackElem {
@ -784,6 +929,7 @@ impl<V, T> TryFrom<ProjectionElem<V, T>> for TrackElem {
fn try_from(value: ProjectionElem<V, T>) -> Result<Self, Self::Error> { fn try_from(value: ProjectionElem<V, T>) -> Result<Self, Self::Error> {
match value { match value {
ProjectionElem::Field(field, _) => Ok(TrackElem::Field(field)), ProjectionElem::Field(field, _) => Ok(TrackElem::Field(field)),
ProjectionElem::Downcast(_, idx) => Ok(TrackElem::Variant(idx)),
_ => Err(()), _ => Err(()),
} }
} }
@ -824,26 +970,27 @@ pub fn iter_fields<'tcx>(
} }
/// Returns all locals with projections that have their reference or address taken. /// Returns all locals with projections that have their reference or address taken.
pub fn excluded_locals(body: &Body<'_>) -> IndexVec<Local, bool> { pub fn excluded_locals(body: &Body<'_>) -> BitSet<Local> {
struct Collector { struct Collector {
result: IndexVec<Local, bool>, result: BitSet<Local>,
} }
impl<'tcx> Visitor<'tcx> for Collector { impl<'tcx> Visitor<'tcx> for Collector {
fn visit_place(&mut self, place: &Place<'tcx>, context: PlaceContext, _location: Location) { fn visit_place(&mut self, place: &Place<'tcx>, context: PlaceContext, _location: Location) {
if context.is_borrow() if (context.is_borrow()
|| context.is_address_of() || context.is_address_of()
|| context.is_drop() || context.is_drop()
|| context == PlaceContext::MutatingUse(MutatingUseContext::AsmOutput) || context == PlaceContext::MutatingUse(MutatingUseContext::AsmOutput))
&& !place.is_indirect()
{ {
// A pointer to a place could be used to access other places with the same local, // A pointer to a place could be used to access other places with the same local,
// hence we have to exclude the local completely. // hence we have to exclude the local completely.
self.result[place.local] = true; self.result.insert(place.local);
} }
} }
} }
let mut collector = Collector { result: IndexVec::from_elem(false, &body.local_decls) }; let mut collector = Collector { result: BitSet::new_empty(body.local_decls.len()) };
collector.visit_body(body); collector.visit_body(body);
collector.result collector.result
} }
@ -899,6 +1046,12 @@ fn debug_with_context_rec<V: Debug + Eq>(
for child in map.children(place) { for child in map.children(place) {
let info_elem = map.places[child].proj_elem.unwrap(); let info_elem = map.places[child].proj_elem.unwrap();
let child_place_str = match info_elem { let child_place_str = match info_elem {
TrackElem::Discriminant => {
format!("discriminant({})", place_str)
}
TrackElem::Variant(idx) => {
format!("({} as {:?})", place_str, idx)
}
TrackElem::Field(field) => { TrackElem::Field(field) => {
if place_str.starts_with('*') { if place_str.starts_with('*') {
format!("({}).{}", place_str, field.index()) format!("({}).{}", place_str, field.index())

View file

@ -13,6 +13,7 @@ use rustc_mir_dataflow::value_analysis::{Map, State, TrackElem, ValueAnalysis, V
use rustc_mir_dataflow::{lattice::FlatSet, Analysis, ResultsVisitor, SwitchIntEdgeEffects}; use rustc_mir_dataflow::{lattice::FlatSet, Analysis, ResultsVisitor, SwitchIntEdgeEffects};
use rustc_span::DUMMY_SP; use rustc_span::DUMMY_SP;
use rustc_target::abi::Align; use rustc_target::abi::Align;
use rustc_target::abi::VariantIdx;
use crate::MirPass; use crate::MirPass;
@ -30,14 +31,12 @@ impl<'tcx> MirPass<'tcx> for DataflowConstProp {
#[instrument(skip_all level = "debug")] #[instrument(skip_all level = "debug")]
fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
debug!(def_id = ?body.source.def_id());
if tcx.sess.mir_opt_level() < 4 && body.basic_blocks.len() > BLOCK_LIMIT { if tcx.sess.mir_opt_level() < 4 && body.basic_blocks.len() > BLOCK_LIMIT {
debug!("aborted dataflow const prop due too many basic blocks"); debug!("aborted dataflow const prop due too many basic blocks");
return; return;
} }
// Decide which places to track during the analysis.
let map = Map::from_filter(tcx, body, Ty::is_scalar);
// We want to have a somewhat linear runtime w.r.t. the number of statements/terminators. // We want to have a somewhat linear runtime w.r.t. the number of statements/terminators.
// Let's call this number `n`. Dataflow analysis has `O(h*n)` transfer function // Let's call this number `n`. Dataflow analysis has `O(h*n)` transfer function
// applications, where `h` is the height of the lattice. Because the height of our lattice // applications, where `h` is the height of the lattice. Because the height of our lattice
@ -46,10 +45,10 @@ impl<'tcx> MirPass<'tcx> for DataflowConstProp {
// `O(num_nodes * tracked_places * n)` in terms of time complexity. Since the number of // `O(num_nodes * tracked_places * n)` in terms of time complexity. Since the number of
// map nodes is strongly correlated to the number of tracked places, this becomes more or // map nodes is strongly correlated to the number of tracked places, this becomes more or
// less `O(n)` if we place a constant limit on the number of tracked places. // less `O(n)` if we place a constant limit on the number of tracked places.
if tcx.sess.mir_opt_level() < 4 && map.tracked_places() > PLACE_LIMIT { let place_limit = if tcx.sess.mir_opt_level() < 4 { Some(PLACE_LIMIT) } else { None };
debug!("aborted dataflow const prop due to too many tracked places");
return; // Decide which places to track during the analysis.
} let map = Map::from_filter(tcx, body, Ty::is_scalar, place_limit);
// Perform the actual dataflow analysis. // Perform the actual dataflow analysis.
let analysis = ConstAnalysis::new(tcx, body, map); let analysis = ConstAnalysis::new(tcx, body, map);
@ -63,14 +62,31 @@ impl<'tcx> MirPass<'tcx> for DataflowConstProp {
} }
} }
struct ConstAnalysis<'tcx> { struct ConstAnalysis<'a, 'tcx> {
map: Map, map: Map,
tcx: TyCtxt<'tcx>, tcx: TyCtxt<'tcx>,
local_decls: &'a LocalDecls<'tcx>,
ecx: InterpCx<'tcx, 'tcx, DummyMachine>, ecx: InterpCx<'tcx, 'tcx, DummyMachine>,
param_env: ty::ParamEnv<'tcx>, param_env: ty::ParamEnv<'tcx>,
} }
impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'tcx> { impl<'tcx> ConstAnalysis<'_, 'tcx> {
fn eval_discriminant(
&self,
enum_ty: Ty<'tcx>,
variant_index: VariantIdx,
) -> Option<ScalarTy<'tcx>> {
if !enum_ty.is_enum() {
return None;
}
let discr = enum_ty.discriminant_for_variant(self.tcx, variant_index)?;
let discr_layout = self.tcx.layout_of(self.param_env.and(discr.ty)).ok()?;
let discr_value = Scalar::try_from_uint(discr.val, discr_layout.size)?;
Some(ScalarTy(discr_value, discr.ty))
}
}
impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'_, 'tcx> {
type Value = FlatSet<ScalarTy<'tcx>>; type Value = FlatSet<ScalarTy<'tcx>>;
const NAME: &'static str = "ConstAnalysis"; const NAME: &'static str = "ConstAnalysis";
@ -79,6 +95,25 @@ impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'tcx> {
&self.map &self.map
} }
fn handle_statement(&self, statement: &Statement<'tcx>, state: &mut State<Self::Value>) {
match statement.kind {
StatementKind::SetDiscriminant { box ref place, variant_index } => {
state.flood_discr(place.as_ref(), &self.map);
if self.map.find_discr(place.as_ref()).is_some() {
let enum_ty = place.ty(self.local_decls, self.tcx).ty;
if let Some(discr) = self.eval_discriminant(enum_ty, variant_index) {
state.assign_discr(
place.as_ref(),
ValueOrPlace::Value(FlatSet::Elem(discr)),
&self.map,
);
}
}
}
_ => self.super_statement(statement, state),
}
}
fn handle_assign( fn handle_assign(
&self, &self,
target: Place<'tcx>, target: Place<'tcx>,
@ -87,36 +122,47 @@ impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'tcx> {
) { ) {
match rvalue { match rvalue {
Rvalue::Aggregate(kind, operands) => { Rvalue::Aggregate(kind, operands) => {
let target = self.map().find(target.as_ref()); state.flood_with(target.as_ref(), self.map(), FlatSet::Bottom);
if let Some(target) = target { if let Some(target_idx) = self.map().find(target.as_ref()) {
state.flood_idx_with(target, self.map(), FlatSet::Bottom); let (variant_target, variant_index) = match **kind {
let field_based = match **kind { AggregateKind::Tuple | AggregateKind::Closure(..) => {
AggregateKind::Tuple | AggregateKind::Closure(..) => true, (Some(target_idx), None)
AggregateKind::Adt(def_id, ..) => {
matches!(self.tcx.def_kind(def_id), DefKind::Struct)
} }
_ => false, AggregateKind::Adt(def_id, variant_index, ..) => {
match self.tcx.def_kind(def_id) {
DefKind::Struct => (Some(target_idx), None),
DefKind::Enum => (Some(target_idx), Some(variant_index)),
_ => (None, None),
}
}
_ => (None, None),
}; };
if field_based { if let Some(target) = variant_target {
for (field_index, operand) in operands.iter().enumerate() { for (field_index, operand) in operands.iter().enumerate() {
if let Some(field) = self if let Some(field) = self
.map() .map()
.apply(target, TrackElem::Field(Field::from_usize(field_index))) .apply(target, TrackElem::Field(Field::from_usize(field_index)))
{ {
let result = self.handle_operand(operand, state); let result = self.handle_operand(operand, state);
state.assign_idx(field, result, self.map()); state.insert_idx(field, result, self.map());
} }
} }
} }
if let Some(variant_index) = variant_index
&& let Some(discr_idx) = self.map().apply(target_idx, TrackElem::Discriminant)
{
let enum_ty = target.ty(self.local_decls, self.tcx).ty;
if let Some(discr_val) = self.eval_discriminant(enum_ty, variant_index) {
state.insert_value_idx(discr_idx, FlatSet::Elem(discr_val), &self.map);
}
}
} }
} }
Rvalue::CheckedBinaryOp(op, box (left, right)) => { Rvalue::CheckedBinaryOp(op, box (left, right)) => {
// Flood everything now, so we can use `insert_value_idx` directly later.
state.flood(target.as_ref(), self.map());
let target = self.map().find(target.as_ref()); let target = self.map().find(target.as_ref());
if let Some(target) = target {
// We should not track any projections other than
// what is overwritten below, but just in case...
state.flood_idx(target, self.map());
}
let value_target = target let value_target = target
.and_then(|target| self.map().apply(target, TrackElem::Field(0_u32.into()))); .and_then(|target| self.map().apply(target, TrackElem::Field(0_u32.into())));
@ -127,7 +173,8 @@ impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'tcx> {
let (val, overflow) = self.binary_op(state, *op, left, right); let (val, overflow) = self.binary_op(state, *op, left, right);
if let Some(value_target) = value_target { if let Some(value_target) = value_target {
state.assign_idx(value_target, ValueOrPlace::Value(val), self.map()); // We have flooded `target` earlier.
state.insert_value_idx(value_target, val, self.map());
} }
if let Some(overflow_target) = overflow_target { if let Some(overflow_target) = overflow_target {
let overflow = match overflow { let overflow = match overflow {
@ -142,11 +189,8 @@ impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'tcx> {
} }
FlatSet::Bottom => FlatSet::Bottom, FlatSet::Bottom => FlatSet::Bottom,
}; };
state.assign_idx( // We have flooded `target` earlier.
overflow_target, state.insert_value_idx(overflow_target, overflow, self.map());
ValueOrPlace::Value(overflow),
self.map(),
);
} }
} }
} }
@ -195,6 +239,9 @@ impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'tcx> {
FlatSet::Bottom => ValueOrPlace::Value(FlatSet::Bottom), FlatSet::Bottom => ValueOrPlace::Value(FlatSet::Bottom),
FlatSet::Top => ValueOrPlace::Value(FlatSet::Top), FlatSet::Top => ValueOrPlace::Value(FlatSet::Top),
}, },
Rvalue::Discriminant(place) => {
ValueOrPlace::Value(state.get_discr(place.as_ref(), self.map()))
}
_ => self.super_rvalue(rvalue, state), _ => self.super_rvalue(rvalue, state),
} }
} }
@ -268,12 +315,13 @@ impl<'tcx> std::fmt::Debug for ScalarTy<'tcx> {
} }
} }
impl<'tcx> ConstAnalysis<'tcx> { impl<'a, 'tcx> ConstAnalysis<'a, 'tcx> {
pub fn new(tcx: TyCtxt<'tcx>, body: &Body<'tcx>, map: Map) -> Self { pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, map: Map) -> Self {
let param_env = tcx.param_env(body.source.def_id()); let param_env = tcx.param_env(body.source.def_id());
Self { Self {
map, map,
tcx, tcx,
local_decls: &body.local_decls,
ecx: InterpCx::new(tcx, DUMMY_SP, param_env, DummyMachine), ecx: InterpCx::new(tcx, DUMMY_SP, param_env, DummyMachine),
param_env: param_env, param_env: param_env,
} }
@ -466,6 +514,21 @@ impl<'tcx, 'map, 'a> Visitor<'tcx> for OperandCollector<'tcx, 'map, 'a> {
_ => (), _ => (),
} }
} }
fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
match rvalue {
Rvalue::Discriminant(place) => {
match self.state.get_discr(place.as_ref(), self.visitor.map) {
FlatSet::Top => (),
FlatSet::Elem(value) => {
self.visitor.before_effect.insert((location, *place), value);
}
FlatSet::Bottom => (),
}
}
_ => self.super_rvalue(rvalue, location),
}
}
} }
struct DummyMachine; struct DummyMachine;

View file

@ -1,5 +1,5 @@
use crate::MirPass; use crate::MirPass;
use rustc_index::bit_set::BitSet; use rustc_index::bit_set::{BitSet, GrowableBitSet};
use rustc_index::vec::IndexVec; use rustc_index::vec::IndexVec;
use rustc_middle::mir::patch::MirPatch; use rustc_middle::mir::patch::MirPatch;
use rustc_middle::mir::visit::*; use rustc_middle::mir::visit::*;
@ -26,10 +26,12 @@ impl<'tcx> MirPass<'tcx> for ScalarReplacementOfAggregates {
debug!(?replacements); debug!(?replacements);
let all_dead_locals = replace_flattened_locals(tcx, body, replacements); let all_dead_locals = replace_flattened_locals(tcx, body, replacements);
if !all_dead_locals.is_empty() { if !all_dead_locals.is_empty() {
for local in excluded.indices() { excluded.union(&all_dead_locals);
excluded[local] |= all_dead_locals.contains(local); excluded = {
} let mut growable = GrowableBitSet::from(excluded);
excluded.raw.resize(body.local_decls.len(), false); growable.ensure(body.local_decls.len());
growable.into()
};
} else { } else {
break; break;
} }
@ -44,11 +46,11 @@ impl<'tcx> MirPass<'tcx> for ScalarReplacementOfAggregates {
/// - the locals is a union or an enum; /// - the locals is a union or an enum;
/// - the local's address is taken, and thus the relative addresses of the fields are observable to /// - the local's address is taken, and thus the relative addresses of the fields are observable to
/// client code. /// client code.
fn escaping_locals(excluded: &IndexVec<Local, bool>, body: &Body<'_>) -> BitSet<Local> { fn escaping_locals(excluded: &BitSet<Local>, body: &Body<'_>) -> BitSet<Local> {
let mut set = BitSet::new_empty(body.local_decls.len()); let mut set = BitSet::new_empty(body.local_decls.len());
set.insert_range(RETURN_PLACE..=Local::from_usize(body.arg_count)); set.insert_range(RETURN_PLACE..=Local::from_usize(body.arg_count));
for (local, decl) in body.local_decls().iter_enumerated() { for (local, decl) in body.local_decls().iter_enumerated() {
if decl.ty.is_union() || decl.ty.is_enum() || excluded[local] { if decl.ty.is_union() || decl.ty.is_enum() || excluded.contains(local) {
set.insert(local); set.insert(local);
} }
} }
@ -172,7 +174,7 @@ fn replace_flattened_locals<'tcx>(
body: &mut Body<'tcx>, body: &mut Body<'tcx>,
replacements: ReplacementMap<'tcx>, replacements: ReplacementMap<'tcx>,
) -> BitSet<Local> { ) -> BitSet<Local> {
let mut all_dead_locals = BitSet::new_empty(body.local_decls.len()); let mut all_dead_locals = BitSet::new_empty(replacements.fragments.len());
for (local, replacements) in replacements.fragments.iter_enumerated() { for (local, replacements) in replacements.fragments.iter_enumerated() {
if replacements.is_some() { if replacements.is_some() {
all_dead_locals.insert(local); all_dead_locals.insert(local);

View file

@ -65,7 +65,7 @@ impl<'a, Id: Into<DefId>> ToNameBinding<'a> for (Res, ty::Visibility<Id>, Span,
} }
} }
impl<'a> Resolver<'a> { impl<'a, 'tcx> Resolver<'a, 'tcx> {
/// Defines `name` in namespace `ns` of module `parent` to be `def` if it is not yet defined; /// Defines `name` in namespace `ns` of module `parent` to be `def` if it is not yet defined;
/// otherwise, reports an error. /// otherwise, reports an error.
pub(crate) fn define<T>(&mut self, parent: Module<'a>, ident: Ident, ns: Namespace, def: T) pub(crate) fn define<T>(&mut self, parent: Module<'a>, ident: Ident, ns: Namespace, def: T)
@ -214,18 +214,18 @@ impl<'a> Resolver<'a> {
} }
} }
struct BuildReducedGraphVisitor<'a, 'b> { struct BuildReducedGraphVisitor<'a, 'b, 'tcx> {
r: &'b mut Resolver<'a>, r: &'b mut Resolver<'a, 'tcx>,
parent_scope: ParentScope<'a>, parent_scope: ParentScope<'a>,
} }
impl<'a> AsMut<Resolver<'a>> for BuildReducedGraphVisitor<'a, '_> { impl<'a, 'tcx> AsMut<Resolver<'a, 'tcx>> for BuildReducedGraphVisitor<'a, '_, 'tcx> {
fn as_mut(&mut self) -> &mut Resolver<'a> { fn as_mut(&mut self) -> &mut Resolver<'a, 'tcx> {
self.r self.r
} }
} }
impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { impl<'a, 'b, 'tcx> BuildReducedGraphVisitor<'a, 'b, 'tcx> {
fn resolve_visibility(&mut self, vis: &ast::Visibility) -> ty::Visibility { fn resolve_visibility(&mut self, vis: &ast::Visibility) -> ty::Visibility {
self.try_resolve_visibility(vis, true).unwrap_or_else(|err| { self.try_resolve_visibility(vis, true).unwrap_or_else(|err| {
self.r.report_vis_error(err); self.r.report_vis_error(err);
@ -1315,7 +1315,7 @@ macro_rules! method {
}; };
} }
impl<'a, 'b> Visitor<'b> for BuildReducedGraphVisitor<'a, 'b> { impl<'a, 'b, 'tcx> Visitor<'b> for BuildReducedGraphVisitor<'a, 'b, 'tcx> {
method!(visit_expr: ast::Expr, ast::ExprKind::MacCall, walk_expr); method!(visit_expr: ast::Expr, ast::ExprKind::MacCall, walk_expr);
method!(visit_pat: ast::Pat, ast::PatKind::MacCall, walk_pat); method!(visit_pat: ast::Pat, ast::PatKind::MacCall, walk_pat);
method!(visit_ty: ast::Ty, ast::TyKind::MacCall, walk_ty); method!(visit_ty: ast::Ty, ast::TyKind::MacCall, walk_ty);

View file

@ -49,8 +49,8 @@ impl<'a> UnusedImport<'a> {
} }
} }
struct UnusedImportCheckVisitor<'a, 'b> { struct UnusedImportCheckVisitor<'a, 'b, 'tcx> {
r: &'a mut Resolver<'b>, r: &'a mut Resolver<'b, 'tcx>,
/// All the (so far) unused imports, grouped path list /// All the (so far) unused imports, grouped path list
unused_imports: FxIndexMap<ast::NodeId, UnusedImport<'a>>, unused_imports: FxIndexMap<ast::NodeId, UnusedImport<'a>>,
base_use_tree: Option<&'a ast::UseTree>, base_use_tree: Option<&'a ast::UseTree>,
@ -58,7 +58,7 @@ struct UnusedImportCheckVisitor<'a, 'b> {
item_span: Span, item_span: Span,
} }
impl<'a, 'b> UnusedImportCheckVisitor<'a, 'b> { impl<'a, 'b, 'tcx> UnusedImportCheckVisitor<'a, 'b, 'tcx> {
// We have information about whether `use` (import) items are actually // We have information about whether `use` (import) items are actually
// used now. If an import is not used at all, we signal a lint error. // used now. If an import is not used at all, we signal a lint error.
fn check_import(&mut self, id: ast::NodeId) { fn check_import(&mut self, id: ast::NodeId) {
@ -94,7 +94,7 @@ impl<'a, 'b> UnusedImportCheckVisitor<'a, 'b> {
} }
} }
impl<'a, 'b> Visitor<'a> for UnusedImportCheckVisitor<'a, 'b> { impl<'a, 'b, 'tcx> Visitor<'a> for UnusedImportCheckVisitor<'a, 'b, 'tcx> {
fn visit_item(&mut self, item: &'a ast::Item) { fn visit_item(&mut self, item: &'a ast::Item) {
self.item_span = item.span_with_attributes(); self.item_span = item.span_with_attributes();
@ -222,7 +222,7 @@ fn calc_unused_spans(
} }
} }
impl Resolver<'_> { impl Resolver<'_, '_> {
pub(crate) fn check_unused(&mut self, krate: &ast::Crate) { pub(crate) fn check_unused(&mut self, krate: &ast::Crate) {
for import in self.potentially_unused_imports.iter() { for import in self.potentially_unused_imports.iter() {
match import.kind { match import.kind {

View file

@ -9,7 +9,7 @@ use rustc_span::symbol::sym;
use rustc_span::Span; use rustc_span::Span;
pub(crate) fn collect_definitions( pub(crate) fn collect_definitions(
resolver: &mut Resolver<'_>, resolver: &mut Resolver<'_, '_>,
fragment: &AstFragment, fragment: &AstFragment,
expansion: LocalExpnId, expansion: LocalExpnId,
) { ) {
@ -18,14 +18,14 @@ pub(crate) fn collect_definitions(
} }
/// Creates `DefId`s for nodes in the AST. /// Creates `DefId`s for nodes in the AST.
struct DefCollector<'a, 'b> { struct DefCollector<'a, 'b, 'tcx> {
resolver: &'a mut Resolver<'b>, resolver: &'a mut Resolver<'b, 'tcx>,
parent_def: LocalDefId, parent_def: LocalDefId,
impl_trait_context: ImplTraitContext, impl_trait_context: ImplTraitContext,
expansion: LocalExpnId, expansion: LocalExpnId,
} }
impl<'a, 'b> DefCollector<'a, 'b> { impl<'a, 'b, 'tcx> DefCollector<'a, 'b, 'tcx> {
fn create_def(&mut self, node_id: NodeId, data: DefPathData, span: Span) -> LocalDefId { fn create_def(&mut self, node_id: NodeId, data: DefPathData, span: Span) -> LocalDefId {
let parent_def = self.parent_def; let parent_def = self.parent_def;
debug!("create_def(node_id={:?}, data={:?}, parent_def={:?})", node_id, data, parent_def); debug!("create_def(node_id={:?}, data={:?}, parent_def={:?})", node_id, data, parent_def);
@ -81,7 +81,7 @@ impl<'a, 'b> DefCollector<'a, 'b> {
} }
} }
impl<'a, 'b> visit::Visitor<'a> for DefCollector<'a, 'b> { impl<'a, 'b, 'tcx> visit::Visitor<'a> for DefCollector<'a, 'b, 'tcx> {
fn visit_item(&mut self, i: &'a Item) { fn visit_item(&mut self, i: &'a Item) {
debug!("visit_item: {:?}", i); debug!("visit_item: {:?}", i);

View file

@ -114,7 +114,7 @@ fn reduce_impl_span_to_impl_keyword(sm: &SourceMap, impl_span: Span) -> Span {
sm.span_until_whitespace(impl_span) sm.span_until_whitespace(impl_span)
} }
impl<'a> Resolver<'a> { impl<'a, 'tcx> Resolver<'a, 'tcx> {
pub(crate) fn report_errors(&mut self, krate: &Crate) { pub(crate) fn report_errors(&mut self, krate: &Crate) {
self.report_with_use_injections(krate); self.report_with_use_injections(krate);
@ -1883,7 +1883,7 @@ impl<'a> Resolver<'a> {
} }
} }
impl<'a, 'b> ImportResolver<'a, 'b> { impl<'a, 'b, 'tcx> ImportResolver<'a, 'b, 'tcx> {
/// Adds suggestions for a path that cannot be resolved. /// Adds suggestions for a path that cannot be resolved.
pub(crate) fn make_path_suggestion( pub(crate) fn make_path_suggestion(
&mut self, &mut self,

View file

@ -29,8 +29,8 @@ impl ParentId<'_> {
} }
} }
pub(crate) struct EffectiveVisibilitiesVisitor<'r, 'a> { pub(crate) struct EffectiveVisibilitiesVisitor<'r, 'a, 'tcx> {
r: &'r mut Resolver<'a>, r: &'r mut Resolver<'a, 'tcx>,
def_effective_visibilities: EffectiveVisibilities, def_effective_visibilities: EffectiveVisibilities,
/// While walking import chains we need to track effective visibilities per-binding, and def id /// While walking import chains we need to track effective visibilities per-binding, and def id
/// keys in `Resolver::effective_visibilities` are not enough for that, because multiple /// keys in `Resolver::effective_visibilities` are not enough for that, because multiple
@ -41,7 +41,7 @@ pub(crate) struct EffectiveVisibilitiesVisitor<'r, 'a> {
changed: bool, changed: bool,
} }
impl Resolver<'_> { impl Resolver<'_, '_> {
fn nearest_normal_mod(&mut self, def_id: LocalDefId) -> LocalDefId { fn nearest_normal_mod(&mut self, def_id: LocalDefId) -> LocalDefId {
self.get_nearest_non_block_module(def_id.to_def_id()).nearest_parent_mod().expect_local() self.get_nearest_non_block_module(def_id.to_def_id()).nearest_parent_mod().expect_local()
} }
@ -67,18 +67,21 @@ impl Resolver<'_> {
} }
} }
impl<'a, 'b> IntoDefIdTree for &'b mut Resolver<'a> { impl<'a, 'b, 'tcx> IntoDefIdTree for &'b mut Resolver<'a, 'tcx> {
type Tree = &'b Resolver<'a>; type Tree = &'b Resolver<'a, 'tcx>;
fn tree(self) -> Self::Tree { fn tree(self) -> Self::Tree {
self self
} }
} }
impl<'r, 'a> EffectiveVisibilitiesVisitor<'r, 'a> { impl<'r, 'a, 'tcx> EffectiveVisibilitiesVisitor<'r, 'a, 'tcx> {
/// Fills the `Resolver::effective_visibilities` table with public & exported items /// Fills the `Resolver::effective_visibilities` table with public & exported items
/// For now, this doesn't resolve macros (FIXME) and cannot resolve Impl, as we /// For now, this doesn't resolve macros (FIXME) and cannot resolve Impl, as we
/// need access to a TyCtxt for that. /// need access to a TyCtxt for that.
pub(crate) fn compute_effective_visibilities<'c>(r: &'r mut Resolver<'a>, krate: &'c Crate) { pub(crate) fn compute_effective_visibilities<'c>(
r: &'r mut Resolver<'a, 'tcx>,
krate: &'c Crate,
) {
let mut visitor = EffectiveVisibilitiesVisitor { let mut visitor = EffectiveVisibilitiesVisitor {
r, r,
def_effective_visibilities: Default::default(), def_effective_visibilities: Default::default(),
@ -192,7 +195,7 @@ impl<'r, 'a> EffectiveVisibilitiesVisitor<'r, 'a> {
} }
} }
impl<'r, 'ast> Visitor<'ast> for EffectiveVisibilitiesVisitor<'ast, 'r> { impl<'r, 'ast, 'tcx> Visitor<'ast> for EffectiveVisibilitiesVisitor<'ast, 'r, 'tcx> {
fn visit_item(&mut self, item: &'ast ast::Item) { fn visit_item(&mut self, item: &'ast ast::Item) {
let def_id = self.r.local_def_id(item.id); let def_id = self.r.local_def_id(item.id);
// Update effective visibilities of nested items. // Update effective visibilities of nested items.

View file

@ -28,7 +28,7 @@ use RibKind::*;
type Visibility = ty::Visibility<LocalDefId>; type Visibility = ty::Visibility<LocalDefId>;
impl<'a> Resolver<'a> { impl<'a, 'tcx> Resolver<'a, 'tcx> {
/// A generic scope visitor. /// A generic scope visitor.
/// Visits scopes in order to resolve some identifier in them or perform other actions. /// Visits scopes in order to resolve some identifier in them or perform other actions.
/// If the callback returns `Some` result, we stop visiting scopes and return it. /// If the callback returns `Some` result, we stop visiting scopes and return it.

View file

@ -225,7 +225,7 @@ fn pub_use_of_private_extern_crate_hack(import: &Import<'_>, binding: &NameBindi
} }
} }
impl<'a> Resolver<'a> { impl<'a, 'tcx> Resolver<'a, 'tcx> {
/// Given a binding and an import that resolves to it, /// Given a binding and an import that resolves to it,
/// return the corresponding binding defined by the import. /// return the corresponding binding defined by the import.
pub(crate) fn import( pub(crate) fn import(
@ -333,7 +333,7 @@ impl<'a> Resolver<'a> {
// If the resolution becomes a success, define it in the module's glob importers. // If the resolution becomes a success, define it in the module's glob importers.
fn update_resolution<T, F>(&mut self, module: Module<'a>, key: BindingKey, f: F) -> T fn update_resolution<T, F>(&mut self, module: Module<'a>, key: BindingKey, f: F) -> T
where where
F: FnOnce(&mut Resolver<'a>, &mut NameResolution<'a>) -> T, F: FnOnce(&mut Resolver<'a, 'tcx>, &mut NameResolution<'a>) -> T,
{ {
// Ensure that `resolution` isn't borrowed when defining in the module's glob importers, // Ensure that `resolution` isn't borrowed when defining in the module's glob importers,
// during which the resolution might end up getting re-defined via a glob cycle. // during which the resolution might end up getting re-defined via a glob cycle.
@ -405,11 +405,11 @@ struct UnresolvedImportError {
candidates: Option<Vec<ImportSuggestion>>, candidates: Option<Vec<ImportSuggestion>>,
} }
pub(crate) struct ImportResolver<'a, 'b> { pub(crate) struct ImportResolver<'a, 'b, 'tcx> {
pub r: &'a mut Resolver<'b>, pub r: &'a mut Resolver<'b, 'tcx>,
} }
impl<'a, 'b> ImportResolver<'a, 'b> { impl<'a, 'b, 'tcx> ImportResolver<'a, 'b, 'tcx> {
// Import resolution // Import resolution
// //
// This is a fixed-point algorithm. We resolve imports until our efforts // This is a fixed-point algorithm. We resolve imports until our efforts

View file

@ -505,7 +505,7 @@ enum MaybeExported<'a> {
} }
impl MaybeExported<'_> { impl MaybeExported<'_> {
fn eval(self, r: &Resolver<'_>) -> bool { fn eval(self, r: &Resolver<'_, '_>) -> bool {
let def_id = match self { let def_id = match self {
MaybeExported::Ok(node_id) => Some(r.local_def_id(node_id)), MaybeExported::Ok(node_id) => Some(r.local_def_id(node_id)),
MaybeExported::Impl(Some(trait_def_id)) | MaybeExported::ImplItem(Ok(trait_def_id)) => { MaybeExported::Impl(Some(trait_def_id)) | MaybeExported::ImplItem(Ok(trait_def_id)) => {
@ -584,8 +584,8 @@ struct DiagnosticMetadata<'ast> {
current_elision_failures: Vec<MissingLifetime>, current_elision_failures: Vec<MissingLifetime>,
} }
struct LateResolutionVisitor<'a, 'b, 'ast> { struct LateResolutionVisitor<'a, 'b, 'ast, 'tcx> {
r: &'b mut Resolver<'a>, r: &'b mut Resolver<'a, 'tcx>,
/// The module that represents the current item scope. /// The module that represents the current item scope.
parent_scope: ParentScope<'a>, parent_scope: ParentScope<'a>,
@ -628,7 +628,7 @@ struct LateResolutionVisitor<'a, 'b, 'ast> {
} }
/// Walks the whole crate in DFS order, visiting each item, resolving names as it goes. /// Walks the whole crate in DFS order, visiting each item, resolving names as it goes.
impl<'a: 'ast, 'ast> Visitor<'ast> for LateResolutionVisitor<'a, '_, 'ast> { impl<'a: 'ast, 'ast, 'tcx> Visitor<'ast> for LateResolutionVisitor<'a, '_, 'ast, 'tcx> {
fn visit_attribute(&mut self, _: &'ast Attribute) { fn visit_attribute(&mut self, _: &'ast Attribute) {
// We do not want to resolve expressions that appear in attributes, // We do not want to resolve expressions that appear in attributes,
// as they do not correspond to actual code. // as they do not correspond to actual code.
@ -1199,8 +1199,8 @@ impl<'a: 'ast, 'ast> Visitor<'ast> for LateResolutionVisitor<'a, '_, 'ast> {
} }
} }
impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> { impl<'a: 'ast, 'b, 'ast, 'tcx> LateResolutionVisitor<'a, 'b, 'ast, 'tcx> {
fn new(resolver: &'b mut Resolver<'a>) -> LateResolutionVisitor<'a, 'b, 'ast> { fn new(resolver: &'b mut Resolver<'a, 'tcx>) -> LateResolutionVisitor<'a, 'b, 'ast, 'tcx> {
// During late resolution we only track the module component of the parent scope, // During late resolution we only track the module component of the parent scope,
// although it may be useful to track other components as well for diagnostics. // although it may be useful to track other components as well for diagnostics.
let graph_root = resolver.graph_root; let graph_root = resolver.graph_root;
@ -2029,13 +2029,13 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> {
/// List all the lifetimes that appear in the provided type. /// List all the lifetimes that appear in the provided type.
fn find_lifetime_for_self(&self, ty: &'ast Ty) -> Set1<LifetimeRes> { fn find_lifetime_for_self(&self, ty: &'ast Ty) -> Set1<LifetimeRes> {
struct SelfVisitor<'r, 'a> { struct SelfVisitor<'r, 'a, 'tcx> {
r: &'r Resolver<'a>, r: &'r Resolver<'a, 'tcx>,
impl_self: Option<Res>, impl_self: Option<Res>,
lifetime: Set1<LifetimeRes>, lifetime: Set1<LifetimeRes>,
} }
impl SelfVisitor<'_, '_> { impl SelfVisitor<'_, '_, '_> {
// Look for `self: &'a Self` - also desugared from `&'a self`, // Look for `self: &'a Self` - also desugared from `&'a self`,
// and if that matches, use it for elision and return early. // and if that matches, use it for elision and return early.
fn is_self_ty(&self, ty: &Ty) -> bool { fn is_self_ty(&self, ty: &Ty) -> bool {
@ -2053,7 +2053,7 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> {
} }
} }
impl<'a> Visitor<'a> for SelfVisitor<'_, '_> { impl<'a> Visitor<'a> for SelfVisitor<'_, '_, '_> {
fn visit_ty(&mut self, ty: &'a Ty) { fn visit_ty(&mut self, ty: &'a Ty) {
trace!("SelfVisitor considering ty={:?}", ty); trace!("SelfVisitor considering ty={:?}", ty);
if let TyKind::Ref(lt, ref mt) = ty.kind && self.is_self_ty(&mt.ty) { if let TyKind::Ref(lt, ref mt) = ty.kind && self.is_self_ty(&mt.ty) {
@ -4288,13 +4288,13 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> {
} }
} }
struct LifetimeCountVisitor<'a, 'b> { struct LifetimeCountVisitor<'a, 'b, 'tcx> {
r: &'b mut Resolver<'a>, r: &'b mut Resolver<'a, 'tcx>,
} }
/// Walks the whole crate in DFS order, visiting each item, counting the declared number of /// Walks the whole crate in DFS order, visiting each item, counting the declared number of
/// lifetime generic parameters. /// lifetime generic parameters.
impl<'ast> Visitor<'ast> for LifetimeCountVisitor<'_, '_> { impl<'ast> Visitor<'ast> for LifetimeCountVisitor<'_, '_, '_> {
fn visit_item(&mut self, item: &'ast Item) { fn visit_item(&mut self, item: &'ast Item) {
match &item.kind { match &item.kind {
ItemKind::TyAlias(box TyAlias { ref generics, .. }) ItemKind::TyAlias(box TyAlias { ref generics, .. })
@ -4328,7 +4328,7 @@ impl<'ast> Visitor<'ast> for LifetimeCountVisitor<'_, '_> {
} }
} }
impl<'a> Resolver<'a> { impl<'a, 'tcx> Resolver<'a, 'tcx> {
pub(crate) fn late_resolve_crate(&mut self, krate: &Crate) { pub(crate) fn late_resolve_crate(&mut self, krate: &Crate) {
visit::walk_crate(&mut LifetimeCountVisitor { r: self }, krate); visit::walk_crate(&mut LifetimeCountVisitor { r: self }, krate);
let mut late_resolution_visitor = LateResolutionVisitor::new(self); let mut late_resolution_visitor = LateResolutionVisitor::new(self);

View file

@ -166,7 +166,7 @@ impl TypoCandidate {
} }
} }
impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> { impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> {
fn def_span(&self, def_id: DefId) -> Option<Span> { fn def_span(&self, def_id: DefId) -> Option<Span> {
match def_id.krate { match def_id.krate {
LOCAL_CRATE => self.r.opt_span(def_id), LOCAL_CRATE => self.r.opt_span(def_id),
@ -318,7 +318,7 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
span: Span, span: Span,
source: PathSource<'_>, source: PathSource<'_>,
res: Option<Res>, res: Option<Res>,
) -> (DiagnosticBuilder<'a, ErrorGuaranteed>, Vec<ImportSuggestion>) { ) -> (DiagnosticBuilder<'tcx, ErrorGuaranteed>, Vec<ImportSuggestion>) {
debug!(?res, ?source); debug!(?res, ?source);
let base_error = self.make_base_error(path, span, source, res); let base_error = self.make_base_error(path, span, source, res);
let code = source.error_code(res.is_some()); let code = source.error_code(res.is_some());

View file

@ -147,7 +147,7 @@ struct ParentScope<'a> {
impl<'a> ParentScope<'a> { impl<'a> ParentScope<'a> {
/// Creates a parent scope with the passed argument used as the module scope component, /// Creates a parent scope with the passed argument used as the module scope component,
/// and other scope components set to default empty values. /// and other scope components set to default empty values.
fn module(module: Module<'a>, resolver: &Resolver<'a>) -> ParentScope<'a> { fn module(module: Module<'a>, resolver: &Resolver<'a, '_>) -> ParentScope<'a> {
ParentScope { ParentScope {
module, module,
expansion: LocalExpnId::ROOT, expansion: LocalExpnId::ROOT,
@ -528,9 +528,9 @@ impl<'a> ModuleData<'a> {
} }
} }
fn for_each_child<R, F>(&'a self, resolver: &mut R, mut f: F) fn for_each_child<'tcx, R, F>(&'a self, resolver: &mut R, mut f: F)
where where
R: AsMut<Resolver<'a>>, R: AsMut<Resolver<'a, 'tcx>>,
F: FnMut(&mut R, Ident, Namespace, &'a NameBinding<'a>), F: FnMut(&mut R, Ident, Namespace, &'a NameBinding<'a>),
{ {
for (key, name_resolution) in resolver.as_mut().resolutions(self).borrow().iter() { for (key, name_resolution) in resolver.as_mut().resolutions(self).borrow().iter() {
@ -541,9 +541,9 @@ impl<'a> ModuleData<'a> {
} }
/// This modifies `self` in place. The traits will be stored in `self.traits`. /// This modifies `self` in place. The traits will be stored in `self.traits`.
fn ensure_traits<R>(&'a self, resolver: &mut R) fn ensure_traits<'tcx, R>(&'a self, resolver: &mut R)
where where
R: AsMut<Resolver<'a>>, R: AsMut<Resolver<'a, 'tcx>>,
{ {
let mut traits = self.traits.borrow_mut(); let mut traits = self.traits.borrow_mut();
if traits.is_none() { if traits.is_none() {
@ -864,8 +864,8 @@ struct MacroData {
/// The main resolver class. /// The main resolver class.
/// ///
/// This is the visitor that walks the whole crate. /// This is the visitor that walks the whole crate.
pub struct Resolver<'a> { pub struct Resolver<'a, 'tcx> {
session: &'a Session, session: &'tcx Session,
/// Item with a given `LocalDefId` was defined during macro expansion with ID `ExpnId`. /// Item with a given `LocalDefId` was defined during macro expansion with ID `ExpnId`.
expn_that_defined: FxHashMap<LocalDefId, ExpnId>, expn_that_defined: FxHashMap<LocalDefId, ExpnId>,
@ -949,7 +949,7 @@ pub struct Resolver<'a> {
/// Ambiguity errors are delayed for deduplication. /// Ambiguity errors are delayed for deduplication.
ambiguity_errors: Vec<AmbiguityError<'a>>, ambiguity_errors: Vec<AmbiguityError<'a>>,
/// `use` injections are delayed for better placement and deduplication. /// `use` injections are delayed for better placement and deduplication.
use_injections: Vec<UseError<'a>>, use_injections: Vec<UseError<'tcx>>,
/// Crate-local macro expanded `macro_export` referred to by a module-relative path. /// Crate-local macro expanded `macro_export` referred to by a module-relative path.
macro_expanded_macro_export_errors: BTreeSet<(Span, Span)>, macro_expanded_macro_export_errors: BTreeSet<(Span, Span)>,
@ -1111,8 +1111,8 @@ impl<'a> ResolverArenas<'a> {
} }
} }
impl<'a> AsMut<Resolver<'a>> for Resolver<'a> { impl<'a, 'tcx> AsMut<Resolver<'a, 'tcx>> for Resolver<'a, 'tcx> {
fn as_mut(&mut self) -> &mut Resolver<'a> { fn as_mut(&mut self) -> &mut Resolver<'a, 'tcx> {
self self
} }
} }
@ -1134,14 +1134,14 @@ impl DefIdTree for ResolverTree<'_> {
} }
} }
impl<'a, 'b> DefIdTree for &'a Resolver<'b> { impl<'a, 'b, 'tcx> DefIdTree for &'a Resolver<'b, 'tcx> {
#[inline] #[inline]
fn opt_parent(self, id: DefId) -> Option<DefId> { fn opt_parent(self, id: DefId) -> Option<DefId> {
ResolverTree(&self.untracked).opt_parent(id) ResolverTree(&self.untracked).opt_parent(id)
} }
} }
impl<'a> Resolver<'a> { impl<'tcx> Resolver<'_, 'tcx> {
fn opt_local_def_id(&self, node: NodeId) -> Option<LocalDefId> { fn opt_local_def_id(&self, node: NodeId) -> Option<LocalDefId> {
self.node_id_to_def_id.get(&node).copied() self.node_id_to_def_id.get(&node).copied()
} }
@ -1200,14 +1200,14 @@ impl<'a> Resolver<'a> {
} }
} }
impl<'a> Resolver<'a> { impl<'a, 'tcx> Resolver<'a, 'tcx> {
pub fn new( pub fn new(
session: &'a Session, session: &'tcx Session,
krate: &Crate, krate: &Crate,
crate_name: Symbol, crate_name: Symbol,
metadata_loader: Box<MetadataLoaderDyn>, metadata_loader: Box<MetadataLoaderDyn>,
arenas: &'a ResolverArenas<'a>, arenas: &'a ResolverArenas<'a>,
) -> Resolver<'a> { ) -> Resolver<'a, 'tcx> {
let root_def_id = CRATE_DEF_ID.to_def_id(); let root_def_id = CRATE_DEF_ID.to_def_id();
let mut module_map = FxHashMap::default(); let mut module_map = FxHashMap::default();
let graph_root = arenas.new_module( let graph_root = arenas.new_module(

View file

@ -160,7 +160,7 @@ fn soft_custom_inner_attributes_gate(path: &ast::Path, invoc: &Invocation) -> bo
false false
} }
impl<'a> ResolverExpand for Resolver<'a> { impl<'a, 'tcx> ResolverExpand for Resolver<'a, 'tcx> {
fn next_node_id(&mut self) -> NodeId { fn next_node_id(&mut self) -> NodeId {
self.next_node_id() self.next_node_id()
} }
@ -467,7 +467,7 @@ impl<'a> ResolverExpand for Resolver<'a> {
} }
} }
impl<'a> Resolver<'a> { impl<'a, 'tcx> Resolver<'a, 'tcx> {
/// Resolve macro path with error reporting and recovery. /// Resolve macro path with error reporting and recovery.
/// Uses dummy syntax extensions for unresolved macros or macros with unexpected resolutions /// Uses dummy syntax extensions for unresolved macros or macros with unexpected resolutions
/// for better error recovery. /// for better error recovery.

View file

@ -3,10 +3,8 @@ pub use Primitive::*;
use crate::json::{Json, ToJson}; use crate::json::{Json, ToJson};
use std::fmt;
use std::ops::Deref; use std::ops::Deref;
use rustc_data_structures::intern::Interned;
use rustc_macros::HashStable_Generic; use rustc_macros::HashStable_Generic;
pub mod call; pub mod call;
@ -19,48 +17,6 @@ impl ToJson for Endian {
} }
} }
rustc_index::newtype_index! {
#[derive(HashStable_Generic)]
pub struct VariantIdx {}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, HashStable_Generic)]
#[rustc_pass_by_value]
pub struct Layout<'a>(pub Interned<'a, LayoutS<VariantIdx>>);
impl<'a> fmt::Debug for Layout<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// See comment on `<LayoutS as Debug>::fmt` above.
self.0.0.fmt(f)
}
}
impl<'a> Layout<'a> {
pub fn fields(self) -> &'a FieldsShape {
&self.0.0.fields
}
pub fn variants(self) -> &'a Variants<VariantIdx> {
&self.0.0.variants
}
pub fn abi(self) -> Abi {
self.0.0.abi
}
pub fn largest_niche(self) -> Option<Niche> {
self.0.0.largest_niche
}
pub fn align(self) -> AbiAndPrefAlign {
self.0.0.align
}
pub fn size(self) -> Size {
self.0.0.size
}
}
/// The layout of a type, alongside the type itself. /// The layout of a type, alongside the type itself.
/// Provides various type traversal APIs (e.g., recursing into fields). /// Provides various type traversal APIs (e.g., recursing into fields).
/// ///
@ -75,8 +31,8 @@ pub struct TyAndLayout<'a, Ty> {
} }
impl<'a, Ty> Deref for TyAndLayout<'a, Ty> { impl<'a, Ty> Deref for TyAndLayout<'a, Ty> {
type Target = &'a LayoutS<VariantIdx>; type Target = &'a LayoutS;
fn deref(&self) -> &&'a LayoutS<VariantIdx> { fn deref(&self) -> &&'a LayoutS {
&self.layout.0.0 &self.layout.0.0
} }
} }

View file

@ -29,8 +29,7 @@ pub fn target() -> Target {
pre_link_args, pre_link_args,
exe_suffix: ".elf".into(), exe_suffix: ".elf".into(),
no_default_libraries: false, no_default_libraries: false,
// There are some issues in debug builds with this enabled in certain programs. has_thread_local: true,
has_thread_local: false,
..Default::default() ..Default::default()
}, },
} }

View file

@ -78,10 +78,10 @@ fn invert_mapping(map: &[u32]) -> Vec<u32> {
fn univariant_uninterned<'tcx>( fn univariant_uninterned<'tcx>(
cx: &LayoutCx<'tcx, TyCtxt<'tcx>>, cx: &LayoutCx<'tcx, TyCtxt<'tcx>>,
ty: Ty<'tcx>, ty: Ty<'tcx>,
fields: &[TyAndLayout<'_>], fields: &[Layout<'_>],
repr: &ReprOptions, repr: &ReprOptions,
kind: StructKind, kind: StructKind,
) -> Result<LayoutS<VariantIdx>, LayoutError<'tcx>> { ) -> Result<LayoutS, LayoutError<'tcx>> {
let dl = cx.data_layout(); let dl = cx.data_layout();
let pack = repr.pack; let pack = repr.pack;
if pack.is_some() && repr.align.is_some() { if pack.is_some() && repr.align.is_some() {
@ -106,7 +106,7 @@ fn layout_of_uncached<'tcx>(
}; };
let scalar = |value: Primitive| tcx.intern_layout(LayoutS::scalar(cx, scalar_unit(value))); let scalar = |value: Primitive| tcx.intern_layout(LayoutS::scalar(cx, scalar_unit(value)));
let univariant = |fields: &[TyAndLayout<'_>], repr: &ReprOptions, kind| { let univariant = |fields: &[Layout<'_>], repr: &ReprOptions, kind| {
Ok(tcx.intern_layout(univariant_uninterned(cx, ty, fields, repr, kind)?)) Ok(tcx.intern_layout(univariant_uninterned(cx, ty, fields, repr, kind)?))
}; };
debug_assert!(!ty.has_non_region_infer()); debug_assert!(!ty.has_non_region_infer());
@ -273,7 +273,7 @@ fn layout_of_uncached<'tcx>(
ty::Closure(_, ref substs) => { ty::Closure(_, ref substs) => {
let tys = substs.as_closure().upvar_tys(); let tys = substs.as_closure().upvar_tys();
univariant( univariant(
&tys.map(|ty| cx.layout_of(ty)).collect::<Result<Vec<_>, _>>()?, &tys.map(|ty| Ok(cx.layout_of(ty)?.layout)).collect::<Result<Vec<_>, _>>()?,
&ReprOptions::default(), &ReprOptions::default(),
StructKind::AlwaysSized, StructKind::AlwaysSized,
)? )?
@ -284,7 +284,7 @@ fn layout_of_uncached<'tcx>(
if tys.len() == 0 { StructKind::AlwaysSized } else { StructKind::MaybeUnsized }; if tys.len() == 0 { StructKind::AlwaysSized } else { StructKind::MaybeUnsized };
univariant( univariant(
&tys.iter().map(|k| cx.layout_of(k)).collect::<Result<Vec<_>, _>>()?, &tys.iter().map(|k| Ok(cx.layout_of(k)?.layout)).collect::<Result<Vec<_>, _>>()?,
&ReprOptions::default(), &ReprOptions::default(),
kind, kind,
)? )?
@ -413,7 +413,7 @@ fn layout_of_uncached<'tcx>(
.map(|v| { .map(|v| {
v.fields v.fields
.iter() .iter()
.map(|field| cx.layout_of(field.ty(tcx, substs))) .map(|field| Ok(cx.layout_of(field.ty(tcx, substs))?.layout))
.collect::<Result<Vec<_>, _>>() .collect::<Result<Vec<_>, _>>()
}) })
.collect::<Result<IndexVec<VariantIdx, _>, _>>()?; .collect::<Result<IndexVec<VariantIdx, _>, _>>()?;
@ -631,23 +631,21 @@ fn generator_layout<'tcx>(
// `info.variant_fields` already accounts for the reserved variants, so no need to add them. // `info.variant_fields` already accounts for the reserved variants, so no need to add them.
let max_discr = (info.variant_fields.len() - 1) as u128; let max_discr = (info.variant_fields.len() - 1) as u128;
let discr_int = Integer::fit_unsigned(max_discr); let discr_int = Integer::fit_unsigned(max_discr);
let discr_int_ty = discr_int.to_ty(tcx, false);
let tag = Scalar::Initialized { let tag = Scalar::Initialized {
value: Primitive::Int(discr_int, false), value: Primitive::Int(discr_int, false),
valid_range: WrappingRange { start: 0, end: max_discr }, valid_range: WrappingRange { start: 0, end: max_discr },
}; };
let tag_layout = cx.tcx.intern_layout(LayoutS::scalar(cx, tag)); let tag_layout = cx.tcx.intern_layout(LayoutS::scalar(cx, tag));
let tag_layout = TyAndLayout { ty: discr_int_ty, layout: tag_layout };
let promoted_layouts = ineligible_locals let promoted_layouts = ineligible_locals
.iter() .iter()
.map(|local| subst_field(info.field_tys[local].ty)) .map(|local| subst_field(info.field_tys[local].ty))
.map(|ty| tcx.mk_maybe_uninit(ty)) .map(|ty| tcx.mk_maybe_uninit(ty))
.map(|ty| cx.layout_of(ty)); .map(|ty| Ok(cx.layout_of(ty)?.layout));
let prefix_layouts = substs let prefix_layouts = substs
.as_generator() .as_generator()
.prefix_tys() .prefix_tys()
.map(|ty| cx.layout_of(ty)) .map(|ty| Ok(cx.layout_of(ty)?.layout))
.chain(iter::once(Ok(tag_layout))) .chain(iter::once(Ok(tag_layout)))
.chain(promoted_layouts) .chain(promoted_layouts)
.collect::<Result<Vec<_>, _>>()?; .collect::<Result<Vec<_>, _>>()?;
@ -716,7 +714,9 @@ fn generator_layout<'tcx>(
let mut variant = univariant_uninterned( let mut variant = univariant_uninterned(
cx, cx,
ty, ty,
&variant_only_tys.map(|ty| cx.layout_of(ty)).collect::<Result<Vec<_>, _>>()?, &variant_only_tys
.map(|ty| Ok(cx.layout_of(ty)?.layout))
.collect::<Result<Vec<_>, _>>()?,
&ReprOptions::default(), &ReprOptions::default(),
StructKind::Prefixed(prefix_size, prefix_align.abi), StructKind::Prefixed(prefix_size, prefix_align.abi),
)?; )?;

View file

@ -295,8 +295,6 @@ pub fn nt_success(status: NTSTATUS) -> bool {
status >= 0 status >= 0
} }
// "RNG\0"
pub const BCRYPT_RNG_ALGORITHM: &[u16] = &[b'R' as u16, b'N' as u16, b'G' as u16, 0];
pub const BCRYPT_USE_SYSTEM_PREFERRED_RNG: DWORD = 0x00000002; pub const BCRYPT_USE_SYSTEM_PREFERRED_RNG: DWORD = 0x00000002;
#[repr(C)] #[repr(C)]
@ -834,6 +832,10 @@ if #[cfg(not(target_vendor = "uwp"))] {
#[link(name = "advapi32")] #[link(name = "advapi32")]
extern "system" { extern "system" {
// Forbidden when targeting UWP
#[link_name = "SystemFunction036"]
pub fn RtlGenRandom(RandomBuffer: *mut u8, RandomBufferLength: ULONG) -> BOOLEAN;
// Allowed but unused by UWP // Allowed but unused by UWP
pub fn OpenProcessToken( pub fn OpenProcessToken(
ProcessHandle: HANDLE, ProcessHandle: HANDLE,
@ -1258,13 +1260,6 @@ extern "system" {
cbBuffer: ULONG, cbBuffer: ULONG,
dwFlags: ULONG, dwFlags: ULONG,
) -> NTSTATUS; ) -> NTSTATUS;
pub fn BCryptOpenAlgorithmProvider(
phalgorithm: *mut BCRYPT_ALG_HANDLE,
pszAlgId: LPCWSTR,
pszimplementation: LPCWSTR,
dwflags: ULONG,
) -> NTSTATUS;
pub fn BCryptCloseAlgorithmProvider(hAlgorithm: BCRYPT_ALG_HANDLE, dwFlags: ULONG) -> NTSTATUS;
} }
// Functions that aren't available on every version of Windows that we support, // Functions that aren't available on every version of Windows that we support,

View file

@ -1,106 +1,39 @@
//! # Random key generation use crate::io;
//!
//! This module wraps the RNG provided by the OS. There are a few different
//! ways to interface with the OS RNG so it's worth exploring each of the options.
//! Note that at the time of writing these all go through the (undocumented)
//! `bcryptPrimitives.dll` but they use different route to get there.
//!
//! Originally we were using [`RtlGenRandom`], however that function is
//! deprecated and warns it "may be altered or unavailable in subsequent versions".
//!
//! So we switched to [`BCryptGenRandom`] with the `BCRYPT_USE_SYSTEM_PREFERRED_RNG`
//! flag to query and find the system configured RNG. However, this change caused a small
//! but significant number of users to experience panics caused by a failure of
//! this function. See [#94098].
//!
//! The current version falls back to using `BCryptOpenAlgorithmProvider` if
//! `BCRYPT_USE_SYSTEM_PREFERRED_RNG` fails for any reason.
//!
//! [#94098]: https://github.com/rust-lang/rust/issues/94098
//! [`RtlGenRandom`]: https://docs.microsoft.com/en-us/windows/win32/api/ntsecapi/nf-ntsecapi-rtlgenrandom
//! [`BCryptGenRandom`]: https://docs.microsoft.com/en-us/windows/win32/api/bcrypt/nf-bcrypt-bcryptgenrandom
use crate::mem; use crate::mem;
use crate::ptr; use crate::ptr;
use crate::sys::c; use crate::sys::c;
/// Generates high quality secure random keys for use by [`HashMap`].
///
/// This is used to seed the default [`RandomState`].
///
/// [`HashMap`]: crate::collections::HashMap
/// [`RandomState`]: crate::collections::hash_map::RandomState
pub fn hashmap_random_keys() -> (u64, u64) { pub fn hashmap_random_keys() -> (u64, u64) {
Rng::SYSTEM.gen_random_keys().unwrap_or_else(fallback_rng) let mut v = (0, 0);
} let ret = unsafe {
c::BCryptGenRandom(
struct Rng { ptr::null_mut(),
algorithm: c::BCRYPT_ALG_HANDLE, &mut v as *mut _ as *mut u8,
flags: u32, mem::size_of_val(&v) as c::ULONG,
} c::BCRYPT_USE_SYSTEM_PREFERRED_RNG,
impl Rng {
const SYSTEM: Self = unsafe { Self::new(ptr::null_mut(), c::BCRYPT_USE_SYSTEM_PREFERRED_RNG) };
/// Create the RNG from an existing algorithm handle.
///
/// # Safety
///
/// The handle must either be null or a valid algorithm handle.
const unsafe fn new(algorithm: c::BCRYPT_ALG_HANDLE, flags: u32) -> Self {
Self { algorithm, flags }
}
/// Open a handle to the RNG algorithm.
fn open() -> Result<Self, c::NTSTATUS> {
use crate::sync::atomic::AtomicPtr;
use crate::sync::atomic::Ordering::{Acquire, Release};
// An atomic is used so we don't need to reopen the handle every time.
static HANDLE: AtomicPtr<crate::ffi::c_void> = AtomicPtr::new(ptr::null_mut());
let mut handle = HANDLE.load(Acquire);
if handle.is_null() {
let status = unsafe {
c::BCryptOpenAlgorithmProvider(
&mut handle,
c::BCRYPT_RNG_ALGORITHM.as_ptr(),
ptr::null(),
0,
) )
}; };
if c::nt_success(status) { if c::nt_success(ret) { v } else { fallback_rng() }
// If another thread opens a handle first then use that handle instead.
let result = HANDLE.compare_exchange(ptr::null_mut(), handle, Release, Acquire);
if let Err(previous_handle) = result {
// Close our handle and return the previous one.
unsafe { c::BCryptCloseAlgorithmProvider(handle, 0) };
handle = previous_handle;
}
Ok(unsafe { Self::new(handle, 0) })
} else {
Err(status)
}
} else {
Ok(unsafe { Self::new(handle, 0) })
}
} }
fn gen_random_keys(self) -> Result<(u64, u64), c::NTSTATUS> { /// Generate random numbers using the fallback RNG function (RtlGenRandom)
let mut v = (0, 0); ///
let status = unsafe { /// This is necessary because of a failure to load the SysWOW64 variant of the
let size = mem::size_of_val(&v).try_into().unwrap(); /// bcryptprimitives.dll library from code that lives in bcrypt.dll
c::BCryptGenRandom(self.algorithm, ptr::addr_of_mut!(v).cast(), size, self.flags) /// See <https://bugzilla.mozilla.org/show_bug.cgi?id=1788004#c9>
}; #[cfg(not(target_vendor = "uwp"))]
if c::nt_success(status) { Ok(v) } else { Err(status) }
}
}
/// Generate random numbers using the fallback RNG function
#[inline(never)] #[inline(never)]
fn fallback_rng(rng_status: c::NTSTATUS) -> (u64, u64) { fn fallback_rng() -> (u64, u64) {
match Rng::open().and_then(|rng| rng.gen_random_keys()) { let mut v = (0, 0);
Ok(keys) => keys, let ret =
Err(status) => { unsafe { c::RtlGenRandom(&mut v as *mut _ as *mut u8, mem::size_of_val(&v) as c::ULONG) };
panic!("RNG broken: {rng_status:#x}, fallback RNG broken: {status:#x}")
} if ret != 0 { v } else { panic!("fallback RNG broken: {}", io::Error::last_os_error()) }
} }
/// We can't use RtlGenRandom with UWP, so there is no fallback
#[cfg(target_vendor = "uwp")]
#[inline(never)]
fn fallback_rng() -> (u64, u64) {
panic!("fallback RNG broken: RtlGenRandom() not supported on UWP");
} }

View file

@ -10,7 +10,7 @@ use rustc_middle::ty::layout::LayoutError;
use rustc_middle::ty::{self, Adt, TyCtxt}; use rustc_middle::ty::{self, Adt, TyCtxt};
use rustc_span::hygiene::MacroKind; use rustc_span::hygiene::MacroKind;
use rustc_span::symbol::{kw, sym, Symbol}; use rustc_span::symbol::{kw, sym, Symbol};
use rustc_target::abi::{LayoutS, Primitive, TagEncoding, VariantIdx, Variants}; use rustc_target::abi::{LayoutS, Primitive, TagEncoding, Variants};
use std::cmp::Ordering; use std::cmp::Ordering;
use std::fmt; use std::fmt;
use std::rc::Rc; use std::rc::Rc;
@ -1833,7 +1833,7 @@ fn document_non_exhaustive(w: &mut Buffer, item: &clean::Item) {
} }
fn document_type_layout(w: &mut Buffer, cx: &Context<'_>, ty_def_id: DefId) { fn document_type_layout(w: &mut Buffer, cx: &Context<'_>, ty_def_id: DefId) {
fn write_size_of_layout(w: &mut Buffer, layout: &LayoutS<VariantIdx>, tag_size: u64) { fn write_size_of_layout(w: &mut Buffer, layout: &LayoutS, tag_size: u64) {
if layout.abi.is_unsized() { if layout.abi.is_unsized() {
write!(w, "(unsized)"); write!(w, "(unsized)");
} else { } else {

View file

@ -0,0 +1,26 @@
- // MIR for `mutate_discriminant` before DataflowConstProp
+ // MIR for `mutate_discriminant` after DataflowConstProp
fn mutate_discriminant() -> u8 {
let mut _0: u8; // return place in scope 0 at $DIR/enum.rs:+0:29: +0:31
let mut _1: std::option::Option<NonZeroUsize>; // in scope 0 at $SRC_DIR/core/src/intrinsics/mir.rs:LL:COL
let mut _2: isize; // in scope 0 at $SRC_DIR/core/src/intrinsics/mir.rs:LL:COL
bb0: {
discriminant(_1) = 1; // scope 0 at $DIR/enum.rs:+4:13: +4:34
(((_1 as variant#1).0: NonZeroUsize).0: usize) = const 0_usize; // scope 0 at $DIR/enum.rs:+6:13: +6:64
_2 = discriminant(_1); // scope 0 at $SRC_DIR/core/src/intrinsics/mir.rs:LL:COL
switchInt(_2) -> [0: bb1, otherwise: bb2]; // scope 0 at $DIR/enum.rs:+9:13: +12:14
}
bb1: {
_0 = const 1_u8; // scope 0 at $DIR/enum.rs:+15:13: +15:20
return; // scope 0 at $DIR/enum.rs:+16:13: +16:21
}
bb2: {
_0 = const 2_u8; // scope 0 at $DIR/enum.rs:+19:13: +19:20
unreachable; // scope 0 at $DIR/enum.rs:+20:13: +20:26
}
}

View file

@ -1,13 +1,52 @@
// unit-test: DataflowConstProp // unit-test: DataflowConstProp
// Not trackable, because variants could be aliased. #![feature(custom_mir, core_intrinsics, rustc_attrs)]
use std::intrinsics::mir::*;
enum E { enum E {
V1(i32), V1(i32),
V2(i32) V2(i32)
} }
// EMIT_MIR enum.main.DataflowConstProp.diff // EMIT_MIR enum.simple.DataflowConstProp.diff
fn main() { fn simple() {
let e = E::V1(0); let e = E::V1(0);
let x = match e { E::V1(x) => x, E::V2(x) => x }; let x = match e { E::V1(x) => x, E::V2(x) => x };
} }
#[rustc_layout_scalar_valid_range_start(1)]
#[rustc_nonnull_optimization_guaranteed]
struct NonZeroUsize(usize);
// EMIT_MIR enum.mutate_discriminant.DataflowConstProp.diff
#[custom_mir(dialect = "runtime", phase = "post-cleanup")]
fn mutate_discriminant() -> u8 {
mir!(
let x: Option<NonZeroUsize>;
{
SetDiscriminant(x, 1);
// This assignment overwrites the niche in which the discriminant is stored.
place!(Field(Field(Variant(x, 1), 0), 0)) = 0_usize;
// So we cannot know the value of this discriminant.
let a = Discriminant(x);
match a {
0 => bb1,
_ => bad,
}
}
bb1 = {
RET = 1;
Return()
}
bad = {
RET = 2;
Unreachable()
}
)
}
fn main() {
simple();
mutate_discriminant();
}

View file

@ -1,8 +1,8 @@
- // MIR for `main` before DataflowConstProp - // MIR for `simple` before DataflowConstProp
+ // MIR for `main` after DataflowConstProp + // MIR for `simple` after DataflowConstProp
fn main() -> () { fn simple() -> () {
let mut _0: (); // return place in scope 0 at $DIR/enum.rs:+0:11: +0:11 let mut _0: (); // return place in scope 0 at $DIR/enum.rs:+0:13: +0:13
let _1: E; // in scope 0 at $DIR/enum.rs:+1:9: +1:10 let _1: E; // in scope 0 at $DIR/enum.rs:+1:9: +1:10
let mut _3: isize; // in scope 0 at $DIR/enum.rs:+2:23: +2:31 let mut _3: isize; // in scope 0 at $DIR/enum.rs:+2:23: +2:31
scope 1 { scope 1 {
@ -25,8 +25,10 @@
StorageLive(_1); // scope 0 at $DIR/enum.rs:+1:9: +1:10 StorageLive(_1); // scope 0 at $DIR/enum.rs:+1:9: +1:10
_1 = E::V1(const 0_i32); // scope 0 at $DIR/enum.rs:+1:13: +1:21 _1 = E::V1(const 0_i32); // scope 0 at $DIR/enum.rs:+1:13: +1:21
StorageLive(_2); // scope 1 at $DIR/enum.rs:+2:9: +2:10 StorageLive(_2); // scope 1 at $DIR/enum.rs:+2:9: +2:10
_3 = discriminant(_1); // scope 1 at $DIR/enum.rs:+2:19: +2:20 - _3 = discriminant(_1); // scope 1 at $DIR/enum.rs:+2:19: +2:20
switchInt(move _3) -> [0: bb3, 1: bb1, otherwise: bb2]; // scope 1 at $DIR/enum.rs:+2:13: +2:20 - switchInt(move _3) -> [0: bb3, 1: bb1, otherwise: bb2]; // scope 1 at $DIR/enum.rs:+2:13: +2:20
+ _3 = const 0_isize; // scope 1 at $DIR/enum.rs:+2:19: +2:20
+ switchInt(const 0_isize) -> [0: bb3, 1: bb1, otherwise: bb2]; // scope 1 at $DIR/enum.rs:+2:13: +2:20
} }
bb1: { bb1: {
@ -50,7 +52,7 @@
} }
bb4: { bb4: {
_0 = const (); // scope 0 at $DIR/enum.rs:+0:11: +3:2 _0 = const (); // scope 0 at $DIR/enum.rs:+0:13: +3:2
StorageDead(_2); // scope 1 at $DIR/enum.rs:+3:1: +3:2 StorageDead(_2); // scope 1 at $DIR/enum.rs:+3:1: +3:2
StorageDead(_1); // scope 0 at $DIR/enum.rs:+3:1: +3:2 StorageDead(_1); // scope 0 at $DIR/enum.rs:+3:1: +3:2
return; // scope 0 at $DIR/enum.rs:+3:2: +3:2 return; // scope 0 at $DIR/enum.rs:+3:2: +3:2

View file

@ -0,0 +1,10 @@
// Regression test for #63033.
// check-pass
// edition: 2018
async fn test1(_: &'static u8, _: &'_ u8, _: &'_ u8) {}
async fn test2<'s>(_: &'s u8, _: &'_ &'s u8, _: &'_ &'s u8) {}
fn main() {}

View file

@ -0,0 +1,4 @@
const NUMBERS: [u8; 3] = [10, 20];
//~^ ERROR mismatched types
//~^^ HELP consider specifying the actual array length
fn main() {}

View file

@ -0,0 +1,11 @@
error[E0308]: mismatched types
--> $DIR/array-literal-len-mismatch.rs:1:26
|
LL | const NUMBERS: [u8; 3] = [10, 20];
| - ^^^^^^^^ expected an array with a fixed size of 3 elements, found one with 2 elements
| |
| help: consider specifying the actual array length: `2`
error: aborting due to previous error
For more information about this error, try `rustc --explain E0308`.

View file

@ -2,13 +2,17 @@ error[E0308]: mismatched types
--> $DIR/const-array-oob-arith.rs:5:45 --> $DIR/const-array-oob-arith.rs:5:45
| |
LL | const BLUB: [i32; (ARR[0] - 40) as usize] = [5]; LL | const BLUB: [i32; (ARR[0] - 40) as usize] = [5];
| ^^^ expected an array with a fixed size of 2 elements, found one with 1 element | ---------------------- ^^^ expected an array with a fixed size of 2 elements, found one with 1 element
| |
| help: consider specifying the actual array length: `1`
error[E0308]: mismatched types error[E0308]: mismatched types
--> $DIR/const-array-oob-arith.rs:8:44 --> $DIR/const-array-oob-arith.rs:8:44
| |
LL | const BOO: [i32; (ARR[0] - 41) as usize] = [5, 99]; LL | const BOO: [i32; (ARR[0] - 41) as usize] = [5, 99];
| ^^^^^^^ expected an array with a fixed size of 1 element, found one with 2 elements | ---------------------- ^^^^^^^ expected an array with a fixed size of 1 element, found one with 2 elements
| |
| help: consider specifying the actual array length: `2`
error: aborting due to 2 previous errors error: aborting due to 2 previous errors

View file

@ -0,0 +1,12 @@
fn returns_arr() -> [u8; 2] {
[1, 2]
}
fn main() {
let wrong: [u8; 3] = [10, 20];
//~^ ERROR mismatched types
//~^^ HELP consider specifying the actual array length
let wrong: [u8; 3] = returns_arr();
//~^ ERROR mismatched types
//~^^ HELP consider specifying the actual array length
}

View file

@ -0,0 +1,21 @@
error[E0308]: mismatched types
--> $DIR/array-len-mismatch.rs:6:26
|
LL | let wrong: [u8; 3] = [10, 20];
| ------- ^^^^^^^^ expected an array with a fixed size of 3 elements, found one with 2 elements
| | |
| | help: consider specifying the actual array length: `2`
| expected due to this
error[E0308]: mismatched types
--> $DIR/array-len-mismatch.rs:9:26
|
LL | let wrong: [u8; 3] = returns_arr();
| ------- ^^^^^^^^^^^^^ expected an array with a fixed size of 3 elements, found one with 2 elements
| | |
| | help: consider specifying the actual array length: `2`
| expected due to this
error: aborting due to 2 previous errors
For more information about this error, try `rustc --explain E0308`.

View file

@ -0,0 +1,43 @@
// ... continued from ./min-choice.rs
// check-fail
trait Cap<'a> {}
impl<T> Cap<'_> for T {}
fn type_test<'a, T: 'a>() -> &'a u8 { &0 }
// Make sure we don't pick `'b`.
fn test_b<'a, 'b, 'c, T>() -> impl Cap<'a> + Cap<'b> + Cap<'c>
where
'a: 'b,
'a: 'c,
T: 'b,
{
type_test::<'_, T>() // This should pass if we pick 'b.
//~^ ERROR the parameter type `T` may not live long enough
}
// Make sure we don't pick `'c`.
fn test_c<'a, 'b, 'c, T>() -> impl Cap<'a> + Cap<'b> + Cap<'c>
where
'a: 'b,
'a: 'c,
T: 'c,
{
type_test::<'_, T>() // This should pass if we pick 'c.
//~^ ERROR the parameter type `T` may not live long enough
}
// We need to pick min_choice from `['b, 'c]`, but it's ambiguous which one to pick because
// they're incomparable.
fn test_ambiguous<'a, 'b, 'c>(s: &'a u8) -> impl Cap<'b> + Cap<'c>
where
'a: 'b,
'a: 'c,
{
s
//~^ ERROR captures lifetime that does not appear in bounds
}
fn main() {}

View file

@ -0,0 +1,40 @@
error[E0309]: the parameter type `T` may not live long enough
--> $DIR/min-choice-reject-ambiguous.rs:17:5
|
LL | type_test::<'_, T>() // This should pass if we pick 'b.
| ^^^^^^^^^^^^^^^^^^ ...so that the type `T` will meet its required lifetime bounds
|
help: consider adding an explicit lifetime bound...
|
LL | T: 'b + 'a,
| ++++
error[E0309]: the parameter type `T` may not live long enough
--> $DIR/min-choice-reject-ambiguous.rs:28:5
|
LL | type_test::<'_, T>() // This should pass if we pick 'c.
| ^^^^^^^^^^^^^^^^^^ ...so that the type `T` will meet its required lifetime bounds
|
help: consider adding an explicit lifetime bound...
|
LL | T: 'c + 'a,
| ++++
error[E0700]: hidden type for `impl Cap<'b> + Cap<'c>` captures lifetime that does not appear in bounds
--> $DIR/min-choice-reject-ambiguous.rs:39:5
|
LL | fn test_ambiguous<'a, 'b, 'c>(s: &'a u8) -> impl Cap<'b> + Cap<'c>
| -- hidden type `&'a u8` captures the lifetime `'a` as defined here
...
LL | s
| ^
|
help: to declare that `impl Cap<'b> + Cap<'c>` captures `'a`, you can add an explicit `'a` lifetime bound
|
LL | fn test_ambiguous<'a, 'b, 'c>(s: &'a u8) -> impl Cap<'b> + Cap<'c> + 'a
| ++++
error: aborting due to 3 previous errors
Some errors have detailed explanations: E0309, E0700.
For more information about an error, try `rustc --explain E0309`.

View file

@ -0,0 +1,34 @@
// Assuming that the hidden type in these tests is `&'_#15r u8`,
// we have a member constraint: `'_#15r member ['static, 'a, 'b, 'c]`.
//
// Make sure we pick up the minimum non-ambiguous region among them.
// We will have to exclude `['b, 'c]` because they're incomparable,
// and then we should pick `'a` because we know `'static: 'a`.
// check-pass
trait Cap<'a> {}
impl<T> Cap<'_> for T {}
fn type_test<'a, T: 'a>() -> &'a u8 { &0 }
// Basic test: make sure we don't bail out because 'b and 'c are incomparable.
fn basic<'a, 'b, 'c>() -> impl Cap<'a> + Cap<'b> + Cap<'c>
where
'a: 'b,
'a: 'c,
{
&0
}
// Make sure we don't pick `'static`.
fn test_static<'a, 'b, 'c, T>() -> impl Cap<'a> + Cap<'b> + Cap<'c>
where
'a: 'b,
'a: 'c,
T: 'a,
{
type_test::<'_, T>() // This will fail if we pick 'static
}
fn main() {}

View file

@ -0,0 +1,33 @@
// Nested impl-traits can impose different member constraints on the same region variable.
// check-fail
trait Cap<'a> {}
impl<T> Cap<'_> for T {}
// Assuming the hidden type is `[&'_#15r u8; 1]`, we have two distinct member constraints:
// - '_#15r member ['static, 'a, 'b] // from outer impl-trait
// - '_#15r member ['static, 'a, 'b] // from inner impl-trait
// To satisfy both we can choose 'a or 'b, so it's a failure due to ambiguity.
fn fail_early_bound<'s, 'a, 'b>(a: &'s u8) -> impl IntoIterator<Item = impl Cap<'a> + Cap<'b>>
where
's: 'a,
's: 'b,
{
[a]
//~^ E0700
//~| E0700
}
// Same as the above but with late-bound regions.
fn fail_late_bound<'s, 'a, 'b>(
a: &'s u8,
_: &'a &'s u8,
_: &'b &'s u8,
) -> impl IntoIterator<Item = impl Cap<'a> + Cap<'b>> {
[a]
//~^ E0700
//~| E0700
}
fn main() {}

View file

@ -0,0 +1,75 @@
error[E0700]: hidden type for `impl IntoIterator<Item = impl Cap<'a> + Cap<'b>>` captures lifetime that does not appear in bounds
--> $DIR/nested-impl-trait-fail.rs:17:5
|
LL | fn fail_early_bound<'s, 'a, 'b>(a: &'s u8) -> impl IntoIterator<Item = impl Cap<'a> + Cap<'b>>
| -- hidden type `[&'s u8; 1]` captures the lifetime `'s` as defined here
...
LL | [a]
| ^^^
|
help: to declare that `impl IntoIterator<Item = impl Cap<'a> + Cap<'b>>` captures `'s`, you can add an explicit `'s` lifetime bound
|
LL | fn fail_early_bound<'s, 'a, 'b>(a: &'s u8) -> impl IntoIterator<Item = impl Cap<'a> + Cap<'b>> + 's
| ++++
help: to declare that `impl Cap<'a> + Cap<'b>` captures `'s`, you can add an explicit `'s` lifetime bound
|
LL | fn fail_early_bound<'s, 'a, 'b>(a: &'s u8) -> impl IntoIterator<Item = impl Cap<'a> + Cap<'b> + 's>
| ++++
error[E0700]: hidden type for `impl Cap<'a> + Cap<'b>` captures lifetime that does not appear in bounds
--> $DIR/nested-impl-trait-fail.rs:17:5
|
LL | fn fail_early_bound<'s, 'a, 'b>(a: &'s u8) -> impl IntoIterator<Item = impl Cap<'a> + Cap<'b>>
| -- hidden type `&'s u8` captures the lifetime `'s` as defined here
...
LL | [a]
| ^^^
|
help: to declare that `impl IntoIterator<Item = impl Cap<'a> + Cap<'b>>` captures `'s`, you can add an explicit `'s` lifetime bound
|
LL | fn fail_early_bound<'s, 'a, 'b>(a: &'s u8) -> impl IntoIterator<Item = impl Cap<'a> + Cap<'b>> + 's
| ++++
help: to declare that `impl Cap<'a> + Cap<'b>` captures `'s`, you can add an explicit `'s` lifetime bound
|
LL | fn fail_early_bound<'s, 'a, 'b>(a: &'s u8) -> impl IntoIterator<Item = impl Cap<'a> + Cap<'b> + 's>
| ++++
error[E0700]: hidden type for `impl IntoIterator<Item = impl Cap<'a> + Cap<'b>>` captures lifetime that does not appear in bounds
--> $DIR/nested-impl-trait-fail.rs:28:5
|
LL | fn fail_late_bound<'s, 'a, 'b>(
| -- hidden type `[&'s u8; 1]` captures the lifetime `'s` as defined here
...
LL | [a]
| ^^^
|
help: to declare that `impl IntoIterator<Item = impl Cap<'a> + Cap<'b>>` captures `'s`, you can add an explicit `'s` lifetime bound
|
LL | ) -> impl IntoIterator<Item = impl Cap<'a> + Cap<'b>> + 's {
| ++++
help: to declare that `impl Cap<'a> + Cap<'b>` captures `'s`, you can add an explicit `'s` lifetime bound
|
LL | ) -> impl IntoIterator<Item = impl Cap<'a> + Cap<'b> + 's> {
| ++++
error[E0700]: hidden type for `impl Cap<'a> + Cap<'b>` captures lifetime that does not appear in bounds
--> $DIR/nested-impl-trait-fail.rs:28:5
|
LL | fn fail_late_bound<'s, 'a, 'b>(
| -- hidden type `&'s u8` captures the lifetime `'s` as defined here
...
LL | [a]
| ^^^
|
help: to declare that `impl IntoIterator<Item = impl Cap<'a> + Cap<'b>>` captures `'s`, you can add an explicit `'s` lifetime bound
|
LL | ) -> impl IntoIterator<Item = impl Cap<'a> + Cap<'b>> + 's {
| ++++
help: to declare that `impl Cap<'a> + Cap<'b>` captures `'s`, you can add an explicit `'s` lifetime bound
|
LL | ) -> impl IntoIterator<Item = impl Cap<'a> + Cap<'b> + 's> {
| ++++
error: aborting due to 4 previous errors
For more information about this error, try `rustc --explain E0700`.

View file

@ -0,0 +1,29 @@
// Nested impl-traits can impose different member constraints on the same region variable.
// check-pass
trait Cap<'a> {}
impl<T> Cap<'_> for T {}
// Assuming the hidden type is `[&'_#15r u8; 1]`, we have two distinct member constraints:
// - '_#15r member ['static, 'a, 'b] // from outer impl-trait
// - '_#15r member ['static, 'a] // from inner impl-trait
// To satisfy both we can only choose 'a.
fn pass_early_bound<'s, 'a, 'b>(a: &'s u8) -> impl IntoIterator<Item = impl Cap<'a>> + Cap<'b>
where
's: 'a,
's: 'b,
{
[a]
}
// Same as the above but with late-bound regions.
fn pass_late_bound<'s, 'a, 'b>(
a: &'s u8,
_: &'a &'s u8,
_: &'b &'s u8,
) -> impl IntoIterator<Item = impl Cap<'a>> + Cap<'b> {
[a]
}
fn main() {}