Store field indices in DeconstructedPat
to avoid virtual wildcards
This commit is contained in:
parent
c1e68860d0
commit
6ae9fa31f0
4 changed files with 102 additions and 84 deletions
|
@ -917,7 +917,9 @@ fn report_arm_reachability<'p, 'tcx>(
|
||||||
fn pat_is_catchall(pat: &DeconstructedPat<'_, '_>) -> bool {
|
fn pat_is_catchall(pat: &DeconstructedPat<'_, '_>) -> bool {
|
||||||
match pat.ctor() {
|
match pat.ctor() {
|
||||||
Constructor::Wildcard => true,
|
Constructor::Wildcard => true,
|
||||||
Constructor::Struct | Constructor::Ref => pat.iter_fields().all(|pat| pat_is_catchall(pat)),
|
Constructor::Struct | Constructor::Ref => {
|
||||||
|
pat.iter_fields().all(|ipat| pat_is_catchall(&ipat.pat))
|
||||||
|
}
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,12 +20,18 @@ impl PatId {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A pattern with an index denoting which field it corresponds to.
|
||||||
|
pub struct IndexedPat<Cx: TypeCx> {
|
||||||
|
pub idx: usize,
|
||||||
|
pub pat: DeconstructedPat<Cx>,
|
||||||
|
}
|
||||||
|
|
||||||
/// Values and patterns can be represented as a constructor applied to some fields. This represents
|
/// Values and patterns can be represented as a constructor applied to some fields. This represents
|
||||||
/// a pattern in this form. A `DeconstructedPat` will almost always come from user input; the only
|
/// a pattern in this form. A `DeconstructedPat` will almost always come from user input; the only
|
||||||
/// exception are some `Wildcard`s introduced during pattern lowering.
|
/// exception are some `Wildcard`s introduced during pattern lowering.
|
||||||
pub struct DeconstructedPat<Cx: TypeCx> {
|
pub struct DeconstructedPat<Cx: TypeCx> {
|
||||||
ctor: Constructor<Cx>,
|
ctor: Constructor<Cx>,
|
||||||
fields: Vec<DeconstructedPat<Cx>>,
|
fields: Vec<IndexedPat<Cx>>,
|
||||||
/// The number of fields in this pattern. E.g. if the pattern is `SomeStruct { field12: true, ..
|
/// The number of fields in this pattern. E.g. if the pattern is `SomeStruct { field12: true, ..
|
||||||
/// }` this would be the total number of fields of the struct.
|
/// }` this would be the total number of fields of the struct.
|
||||||
/// This is also the same as `self.ctor.arity(self.ty)`.
|
/// This is also the same as `self.ctor.arity(self.ty)`.
|
||||||
|
@ -39,20 +45,9 @@ pub struct DeconstructedPat<Cx: TypeCx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<Cx: TypeCx> DeconstructedPat<Cx> {
|
impl<Cx: TypeCx> DeconstructedPat<Cx> {
|
||||||
pub fn wildcard(ty: Cx::Ty) -> Self {
|
|
||||||
DeconstructedPat {
|
|
||||||
ctor: Wildcard,
|
|
||||||
fields: Vec::new(),
|
|
||||||
arity: 0,
|
|
||||||
ty,
|
|
||||||
data: None,
|
|
||||||
uid: PatId::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn new(
|
pub fn new(
|
||||||
ctor: Constructor<Cx>,
|
ctor: Constructor<Cx>,
|
||||||
fields: Vec<DeconstructedPat<Cx>>,
|
fields: Vec<IndexedPat<Cx>>,
|
||||||
arity: usize,
|
arity: usize,
|
||||||
ty: Cx::Ty,
|
ty: Cx::Ty,
|
||||||
data: Cx::PatData,
|
data: Cx::PatData,
|
||||||
|
@ -60,6 +55,10 @@ impl<Cx: TypeCx> DeconstructedPat<Cx> {
|
||||||
DeconstructedPat { ctor, fields, arity, ty, data: Some(data), uid: PatId::new() }
|
DeconstructedPat { ctor, fields, arity, ty, data: Some(data), uid: PatId::new() }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn at_index(self, idx: usize) -> IndexedPat<Cx> {
|
||||||
|
IndexedPat { idx, pat: self }
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) fn is_or_pat(&self) -> bool {
|
pub(crate) fn is_or_pat(&self) -> bool {
|
||||||
matches!(self.ctor, Or)
|
matches!(self.ctor, Or)
|
||||||
}
|
}
|
||||||
|
@ -75,8 +74,11 @@ impl<Cx: TypeCx> DeconstructedPat<Cx> {
|
||||||
pub fn data(&self) -> Option<&Cx::PatData> {
|
pub fn data(&self) -> Option<&Cx::PatData> {
|
||||||
self.data.as_ref()
|
self.data.as_ref()
|
||||||
}
|
}
|
||||||
|
pub fn arity(&self) -> usize {
|
||||||
|
self.arity
|
||||||
|
}
|
||||||
|
|
||||||
pub fn iter_fields<'a>(&'a self) -> impl Iterator<Item = &'a DeconstructedPat<Cx>> {
|
pub fn iter_fields<'a>(&'a self) -> impl Iterator<Item = &'a IndexedPat<Cx>> {
|
||||||
self.fields.iter()
|
self.fields.iter()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -85,37 +87,41 @@ impl<Cx: TypeCx> DeconstructedPat<Cx> {
|
||||||
pub(crate) fn specialize<'a>(
|
pub(crate) fn specialize<'a>(
|
||||||
&'a self,
|
&'a self,
|
||||||
other_ctor: &Constructor<Cx>,
|
other_ctor: &Constructor<Cx>,
|
||||||
ctor_arity: usize,
|
other_ctor_arity: usize,
|
||||||
) -> SmallVec<[PatOrWild<'a, Cx>; 2]> {
|
) -> SmallVec<[PatOrWild<'a, Cx>; 2]> {
|
||||||
let wildcard_sub_tys = || (0..ctor_arity).map(|_| PatOrWild::Wild).collect();
|
if matches!(other_ctor, PrivateUninhabited) {
|
||||||
match (&self.ctor, other_ctor) {
|
|
||||||
// Return a wildcard for each field of `other_ctor`.
|
|
||||||
(Wildcard, _) => wildcard_sub_tys(),
|
|
||||||
// Skip this column.
|
// Skip this column.
|
||||||
(_, PrivateUninhabited) => smallvec![],
|
return smallvec![];
|
||||||
// The only non-trivial case: two slices of different arity. `other_slice` is
|
}
|
||||||
// guaranteed to have a larger arity, so we fill the middle part with enough
|
|
||||||
// wildcards to reach the length of the new, larger slice.
|
// Start with a slice of wildcards of the appropriate length.
|
||||||
(
|
let mut fields: SmallVec<[_; 2]> = (0..other_ctor_arity).map(|_| PatOrWild::Wild).collect();
|
||||||
&Slice(self_slice @ Slice { kind: SliceKind::VarLen(prefix, suffix), .. }),
|
// Fill `fields` with our fields. The arities are known to be compatible.
|
||||||
&Slice(other_slice),
|
match self.ctor {
|
||||||
) if self_slice.arity() != other_slice.arity() => {
|
// The only non-trivial case: two slices of different arity. `other_ctor` is guaranteed
|
||||||
// Start with a slice of wildcards of the appropriate length.
|
// to have a larger arity, so we adjust the indices of the patterns in the suffix so
|
||||||
let mut fields: SmallVec<[_; 2]> = wildcard_sub_tys();
|
// that they are correctly positioned in the larger slice.
|
||||||
// Fill in the fields from both ends.
|
Slice(Slice { kind: SliceKind::VarLen(prefix, _), .. })
|
||||||
let new_arity = fields.len();
|
if self.arity != other_ctor_arity =>
|
||||||
for i in 0..prefix {
|
{
|
||||||
fields[i] = PatOrWild::Pat(&self.fields[i]);
|
for ipat in &self.fields {
|
||||||
|
let new_idx = if ipat.idx < prefix {
|
||||||
|
ipat.idx
|
||||||
|
} else {
|
||||||
|
// Adjust the indices in the suffix.
|
||||||
|
ipat.idx + other_ctor_arity - self.arity
|
||||||
|
};
|
||||||
|
fields[new_idx] = PatOrWild::Pat(&ipat.pat);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
for ipat in &self.fields {
|
||||||
|
fields[ipat.idx] = PatOrWild::Pat(&ipat.pat);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
for i in 0..suffix {
|
|
||||||
fields[new_arity - 1 - i] =
|
|
||||||
PatOrWild::Pat(&self.fields[self.fields.len() - 1 - i]);
|
|
||||||
}
|
}
|
||||||
fields
|
fields
|
||||||
}
|
}
|
||||||
_ => self.fields.iter().map(PatOrWild::Pat).collect(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Walk top-down and call `it` in each place where a pattern occurs
|
/// Walk top-down and call `it` in each place where a pattern occurs
|
||||||
/// starting with the root pattern `walk` is called on. If `it` returns
|
/// starting with the root pattern `walk` is called on. If `it` returns
|
||||||
|
@ -126,7 +132,7 @@ impl<Cx: TypeCx> DeconstructedPat<Cx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
for p in self.iter_fields() {
|
for p in self.iter_fields() {
|
||||||
p.walk(it)
|
p.pat.walk(it)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -146,6 +152,11 @@ impl<Cx: TypeCx> fmt::Debug for DeconstructedPat<Cx> {
|
||||||
};
|
};
|
||||||
let mut start_or_comma = || start_or_continue(", ");
|
let mut start_or_comma = || start_or_continue(", ");
|
||||||
|
|
||||||
|
let mut fields: Vec<_> = (0..self.arity).map(|_| PatOrWild::Wild).collect();
|
||||||
|
for ipat in self.iter_fields() {
|
||||||
|
fields[ipat.idx] = PatOrWild::Pat(&ipat.pat);
|
||||||
|
}
|
||||||
|
|
||||||
match pat.ctor() {
|
match pat.ctor() {
|
||||||
Struct | Variant(_) | UnionField => {
|
Struct | Variant(_) | UnionField => {
|
||||||
Cx::write_variant_name(f, pat)?;
|
Cx::write_variant_name(f, pat)?;
|
||||||
|
@ -153,7 +164,7 @@ impl<Cx: TypeCx> fmt::Debug for DeconstructedPat<Cx> {
|
||||||
// get the names of the fields. Instead we just display everything as a tuple
|
// get the names of the fields. Instead we just display everything as a tuple
|
||||||
// struct, which should be good enough.
|
// struct, which should be good enough.
|
||||||
write!(f, "(")?;
|
write!(f, "(")?;
|
||||||
for p in pat.iter_fields() {
|
for p in fields {
|
||||||
write!(f, "{}", start_or_comma())?;
|
write!(f, "{}", start_or_comma())?;
|
||||||
write!(f, "{p:?}")?;
|
write!(f, "{p:?}")?;
|
||||||
}
|
}
|
||||||
|
@ -163,25 +174,23 @@ impl<Cx: TypeCx> fmt::Debug for DeconstructedPat<Cx> {
|
||||||
// be careful to detect strings here. However a string literal pattern will never
|
// be careful to detect strings here. However a string literal pattern will never
|
||||||
// be reported as a non-exhaustiveness witness, so we can ignore this issue.
|
// be reported as a non-exhaustiveness witness, so we can ignore this issue.
|
||||||
Ref => {
|
Ref => {
|
||||||
let subpattern = pat.iter_fields().next().unwrap();
|
write!(f, "&{:?}", &fields[0])
|
||||||
write!(f, "&{:?}", subpattern)
|
|
||||||
}
|
}
|
||||||
Slice(slice) => {
|
Slice(slice) => {
|
||||||
let mut subpatterns = pat.iter_fields();
|
|
||||||
write!(f, "[")?;
|
write!(f, "[")?;
|
||||||
match slice.kind {
|
match slice.kind {
|
||||||
SliceKind::FixedLen(_) => {
|
SliceKind::FixedLen(_) => {
|
||||||
for p in subpatterns {
|
for p in fields {
|
||||||
write!(f, "{}{:?}", start_or_comma(), p)?;
|
write!(f, "{}{:?}", start_or_comma(), p)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
SliceKind::VarLen(prefix_len, _) => {
|
SliceKind::VarLen(prefix_len, _) => {
|
||||||
for p in subpatterns.by_ref().take(prefix_len) {
|
for p in &fields[..prefix_len] {
|
||||||
write!(f, "{}{:?}", start_or_comma(), p)?;
|
write!(f, "{}{:?}", start_or_comma(), p)?;
|
||||||
}
|
}
|
||||||
write!(f, "{}", start_or_comma())?;
|
write!(f, "{}", start_or_comma())?;
|
||||||
write!(f, "..")?;
|
write!(f, "..")?;
|
||||||
for p in subpatterns {
|
for p in &fields[prefix_len..] {
|
||||||
write!(f, "{}{:?}", start_or_comma(), p)?;
|
write!(f, "{}{:?}", start_or_comma(), p)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -196,7 +205,7 @@ impl<Cx: TypeCx> fmt::Debug for DeconstructedPat<Cx> {
|
||||||
Str(value) => write!(f, "{value:?}"),
|
Str(value) => write!(f, "{value:?}"),
|
||||||
Opaque(..) => write!(f, "<constant pattern>"),
|
Opaque(..) => write!(f, "<constant pattern>"),
|
||||||
Or => {
|
Or => {
|
||||||
for pat in pat.iter_fields() {
|
for pat in fields {
|
||||||
write!(f, "{}{:?}", start_or_continue(" | "), pat)?;
|
write!(f, "{}{:?}", start_or_continue(" | "), pat)?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -254,9 +263,10 @@ impl<'p, Cx: TypeCx> PatOrWild<'p, Cx> {
|
||||||
/// Expand this (possibly-nested) or-pattern into its alternatives.
|
/// Expand this (possibly-nested) or-pattern into its alternatives.
|
||||||
pub(crate) fn flatten_or_pat(self) -> SmallVec<[Self; 1]> {
|
pub(crate) fn flatten_or_pat(self) -> SmallVec<[Self; 1]> {
|
||||||
match self {
|
match self {
|
||||||
PatOrWild::Pat(pat) if pat.is_or_pat() => {
|
PatOrWild::Pat(pat) if pat.is_or_pat() => pat
|
||||||
pat.iter_fields().flat_map(|p| PatOrWild::Pat(p).flatten_or_pat()).collect()
|
.iter_fields()
|
||||||
}
|
.flat_map(|ipat| PatOrWild::Pat(&ipat.pat).flatten_or_pat())
|
||||||
|
.collect(),
|
||||||
_ => smallvec![self],
|
_ => smallvec![self],
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -446,7 +446,7 @@ impl<'p, 'tcx: 'p> RustcMatchCheckCtxt<'p, 'tcx> {
|
||||||
let ty = cx.reveal_opaque_ty(pat.ty);
|
let ty = cx.reveal_opaque_ty(pat.ty);
|
||||||
let ctor;
|
let ctor;
|
||||||
let arity;
|
let arity;
|
||||||
let mut fields: Vec<_>;
|
let fields: Vec<_>;
|
||||||
match &pat.kind {
|
match &pat.kind {
|
||||||
PatKind::AscribeUserType { subpattern, .. }
|
PatKind::AscribeUserType { subpattern, .. }
|
||||||
| PatKind::InlineConstant { subpattern, .. } => return self.lower_pat(subpattern),
|
| PatKind::InlineConstant { subpattern, .. } => return self.lower_pat(subpattern),
|
||||||
|
@ -457,7 +457,7 @@ impl<'p, 'tcx: 'p> RustcMatchCheckCtxt<'p, 'tcx> {
|
||||||
arity = 0;
|
arity = 0;
|
||||||
}
|
}
|
||||||
PatKind::Deref { subpattern } => {
|
PatKind::Deref { subpattern } => {
|
||||||
fields = vec![self.lower_pat(subpattern)];
|
fields = vec![self.lower_pat(subpattern).at_index(0)];
|
||||||
arity = 1;
|
arity = 1;
|
||||||
ctor = match ty.kind() {
|
ctor = match ty.kind() {
|
||||||
// This is a box pattern.
|
// This is a box pattern.
|
||||||
|
@ -471,16 +471,12 @@ impl<'p, 'tcx: 'p> RustcMatchCheckCtxt<'p, 'tcx> {
|
||||||
ty::Tuple(fs) => {
|
ty::Tuple(fs) => {
|
||||||
ctor = Struct;
|
ctor = Struct;
|
||||||
arity = fs.len();
|
arity = fs.len();
|
||||||
fields = fs
|
fields = subpatterns
|
||||||
.iter()
|
.iter()
|
||||||
.map(|ty| cx.reveal_opaque_ty(ty))
|
.map(|ipat| self.lower_pat(&ipat.pattern).at_index(ipat.field.index()))
|
||||||
.map(|ty| DeconstructedPat::wildcard(ty))
|
|
||||||
.collect();
|
.collect();
|
||||||
for pat in subpatterns {
|
|
||||||
fields[pat.field.index()] = self.lower_pat(&pat.pattern);
|
|
||||||
}
|
}
|
||||||
}
|
ty::Adt(adt, _) if adt.is_box() => {
|
||||||
ty::Adt(adt, args) if adt.is_box() => {
|
|
||||||
// The only legal patterns of type `Box` (outside `std`) are `_` and box
|
// The only legal patterns of type `Box` (outside `std`) are `_` and box
|
||||||
// patterns. If we're here we can assume this is a box pattern.
|
// patterns. If we're here we can assume this is a box pattern.
|
||||||
// FIXME(Nadrieril): A `Box` can in theory be matched either with `Box(_,
|
// FIXME(Nadrieril): A `Box` can in theory be matched either with `Box(_,
|
||||||
|
@ -494,13 +490,12 @@ impl<'p, 'tcx: 'p> RustcMatchCheckCtxt<'p, 'tcx> {
|
||||||
// solution when we introduce generalized deref patterns. Also need to
|
// solution when we introduce generalized deref patterns. Also need to
|
||||||
// prevent mixing of those two options.
|
// prevent mixing of those two options.
|
||||||
let pattern = subpatterns.into_iter().find(|pat| pat.field.index() == 0);
|
let pattern = subpatterns.into_iter().find(|pat| pat.field.index() == 0);
|
||||||
let pat = if let Some(pat) = pattern {
|
if let Some(pat) = pattern {
|
||||||
self.lower_pat(&pat.pattern)
|
fields = vec![self.lower_pat(&pat.pattern).at_index(0)];
|
||||||
} else {
|
} else {
|
||||||
DeconstructedPat::wildcard(self.reveal_opaque_ty(args.type_at(0)))
|
fields = vec![];
|
||||||
};
|
}
|
||||||
ctor = Struct;
|
ctor = Struct;
|
||||||
fields = vec![pat];
|
|
||||||
arity = 1;
|
arity = 1;
|
||||||
}
|
}
|
||||||
ty::Adt(adt, _) => {
|
ty::Adt(adt, _) => {
|
||||||
|
@ -513,13 +508,10 @@ impl<'p, 'tcx: 'p> RustcMatchCheckCtxt<'p, 'tcx> {
|
||||||
let variant =
|
let variant =
|
||||||
&adt.variant(RustcMatchCheckCtxt::variant_index_for_adt(&ctor, *adt));
|
&adt.variant(RustcMatchCheckCtxt::variant_index_for_adt(&ctor, *adt));
|
||||||
arity = variant.fields.len();
|
arity = variant.fields.len();
|
||||||
fields = cx
|
fields = subpatterns
|
||||||
.variant_sub_tys(ty, variant)
|
.iter()
|
||||||
.map(|(_, ty)| DeconstructedPat::wildcard(ty))
|
.map(|ipat| self.lower_pat(&ipat.pattern).at_index(ipat.field.index()))
|
||||||
.collect();
|
.collect();
|
||||||
for pat in subpatterns {
|
|
||||||
fields[pat.field.index()] = self.lower_pat(&pat.pattern);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
_ => bug!("pattern has unexpected type: pat: {:?}, ty: {:?}", pat, ty),
|
_ => bug!("pattern has unexpected type: pat: {:?}, ty: {:?}", pat, ty),
|
||||||
}
|
}
|
||||||
|
@ -586,7 +578,7 @@ impl<'p, 'tcx: 'p> RustcMatchCheckCtxt<'p, 'tcx> {
|
||||||
let ty = self.reveal_opaque_ty(*t);
|
let ty = self.reveal_opaque_ty(*t);
|
||||||
let subpattern = DeconstructedPat::new(Str(*value), Vec::new(), 0, ty, pat);
|
let subpattern = DeconstructedPat::new(Str(*value), Vec::new(), 0, ty, pat);
|
||||||
ctor = Ref;
|
ctor = Ref;
|
||||||
fields = vec![subpattern];
|
fields = vec![subpattern.at_index(0)];
|
||||||
arity = 1;
|
arity = 1;
|
||||||
}
|
}
|
||||||
// All constants that can be structurally matched have already been expanded
|
// All constants that can be structurally matched have already been expanded
|
||||||
|
@ -651,13 +643,24 @@ impl<'p, 'tcx: 'p> RustcMatchCheckCtxt<'p, 'tcx> {
|
||||||
SliceKind::FixedLen(prefix.len() + suffix.len())
|
SliceKind::FixedLen(prefix.len() + suffix.len())
|
||||||
};
|
};
|
||||||
ctor = Slice(Slice::new(array_len, kind));
|
ctor = Slice(Slice::new(array_len, kind));
|
||||||
fields = prefix.iter().chain(suffix.iter()).map(|p| self.lower_pat(&*p)).collect();
|
fields = prefix
|
||||||
|
.iter()
|
||||||
|
.chain(suffix.iter())
|
||||||
|
.map(|p| self.lower_pat(&*p))
|
||||||
|
.enumerate()
|
||||||
|
.map(|(i, p)| p.at_index(i))
|
||||||
|
.collect();
|
||||||
arity = kind.arity();
|
arity = kind.arity();
|
||||||
}
|
}
|
||||||
PatKind::Or { .. } => {
|
PatKind::Or { .. } => {
|
||||||
ctor = Or;
|
ctor = Or;
|
||||||
let pats = expand_or_pat(pat);
|
let pats = expand_or_pat(pat);
|
||||||
fields = pats.into_iter().map(|p| self.lower_pat(p)).collect();
|
fields = pats
|
||||||
|
.into_iter()
|
||||||
|
.map(|p| self.lower_pat(p))
|
||||||
|
.enumerate()
|
||||||
|
.map(|(i, p)| p.at_index(i))
|
||||||
|
.collect();
|
||||||
arity = fields.len();
|
arity = fields.len();
|
||||||
}
|
}
|
||||||
PatKind::Never => {
|
PatKind::Never => {
|
||||||
|
|
|
@ -1006,15 +1006,17 @@ impl<'p, Cx: TypeCx> PatStack<'p, Cx> {
|
||||||
ctor_arity: usize,
|
ctor_arity: usize,
|
||||||
ctor_is_relevant: bool,
|
ctor_is_relevant: bool,
|
||||||
) -> Result<PatStack<'p, Cx>, Cx::Error> {
|
) -> Result<PatStack<'p, Cx>, Cx::Error> {
|
||||||
// We pop the head pattern and push the new fields extracted from the arguments of
|
let head_pat = self.head();
|
||||||
// `self.head()`.
|
if head_pat.as_pat().is_some_and(|pat| pat.arity() > ctor_arity) {
|
||||||
let mut new_pats = self.head().specialize(ctor, ctor_arity);
|
// Arity can be smaller in case of variable-length slices, but mustn't be larger.
|
||||||
if new_pats.len() != ctor_arity {
|
|
||||||
return Err(cx.bug(format_args!(
|
return Err(cx.bug(format_args!(
|
||||||
"uncaught type error: pattern {:?} has inconsistent arity (expected arity {ctor_arity})",
|
"uncaught type error: pattern {:?} has inconsistent arity (expected arity <= {ctor_arity})",
|
||||||
self.head().as_pat().unwrap()
|
head_pat.as_pat().unwrap()
|
||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
|
// We pop the head pattern and push the new fields extracted from the arguments of
|
||||||
|
// `self.head()`.
|
||||||
|
let mut new_pats = head_pat.specialize(ctor, ctor_arity);
|
||||||
new_pats.extend_from_slice(&self.pats[1..]);
|
new_pats.extend_from_slice(&self.pats[1..]);
|
||||||
// `ctor` is relevant for this row if it is the actual constructor of this row, or if the
|
// `ctor` is relevant for this row if it is the actual constructor of this row, or if the
|
||||||
// row has a wildcard and `ctor` is relevant for wildcards.
|
// row has a wildcard and `ctor` is relevant for wildcards.
|
||||||
|
@ -1706,7 +1708,8 @@ fn collect_pattern_usefulness<'p, Cx: TypeCx>(
|
||||||
) -> bool {
|
) -> bool {
|
||||||
if useful_subpatterns.contains(&pat.uid) {
|
if useful_subpatterns.contains(&pat.uid) {
|
||||||
true
|
true
|
||||||
} else if pat.is_or_pat() && pat.iter_fields().any(|f| pat_is_useful(useful_subpatterns, f))
|
} else if pat.is_or_pat()
|
||||||
|
&& pat.iter_fields().any(|f| pat_is_useful(useful_subpatterns, &f.pat))
|
||||||
{
|
{
|
||||||
// We always expand or patterns in the matrix, so we will never see the actual
|
// We always expand or patterns in the matrix, so we will never see the actual
|
||||||
// or-pattern (the one with constructor `Or`) in the column. As such, it will not be
|
// or-pattern (the one with constructor `Or`) in the column. As such, it will not be
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue