Add a crate-custom test harness
This commit is contained in:
parent
e4487ad391
commit
d697dd44d1
7 changed files with 582 additions and 0 deletions
|
@ -4440,6 +4440,8 @@ dependencies = [
|
||||||
"rustc_target",
|
"rustc_target",
|
||||||
"smallvec",
|
"smallvec",
|
||||||
"tracing",
|
"tracing",
|
||||||
|
"tracing-subscriber",
|
||||||
|
"tracing-tree",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
|
@ -22,6 +22,10 @@ smallvec = { version = "1.8.1", features = ["union"] }
|
||||||
tracing = "0.1"
|
tracing = "0.1"
|
||||||
# tidy-alphabetical-end
|
# tidy-alphabetical-end
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
tracing-subscriber = { version = "0.3.3", default-features = false, features = ["fmt", "env-filter", "ansi"] }
|
||||||
|
tracing-tree = "0.2.0"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["rustc"]
|
default = ["rustc"]
|
||||||
rustc = [
|
rustc = [
|
||||||
|
|
|
@ -49,6 +49,12 @@ pub mod index {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<V> FromIterator<V> for IdxContainer<usize, V> {
|
||||||
|
fn from_iter<T: IntoIterator<Item = V>>(iter: T) -> Self {
|
||||||
|
Self(iter.into_iter().enumerate().collect())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct IdxSet<T>(pub rustc_hash::FxHashSet<T>);
|
pub struct IdxSet<T>(pub rustc_hash::FxHashSet<T>);
|
||||||
impl<T: Idx> IdxSet<T> {
|
impl<T: Idx> IdxSet<T> {
|
||||||
|
|
315
compiler/rustc_pattern_analysis/tests/common/mod.rs
Normal file
315
compiler/rustc_pattern_analysis/tests/common/mod.rs
Normal file
|
@ -0,0 +1,315 @@
|
||||||
|
use rustc_pattern_analysis::{
|
||||||
|
constructor::{
|
||||||
|
Constructor, ConstructorSet, IntRange, MaybeInfiniteInt, RangeEnd, VariantVisibility,
|
||||||
|
},
|
||||||
|
usefulness::{PlaceValidity, UsefulnessReport},
|
||||||
|
Captures, MatchArm, PatCx, PrivateUninhabitedField,
|
||||||
|
};
|
||||||
|
|
||||||
|
/// Sets up `tracing` for easier debugging. Tries to look like the `rustc` setup.
|
||||||
|
pub fn init_tracing() {
|
||||||
|
use tracing_subscriber::layer::SubscriberExt;
|
||||||
|
use tracing_subscriber::util::SubscriberInitExt;
|
||||||
|
use tracing_subscriber::Layer;
|
||||||
|
let _ = tracing_tree::HierarchicalLayer::default()
|
||||||
|
.with_writer(std::io::stderr)
|
||||||
|
.with_indent_lines(true)
|
||||||
|
.with_ansi(true)
|
||||||
|
.with_targets(true)
|
||||||
|
.with_indent_amount(2)
|
||||||
|
.with_subscriber(
|
||||||
|
tracing_subscriber::Registry::default()
|
||||||
|
.with(tracing_subscriber::EnvFilter::from_default_env()),
|
||||||
|
)
|
||||||
|
.try_init();
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A simple set of types.
|
||||||
|
#[allow(dead_code)]
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub enum Ty {
|
||||||
|
/// Booleans
|
||||||
|
Bool,
|
||||||
|
/// 8-bit unsigned integers
|
||||||
|
U8,
|
||||||
|
/// Tuples.
|
||||||
|
Tuple(&'static [Ty]),
|
||||||
|
/// A struct with `arity` fields of type `ty`.
|
||||||
|
BigStruct { arity: usize, ty: &'static Ty },
|
||||||
|
/// A enum with `arity` variants of type `ty`.
|
||||||
|
BigEnum { arity: usize, ty: &'static Ty },
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The important logic.
|
||||||
|
impl Ty {
|
||||||
|
pub fn sub_tys(&self, ctor: &Constructor<Cx>) -> Vec<Self> {
|
||||||
|
use Constructor::*;
|
||||||
|
match (ctor, *self) {
|
||||||
|
(Struct, Ty::Tuple(tys)) => tys.iter().copied().collect(),
|
||||||
|
(Struct, Ty::BigStruct { arity, ty }) => (0..arity).map(|_| *ty).collect(),
|
||||||
|
(Variant(_), Ty::BigEnum { ty, .. }) => vec![*ty],
|
||||||
|
(Bool(..) | IntRange(..) | NonExhaustive | Missing | Wildcard, _) => vec![],
|
||||||
|
_ => panic!("Unexpected ctor {ctor:?} for type {self:?}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn ctor_set(&self) -> ConstructorSet<Cx> {
|
||||||
|
match *self {
|
||||||
|
Ty::Bool => ConstructorSet::Bool,
|
||||||
|
Ty::U8 => ConstructorSet::Integers {
|
||||||
|
range_1: IntRange::from_range(
|
||||||
|
MaybeInfiniteInt::new_finite_uint(0),
|
||||||
|
MaybeInfiniteInt::new_finite_uint(255),
|
||||||
|
RangeEnd::Included,
|
||||||
|
),
|
||||||
|
range_2: None,
|
||||||
|
},
|
||||||
|
Ty::Tuple(..) | Ty::BigStruct { .. } => ConstructorSet::Struct { empty: false },
|
||||||
|
Ty::BigEnum { arity, .. } => ConstructorSet::Variants {
|
||||||
|
variants: (0..arity).map(|_| VariantVisibility::Visible).collect(),
|
||||||
|
non_exhaustive: false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn write_variant_name(
|
||||||
|
&self,
|
||||||
|
f: &mut std::fmt::Formatter<'_>,
|
||||||
|
ctor: &Constructor<Cx>,
|
||||||
|
) -> std::fmt::Result {
|
||||||
|
match (*self, ctor) {
|
||||||
|
(Ty::Tuple(..), _) => Ok(()),
|
||||||
|
(Ty::BigStruct { .. }, _) => write!(f, "BigStruct"),
|
||||||
|
(Ty::BigEnum { .. }, Constructor::Variant(i)) => write!(f, "BigEnum::Variant{i}"),
|
||||||
|
_ => write!(f, "{:?}::{:?}", self, ctor),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Compute usefulness in our simple context (and set up tracing for easier debugging).
|
||||||
|
pub fn compute_match_usefulness<'p>(
|
||||||
|
arms: &[MatchArm<'p, Cx>],
|
||||||
|
ty: Ty,
|
||||||
|
scrut_validity: PlaceValidity,
|
||||||
|
complexity_limit: Option<usize>,
|
||||||
|
) -> Result<UsefulnessReport<'p, Cx>, ()> {
|
||||||
|
init_tracing();
|
||||||
|
rustc_pattern_analysis::usefulness::compute_match_usefulness(
|
||||||
|
&Cx,
|
||||||
|
arms,
|
||||||
|
ty,
|
||||||
|
scrut_validity,
|
||||||
|
complexity_limit,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Cx;
|
||||||
|
|
||||||
|
/// The context for pattern analysis. Forwards anything interesting to `Ty` methods.
|
||||||
|
impl PatCx for Cx {
|
||||||
|
type Ty = Ty;
|
||||||
|
type Error = ();
|
||||||
|
type VariantIdx = usize;
|
||||||
|
type StrLit = ();
|
||||||
|
type ArmData = ();
|
||||||
|
type PatData = ();
|
||||||
|
|
||||||
|
fn is_exhaustive_patterns_feature_on(&self) -> bool {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_min_exhaustive_patterns_feature_on(&self) -> bool {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
|
fn ctor_arity(&self, ctor: &Constructor<Self>, ty: &Self::Ty) -> usize {
|
||||||
|
ty.sub_tys(ctor).len()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn ctor_sub_tys<'a>(
|
||||||
|
&'a self,
|
||||||
|
ctor: &'a Constructor<Self>,
|
||||||
|
ty: &'a Self::Ty,
|
||||||
|
) -> impl Iterator<Item = (Self::Ty, PrivateUninhabitedField)> + ExactSizeIterator + Captures<'a>
|
||||||
|
{
|
||||||
|
ty.sub_tys(ctor).into_iter().map(|ty| (ty, PrivateUninhabitedField(false)))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn ctors_for_ty(&self, ty: &Self::Ty) -> Result<ConstructorSet<Self>, Self::Error> {
|
||||||
|
Ok(ty.ctor_set())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_variant_name(
|
||||||
|
f: &mut std::fmt::Formatter<'_>,
|
||||||
|
ctor: &Constructor<Self>,
|
||||||
|
ty: &Self::Ty,
|
||||||
|
) -> std::fmt::Result {
|
||||||
|
ty.write_variant_name(f, ctor)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn bug(&self, fmt: std::fmt::Arguments<'_>) -> Self::Error {
|
||||||
|
panic!("{}", fmt)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Abort when reaching the complexity limit. This is what we'll check in tests.
|
||||||
|
fn complexity_exceeded(&self) -> Result<(), Self::Error> {
|
||||||
|
Err(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Construct a single pattern; see `pats!()`.
|
||||||
|
#[allow(unused_macros)]
|
||||||
|
macro_rules! pat {
|
||||||
|
($($rest:tt)*) => {{
|
||||||
|
let mut vec = pats!($($rest)*);
|
||||||
|
vec.pop().unwrap()
|
||||||
|
}};
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A macro to construct patterns. Called like `pats!(type_expr; pattern, pattern, ..)` and returns
|
||||||
|
/// a `Vec<DeconstructedPat>`. A pattern can be nested and looks like `Constructor(pat, pat)` or
|
||||||
|
/// `Constructor { .i: pat, .j: pat }`, where `Constructor` is `Struct`, `Variant.i` (with index
|
||||||
|
/// `i`), as well as booleans and integer ranges.
|
||||||
|
///
|
||||||
|
/// The general structure of the macro is a tt-muncher with several stages identified with
|
||||||
|
/// `@something(args)`. The args are a key-value list (the keys ensure we don't mix the arguments
|
||||||
|
/// around) which is passed down and modified as needed. We then parse token-trees from
|
||||||
|
/// left-to-right. Non-trivial recursion happens when we parse the arguments to a pattern: we
|
||||||
|
/// recurse to parse the tokens inside `{..}`/`(..)`, and then we continue parsing anything that
|
||||||
|
/// follows.
|
||||||
|
macro_rules! pats {
|
||||||
|
// Entrypoint
|
||||||
|
// Parse `type; ..`
|
||||||
|
($ty:expr; $($rest:tt)*) => {{
|
||||||
|
#[allow(unused_imports)]
|
||||||
|
use rustc_pattern_analysis::{
|
||||||
|
constructor::{Constructor, IntRange, MaybeInfiniteInt, RangeEnd},
|
||||||
|
pat::DeconstructedPat,
|
||||||
|
};
|
||||||
|
let ty = $ty;
|
||||||
|
// The heart of the macro is designed to push `IndexedPat`s into a `Vec`, so we work around
|
||||||
|
// that.
|
||||||
|
let sub_tys = ::std::iter::repeat(&ty);
|
||||||
|
let mut vec = Vec::new();
|
||||||
|
pats!(@ctor(vec:vec, sub_tys:sub_tys, idx:0) $($rest)*);
|
||||||
|
vec.into_iter().map(|ipat| ipat.pat).collect::<Vec<_>>()
|
||||||
|
}};
|
||||||
|
|
||||||
|
// Parse `constructor ..`
|
||||||
|
|
||||||
|
(@ctor($($args:tt)*) true $($rest:tt)*) => {{
|
||||||
|
let ctor = Constructor::Bool(true);
|
||||||
|
pats!(@pat($($args)*, ctor:ctor) $($rest)*)
|
||||||
|
}};
|
||||||
|
(@ctor($($args:tt)*) false $($rest:tt)*) => {{
|
||||||
|
let ctor = Constructor::Bool(false);
|
||||||
|
pats!(@pat($($args)*, ctor:ctor) $($rest)*)
|
||||||
|
}};
|
||||||
|
(@ctor($($args:tt)*) Struct $($rest:tt)*) => {{
|
||||||
|
let ctor = Constructor::Struct;
|
||||||
|
pats!(@pat($($args)*, ctor:ctor) $($rest)*)
|
||||||
|
}};
|
||||||
|
(@ctor($($args:tt)*) ( $($fields:tt)* ) $($rest:tt)*) => {{
|
||||||
|
let ctor = Constructor::Struct; // tuples
|
||||||
|
pats!(@pat($($args)*, ctor:ctor) ( $($fields)* ) $($rest)*)
|
||||||
|
}};
|
||||||
|
(@ctor($($args:tt)*) Variant.$variant:ident $($rest:tt)*) => {{
|
||||||
|
let ctor = Constructor::Variant($variant);
|
||||||
|
pats!(@pat($($args)*, ctor:ctor) $($rest)*)
|
||||||
|
}};
|
||||||
|
(@ctor($($args:tt)*) Variant.$variant:literal $($rest:tt)*) => {{
|
||||||
|
let ctor = Constructor::Variant($variant);
|
||||||
|
pats!(@pat($($args)*, ctor:ctor) $($rest)*)
|
||||||
|
}};
|
||||||
|
(@ctor($($args:tt)*) _ $($rest:tt)*) => {{
|
||||||
|
let ctor = Constructor::Wildcard;
|
||||||
|
pats!(@pat($($args)*, ctor:ctor) $($rest)*)
|
||||||
|
}};
|
||||||
|
|
||||||
|
// Integers and int ranges
|
||||||
|
(@ctor($($args:tt)*) $($start:literal)?..$end:literal $($rest:tt)*) => {{
|
||||||
|
let ctor = Constructor::IntRange(IntRange::from_range(
|
||||||
|
pats!(@rangeboundary- $($start)?),
|
||||||
|
pats!(@rangeboundary+ $end),
|
||||||
|
RangeEnd::Excluded,
|
||||||
|
));
|
||||||
|
pats!(@pat($($args)*, ctor:ctor) $($rest)*)
|
||||||
|
}};
|
||||||
|
(@ctor($($args:tt)*) $($start:literal)?.. $($rest:tt)*) => {{
|
||||||
|
let ctor = Constructor::IntRange(IntRange::from_range(
|
||||||
|
pats!(@rangeboundary- $($start)?),
|
||||||
|
pats!(@rangeboundary+),
|
||||||
|
RangeEnd::Excluded,
|
||||||
|
));
|
||||||
|
pats!(@pat($($args)*, ctor:ctor) $($rest)*)
|
||||||
|
}};
|
||||||
|
(@ctor($($args:tt)*) $($start:literal)?..=$end:literal $($rest:tt)*) => {{
|
||||||
|
let ctor = Constructor::IntRange(IntRange::from_range(
|
||||||
|
pats!(@rangeboundary- $($start)?),
|
||||||
|
pats!(@rangeboundary+ $end),
|
||||||
|
RangeEnd::Included,
|
||||||
|
));
|
||||||
|
pats!(@pat($($args)*, ctor:ctor) $($rest)*)
|
||||||
|
}};
|
||||||
|
(@ctor($($args:tt)*) $int:literal $($rest:tt)*) => {{
|
||||||
|
let ctor = Constructor::IntRange(IntRange::from_range(
|
||||||
|
pats!(@rangeboundary- $int),
|
||||||
|
pats!(@rangeboundary+ $int),
|
||||||
|
RangeEnd::Included,
|
||||||
|
));
|
||||||
|
pats!(@pat($($args)*, ctor:ctor) $($rest)*)
|
||||||
|
}};
|
||||||
|
// Utility to manage range boundaries.
|
||||||
|
(@rangeboundary $sign:tt $int:literal) => { MaybeInfiniteInt::new_finite_uint($int) };
|
||||||
|
(@rangeboundary -) => { MaybeInfiniteInt::NegInfinity };
|
||||||
|
(@rangeboundary +) => { MaybeInfiniteInt::PosInfinity };
|
||||||
|
|
||||||
|
// Parse subfields: `(..)` or `{..}`
|
||||||
|
|
||||||
|
// Constructor with no fields, e.g. `bool` or `Variant.1`.
|
||||||
|
(@pat($($args:tt)*) $(,)?) => {
|
||||||
|
pats!(@pat($($args)*) {})
|
||||||
|
};
|
||||||
|
(@pat($($args:tt)*) , $($rest:tt)*) => {
|
||||||
|
pats!(@pat($($args)*) {}, $($rest)*)
|
||||||
|
};
|
||||||
|
// `(..)` and `{..}` are treated the same.
|
||||||
|
(@pat($($args:tt)*) ( $($subpat:tt)* ) $($rest:tt)*) => {{
|
||||||
|
pats!(@pat($($args)*) { $($subpat)* } $($rest)*)
|
||||||
|
}};
|
||||||
|
(@pat(vec:$vec:expr, sub_tys:$sub_tys:expr, idx:$idx:expr, ctor:$ctor:expr) { $($fields:tt)* } $($rest:tt)*) => {{
|
||||||
|
let sub_tys = $sub_tys;
|
||||||
|
let index = $idx;
|
||||||
|
// Silly dance to work with both a vec and `iter::repeat()`.
|
||||||
|
let ty = *(&sub_tys).clone().into_iter().nth(index).unwrap();
|
||||||
|
let ctor = $ctor;
|
||||||
|
let ctor_sub_tys = &ty.sub_tys(&ctor);
|
||||||
|
#[allow(unused_mut)]
|
||||||
|
let mut fields = Vec::new();
|
||||||
|
// Parse subpatterns (note the leading comma).
|
||||||
|
pats!(@fields(idx:0, vec:fields, sub_tys:ctor_sub_tys) ,$($fields)*);
|
||||||
|
let arity = ctor_sub_tys.len();
|
||||||
|
let pat = DeconstructedPat::new(ctor, fields, arity, ty, ()).at_index(index);
|
||||||
|
$vec.push(pat);
|
||||||
|
|
||||||
|
// Continue parsing further patterns.
|
||||||
|
pats!(@fields(idx:index+1, vec:$vec, sub_tys:sub_tys) $($rest)*);
|
||||||
|
}};
|
||||||
|
|
||||||
|
// Parse fields one by one.
|
||||||
|
|
||||||
|
// No fields left.
|
||||||
|
(@fields($($args:tt)*) $(,)?) => {};
|
||||||
|
// `.i: pat` sets the current index to `i`.
|
||||||
|
(@fields(idx:$_idx:expr, $($args:tt)*) , .$idx:literal : $($rest:tt)*) => {{
|
||||||
|
pats!(@ctor($($args)*, idx:$idx) $($rest)*);
|
||||||
|
}};
|
||||||
|
(@fields(idx:$_idx:expr, $($args:tt)*) , .$idx:ident : $($rest:tt)*) => {{
|
||||||
|
pats!(@ctor($($args)*, idx:$idx) $($rest)*);
|
||||||
|
}};
|
||||||
|
// Field without an explicit index; we use the current index which gets incremented above.
|
||||||
|
(@fields(idx:$idx:expr, $($args:tt)*) , $($rest:tt)*) => {{
|
||||||
|
pats!(@ctor($($args)*, idx:$idx) $($rest)*);
|
||||||
|
}};
|
||||||
|
}
|
109
compiler/rustc_pattern_analysis/tests/complexity.rs
Normal file
109
compiler/rustc_pattern_analysis/tests/complexity.rs
Normal file
|
@ -0,0 +1,109 @@
|
||||||
|
//! Test the pattern complexity limit.
|
||||||
|
use common::*;
|
||||||
|
use rustc_pattern_analysis::{pat::DeconstructedPat, usefulness::PlaceValidity, MatchArm};
|
||||||
|
|
||||||
|
#[macro_use]
|
||||||
|
mod common;
|
||||||
|
|
||||||
|
/// Analyze a match made of these patterns. Ignore the report; we only care whether we exceeded the
|
||||||
|
/// limit or not.
|
||||||
|
fn check(patterns: &[DeconstructedPat<Cx>], complexity_limit: usize) -> Result<(), ()> {
|
||||||
|
let ty = *patterns[0].ty();
|
||||||
|
let arms: Vec<_> =
|
||||||
|
patterns.iter().map(|pat| MatchArm { pat, has_guard: false, arm_data: () }).collect();
|
||||||
|
compute_match_usefulness(arms.as_slice(), ty, PlaceValidity::ValidOnly, Some(complexity_limit))
|
||||||
|
.map(|_report| ())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Asserts that analyzing this match takes exactly `complexity` steps.
|
||||||
|
#[track_caller]
|
||||||
|
fn assert_complexity(patterns: Vec<DeconstructedPat<Cx>>, complexity: usize) {
|
||||||
|
assert!(check(&patterns, complexity).is_ok());
|
||||||
|
assert!(check(&patterns, complexity - 1).is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Construct a match like:
|
||||||
|
/// ```ignore(illustrative)
|
||||||
|
/// match ... {
|
||||||
|
/// BigStruct { field01: true, .. } => {}
|
||||||
|
/// BigStruct { field02: true, .. } => {}
|
||||||
|
/// BigStruct { field03: true, .. } => {}
|
||||||
|
/// BigStruct { field04: true, .. } => {}
|
||||||
|
/// ...
|
||||||
|
/// _ => {}
|
||||||
|
/// }
|
||||||
|
/// ```
|
||||||
|
fn diagonal_match(arity: usize) -> Vec<DeconstructedPat<Cx>> {
|
||||||
|
let struct_ty = Ty::BigStruct { arity, ty: &Ty::Bool };
|
||||||
|
let mut patterns = vec![];
|
||||||
|
for i in 0..arity {
|
||||||
|
patterns.push(pat!(struct_ty; Struct { .i: true }));
|
||||||
|
}
|
||||||
|
patterns.push(pat!(struct_ty; _));
|
||||||
|
patterns
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Construct a match like:
|
||||||
|
/// ```ignore(illustrative)
|
||||||
|
/// match ... {
|
||||||
|
/// BigStruct { field01: true, .. } => {}
|
||||||
|
/// BigStruct { field02: true, .. } => {}
|
||||||
|
/// BigStruct { field03: true, .. } => {}
|
||||||
|
/// BigStruct { field04: true, .. } => {}
|
||||||
|
/// ...
|
||||||
|
/// BigStruct { field01: false, .. } => {}
|
||||||
|
/// BigStruct { field02: false, .. } => {}
|
||||||
|
/// BigStruct { field03: false, .. } => {}
|
||||||
|
/// BigStruct { field04: false, .. } => {}
|
||||||
|
/// ...
|
||||||
|
/// _ => {}
|
||||||
|
/// }
|
||||||
|
/// ```
|
||||||
|
fn diagonal_exponential_match(arity: usize) -> Vec<DeconstructedPat<Cx>> {
|
||||||
|
let struct_ty = Ty::BigStruct { arity, ty: &Ty::Bool };
|
||||||
|
let mut patterns = vec![];
|
||||||
|
for i in 0..arity {
|
||||||
|
patterns.push(pat!(struct_ty; Struct { .i: true }));
|
||||||
|
}
|
||||||
|
for i in 0..arity {
|
||||||
|
patterns.push(pat!(struct_ty; Struct { .i: false }));
|
||||||
|
}
|
||||||
|
patterns.push(pat!(struct_ty; _));
|
||||||
|
patterns
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_diagonal_struct_match() {
|
||||||
|
// These cases are nicely linear: we check `arity` patterns with exactly one `true`, matching
|
||||||
|
// in 2 branches each, and a final pattern with all `false`, matching only the `_` branch.
|
||||||
|
assert_complexity(diagonal_match(20), 41);
|
||||||
|
assert_complexity(diagonal_match(30), 61);
|
||||||
|
// This case goes exponential.
|
||||||
|
assert!(check(&diagonal_exponential_match(10), 10000).is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Construct a match like:
|
||||||
|
/// ```ignore(illustrative)
|
||||||
|
/// match ... {
|
||||||
|
/// BigEnum::Variant1(_) => {}
|
||||||
|
/// BigEnum::Variant2(_) => {}
|
||||||
|
/// BigEnum::Variant3(_) => {}
|
||||||
|
/// ...
|
||||||
|
/// _ => {}
|
||||||
|
/// }
|
||||||
|
/// ```
|
||||||
|
fn big_enum(arity: usize) -> Vec<DeconstructedPat<Cx>> {
|
||||||
|
let enum_ty = Ty::BigEnum { arity, ty: &Ty::Bool };
|
||||||
|
let mut patterns = vec![];
|
||||||
|
for i in 0..arity {
|
||||||
|
patterns.push(pat!(enum_ty; Variant.i));
|
||||||
|
}
|
||||||
|
patterns.push(pat!(enum_ty; _));
|
||||||
|
patterns
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_big_enum() {
|
||||||
|
// We try 2 branches per variant.
|
||||||
|
assert_complexity(big_enum(20), 40);
|
||||||
|
}
|
77
compiler/rustc_pattern_analysis/tests/exhaustiveness.rs
Normal file
77
compiler/rustc_pattern_analysis/tests/exhaustiveness.rs
Normal file
|
@ -0,0 +1,77 @@
|
||||||
|
//! Test exhaustiveness checking.
|
||||||
|
use common::*;
|
||||||
|
use rustc_pattern_analysis::{
|
||||||
|
pat::{DeconstructedPat, WitnessPat},
|
||||||
|
usefulness::PlaceValidity,
|
||||||
|
MatchArm,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[macro_use]
|
||||||
|
mod common;
|
||||||
|
|
||||||
|
/// Analyze a match made of these patterns.
|
||||||
|
fn check(patterns: Vec<DeconstructedPat<Cx>>) -> Vec<WitnessPat<Cx>> {
|
||||||
|
let ty = *patterns[0].ty();
|
||||||
|
let arms: Vec<_> =
|
||||||
|
patterns.iter().map(|pat| MatchArm { pat, has_guard: false, arm_data: () }).collect();
|
||||||
|
let report =
|
||||||
|
compute_match_usefulness(arms.as_slice(), ty, PlaceValidity::ValidOnly, None).unwrap();
|
||||||
|
report.non_exhaustiveness_witnesses
|
||||||
|
}
|
||||||
|
|
||||||
|
#[track_caller]
|
||||||
|
fn assert_exhaustive(patterns: Vec<DeconstructedPat<Cx>>) {
|
||||||
|
let witnesses = check(patterns);
|
||||||
|
if !witnesses.is_empty() {
|
||||||
|
panic!("non-exaustive match: missing {witnesses:?}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[track_caller]
|
||||||
|
fn assert_non_exhaustive(patterns: Vec<DeconstructedPat<Cx>>) {
|
||||||
|
let witnesses = check(patterns);
|
||||||
|
assert!(!witnesses.is_empty())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_int_ranges() {
|
||||||
|
let ty = Ty::U8;
|
||||||
|
assert_exhaustive(pats!(ty;
|
||||||
|
0..=255,
|
||||||
|
));
|
||||||
|
assert_exhaustive(pats!(ty;
|
||||||
|
0..,
|
||||||
|
));
|
||||||
|
assert_non_exhaustive(pats!(ty;
|
||||||
|
0..255,
|
||||||
|
));
|
||||||
|
assert_exhaustive(pats!(ty;
|
||||||
|
0..255,
|
||||||
|
255,
|
||||||
|
));
|
||||||
|
assert_exhaustive(pats!(ty;
|
||||||
|
..10,
|
||||||
|
10..
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_nested() {
|
||||||
|
let ty = Ty::BigStruct { arity: 2, ty: &Ty::BigEnum { arity: 2, ty: &Ty::Bool } };
|
||||||
|
assert_non_exhaustive(pats!(ty;
|
||||||
|
Struct(Variant.0, _),
|
||||||
|
));
|
||||||
|
assert_exhaustive(pats!(ty;
|
||||||
|
Struct(Variant.0, _),
|
||||||
|
Struct(Variant.1, _),
|
||||||
|
));
|
||||||
|
assert_non_exhaustive(pats!(ty;
|
||||||
|
Struct(Variant.0, _),
|
||||||
|
Struct(_, Variant.0),
|
||||||
|
));
|
||||||
|
assert_exhaustive(pats!(ty;
|
||||||
|
Struct(Variant.0, _),
|
||||||
|
Struct(_, Variant.0),
|
||||||
|
Struct(Variant.1, Variant.1),
|
||||||
|
));
|
||||||
|
}
|
69
compiler/rustc_pattern_analysis/tests/intersection.rs
Normal file
69
compiler/rustc_pattern_analysis/tests/intersection.rs
Normal file
|
@ -0,0 +1,69 @@
|
||||||
|
//! Test the computation of arm intersections.
|
||||||
|
use common::*;
|
||||||
|
use rustc_pattern_analysis::{pat::DeconstructedPat, usefulness::PlaceValidity, MatchArm};
|
||||||
|
|
||||||
|
#[macro_use]
|
||||||
|
mod common;
|
||||||
|
|
||||||
|
/// Analyze a match made of these patterns and returns the computed arm intersections.
|
||||||
|
fn check(patterns: Vec<DeconstructedPat<Cx>>) -> Vec<Vec<usize>> {
|
||||||
|
let ty = *patterns[0].ty();
|
||||||
|
let arms: Vec<_> =
|
||||||
|
patterns.iter().map(|pat| MatchArm { pat, has_guard: false, arm_data: () }).collect();
|
||||||
|
let report =
|
||||||
|
compute_match_usefulness(arms.as_slice(), ty, PlaceValidity::ValidOnly, None).unwrap();
|
||||||
|
report.arm_intersections.into_iter().map(|bitset| bitset.iter().collect()).collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[track_caller]
|
||||||
|
fn assert_intersects(patterns: Vec<DeconstructedPat<Cx>>, intersects: &[&[usize]]) {
|
||||||
|
let computed_intersects = check(patterns);
|
||||||
|
assert_eq!(computed_intersects, intersects);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_int_ranges() {
|
||||||
|
let ty = Ty::U8;
|
||||||
|
assert_intersects(
|
||||||
|
pats!(ty;
|
||||||
|
0..=100,
|
||||||
|
100..,
|
||||||
|
),
|
||||||
|
&[&[], &[0]],
|
||||||
|
);
|
||||||
|
assert_intersects(
|
||||||
|
pats!(ty;
|
||||||
|
0..=101,
|
||||||
|
100..,
|
||||||
|
),
|
||||||
|
&[&[], &[0]],
|
||||||
|
);
|
||||||
|
assert_intersects(
|
||||||
|
pats!(ty;
|
||||||
|
0..100,
|
||||||
|
100..,
|
||||||
|
),
|
||||||
|
&[&[], &[]],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_nested() {
|
||||||
|
let ty = Ty::Tuple(&[Ty::Bool; 2]);
|
||||||
|
assert_intersects(
|
||||||
|
pats!(ty;
|
||||||
|
(true, true),
|
||||||
|
(true, _),
|
||||||
|
(_, true),
|
||||||
|
),
|
||||||
|
&[&[], &[0], &[0, 1]],
|
||||||
|
);
|
||||||
|
// Here we shortcut because `(true, true)` is irrelevant, so we fail to detect the intersection.
|
||||||
|
assert_intersects(
|
||||||
|
pats!(ty;
|
||||||
|
(true, _),
|
||||||
|
(_, true),
|
||||||
|
),
|
||||||
|
&[&[], &[]],
|
||||||
|
);
|
||||||
|
}
|
Loading…
Add table
Add a link
Reference in a new issue