2016-09-24 18:24:34 +03:00
|
|
|
|
// Copyright 2012-2016 The Rust Project Developers. See the COPYRIGHT
|
|
|
|
|
// file at the top-level directory of this distribution and at
|
|
|
|
|
// http://rust-lang.org/COPYRIGHT.
|
|
|
|
|
//
|
|
|
|
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
|
|
|
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
|
|
|
|
// option. This file may not be copied, modified, or distributed
|
|
|
|
|
// except according to those terms.
|
|
|
|
|
|
|
|
|
|
use self::Constructor::*;
|
|
|
|
|
use self::Usefulness::*;
|
|
|
|
|
use self::WitnessPreference::*;
|
|
|
|
|
|
|
|
|
|
use rustc::middle::const_val::ConstVal;
|
2016-09-26 02:53:26 +03:00
|
|
|
|
use eval::{compare_const_vals};
|
|
|
|
|
|
2016-10-26 22:38:22 +03:00
|
|
|
|
use rustc_const_math::ConstInt;
|
|
|
|
|
|
2016-11-29 15:10:26 +08:00
|
|
|
|
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
2016-09-26 02:53:26 +03:00
|
|
|
|
use rustc_data_structures::indexed_vec::Idx;
|
|
|
|
|
|
|
|
|
|
use pattern::{FieldPattern, Pattern, PatternKind};
|
|
|
|
|
use pattern::{PatternFoldable, PatternFolder};
|
2016-09-24 18:24:34 +03:00
|
|
|
|
|
2016-11-25 13:21:19 +02:00
|
|
|
|
use rustc::hir::def_id::DefId;
|
2016-09-26 02:53:26 +03:00
|
|
|
|
use rustc::ty::{self, Ty, TyCtxt, TypeFoldable};
|
2016-09-24 18:24:34 +03:00
|
|
|
|
|
2017-01-01 20:57:21 +02:00
|
|
|
|
use rustc::mir::Field;
|
2016-09-24 18:24:34 +03:00
|
|
|
|
use rustc::util::common::ErrorReported;
|
|
|
|
|
|
2016-11-29 15:10:26 +08:00
|
|
|
|
use syntax::ast::NodeId;
|
2016-09-24 18:24:34 +03:00
|
|
|
|
use syntax_pos::{Span, DUMMY_SP};
|
|
|
|
|
|
2016-09-26 02:53:26 +03:00
|
|
|
|
use arena::TypedArena;
|
2016-09-24 18:24:34 +03:00
|
|
|
|
|
2016-11-05 13:32:35 +02:00
|
|
|
|
use std::cmp::{self, Ordering};
|
2016-09-26 02:53:26 +03:00
|
|
|
|
use std::fmt;
|
|
|
|
|
use std::iter::{FromIterator, IntoIterator, repeat};
|
2016-09-24 18:24:34 +03:00
|
|
|
|
|
2016-10-03 21:39:21 +03:00
|
|
|
|
pub fn expand_pattern<'a, 'tcx>(cx: &MatchCheckCtxt<'a, 'tcx>, pat: Pattern<'tcx>)
|
|
|
|
|
-> &'a Pattern<'tcx>
|
2016-09-26 02:53:26 +03:00
|
|
|
|
{
|
2016-10-03 21:39:21 +03:00
|
|
|
|
cx.pattern_arena.alloc(LiteralExpander.fold_pattern(&pat))
|
2016-09-24 18:24:34 +03:00
|
|
|
|
}
|
|
|
|
|
|
2016-09-26 02:53:26 +03:00
|
|
|
|
struct LiteralExpander;
|
|
|
|
|
impl<'tcx> PatternFolder<'tcx> for LiteralExpander {
|
|
|
|
|
fn fold_pattern(&mut self, pat: &Pattern<'tcx>) -> Pattern<'tcx> {
|
|
|
|
|
match (&pat.ty.sty, &*pat.kind) {
|
|
|
|
|
(&ty::TyRef(_, mt), &PatternKind::Constant { ref value }) => {
|
|
|
|
|
Pattern {
|
|
|
|
|
ty: pat.ty,
|
|
|
|
|
span: pat.span,
|
|
|
|
|
kind: box PatternKind::Deref {
|
|
|
|
|
subpattern: Pattern {
|
|
|
|
|
ty: mt.ty,
|
|
|
|
|
span: pat.span,
|
|
|
|
|
kind: box PatternKind::Constant { value: value.clone() },
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
(_, &PatternKind::Binding { subpattern: Some(ref s), .. }) => {
|
|
|
|
|
s.fold_with(self)
|
|
|
|
|
}
|
|
|
|
|
_ => pat.super_fold_with(self)
|
2016-09-24 18:24:34 +03:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2016-09-26 02:53:26 +03:00
|
|
|
|
impl<'tcx> Pattern<'tcx> {
|
|
|
|
|
fn is_wildcard(&self) -> bool {
|
|
|
|
|
match *self.kind {
|
|
|
|
|
PatternKind::Binding { subpattern: None, .. } | PatternKind::Wild =>
|
|
|
|
|
true,
|
|
|
|
|
_ => false
|
|
|
|
|
}
|
2016-09-24 18:24:34 +03:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2016-09-26 02:53:26 +03:00
|
|
|
|
pub struct Matrix<'a, 'tcx: 'a>(Vec<Vec<&'a Pattern<'tcx>>>);
|
2016-09-24 18:24:34 +03:00
|
|
|
|
|
|
|
|
|
impl<'a, 'tcx> Matrix<'a, 'tcx> {
|
|
|
|
|
pub fn empty() -> Self {
|
|
|
|
|
Matrix(vec![])
|
|
|
|
|
}
|
|
|
|
|
|
2016-09-26 02:53:26 +03:00
|
|
|
|
pub fn push(&mut self, row: Vec<&'a Pattern<'tcx>>) {
|
2016-09-24 18:24:34 +03:00
|
|
|
|
self.0.push(row)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Pretty-printer for matrices of patterns, example:
|
|
|
|
|
/// ++++++++++++++++++++++++++
|
|
|
|
|
/// + _ + [] +
|
|
|
|
|
/// ++++++++++++++++++++++++++
|
|
|
|
|
/// + true + [First] +
|
|
|
|
|
/// ++++++++++++++++++++++++++
|
|
|
|
|
/// + true + [Second(true)] +
|
|
|
|
|
/// ++++++++++++++++++++++++++
|
|
|
|
|
/// + false + [_] +
|
|
|
|
|
/// ++++++++++++++++++++++++++
|
|
|
|
|
/// + _ + [_, _, ..tail] +
|
|
|
|
|
/// ++++++++++++++++++++++++++
|
|
|
|
|
impl<'a, 'tcx> fmt::Debug for Matrix<'a, 'tcx> {
|
|
|
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
|
|
|
write!(f, "\n")?;
|
|
|
|
|
|
|
|
|
|
let &Matrix(ref m) = self;
|
|
|
|
|
let pretty_printed_matrix: Vec<Vec<String>> = m.iter().map(|row| {
|
|
|
|
|
row.iter().map(|pat| format!("{:?}", pat)).collect()
|
|
|
|
|
}).collect();
|
|
|
|
|
|
|
|
|
|
let column_count = m.iter().map(|row| row.len()).max().unwrap_or(0);
|
|
|
|
|
assert!(m.iter().all(|row| row.len() == column_count));
|
|
|
|
|
let column_widths: Vec<usize> = (0..column_count).map(|col| {
|
|
|
|
|
pretty_printed_matrix.iter().map(|row| row[col].len()).max().unwrap_or(0)
|
|
|
|
|
}).collect();
|
|
|
|
|
|
|
|
|
|
let total_width = column_widths.iter().cloned().sum::<usize>() + column_count * 3 + 1;
|
|
|
|
|
let br = repeat('+').take(total_width).collect::<String>();
|
|
|
|
|
write!(f, "{}\n", br)?;
|
|
|
|
|
for row in pretty_printed_matrix {
|
|
|
|
|
write!(f, "+")?;
|
|
|
|
|
for (column, pat_str) in row.into_iter().enumerate() {
|
|
|
|
|
write!(f, " ")?;
|
|
|
|
|
write!(f, "{:1$}", pat_str, column_widths[column])?;
|
|
|
|
|
write!(f, " +")?;
|
|
|
|
|
}
|
|
|
|
|
write!(f, "\n")?;
|
|
|
|
|
write!(f, "{}\n", br)?;
|
|
|
|
|
}
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2016-09-26 02:53:26 +03:00
|
|
|
|
impl<'a, 'tcx> FromIterator<Vec<&'a Pattern<'tcx>>> for Matrix<'a, 'tcx> {
|
|
|
|
|
fn from_iter<T: IntoIterator<Item=Vec<&'a Pattern<'tcx>>>>(iter: T) -> Self
|
2016-09-24 18:24:34 +03:00
|
|
|
|
{
|
|
|
|
|
Matrix(iter.into_iter().collect())
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
//NOTE: appears to be the only place other then InferCtxt to contain a ParamEnv
|
|
|
|
|
pub struct MatchCheckCtxt<'a, 'tcx: 'a> {
|
|
|
|
|
pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
2016-11-29 15:10:26 +08:00
|
|
|
|
/// (roughly) where in the code the match occurs. This is necessary for
|
|
|
|
|
/// checking inhabited-ness of types because whether a type is (visibly)
|
|
|
|
|
/// inhabited can depend on whether it was defined in the current module or
|
|
|
|
|
/// not. eg.
|
|
|
|
|
/// struct Foo { _private: ! }
|
|
|
|
|
/// can not be seen to be empty outside it's module and should not
|
|
|
|
|
/// be matchable with an empty match statement.
|
|
|
|
|
pub node: NodeId,
|
2016-09-26 02:53:26 +03:00
|
|
|
|
/// A wild pattern with an error type - it exists to avoid having to normalize
|
|
|
|
|
/// associated types to get field types.
|
|
|
|
|
pub wild_pattern: &'a Pattern<'tcx>,
|
|
|
|
|
pub pattern_arena: &'a TypedArena<Pattern<'tcx>>,
|
2016-11-08 14:02:55 +11:00
|
|
|
|
pub byte_array_map: FxHashMap<*const Pattern<'tcx>, Vec<&'a Pattern<'tcx>>>,
|
2016-09-26 02:53:26 +03:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl<'a, 'tcx> MatchCheckCtxt<'a, 'tcx> {
|
|
|
|
|
pub fn create_and_enter<F, R>(
|
|
|
|
|
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
2016-11-29 15:10:26 +08:00
|
|
|
|
node: NodeId,
|
2016-09-26 02:53:26 +03:00
|
|
|
|
f: F) -> R
|
|
|
|
|
where F: for<'b> FnOnce(MatchCheckCtxt<'b, 'tcx>) -> R
|
|
|
|
|
{
|
|
|
|
|
let wild_pattern = Pattern {
|
|
|
|
|
ty: tcx.types.err,
|
|
|
|
|
span: DUMMY_SP,
|
|
|
|
|
kind: box PatternKind::Wild
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let pattern_arena = TypedArena::new();
|
|
|
|
|
|
|
|
|
|
f(MatchCheckCtxt {
|
|
|
|
|
tcx: tcx,
|
2016-11-29 15:10:26 +08:00
|
|
|
|
node: node,
|
2016-09-26 02:53:26 +03:00
|
|
|
|
wild_pattern: &wild_pattern,
|
|
|
|
|
pattern_arena: &pattern_arena,
|
2016-11-08 14:02:55 +11:00
|
|
|
|
byte_array_map: FxHashMap(),
|
2016-09-26 02:53:26 +03:00
|
|
|
|
})
|
|
|
|
|
}
|
2016-10-26 22:38:22 +03:00
|
|
|
|
|
|
|
|
|
// convert a byte-string pattern to a list of u8 patterns.
|
|
|
|
|
fn lower_byte_str_pattern(&mut self, pat: &'a Pattern<'tcx>) -> Vec<&'a Pattern<'tcx>> {
|
|
|
|
|
let pattern_arena = &*self.pattern_arena;
|
|
|
|
|
let tcx = self.tcx;
|
|
|
|
|
self.byte_array_map.entry(pat).or_insert_with(|| {
|
|
|
|
|
match pat.kind {
|
|
|
|
|
box PatternKind::Constant {
|
|
|
|
|
value: ConstVal::ByteStr(ref data)
|
|
|
|
|
} => {
|
|
|
|
|
data.iter().map(|c| &*pattern_arena.alloc(Pattern {
|
|
|
|
|
ty: tcx.types.u8,
|
|
|
|
|
span: pat.span,
|
|
|
|
|
kind: box PatternKind::Constant {
|
|
|
|
|
value: ConstVal::Integral(ConstInt::U8(*c))
|
|
|
|
|
}
|
|
|
|
|
})).collect()
|
|
|
|
|
}
|
|
|
|
|
_ => span_bug!(pat.span, "unexpected byte array pattern {:?}", pat)
|
|
|
|
|
}
|
|
|
|
|
}).clone()
|
|
|
|
|
}
|
2016-09-24 18:24:34 +03:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[derive(Clone, Debug, PartialEq)]
|
|
|
|
|
pub enum Constructor {
|
|
|
|
|
/// The constructor of all patterns that don't vary by constructor,
|
|
|
|
|
/// e.g. struct patterns and fixed-length arrays.
|
|
|
|
|
Single,
|
|
|
|
|
/// Enum variants.
|
|
|
|
|
Variant(DefId),
|
|
|
|
|
/// Literal values.
|
|
|
|
|
ConstantValue(ConstVal),
|
|
|
|
|
/// Ranges of literal values (2..5).
|
|
|
|
|
ConstantRange(ConstVal, ConstVal),
|
|
|
|
|
/// Array patterns of length n.
|
|
|
|
|
Slice(usize),
|
|
|
|
|
}
|
|
|
|
|
|
2016-11-25 01:33:29 +02:00
|
|
|
|
impl<'tcx> Constructor {
|
2017-01-01 20:57:21 +02:00
|
|
|
|
fn variant_index_for_adt(&self, adt: &'tcx ty::AdtDef) -> usize {
|
2016-09-24 18:24:34 +03:00
|
|
|
|
match self {
|
2017-01-01 20:57:21 +02:00
|
|
|
|
&Variant(vid) => adt.variant_index_with_id(vid),
|
2016-09-26 02:53:26 +03:00
|
|
|
|
&Single => {
|
|
|
|
|
assert_eq!(adt.variants.len(), 1);
|
2017-01-01 20:57:21 +02:00
|
|
|
|
0
|
2016-09-26 02:53:26 +03:00
|
|
|
|
}
|
|
|
|
|
_ => bug!("bad constructor {:?} for adt {:?}", self, adt)
|
2016-09-24 18:24:34 +03:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2017-01-01 20:57:21 +02:00
|
|
|
|
#[derive(Clone)]
|
|
|
|
|
pub enum Usefulness<'tcx> {
|
2016-09-24 18:24:34 +03:00
|
|
|
|
Useful,
|
2017-01-01 20:57:21 +02:00
|
|
|
|
UsefulWithWitness(Vec<Witness<'tcx>>),
|
2016-09-24 18:24:34 +03:00
|
|
|
|
NotUseful
|
|
|
|
|
}
|
|
|
|
|
|
2017-01-01 20:57:21 +02:00
|
|
|
|
impl<'tcx> Usefulness<'tcx> {
|
|
|
|
|
fn is_useful(&self) -> bool {
|
|
|
|
|
match *self {
|
|
|
|
|
NotUseful => false,
|
|
|
|
|
_ => true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2016-09-24 18:24:34 +03:00
|
|
|
|
#[derive(Copy, Clone)]
|
|
|
|
|
pub enum WitnessPreference {
|
|
|
|
|
ConstructWitness,
|
|
|
|
|
LeaveOutWitness
|
|
|
|
|
}
|
|
|
|
|
|
2016-09-24 20:45:59 +03:00
|
|
|
|
#[derive(Copy, Clone, Debug)]
|
|
|
|
|
struct PatternContext<'tcx> {
|
|
|
|
|
ty: Ty<'tcx>,
|
|
|
|
|
max_slice_length: usize,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// A stack of patterns in reverse order of construction
|
2017-01-01 20:57:21 +02:00
|
|
|
|
#[derive(Clone)]
|
|
|
|
|
pub struct Witness<'tcx>(Vec<Pattern<'tcx>>);
|
2016-09-24 18:24:34 +03:00
|
|
|
|
|
2017-01-01 20:57:21 +02:00
|
|
|
|
impl<'tcx> Witness<'tcx> {
|
|
|
|
|
pub fn single_pattern(&self) -> &Pattern<'tcx> {
|
2016-09-24 20:45:59 +03:00
|
|
|
|
assert_eq!(self.0.len(), 1);
|
|
|
|
|
&self.0[0]
|
|
|
|
|
}
|
2016-09-24 18:24:34 +03:00
|
|
|
|
|
2017-01-01 20:57:21 +02:00
|
|
|
|
fn push_wild_constructor<'a>(
|
2016-09-24 20:45:59 +03:00
|
|
|
|
mut self,
|
|
|
|
|
cx: &MatchCheckCtxt<'a, 'tcx>,
|
|
|
|
|
ctor: &Constructor,
|
|
|
|
|
ty: Ty<'tcx>)
|
|
|
|
|
-> Self
|
|
|
|
|
{
|
|
|
|
|
let arity = constructor_arity(cx, ctor, ty);
|
2017-01-01 20:57:21 +02:00
|
|
|
|
self.0.extend(repeat(cx.wild_pattern).take(arity).cloned());
|
2016-09-24 20:45:59 +03:00
|
|
|
|
self.apply_constructor(cx, ctor, ty)
|
|
|
|
|
}
|
2016-09-24 18:24:34 +03:00
|
|
|
|
|
|
|
|
|
|
2016-09-24 20:45:59 +03:00
|
|
|
|
/// Constructs a partial witness for a pattern given a list of
|
|
|
|
|
/// patterns expanded by the specialization step.
|
|
|
|
|
///
|
|
|
|
|
/// When a pattern P is discovered to be useful, this function is used bottom-up
|
|
|
|
|
/// to reconstruct a complete witness, e.g. a pattern P' that covers a subset
|
|
|
|
|
/// of values, V, where each value in that set is not covered by any previously
|
|
|
|
|
/// used patterns and is covered by the pattern P'. Examples:
|
|
|
|
|
///
|
|
|
|
|
/// left_ty: tuple of 3 elements
|
|
|
|
|
/// pats: [10, 20, _] => (10, 20, _)
|
|
|
|
|
///
|
|
|
|
|
/// left_ty: struct X { a: (bool, &'static str), b: usize}
|
|
|
|
|
/// pats: [(false, "foo"), 42] => X { a: (false, "foo"), b: 42 }
|
2017-01-01 20:57:21 +02:00
|
|
|
|
fn apply_constructor<'a>(
|
2016-09-24 20:45:59 +03:00
|
|
|
|
mut self,
|
|
|
|
|
cx: &MatchCheckCtxt<'a,'tcx>,
|
|
|
|
|
ctor: &Constructor,
|
|
|
|
|
ty: Ty<'tcx>)
|
|
|
|
|
-> Self
|
|
|
|
|
{
|
|
|
|
|
let arity = constructor_arity(cx, ctor, ty);
|
|
|
|
|
let pat = {
|
|
|
|
|
let len = self.0.len();
|
|
|
|
|
let mut pats = self.0.drain(len-arity..).rev();
|
|
|
|
|
|
|
|
|
|
match ty.sty {
|
2017-01-01 20:57:21 +02:00
|
|
|
|
ty::TyAdt(..) |
|
|
|
|
|
ty::TyTuple(..) => {
|
|
|
|
|
let pats = pats.enumerate().map(|(i, p)| {
|
|
|
|
|
FieldPattern {
|
|
|
|
|
field: Field::new(i),
|
|
|
|
|
pattern: p
|
2016-09-24 20:45:59 +03:00
|
|
|
|
}
|
2017-01-01 20:57:21 +02:00
|
|
|
|
}).collect();
|
|
|
|
|
|
|
|
|
|
if let ty::TyAdt(adt, _) = ty.sty {
|
|
|
|
|
if adt.variants.len() > 1 {
|
|
|
|
|
PatternKind::Variant {
|
|
|
|
|
adt_def: adt,
|
|
|
|
|
variant_index: ctor.variant_index_for_adt(adt),
|
|
|
|
|
subpatterns: pats
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
PatternKind::Leaf { subpatterns: pats }
|
2016-09-24 20:45:59 +03:00
|
|
|
|
}
|
2017-01-01 20:57:21 +02:00
|
|
|
|
} else {
|
|
|
|
|
PatternKind::Leaf { subpatterns: pats }
|
2016-09-24 20:45:59 +03:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2017-01-01 20:57:21 +02:00
|
|
|
|
ty::TyRef(..) => {
|
|
|
|
|
PatternKind::Deref { subpattern: pats.nth(0).unwrap() }
|
2016-09-24 20:45:59 +03:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
ty::TySlice(_) | ty::TyArray(..) => {
|
2017-01-01 20:57:21 +02:00
|
|
|
|
PatternKind::Slice {
|
|
|
|
|
prefix: pats.collect(),
|
|
|
|
|
slice: None,
|
|
|
|
|
suffix: vec![]
|
|
|
|
|
}
|
2016-09-24 20:45:59 +03:00
|
|
|
|
}
|
2016-09-24 18:24:34 +03:00
|
|
|
|
|
2016-09-24 20:45:59 +03:00
|
|
|
|
_ => {
|
|
|
|
|
match *ctor {
|
2017-01-01 20:57:21 +02:00
|
|
|
|
ConstantValue(ref v) => PatternKind::Constant { value: v.clone() },
|
|
|
|
|
_ => PatternKind::Wild,
|
2016-09-24 20:45:59 +03:00
|
|
|
|
}
|
|
|
|
|
}
|
2016-09-24 18:24:34 +03:00
|
|
|
|
}
|
2016-09-24 20:45:59 +03:00
|
|
|
|
};
|
2016-09-24 18:24:34 +03:00
|
|
|
|
|
2017-01-01 20:57:21 +02:00
|
|
|
|
self.0.push(Pattern {
|
|
|
|
|
ty: ty,
|
|
|
|
|
span: DUMMY_SP,
|
|
|
|
|
kind: Box::new(pat),
|
|
|
|
|
});
|
2016-09-24 20:45:59 +03:00
|
|
|
|
|
|
|
|
|
self
|
|
|
|
|
}
|
2016-09-24 18:24:34 +03:00
|
|
|
|
}
|
|
|
|
|
|
2016-09-24 20:45:59 +03:00
|
|
|
|
/// Return the set of constructors from the same type as the first column of `matrix`,
|
|
|
|
|
/// that are matched only by wildcard patterns from that first column.
|
|
|
|
|
///
|
|
|
|
|
/// Therefore, if there is some pattern that is unmatched by `matrix`, it will
|
|
|
|
|
/// still be unmatched if the first constructor is replaced by any of the constructors
|
|
|
|
|
/// in the return value.
|
2016-11-29 15:10:26 +08:00
|
|
|
|
fn missing_constructors<'a, 'tcx: 'a>(cx: &mut MatchCheckCtxt<'a, 'tcx>,
|
|
|
|
|
matrix: &Matrix,
|
|
|
|
|
pcx: PatternContext<'tcx>) -> Vec<Constructor> {
|
2016-09-24 20:45:59 +03:00
|
|
|
|
let used_constructors: Vec<Constructor> =
|
|
|
|
|
matrix.0.iter()
|
|
|
|
|
.flat_map(|row| pat_constructors(cx, row[0], pcx).unwrap_or(vec![]))
|
2016-09-24 18:24:34 +03:00
|
|
|
|
.collect();
|
2016-09-26 02:53:26 +03:00
|
|
|
|
debug!("used_constructors = {:?}", used_constructors);
|
2016-09-24 20:45:59 +03:00
|
|
|
|
all_constructors(cx, pcx).into_iter()
|
2016-09-24 18:24:34 +03:00
|
|
|
|
.filter(|c| !used_constructors.contains(c))
|
|
|
|
|
.collect()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// This determines the set of all possible constructors of a pattern matching
|
|
|
|
|
/// values of type `left_ty`. For vectors, this would normally be an infinite set
|
2016-10-26 22:38:22 +03:00
|
|
|
|
///
|
|
|
|
|
/// This intentionally does not list ConstantValue specializations for
|
|
|
|
|
/// non-booleans, because we currently assume that there is always a
|
|
|
|
|
/// "non-standard constant" that matches. See issue #12483.
|
|
|
|
|
///
|
2016-09-24 18:24:34 +03:00
|
|
|
|
/// but is instead bounded by the maximum fixed length of slice patterns in
|
|
|
|
|
/// the column of patterns being analyzed.
|
2016-11-29 15:10:26 +08:00
|
|
|
|
///
|
|
|
|
|
/// We make sure to omit constructors that are statically impossible. eg for
|
|
|
|
|
/// Option<!> we do not include Some(_) in the returned list of constructors.
|
|
|
|
|
fn all_constructors<'a, 'tcx: 'a>(cx: &mut MatchCheckCtxt<'a, 'tcx>,
|
|
|
|
|
pcx: PatternContext<'tcx>) -> Vec<Constructor>
|
|
|
|
|
{
|
2016-09-24 20:45:59 +03:00
|
|
|
|
match pcx.ty.sty {
|
2016-09-24 18:24:34 +03:00
|
|
|
|
ty::TyBool =>
|
|
|
|
|
[true, false].iter().map(|b| ConstantValue(ConstVal::Bool(*b))).collect(),
|
2016-11-29 15:10:26 +08:00
|
|
|
|
ty::TySlice(ref sub_ty) => {
|
|
|
|
|
if sub_ty.is_uninhabited(Some(cx.node), cx.tcx) {
|
|
|
|
|
vec![Slice(0)]
|
|
|
|
|
} else {
|
|
|
|
|
(0..pcx.max_slice_length+1).map(|length| Slice(length)).collect()
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
ty::TyArray(ref sub_ty, length) => {
|
|
|
|
|
if length == 0 || !sub_ty.is_uninhabited(Some(cx.node), cx.tcx) {
|
|
|
|
|
vec![Slice(length)]
|
|
|
|
|
} else {
|
|
|
|
|
vec![]
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
ty::TyAdt(def, substs) if def.is_enum() && def.variants.len() != 1 => {
|
|
|
|
|
def.variants.iter().filter_map(|v| {
|
|
|
|
|
let mut visited = FxHashSet::default();
|
|
|
|
|
if v.is_uninhabited_recurse(&mut visited, Some(cx.node), cx.tcx, substs, false) {
|
|
|
|
|
None
|
|
|
|
|
} else {
|
|
|
|
|
Some(Variant(v.did))
|
|
|
|
|
}
|
|
|
|
|
}).collect()
|
|
|
|
|
}
|
|
|
|
|
_ => {
|
|
|
|
|
if pcx.ty.is_uninhabited(Some(cx.node), cx.tcx) {
|
|
|
|
|
vec![]
|
|
|
|
|
} else {
|
|
|
|
|
vec![Single]
|
|
|
|
|
}
|
|
|
|
|
}
|
2016-09-24 18:24:34 +03:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2016-11-08 22:55:57 +02:00
|
|
|
|
fn max_slice_length<'a, 'tcx, I>(
|
2016-11-05 13:32:35 +02:00
|
|
|
|
_cx: &mut MatchCheckCtxt<'a, 'tcx>,
|
|
|
|
|
patterns: I) -> usize
|
2016-11-08 22:55:57 +02:00
|
|
|
|
where I: Iterator<Item=&'a Pattern<'tcx>>
|
2016-11-05 13:32:35 +02:00
|
|
|
|
{
|
|
|
|
|
// The exhaustiveness-checking paper does not include any details on
|
|
|
|
|
// checking variable-length slice patterns. However, they are matched
|
|
|
|
|
// by an infinite collection of fixed-length array patterns.
|
|
|
|
|
//
|
2016-11-08 22:55:57 +02:00
|
|
|
|
// Checking the infinite set directly would take an infinite amount
|
|
|
|
|
// of time. However, it turns out that for each finite set of
|
|
|
|
|
// patterns `P`, all sufficiently large array lengths are equivalent:
|
2016-11-05 13:32:35 +02:00
|
|
|
|
//
|
2016-11-08 22:55:57 +02:00
|
|
|
|
// Each slice `s` with a "sufficiently-large" length `l ≥ L` that applies
|
|
|
|
|
// to exactly the subset `Pₜ` of `P` can be transformed to a slice
|
|
|
|
|
// `sₘ` for each sufficiently-large length `m` that applies to exactly
|
|
|
|
|
// the same subset of `P`.
|
2016-11-05 13:32:35 +02:00
|
|
|
|
//
|
2016-11-08 22:55:57 +02:00
|
|
|
|
// Because of that, each witness for reachability-checking from one
|
|
|
|
|
// of the sufficiently-large lengths can be transformed to an
|
|
|
|
|
// equally-valid witness from any other length, so we only have
|
|
|
|
|
// to check slice lengths from the "minimal sufficiently-large length"
|
|
|
|
|
// and below.
|
|
|
|
|
//
|
|
|
|
|
// Note that the fact that there is a *single* `sₘ` for each `m`
|
|
|
|
|
// not depending on the specific pattern in `P` is important: if
|
|
|
|
|
// you look at the pair of patterns
|
|
|
|
|
// `[true, ..]`
|
|
|
|
|
// `[.., false]`
|
|
|
|
|
// Then any slice of length ≥1 that matches one of these two
|
|
|
|
|
// patterns can be be trivially turned to a slice of any
|
|
|
|
|
// other length ≥1 that matches them and vice-versa - for
|
|
|
|
|
// but the slice from length 2 `[false, true]` that matches neither
|
|
|
|
|
// of these patterns can't be turned to a slice from length 1 that
|
|
|
|
|
// matches neither of these patterns, so we have to consider
|
|
|
|
|
// slices from length 2 there.
|
|
|
|
|
//
|
|
|
|
|
// Now, to see that that length exists and find it, observe that slice
|
|
|
|
|
// patterns are either "fixed-length" patterns (`[_, _, _]`) or
|
|
|
|
|
// "variable-length" patterns (`[_, .., _]`).
|
|
|
|
|
//
|
|
|
|
|
// For fixed-length patterns, all slices with lengths *longer* than
|
|
|
|
|
// the pattern's length have the same outcome (of not matching), so
|
|
|
|
|
// as long as `L` is greater than the pattern's length we can pick
|
|
|
|
|
// any `sₘ` from that length and get the same result.
|
|
|
|
|
//
|
|
|
|
|
// For variable-length patterns, the situation is more complicated,
|
|
|
|
|
// because as seen above the precise value of `sₘ` matters.
|
|
|
|
|
//
|
|
|
|
|
// However, for each variable-length pattern `p` with a prefix of length
|
|
|
|
|
// `plₚ` and suffix of length `slₚ`, only the first `plₚ` and the last
|
|
|
|
|
// `slₚ` elements are examined.
|
|
|
|
|
//
|
|
|
|
|
// Therefore, as long as `L` is positive (to avoid concerns about empty
|
|
|
|
|
// types), all elements after the maximum prefix length and before
|
|
|
|
|
// the maximum suffix length are not examined by any variable-length
|
|
|
|
|
// pattern, and therefore can be added/removed without affecting
|
|
|
|
|
// them - creating equivalent patterns from any sufficiently-large
|
|
|
|
|
// length.
|
|
|
|
|
//
|
|
|
|
|
// Of course, if fixed-length patterns exist, we must be sure
|
|
|
|
|
// that our length is large enough to miss them all, so
|
|
|
|
|
// we can pick `L = max(FIXED_LEN+1 ∪ {max(PREFIX_LEN) + max(SUFFIX_LEN)})`
|
|
|
|
|
//
|
|
|
|
|
// for example, with the above pair of patterns, all elements
|
|
|
|
|
// but the first and last can be added/removed, so any
|
|
|
|
|
// witness of length ≥2 (say, `[false, false, true]`) can be
|
|
|
|
|
// turned to a witness from any other length ≥2.
|
2016-11-05 13:32:35 +02:00
|
|
|
|
|
|
|
|
|
let mut max_prefix_len = 0;
|
|
|
|
|
let mut max_suffix_len = 0;
|
|
|
|
|
let mut max_fixed_len = 0;
|
|
|
|
|
|
|
|
|
|
for row in patterns {
|
2016-11-08 22:55:57 +02:00
|
|
|
|
match *row.kind {
|
2016-11-05 13:32:35 +02:00
|
|
|
|
PatternKind::Constant { value: ConstVal::ByteStr(ref data) } => {
|
|
|
|
|
max_fixed_len = cmp::max(max_fixed_len, data.len());
|
|
|
|
|
}
|
|
|
|
|
PatternKind::Slice { ref prefix, slice: None, ref suffix } => {
|
|
|
|
|
let fixed_len = prefix.len() + suffix.len();
|
|
|
|
|
max_fixed_len = cmp::max(max_fixed_len, fixed_len);
|
|
|
|
|
}
|
|
|
|
|
PatternKind::Slice { ref prefix, slice: Some(_), ref suffix } => {
|
|
|
|
|
max_prefix_len = cmp::max(max_prefix_len, prefix.len());
|
|
|
|
|
max_suffix_len = cmp::max(max_suffix_len, suffix.len());
|
|
|
|
|
}
|
|
|
|
|
_ => {}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
cmp::max(max_fixed_len + 1, max_prefix_len + max_suffix_len)
|
|
|
|
|
}
|
|
|
|
|
|
2016-09-26 02:53:26 +03:00
|
|
|
|
/// Algorithm from http://moscova.inria.fr/~maranget/papers/warn/index.html
|
|
|
|
|
///
|
|
|
|
|
/// Whether a vector `v` of patterns is 'useful' in relation to a set of such
|
|
|
|
|
/// vectors `m` is defined as there being a set of inputs that will match `v`
|
|
|
|
|
/// but not any of the sets in `m`.
|
|
|
|
|
///
|
|
|
|
|
/// This is used both for reachability checking (if a pattern isn't useful in
|
|
|
|
|
/// relation to preceding patterns, it is not reachable) and exhaustiveness
|
|
|
|
|
/// checking (if a wildcard pattern is useful in relation to a matrix, the
|
|
|
|
|
/// matrix isn't exhaustive).
|
|
|
|
|
///
|
|
|
|
|
/// Note: is_useful doesn't work on empty types, as the paper notes.
|
|
|
|
|
/// So it assumes that v is non-empty.
|
2016-10-26 22:38:22 +03:00
|
|
|
|
pub fn is_useful<'a, 'tcx>(cx: &mut MatchCheckCtxt<'a, 'tcx>,
|
2016-09-24 18:24:34 +03:00
|
|
|
|
matrix: &Matrix<'a, 'tcx>,
|
2016-09-26 02:53:26 +03:00
|
|
|
|
v: &[&'a Pattern<'tcx>],
|
2016-09-24 18:24:34 +03:00
|
|
|
|
witness: WitnessPreference)
|
2017-01-01 20:57:21 +02:00
|
|
|
|
-> Usefulness<'tcx> {
|
2016-09-24 18:24:34 +03:00
|
|
|
|
let &Matrix(ref rows) = matrix;
|
|
|
|
|
debug!("is_useful({:?}, {:?})", matrix, v);
|
2016-09-24 20:45:59 +03:00
|
|
|
|
|
2016-11-28 18:38:27 +08:00
|
|
|
|
// The base case. We are pattern-matching on () and the return value is
|
|
|
|
|
// based on whether our matrix has a row or not.
|
|
|
|
|
// NOTE: This could potentially be optimized by checking rows.is_empty()
|
|
|
|
|
// first and then, if v is non-empty, the return value is based on whether
|
|
|
|
|
// the type of the tuple we're checking is inhabited or not.
|
|
|
|
|
if v.is_empty() {
|
|
|
|
|
return if rows.is_empty() {
|
|
|
|
|
match witness {
|
|
|
|
|
ConstructWitness => UsefulWithWitness(vec![Witness(vec![])]),
|
|
|
|
|
LeaveOutWitness => Useful,
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
NotUseful
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
2016-10-26 22:38:22 +03:00
|
|
|
|
assert!(rows.iter().all(|r| r.len() == v.len()));
|
2016-11-05 13:32:35 +02:00
|
|
|
|
|
|
|
|
|
|
2016-09-24 20:45:59 +03:00
|
|
|
|
let pcx = PatternContext {
|
2016-09-26 02:53:26 +03:00
|
|
|
|
ty: rows.iter().map(|r| r[0].ty).find(|ty| !ty.references_error())
|
|
|
|
|
.unwrap_or(v[0].ty),
|
2016-11-08 22:55:57 +02:00
|
|
|
|
max_slice_length: max_slice_length(cx, rows.iter().map(|r| r[0]).chain(Some(v[0])))
|
2016-09-24 18:24:34 +03:00
|
|
|
|
};
|
|
|
|
|
|
2016-10-26 22:38:22 +03:00
|
|
|
|
debug!("is_useful_expand_first_col: pcx={:?}, expanding {:?}", pcx, v[0]);
|
2016-09-24 18:24:34 +03:00
|
|
|
|
|
2016-09-24 20:45:59 +03:00
|
|
|
|
if let Some(constructors) = pat_constructors(cx, v[0], pcx) {
|
|
|
|
|
debug!("is_useful - expanding constructors: {:?}", constructors);
|
|
|
|
|
constructors.into_iter().map(|c|
|
|
|
|
|
is_useful_specialized(cx, matrix, v, c.clone(), pcx.ty, witness)
|
2017-01-01 20:57:21 +02:00
|
|
|
|
).find(|result| result.is_useful()).unwrap_or(NotUseful)
|
2016-09-24 20:45:59 +03:00
|
|
|
|
} else {
|
|
|
|
|
debug!("is_useful - expanding wildcard");
|
|
|
|
|
let constructors = missing_constructors(cx, matrix, pcx);
|
2016-09-24 18:24:34 +03:00
|
|
|
|
debug!("is_useful - missing_constructors = {:?}", constructors);
|
|
|
|
|
if constructors.is_empty() {
|
2016-09-24 20:45:59 +03:00
|
|
|
|
all_constructors(cx, pcx).into_iter().map(|c| {
|
|
|
|
|
is_useful_specialized(cx, matrix, v, c.clone(), pcx.ty, witness)
|
2017-01-01 20:57:21 +02:00
|
|
|
|
}).find(|result| result.is_useful()).unwrap_or(NotUseful)
|
2016-09-24 18:24:34 +03:00
|
|
|
|
} else {
|
|
|
|
|
let matrix = rows.iter().filter_map(|r| {
|
2016-09-26 02:53:26 +03:00
|
|
|
|
if r[0].is_wildcard() {
|
|
|
|
|
Some(r[1..].to_vec())
|
|
|
|
|
} else {
|
|
|
|
|
None
|
2016-09-24 18:24:34 +03:00
|
|
|
|
}
|
|
|
|
|
}).collect();
|
|
|
|
|
match is_useful(cx, &matrix, &v[1..], witness) {
|
|
|
|
|
UsefulWithWitness(pats) => {
|
2016-10-26 22:38:22 +03:00
|
|
|
|
let cx = &*cx;
|
2016-09-24 20:45:59 +03:00
|
|
|
|
UsefulWithWitness(pats.into_iter().flat_map(|witness| {
|
|
|
|
|
constructors.iter().map(move |ctor| {
|
|
|
|
|
witness.clone().push_wild_constructor(cx, ctor, pcx.ty)
|
|
|
|
|
})
|
|
|
|
|
}).collect())
|
|
|
|
|
}
|
2016-09-24 18:24:34 +03:00
|
|
|
|
result => result
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn is_useful_specialized<'a, 'tcx>(
|
2016-10-26 22:38:22 +03:00
|
|
|
|
cx: &mut MatchCheckCtxt<'a, 'tcx>,
|
2016-09-24 18:24:34 +03:00
|
|
|
|
&Matrix(ref m): &Matrix<'a, 'tcx>,
|
2016-09-26 02:53:26 +03:00
|
|
|
|
v: &[&'a Pattern<'tcx>],
|
2016-09-24 18:24:34 +03:00
|
|
|
|
ctor: Constructor,
|
|
|
|
|
lty: Ty<'tcx>,
|
2017-01-01 20:57:21 +02:00
|
|
|
|
witness: WitnessPreference) -> Usefulness<'tcx>
|
2016-09-24 18:24:34 +03:00
|
|
|
|
{
|
|
|
|
|
let arity = constructor_arity(cx, &ctor, lty);
|
2016-10-26 22:38:22 +03:00
|
|
|
|
let matrix = Matrix(m.iter().flat_map(|r| {
|
2016-09-24 18:24:34 +03:00
|
|
|
|
specialize(cx, &r[..], &ctor, 0, arity)
|
|
|
|
|
}).collect());
|
|
|
|
|
match specialize(cx, v, &ctor, 0, arity) {
|
2016-09-24 20:45:59 +03:00
|
|
|
|
Some(v) => match is_useful(cx, &matrix, &v[..], witness) {
|
|
|
|
|
UsefulWithWitness(witnesses) => UsefulWithWitness(
|
|
|
|
|
witnesses.into_iter()
|
|
|
|
|
.map(|witness| witness.apply_constructor(cx, &ctor, lty))
|
|
|
|
|
.collect()
|
|
|
|
|
),
|
|
|
|
|
result => result
|
|
|
|
|
},
|
2016-09-24 18:24:34 +03:00
|
|
|
|
None => NotUseful
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Determines the constructors that the given pattern can be specialized to.
|
|
|
|
|
///
|
|
|
|
|
/// In most cases, there's only one constructor that a specific pattern
|
|
|
|
|
/// represents, such as a specific enum variant or a specific literal value.
|
|
|
|
|
/// Slice patterns, however, can match slices of different lengths. For instance,
|
|
|
|
|
/// `[a, b, ..tail]` can match a slice of length 2, 3, 4 and so on.
|
|
|
|
|
///
|
2016-09-24 20:45:59 +03:00
|
|
|
|
/// Returns None in case of a catch-all, which can't be specialized.
|
2016-10-26 22:38:22 +03:00
|
|
|
|
fn pat_constructors(_cx: &mut MatchCheckCtxt,
|
2016-09-26 02:53:26 +03:00
|
|
|
|
pat: &Pattern,
|
2016-09-24 20:45:59 +03:00
|
|
|
|
pcx: PatternContext)
|
|
|
|
|
-> Option<Vec<Constructor>>
|
|
|
|
|
{
|
2016-09-26 02:53:26 +03:00
|
|
|
|
match *pat.kind {
|
|
|
|
|
PatternKind::Binding { .. } | PatternKind::Wild =>
|
2016-09-24 20:45:59 +03:00
|
|
|
|
None,
|
2016-10-26 22:38:22 +03:00
|
|
|
|
PatternKind::Leaf { .. } | PatternKind::Deref { .. } =>
|
2016-09-26 02:53:26 +03:00
|
|
|
|
Some(vec![Single]),
|
|
|
|
|
PatternKind::Variant { adt_def, variant_index, .. } =>
|
|
|
|
|
Some(vec![Variant(adt_def.variants[variant_index].did)]),
|
|
|
|
|
PatternKind::Constant { ref value } =>
|
|
|
|
|
Some(vec![ConstantValue(value.clone())]),
|
|
|
|
|
PatternKind::Range { ref lo, ref hi } =>
|
|
|
|
|
Some(vec![ConstantRange(lo.clone(), hi.clone())]),
|
2016-10-26 22:38:22 +03:00
|
|
|
|
PatternKind::Array { .. } => match pcx.ty.sty {
|
|
|
|
|
ty::TyArray(_, length) => Some(vec![Slice(length)]),
|
|
|
|
|
_ => span_bug!(pat.span, "bad ty {:?} for array pattern", pcx.ty)
|
|
|
|
|
},
|
2016-09-26 02:53:26 +03:00
|
|
|
|
PatternKind::Slice { ref prefix, ref slice, ref suffix } => {
|
|
|
|
|
let pat_len = prefix.len() + suffix.len();
|
|
|
|
|
if slice.is_some() {
|
|
|
|
|
Some((pat_len..pcx.max_slice_length+1).map(Slice).collect())
|
|
|
|
|
} else {
|
|
|
|
|
Some(vec![Slice(pat_len)])
|
|
|
|
|
}
|
|
|
|
|
}
|
2016-09-24 18:24:34 +03:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// This computes the arity of a constructor. The arity of a constructor
|
|
|
|
|
/// is how many subpattern patterns of that constructor should be expanded to.
|
|
|
|
|
///
|
|
|
|
|
/// For instance, a tuple pattern (_, 42, Some([])) has the arity of 3.
|
|
|
|
|
/// A struct pattern's arity is the number of fields it contains, etc.
|
2016-09-24 20:45:59 +03:00
|
|
|
|
fn constructor_arity(_cx: &MatchCheckCtxt, ctor: &Constructor, ty: Ty) -> usize {
|
2016-09-24 18:24:34 +03:00
|
|
|
|
debug!("constructor_arity({:?}, {:?})", ctor, ty);
|
|
|
|
|
match ty.sty {
|
|
|
|
|
ty::TyTuple(ref fs) => fs.len(),
|
|
|
|
|
ty::TyBox(_) => 1,
|
2016-10-26 22:38:22 +03:00
|
|
|
|
ty::TySlice(..) | ty::TyArray(..) => match *ctor {
|
2016-09-24 18:24:34 +03:00
|
|
|
|
Slice(length) => length,
|
2016-10-26 22:38:22 +03:00
|
|
|
|
ConstantValue(_) => 0,
|
2016-09-24 18:24:34 +03:00
|
|
|
|
_ => bug!("bad slice pattern {:?} {:?}", ctor, ty)
|
|
|
|
|
},
|
|
|
|
|
ty::TyRef(..) => 1,
|
|
|
|
|
ty::TyAdt(adt, _) => {
|
2017-01-01 20:57:21 +02:00
|
|
|
|
adt.variants[ctor.variant_index_for_adt(adt)].fields.len()
|
2016-09-24 18:24:34 +03:00
|
|
|
|
}
|
|
|
|
|
_ => 0
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2016-10-26 22:38:22 +03:00
|
|
|
|
fn slice_pat_covered_by_constructor(_tcx: TyCtxt, _span: Span,
|
|
|
|
|
ctor: &Constructor,
|
|
|
|
|
prefix: &[Pattern],
|
|
|
|
|
slice: &Option<Pattern>,
|
|
|
|
|
suffix: &[Pattern])
|
|
|
|
|
-> Result<bool, ErrorReported> {
|
|
|
|
|
let data = match *ctor {
|
|
|
|
|
ConstantValue(ConstVal::ByteStr(ref data)) => data,
|
|
|
|
|
_ => bug!()
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let pat_len = prefix.len() + suffix.len();
|
|
|
|
|
if data.len() < pat_len || (slice.is_none() && data.len() > pat_len) {
|
|
|
|
|
return Ok(false);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for (ch, pat) in
|
|
|
|
|
data[..prefix.len()].iter().zip(prefix).chain(
|
|
|
|
|
data[data.len()-suffix.len()..].iter().zip(suffix))
|
|
|
|
|
{
|
|
|
|
|
match pat.kind {
|
|
|
|
|
box PatternKind::Constant { ref value } => match *value {
|
|
|
|
|
ConstVal::Integral(ConstInt::U8(u)) => {
|
|
|
|
|
if u != *ch {
|
|
|
|
|
return Ok(false);
|
|
|
|
|
}
|
|
|
|
|
},
|
|
|
|
|
_ => span_bug!(pat.span, "bad const u8 {:?}", value)
|
|
|
|
|
},
|
|
|
|
|
_ => {}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Ok(true)
|
|
|
|
|
}
|
|
|
|
|
|
2016-09-24 18:24:34 +03:00
|
|
|
|
fn range_covered_by_constructor(tcx: TyCtxt, span: Span,
|
|
|
|
|
ctor: &Constructor,
|
|
|
|
|
from: &ConstVal, to: &ConstVal)
|
|
|
|
|
-> Result<bool, ErrorReported> {
|
|
|
|
|
let (c_from, c_to) = match *ctor {
|
|
|
|
|
ConstantValue(ref value) => (value, value),
|
|
|
|
|
ConstantRange(ref from, ref to) => (from, to),
|
|
|
|
|
Single => return Ok(true),
|
|
|
|
|
_ => bug!()
|
|
|
|
|
};
|
|
|
|
|
let cmp_from = compare_const_vals(tcx, span, c_from, from)?;
|
|
|
|
|
let cmp_to = compare_const_vals(tcx, span, c_to, to)?;
|
|
|
|
|
Ok(cmp_from != Ordering::Less && cmp_to != Ordering::Greater)
|
|
|
|
|
}
|
|
|
|
|
|
2016-09-26 02:53:26 +03:00
|
|
|
|
fn patterns_for_variant<'a, 'tcx>(
|
2016-10-26 22:38:22 +03:00
|
|
|
|
cx: &mut MatchCheckCtxt<'a, 'tcx>,
|
2016-09-26 02:53:26 +03:00
|
|
|
|
subpatterns: &'a [FieldPattern<'tcx>],
|
|
|
|
|
arity: usize)
|
|
|
|
|
-> Vec<&'a Pattern<'tcx>>
|
2016-09-24 18:24:34 +03:00
|
|
|
|
{
|
2016-09-26 02:53:26 +03:00
|
|
|
|
let mut result = vec![cx.wild_pattern; arity];
|
|
|
|
|
|
|
|
|
|
for subpat in subpatterns {
|
|
|
|
|
result[subpat.field.index()] = &subpat.pattern;
|
2016-09-24 18:24:34 +03:00
|
|
|
|
}
|
2016-09-26 02:53:26 +03:00
|
|
|
|
|
|
|
|
|
debug!("patterns_for_variant({:?}, {:?}) = {:?}", subpatterns, arity, result);
|
|
|
|
|
result
|
2016-09-24 18:24:34 +03:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// This is the main specialization step. It expands the first pattern in the given row
|
|
|
|
|
/// into `arity` patterns based on the constructor. For most patterns, the step is trivial,
|
|
|
|
|
/// for instance tuple patterns are flattened and box patterns expand into their inner pattern.
|
|
|
|
|
///
|
|
|
|
|
/// OTOH, slice patterns with a subslice pattern (..tail) can be expanded into multiple
|
|
|
|
|
/// different patterns.
|
|
|
|
|
/// Structure patterns with a partial wild pattern (Foo { a: 42, .. }) have their missing
|
|
|
|
|
/// fields filled with wild patterns.
|
2016-09-26 02:53:26 +03:00
|
|
|
|
fn specialize<'a, 'tcx>(
|
2016-10-26 22:38:22 +03:00
|
|
|
|
cx: &mut MatchCheckCtxt<'a, 'tcx>,
|
2016-09-26 02:53:26 +03:00
|
|
|
|
r: &[&'a Pattern<'tcx>],
|
2016-09-24 18:24:34 +03:00
|
|
|
|
constructor: &Constructor, col: usize, arity: usize)
|
2016-09-26 02:53:26 +03:00
|
|
|
|
-> Option<Vec<&'a Pattern<'tcx>>>
|
2016-09-24 18:24:34 +03:00
|
|
|
|
{
|
2016-09-26 02:53:26 +03:00
|
|
|
|
let pat = &r[col];
|
2016-09-24 18:24:34 +03:00
|
|
|
|
|
2016-09-26 02:53:26 +03:00
|
|
|
|
let head: Option<Vec<&Pattern>> = match *pat.kind {
|
|
|
|
|
PatternKind::Binding { .. } | PatternKind::Wild =>
|
|
|
|
|
Some(vec![cx.wild_pattern; arity]),
|
2016-09-24 18:24:34 +03:00
|
|
|
|
|
2016-09-26 02:53:26 +03:00
|
|
|
|
PatternKind::Variant { adt_def, variant_index, ref subpatterns } => {
|
|
|
|
|
let ref variant = adt_def.variants[variant_index];
|
|
|
|
|
if *constructor == Variant(variant.did) {
|
|
|
|
|
Some(patterns_for_variant(cx, subpatterns, arity))
|
2016-09-24 18:24:34 +03:00
|
|
|
|
} else {
|
|
|
|
|
None
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2016-09-26 02:53:26 +03:00
|
|
|
|
PatternKind::Leaf { ref subpatterns } => Some(patterns_for_variant(cx, subpatterns, arity)),
|
|
|
|
|
PatternKind::Deref { ref subpattern } => Some(vec![subpattern]),
|
|
|
|
|
|
|
|
|
|
PatternKind::Constant { ref value } => {
|
2016-10-26 22:38:22 +03:00
|
|
|
|
match *constructor {
|
|
|
|
|
Slice(..) => match *value {
|
|
|
|
|
ConstVal::ByteStr(ref data) => {
|
|
|
|
|
if arity == data.len() {
|
|
|
|
|
Some(cx.lower_byte_str_pattern(pat))
|
|
|
|
|
} else {
|
|
|
|
|
None
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
_ => span_bug!(pat.span,
|
|
|
|
|
"unexpected const-val {:?} with ctor {:?}", value, constructor)
|
|
|
|
|
},
|
|
|
|
|
_ => {
|
|
|
|
|
match range_covered_by_constructor(
|
|
|
|
|
cx.tcx, pat.span, constructor, value, value
|
|
|
|
|
) {
|
|
|
|
|
Ok(true) => Some(vec![]),
|
|
|
|
|
Ok(false) => None,
|
|
|
|
|
Err(ErrorReported) => None,
|
|
|
|
|
}
|
|
|
|
|
}
|
2016-09-24 18:24:34 +03:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2016-09-26 02:53:26 +03:00
|
|
|
|
PatternKind::Range { ref lo, ref hi } => {
|
2016-09-24 18:24:34 +03:00
|
|
|
|
match range_covered_by_constructor(
|
2016-09-26 02:53:26 +03:00
|
|
|
|
cx.tcx, pat.span, constructor, lo, hi
|
2016-09-24 18:24:34 +03:00
|
|
|
|
) {
|
|
|
|
|
Ok(true) => Some(vec![]),
|
|
|
|
|
Ok(false) => None,
|
|
|
|
|
Err(ErrorReported) => None,
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2016-10-26 22:38:22 +03:00
|
|
|
|
PatternKind::Array { ref prefix, ref slice, ref suffix } |
|
2016-09-26 02:53:26 +03:00
|
|
|
|
PatternKind::Slice { ref prefix, ref slice, ref suffix } => {
|
2016-10-26 22:38:22 +03:00
|
|
|
|
match *constructor {
|
|
|
|
|
Slice(..) => {
|
|
|
|
|
let pat_len = prefix.len() + suffix.len();
|
|
|
|
|
if let Some(slice_count) = arity.checked_sub(pat_len) {
|
|
|
|
|
if slice_count == 0 || slice.is_some() {
|
|
|
|
|
Some(
|
|
|
|
|
prefix.iter().chain(
|
|
|
|
|
repeat(cx.wild_pattern).take(slice_count).chain(
|
|
|
|
|
suffix.iter()
|
|
|
|
|
)).collect())
|
|
|
|
|
} else {
|
|
|
|
|
None
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
None
|
|
|
|
|
}
|
2016-09-24 18:24:34 +03:00
|
|
|
|
}
|
2016-10-26 22:38:22 +03:00
|
|
|
|
ConstantValue(..) => {
|
|
|
|
|
match slice_pat_covered_by_constructor(
|
|
|
|
|
cx.tcx, pat.span, constructor, prefix, slice, suffix
|
|
|
|
|
) {
|
|
|
|
|
Ok(true) => Some(vec![]),
|
|
|
|
|
Ok(false) => None,
|
|
|
|
|
Err(ErrorReported) => None
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
_ => span_bug!(pat.span,
|
|
|
|
|
"unexpected ctor {:?} for slice pat", constructor)
|
2016-09-24 18:24:34 +03:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
debug!("specialize({:?}, {:?}) = {:?}", r[col], arity, head);
|
|
|
|
|
|
|
|
|
|
head.map(|mut head| {
|
|
|
|
|
head.extend_from_slice(&r[..col]);
|
|
|
|
|
head.extend_from_slice(&r[col + 1..]);
|
|
|
|
|
head
|
|
|
|
|
})
|
|
|
|
|
}
|