1
Fork 0

Handle tags better.

Currently, for the enums and comparison traits we always check the tag
for equality before doing anything else. This is a bit clumsy. This
commit changes things so that the tags are handled very much like a
zeroth field in the enum.

For `eq`/ne` this makes the code slightly cleaner.

For `partial_cmp` and `cmp` it's a more notable change: in the case
where the tags aren't equal, instead of having a tag equality check
followed by a tag comparison, it just does a single tag comparison.

The commit also improves how `Hash` works for enums: instead of having
duplicated code to hash the tag for every arm within the match, we do
it just once before the match.

All this required replacing the `EnumNonMatchingCollapsed` value with a
new `EnumTag` value.

For fieldless enums the new code is particularly improved. All the code
now produced is close to optimal, being very similar to what you'd write
by hand.
This commit is contained in:
Nicholas Nethercote 2022-07-08 15:32:27 +10:00
parent 4bcbd76bc9
commit 10144e29af
9 changed files with 245 additions and 329 deletions

View file

@ -148,7 +148,7 @@ fn cs_clone_simple(
), ),
} }
} }
BlockOrExpr::new_mixed(stmts, cx.expr_deref(trait_span, cx.expr_self(trait_span))) BlockOrExpr::new_mixed(stmts, Some(cx.expr_deref(trait_span, cx.expr_self(trait_span))))
} }
fn cs_clone( fn cs_clone(
@ -177,9 +177,7 @@ fn cs_clone(
all_fields = af; all_fields = af;
vdata = &variant.data; vdata = &variant.data;
} }
EnumNonMatchingCollapsed(..) => { EnumTag(..) => cx.span_bug(trait_span, &format!("enum tags in `derive({})`", name,)),
cx.span_bug(trait_span, &format!("non-matching enum variants in `derive({})`", name,))
}
StaticEnum(..) | StaticStruct(..) => { StaticEnum(..) | StaticStruct(..) => {
cx.span_bug(trait_span, &format!("associated function in `derive({})`", name)) cx.span_bug(trait_span, &format!("associated function in `derive({})`", name))
} }

View file

@ -73,16 +73,6 @@ pub fn cs_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> Bl
cx.expr_match(span, expr2, vec![eq_arm, neq_arm]) cx.expr_match(span, expr2, vec![eq_arm, neq_arm])
} }
CsFold::Fieldless => cx.expr_path(equal_path.clone()), CsFold::Fieldless => cx.expr_path(equal_path.clone()),
CsFold::EnumNonMatching(span, tag_tuple) => {
if tag_tuple.len() != 2 {
cx.span_bug(span, "not exactly 2 arguments in `derive(Ord)`")
} else {
let lft = cx.expr_addr_of(span, cx.expr_ident(span, tag_tuple[0]));
let rgt = cx.expr_addr_of(span, cx.expr_ident(span, tag_tuple[1]));
let fn_cmp_path = cx.std_path(&[sym::cmp, sym::Ord, sym::cmp]);
cx.expr_call_global(span, fn_cmp_path, vec![lft, rgt])
}
}
}, },
); );
BlockOrExpr::new_expr(expr) BlockOrExpr::new_expr(expr)

View file

@ -51,7 +51,6 @@ pub fn expand_deriving_partial_eq(
} }
CsFold::Combine(span, expr1, expr2) => cx.expr_binary(span, combiner, expr1, expr2), CsFold::Combine(span, expr1, expr2) => cx.expr_binary(span, combiner, expr1, expr2),
CsFold::Fieldless => cx.expr_bool(span, base), CsFold::Fieldless => cx.expr_bool(span, base),
CsFold::EnumNonMatching(span, _tag_tuple) => cx.expr_bool(span, !base),
}, },
); );
BlockOrExpr::new_expr(expr) BlockOrExpr::new_expr(expr)

View file

@ -82,17 +82,6 @@ pub fn cs_partial_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_
cx.expr_match(span, expr2, vec![eq_arm, neq_arm]) cx.expr_match(span, expr2, vec![eq_arm, neq_arm])
} }
CsFold::Fieldless => cx.expr_some(span, cx.expr_path(equal_path.clone())), CsFold::Fieldless => cx.expr_some(span, cx.expr_path(equal_path.clone())),
CsFold::EnumNonMatching(span, tag_tuple) => {
if tag_tuple.len() != 2 {
cx.span_bug(span, "not exactly 2 arguments in `derive(PartialOrd)`")
} else {
let lft = cx.expr_addr_of(span, cx.expr_ident(span, tag_tuple[0]));
let rgt = cx.expr_addr_of(span, cx.expr_ident(span, tag_tuple[1]));
let fn_partial_cmp_path =
cx.std_path(&[sym::cmp, sym::PartialOrd, sym::partial_cmp]);
cx.expr_call_global(span, fn_partial_cmp_path, vec![lft, rgt])
}
}
}, },
); );
BlockOrExpr::new_expr(expr) BlockOrExpr::new_expr(expr)

View file

@ -45,7 +45,7 @@ fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>
let (ident, vdata, fields) = match substr.fields { let (ident, vdata, fields) = match substr.fields {
Struct(vdata, fields) => (substr.type_ident, *vdata, fields), Struct(vdata, fields) => (substr.type_ident, *vdata, fields),
EnumMatching(_, _, v, fields) => (v.ident, &v.data, fields), EnumMatching(_, _, v, fields) => (v.ident, &v.data, fields),
EnumNonMatchingCollapsed(..) | StaticStruct(..) | StaticEnum(..) => { EnumTag(..) | StaticStruct(..) | StaticEnum(..) => {
cx.span_bug(span, "nonsensical .fields in `#[derive(Debug)]`") cx.span_bug(span, "nonsensical .fields in `#[derive(Debug)]`")
} }
}; };
@ -176,6 +176,6 @@ fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>
stmts.push(names_let.unwrap()); stmts.push(names_let.unwrap());
} }
stmts.push(values_let); stmts.push(values_let);
BlockOrExpr::new_mixed(stmts, expr) BlockOrExpr::new_mixed(stmts, Some(expr))
} }
} }

View file

@ -287,7 +287,7 @@ fn encodable_substructure(
fn_emit_enum_path, fn_emit_enum_path,
vec![encoder, cx.expr_str(trait_span, substr.type_ident.name), blk], vec![encoder, cx.expr_str(trait_span, substr.type_ident.name), blk],
); );
BlockOrExpr::new_mixed(vec![me], expr) BlockOrExpr::new_mixed(vec![me], Some(expr))
} }
_ => cx.bug("expected Struct or EnumMatching in derive(Encodable)"), _ => cx.bug("expected Struct or EnumMatching in derive(Encodable)"),

View file

@ -21,21 +21,14 @@
//! `struct T(i32, char)`). //! `struct T(i32, char)`).
//! - `EnumMatching`, when `Self` is an enum and all the arguments are the //! - `EnumMatching`, when `Self` is an enum and all the arguments are the
//! same variant of the enum (e.g., `Some(1)`, `Some(3)` and `Some(4)`) //! same variant of the enum (e.g., `Some(1)`, `Some(3)` and `Some(4)`)
//! - `EnumNonMatchingCollapsed` when `Self` is an enum and the arguments //! - `EnumTag` when `Self` is an enum, for comparing the enum tags.
//! are not the same variant (e.g., `None`, `Some(1)` and `None`).
//! - `StaticEnum` and `StaticStruct` for static methods, where the type //! - `StaticEnum` and `StaticStruct` for static methods, where the type
//! being derived upon is either an enum or struct respectively. (Any //! being derived upon is either an enum or struct respectively. (Any
//! argument with type Self is just grouped among the non-self //! argument with type Self is just grouped among the non-self
//! arguments.) //! arguments.)
//! //!
//! In the first two cases, the values from the corresponding fields in //! In the first two cases, the values from the corresponding fields in
//! all the arguments are grouped together. For `EnumNonMatchingCollapsed` //! all the arguments are grouped together.
//! this isn't possible (different variants have different fields), so the
//! fields are inaccessible. (Previous versions of the deriving infrastructure
//! had a way to expand into code that could access them, at the cost of
//! generating exponential amounts of code; see issue #15375). There are no
//! fields with values in the static cases, so these are treated entirely
//! differently.
//! //!
//! The non-static cases have `Option<ident>` in several places associated //! The non-static cases have `Option<ident>` in several places associated
//! with field `expr`s. This represents the name of the field it is //! with field `expr`s. This represents the name of the field it is
@ -142,21 +135,15 @@
//! }]) //! }])
//! ``` //! ```
//! //!
//! For `C0(a)` and `C1 {x}` , //! For the tags,
//! //!
//! ```{.text} //! ```{.text}
//! EnumNonMatchingCollapsed( //! EnumTag(
//! &[<ident for self index value>, <ident of __arg1 index value>]) //! &[<ident of self tag>, <ident of other tag>], <expr to combine with>)
//! ``` //! ```
//! //! Note that this setup doesn't allow for the brute-force "match every variant
//! It is the same for when the arguments are flipped to `C1 {x}` and //! against every other variant" approach, which is bad because it produces a
//! `C0(a)`; the only difference is what the values of the identifiers //! quadratic amount of code (see #15375).
//! <ident for self index value> and <ident of __arg1 index value> will
//! be in the generated code.
//!
//! `EnumNonMatchingCollapsed` deliberately provides far less information
//! than is generally available for a given pair of variants; see #15375
//! for discussion.
//! //!
//! ## Static //! ## Static
//! //!
@ -180,7 +167,7 @@ use std::iter;
use std::vec; use std::vec;
use rustc_ast::ptr::P; use rustc_ast::ptr::P;
use rustc_ast::{self as ast, BinOpKind, EnumDef, Expr, Generics, PatKind}; use rustc_ast::{self as ast, EnumDef, Expr, Generics, PatKind};
use rustc_ast::{GenericArg, GenericParamKind, VariantData}; use rustc_ast::{GenericArg, GenericParamKind, VariantData};
use rustc_attr as attr; use rustc_attr as attr;
use rustc_expand::base::{Annotatable, ExtCtxt}; use rustc_expand::base::{Annotatable, ExtCtxt};
@ -235,6 +222,8 @@ pub struct MethodDef<'a> {
pub attributes: Vec<ast::Attribute>, pub attributes: Vec<ast::Attribute>,
/// Can we combine fieldless variants for enums into a single match arm? /// Can we combine fieldless variants for enums into a single match arm?
/// If true, indicates that the trait operation uses the enum tag in some
/// way.
pub unify_fieldless_variants: bool, pub unify_fieldless_variants: bool,
pub combine_substructure: RefCell<CombineSubstructureFunc<'a>>, pub combine_substructure: RefCell<CombineSubstructureFunc<'a>>,
@ -274,19 +263,22 @@ pub enum StaticFields {
/// A summary of the possible sets of fields. /// A summary of the possible sets of fields.
pub enum SubstructureFields<'a> { pub enum SubstructureFields<'a> {
/// A non-static method with `Self` is a struct.
Struct(&'a ast::VariantData, Vec<FieldInfo>), Struct(&'a ast::VariantData, Vec<FieldInfo>),
/// Matching variants of the enum: variant index, variant count, ast::Variant, /// Matching variants of the enum: variant index, variant count, ast::Variant,
/// fields: the field name is only non-`None` in the case of a struct /// fields: the field name is only non-`None` in the case of a struct
/// variant. /// variant.
EnumMatching(usize, usize, &'a ast::Variant, Vec<FieldInfo>), EnumMatching(usize, usize, &'a ast::Variant, Vec<FieldInfo>),
/// Non-matching variants of the enum, but with all state hidden from the /// The tag of an enum. The first field is a `FieldInfo` for the tags, as
/// consequent code. The field is a list of `Ident`s bound to the variant /// if they were fields. The second field is the expression to combine the
/// index values for each of the actual input `Self` arguments. /// tag expression with; it will be `None` if no match is necessary.
EnumNonMatchingCollapsed(&'a [Ident]), EnumTag(FieldInfo, Option<P<Expr>>),
/// A static method where `Self` is a struct. /// A static method where `Self` is a struct.
StaticStruct(&'a ast::VariantData, StaticFields), StaticStruct(&'a ast::VariantData, StaticFields),
/// A static method where `Self` is an enum. /// A static method where `Self` is an enum.
StaticEnum(&'a ast::EnumDef, Vec<(Ident, Span, StaticFields)>), StaticEnum(&'a ast::EnumDef, Vec<(Ident, Span, StaticFields)>),
} }
@ -324,8 +316,8 @@ impl BlockOrExpr {
BlockOrExpr(vec![], Some(expr)) BlockOrExpr(vec![], Some(expr))
} }
pub fn new_mixed(stmts: Vec<ast::Stmt>, expr: P<Expr>) -> BlockOrExpr { pub fn new_mixed(stmts: Vec<ast::Stmt>, expr: Option<P<Expr>>) -> BlockOrExpr {
BlockOrExpr(stmts, Some(expr)) BlockOrExpr(stmts, expr)
} }
// Converts it into a block. // Converts it into a block.
@ -339,7 +331,6 @@ impl BlockOrExpr {
// Converts it into an expression. // Converts it into an expression.
fn into_expr(self, cx: &ExtCtxt<'_>, span: Span) -> P<Expr> { fn into_expr(self, cx: &ExtCtxt<'_>, span: Span) -> P<Expr> {
if self.0.is_empty() { if self.0.is_empty() {
// No statements.
match self.1 { match self.1 {
None => cx.expr_block(cx.block(span, vec![])), None => cx.expr_block(cx.block(span, vec![])),
Some(expr) => expr, Some(expr) => expr,
@ -1135,44 +1126,34 @@ impl<'a> MethodDef<'a> {
/// fn eq(&self, other: &A) -> bool { /// fn eq(&self, other: &A) -> bool {
/// let __self_tag = ::core::intrinsics::discriminant_value(self); /// let __self_tag = ::core::intrinsics::discriminant_value(self);
/// let __arg1_tag = ::core::intrinsics::discriminant_value(other); /// let __arg1_tag = ::core::intrinsics::discriminant_value(other);
/// if __self_tag == __arg1_tag { /// __self_tag == __arg1_tag &&
/// match (self, other) { /// match (self, other) {
/// (A::A2(__self_0), A::A2(__arg1_0)) => /// (A::A2(__self_0), A::A2(__arg1_0)) =>
/// *__self_0 == *__arg1_0, /// *__self_0 == *__arg1_0,
/// _ => true, /// _ => true,
/// } /// }
/// } else {
/// false // catch-all handler
/// }
/// } /// }
/// } /// }
/// ``` /// ```
/// Creates a match for a tuple of all `selflike_args`, where either all /// Creates a tag check combined with a match for a tuple of all
/// variants match, or it falls into a catch-all for when one variant /// `selflike_args`, with an arm for each variant with fields, possibly an
/// does not match. /// arm for each fieldless variant (if `!unify_fieldless_variants` is not
/// /// true), and possibly a default arm.
/// There are N + 1 cases because is a case for each of the N
/// variants where all of the variants match, and one catch-all for
/// when one does not match.
///
/// As an optimization we generate code which checks whether all variants
/// match first which makes llvm see that C-like enums can be compiled into
/// a simple equality check (for PartialEq).
///
/// The catch-all handler is provided access the variant index values
/// for each of the selflike_args, carried in precomputed variables.
fn expand_enum_method_body<'b>( fn expand_enum_method_body<'b>(
&self, &self,
cx: &mut ExtCtxt<'_>, cx: &mut ExtCtxt<'_>,
trait_: &TraitDef<'b>, trait_: &TraitDef<'b>,
enum_def: &'b EnumDef, enum_def: &'b EnumDef,
type_ident: Ident, type_ident: Ident,
mut selflike_args: Vec<P<Expr>>, selflike_args: Vec<P<Expr>>,
nonselflike_args: &[P<Expr>], nonselflike_args: &[P<Expr>],
) -> BlockOrExpr { ) -> BlockOrExpr {
let span = trait_.span; let span = trait_.span;
let variants = &enum_def.variants; let variants = &enum_def.variants;
// Traits that unify fieldless variants always use the tag(s).
let uses_tags = self.unify_fieldless_variants;
// There is no sensible code to be generated for *any* deriving on a // There is no sensible code to be generated for *any* deriving on a
// zero-variant enum. So we just generate a failing expression. // zero-variant enum. So we just generate a failing expression.
if variants.is_empty() { if variants.is_empty() {
@ -1189,27 +1170,82 @@ impl<'a> MethodDef<'a> {
) )
.collect::<Vec<String>>(); .collect::<Vec<String>>();
// The `tag_idents` will be bound, solely in the catch-all, to // Build a series of let statements mapping each selflike_arg
// a series of let statements mapping each selflike_arg to an int // to its discriminant value.
// value corresponding to its discriminant. //
let tag_idents = prefixes // e.g. for `PartialEq::eq` builds two statements:
.iter() // ```
.map(|name| Ident::from_str_and_span(&format!("{}_tag", name), span)) // let __self_tag = ::core::intrinsics::discriminant_value(self);
.collect::<Vec<Ident>>(); // let __arg1_tag = ::core::intrinsics::discriminant_value(other);
// ```
let get_tag_pieces = |cx: &ExtCtxt<'_>| {
let tag_idents: Vec<_> = prefixes
.iter()
.map(|name| Ident::from_str_and_span(&format!("{}_tag", name), span))
.collect();
// Builds, via callback to call_substructure_method, the let mut tag_exprs: Vec<_> = tag_idents
// delegated expression that handles the catch-all case, .iter()
// using `__variants_tuple` to drive logic if necessary. .map(|&ident| cx.expr_addr_of(span, cx.expr_ident(span, ident)))
let catch_all_substructure = EnumNonMatchingCollapsed(&tag_idents); .collect();
let first_fieldless = variants.iter().find(|v| v.data.fields().is_empty()); let self_expr = tag_exprs.remove(0);
let other_selflike_exprs = tag_exprs;
let tag_field = FieldInfo { span, name: None, self_expr, other_selflike_exprs };
let tag_let_stmts: Vec<_> = iter::zip(&tag_idents, &selflike_args)
.map(|(&ident, selflike_arg)| {
let variant_value = deriving::call_intrinsic(
cx,
span,
sym::discriminant_value,
vec![selflike_arg.clone()],
);
cx.stmt_let(span, false, ident, variant_value)
})
.collect();
(tag_field, tag_let_stmts)
};
// There are some special cases involving fieldless enums where no
// match is necessary.
let all_fieldless = variants.iter().all(|v| v.data.fields().is_empty());
if all_fieldless {
if uses_tags && variants.len() > 1 {
// If the type is fieldless and the trait uses the tag and
// there are multiple variants, we need just an operation on
// the tag(s).
let (tag_field, mut tag_let_stmts) = get_tag_pieces(cx);
let mut tag_check = self.call_substructure_method(
cx,
trait_,
type_ident,
nonselflike_args,
&EnumTag(tag_field, None),
);
tag_let_stmts.append(&mut tag_check.0);
return BlockOrExpr(tag_let_stmts, tag_check.1);
}
if variants.len() == 1 {
// If there is a single variant, we don't need an operation on
// the tag(s). Just use the most degenerate result.
return self.call_substructure_method(
cx,
trait_,
type_ident,
nonselflike_args,
&EnumMatching(0, 1, &variants[0], Vec::new()),
);
};
}
// These arms are of the form: // These arms are of the form:
// (Variant1, Variant1, ...) => Body1 // (Variant1, Variant1, ...) => Body1
// (Variant2, Variant2, ...) => Body2 // (Variant2, Variant2, ...) => Body2
// ... // ...
// where each tuple has length = selflike_args.len() // where each tuple has length = selflike_args.len()
let mut match_arms: Vec<ast::Arm> = variants let mut match_arms: Vec<ast::Arm> = variants
.iter() .iter()
.enumerate() .enumerate()
@ -1233,7 +1269,7 @@ impl<'a> MethodDef<'a> {
use_ref_pat, use_ref_pat,
); );
// Here is the pat = `(&VariantK, &VariantK, ...)` // `(VariantK, VariantK, ...)` or just `VariantK`.
let single_pat = if subpats.len() == 1 { let single_pat = if subpats.len() == 1 {
subpats.pop().unwrap() subpats.pop().unwrap()
} else { } else {
@ -1263,27 +1299,28 @@ impl<'a> MethodDef<'a> {
}) })
.collect(); .collect();
// Add a default arm to the match, if necessary.
let first_fieldless = variants.iter().find(|v| v.data.fields().is_empty());
let default = match first_fieldless { let default = match first_fieldless {
Some(v) if self.unify_fieldless_variants => { Some(v) if self.unify_fieldless_variants => {
// We need a default case that handles the fieldless variants. // We need a default case that handles all the fieldless
// The index and actual variant aren't meaningful in this case, // variants. The index and actual variant aren't meaningful in
// so just use whatever // this case, so just use dummy values.
let substructure = EnumMatching(0, variants.len(), v, Vec::new());
Some( Some(
self.call_substructure_method( self.call_substructure_method(
cx, cx,
trait_, trait_,
type_ident, type_ident,
nonselflike_args, nonselflike_args,
&substructure, &EnumMatching(0, variants.len(), v, Vec::new()),
) )
.into_expr(cx, span), .into_expr(cx, span),
) )
} }
_ if variants.len() > 1 && selflike_args.len() > 1 => { _ if variants.len() > 1 && selflike_args.len() > 1 => {
// Since we know that all the arguments will match if we reach // Because we know that all the arguments will match if we reach
// the match expression we add the unreachable intrinsics as the // the match expression we add the unreachable intrinsics as the
// result of the catch all which should help llvm in optimizing it // result of the default which should help llvm in optimizing it.
Some(deriving::call_unreachable(cx, span)) Some(deriving::call_unreachable(cx, span))
} }
_ => None, _ => None,
@ -1292,92 +1329,41 @@ impl<'a> MethodDef<'a> {
match_arms.push(cx.arm(span, cx.pat_wild(span), arm)); match_arms.push(cx.arm(span, cx.pat_wild(span), arm));
} }
// We will usually need the catch-all after matching the // Create a match expression with one arm per discriminant plus
// tuples `(VariantK, VariantK, ...)` for each VariantK of the // possibly a default arm, e.g.:
// enum. But: // match (self, other) {
// // (Variant1, Variant1, ...) => Body1
// * when there is only one Self arg, the arms above suffice // (Variant2, Variant2, ...) => Body2,
// (and the deriving we call back into may not be prepared to // ...
// handle EnumNonMatchCollapsed), and, // _ => ::core::intrinsics::unreachable()
// // }
// * when the enum has only one variant, the single arm that let get_match_expr = |mut selflike_args: Vec<P<Expr>>| {
// is already present always suffices.
//
// * In either of the two cases above, if we *did* add a
// catch-all `_` match, it would trigger the
// unreachable-pattern error.
//
if variants.len() > 1 && selflike_args.len() > 1 {
// Build a series of let statements mapping each selflike_arg
// to its discriminant value.
//
// i.e., for `enum E<T> { A, B(1), C(T, T) }` for `PartialEq::eq`,
// builds two statements:
// ```
// let __self_tag = ::core::intrinsics::discriminant_value(self);
// let __arg1_tag = ::core::intrinsics::discriminant_value(other);
// ```
let mut index_let_stmts: Vec<ast::Stmt> = Vec::with_capacity(tag_idents.len() + 1);
// We also build an expression which checks whether all discriminants are equal, e.g.
// `__self_tag == __arg1_tag`.
let mut discriminant_test = cx.expr_bool(span, true);
for (i, (&ident, selflike_arg)) in iter::zip(&tag_idents, &selflike_args).enumerate() {
let variant_value = deriving::call_intrinsic(
cx,
span,
sym::discriminant_value,
vec![selflike_arg.clone()],
);
let let_stmt = cx.stmt_let(span, false, ident, variant_value);
index_let_stmts.push(let_stmt);
if i > 0 {
let id0 = cx.expr_ident(span, tag_idents[0]);
let id = cx.expr_ident(span, ident);
let test = cx.expr_binary(span, BinOpKind::Eq, id0, id);
discriminant_test = if i == 1 {
test
} else {
cx.expr_binary(span, BinOpKind::And, discriminant_test, test)
};
}
}
let arm_expr = self
.call_substructure_method(
cx,
trait_,
type_ident,
nonselflike_args,
&catch_all_substructure,
)
.into_expr(cx, span);
let match_arg = cx.expr(span, ast::ExprKind::Tup(selflike_args));
// Lastly we create an expression which branches on all discriminants being equal, e.g.
// if __self_tag == _arg1_tag {
// match (self, other) {
// (Variant1, Variant1, ...) => Body1
// (Variant2, Variant2, ...) => Body2,
// ...
// _ => ::core::intrinsics::unreachable()
// }
// }
// else {
// <delegated expression referring to __self_tag, et al.>
// }
let all_match = cx.expr_match(span, match_arg, match_arms);
let arm_expr = cx.expr_if(span, discriminant_test, all_match, Some(arm_expr));
BlockOrExpr(index_let_stmts, Some(arm_expr))
} else {
let match_arg = if selflike_args.len() == 1 { let match_arg = if selflike_args.len() == 1 {
selflike_args.pop().unwrap() selflike_args.pop().unwrap()
} else { } else {
cx.expr(span, ast::ExprKind::Tup(selflike_args)) cx.expr(span, ast::ExprKind::Tup(selflike_args))
}; };
BlockOrExpr(vec![], Some(cx.expr_match(span, match_arg, match_arms))) cx.expr_match(span, match_arg, match_arms)
};
// If the trait uses the tag and there are multiple variants, we need
// to add a tag check operation before the match. Otherwise, the match
// is enough.
if uses_tags && variants.len() > 1 {
let (tag_field, mut tag_let_stmts) = get_tag_pieces(cx);
// Combine a tag check with the match.
let mut tag_check_plus_match = self.call_substructure_method(
cx,
trait_,
type_ident,
nonselflike_args,
&EnumTag(tag_field, Some(get_match_expr(selflike_args))),
);
tag_let_stmts.append(&mut tag_check_plus_match.0);
BlockOrExpr(tag_let_stmts, tag_check_plus_match.1)
} else {
BlockOrExpr(vec![], Some(get_match_expr(selflike_args)))
} }
} }
@ -1591,11 +1577,6 @@ pub enum CsFold<'a> {
// The fallback case for a struct or enum variant with no fields. // The fallback case for a struct or enum variant with no fields.
Fieldless, Fieldless,
/// The fallback case for non-matching enum variants. The slice is the
/// identifiers holding the variant index value for each of the `Self`
/// arguments.
EnumNonMatching(Span, &'a [Ident]),
} }
/// Folds over fields, combining the expressions for each field in a sequence. /// Folds over fields, combining the expressions for each field in a sequence.
@ -1610,8 +1591,8 @@ pub fn cs_fold<F>(
where where
F: FnMut(&mut ExtCtxt<'_>, CsFold<'_>) -> P<Expr>, F: FnMut(&mut ExtCtxt<'_>, CsFold<'_>) -> P<Expr>,
{ {
match *substructure.fields { match substructure.fields {
EnumMatching(.., ref all_fields) | Struct(_, ref all_fields) => { EnumMatching(.., all_fields) | Struct(_, all_fields) => {
if all_fields.is_empty() { if all_fields.is_empty() {
return f(cx, CsFold::Fieldless); return f(cx, CsFold::Fieldless);
} }
@ -1635,7 +1616,18 @@ where
rest.iter().rfold(base_expr, op) rest.iter().rfold(base_expr, op)
} }
} }
EnumNonMatchingCollapsed(tuple) => f(cx, CsFold::EnumNonMatching(trait_span, tuple)), EnumTag(tag_field, match_expr) => {
let tag_check_expr = f(cx, CsFold::Single(tag_field));
if let Some(match_expr) = match_expr {
if use_foldl {
f(cx, CsFold::Combine(trait_span, tag_check_expr, match_expr.clone()))
} else {
f(cx, CsFold::Combine(trait_span, match_expr.clone(), tag_check_expr))
}
} else {
tag_check_expr
}
}
StaticEnum(..) | StaticStruct(..) => cx.span_bug(trait_span, "static function in `derive`"), StaticEnum(..) | StaticStruct(..) => cx.span_bug(trait_span, "static function in `derive`"),
} }
} }

View file

@ -1,6 +1,6 @@
use crate::deriving::generic::ty::*; use crate::deriving::generic::ty::*;
use crate::deriving::generic::*; use crate::deriving::generic::*;
use crate::deriving::{self, path_std, pathvec_std}; use crate::deriving::{path_std, pathvec_std};
use rustc_ast::{MetaItem, Mutability}; use rustc_ast::{MetaItem, Mutability};
use rustc_expand::base::{Annotatable, ExtCtxt}; use rustc_expand::base::{Annotatable, ExtCtxt};
@ -61,32 +61,20 @@ fn hash_substructure(
let expr = cx.expr_call(span, hash_path, vec![expr, state_expr.clone()]); let expr = cx.expr_call(span, hash_path, vec![expr, state_expr.clone()]);
cx.stmt_expr(expr) cx.stmt_expr(expr)
}; };
let mut stmts = Vec::new();
let fields = match substr.fields { let (stmts, match_expr) = match substr.fields {
Struct(_, fs) | EnumMatching(_, 1, .., fs) => fs, Struct(_, fields) | EnumMatching(.., fields) => {
EnumMatching(.., fs) => { let stmts =
let variant_value = cx.expr_addr_of( fields.iter().map(|field| call_hash(field.span, field.self_expr.clone())).collect();
trait_span, (stmts, None)
deriving::call_intrinsic( }
cx, EnumTag(tag_field, match_expr) => {
trait_span, assert!(tag_field.other_selflike_exprs.is_empty());
sym::discriminant_value, let stmts = vec![call_hash(tag_field.span, tag_field.self_expr.clone())];
vec![cx.expr_self(trait_span)], (stmts, match_expr.clone())
),
);
stmts.push(call_hash(trait_span, variant_value));
fs
} }
_ => cx.span_bug(trait_span, "impossible substructure in `derive(Hash)`"), _ => cx.span_bug(trait_span, "impossible substructure in `derive(Hash)`"),
}; };
stmts.extend( BlockOrExpr::new_mixed(stmts, match_expr)
fields
.iter()
.map(|FieldInfo { ref self_expr, span, .. }| call_hash(*span, self_expr.clone())),
);
BlockOrExpr::new_stmts(stmts)
} }

View file

@ -766,17 +766,13 @@ enum Fieldless1 {
#[allow(unused_qualifications)] #[allow(unused_qualifications)]
impl ::core::clone::Clone for Fieldless1 { impl ::core::clone::Clone for Fieldless1 {
#[inline] #[inline]
fn clone(&self) -> Fieldless1 { fn clone(&self) -> Fieldless1 { Fieldless1::A }
match self { Fieldless1::A => Fieldless1::A, }
}
} }
#[automatically_derived] #[automatically_derived]
#[allow(unused_qualifications)] #[allow(unused_qualifications)]
impl ::core::fmt::Debug for Fieldless1 { impl ::core::fmt::Debug for Fieldless1 {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self { ::core::fmt::Formatter::write_str(f, "A")
Fieldless1::A => ::core::fmt::Formatter::write_str(f, "A"),
}
} }
} }
#[automatically_derived] #[automatically_derived]
@ -788,18 +784,14 @@ impl ::core::default::Default for Fieldless1 {
#[automatically_derived] #[automatically_derived]
#[allow(unused_qualifications)] #[allow(unused_qualifications)]
impl ::core::hash::Hash for Fieldless1 { impl ::core::hash::Hash for Fieldless1 {
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () { fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {}
match self { _ => {} }
}
} }
impl ::core::marker::StructuralPartialEq for Fieldless1 {} impl ::core::marker::StructuralPartialEq for Fieldless1 {}
#[automatically_derived] #[automatically_derived]
#[allow(unused_qualifications)] #[allow(unused_qualifications)]
impl ::core::cmp::PartialEq for Fieldless1 { impl ::core::cmp::PartialEq for Fieldless1 {
#[inline] #[inline]
fn eq(&self, other: &Fieldless1) -> bool { fn eq(&self, other: &Fieldless1) -> bool { true }
match (self, other) { _ => true, }
}
} }
impl ::core::marker::StructuralEq for Fieldless1 {} impl ::core::marker::StructuralEq for Fieldless1 {}
#[automatically_derived] #[automatically_derived]
@ -816,9 +808,7 @@ impl ::core::cmp::PartialOrd for Fieldless1 {
#[inline] #[inline]
fn partial_cmp(&self, other: &Fieldless1) fn partial_cmp(&self, other: &Fieldless1)
-> ::core::option::Option<::core::cmp::Ordering> { -> ::core::option::Option<::core::cmp::Ordering> {
match (self, other) { ::core::option::Option::Some(::core::cmp::Ordering::Equal)
_ => ::core::option::Option::Some(::core::cmp::Ordering::Equal),
}
} }
} }
#[automatically_derived] #[automatically_derived]
@ -826,7 +816,7 @@ impl ::core::cmp::PartialOrd for Fieldless1 {
impl ::core::cmp::Ord for Fieldless1 { impl ::core::cmp::Ord for Fieldless1 {
#[inline] #[inline]
fn cmp(&self, other: &Fieldless1) -> ::core::cmp::Ordering { fn cmp(&self, other: &Fieldless1) -> ::core::cmp::Ordering {
match (self, other) { _ => ::core::cmp::Ordering::Equal, } ::core::cmp::Ordering::Equal
} }
} }
@ -868,11 +858,8 @@ impl ::core::default::Default for Fieldless {
#[allow(unused_qualifications)] #[allow(unused_qualifications)]
impl ::core::hash::Hash for Fieldless { impl ::core::hash::Hash for Fieldless {
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () { fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
match self { let __self_tag = ::core::intrinsics::discriminant_value(self);
_ => ::core::hash::Hash::hash(&__self_tag, state)
::core::hash::Hash::hash(&::core::intrinsics::discriminant_value(self),
state),
}
} }
} }
impl ::core::marker::StructuralPartialEq for Fieldless {} impl ::core::marker::StructuralPartialEq for Fieldless {}
@ -883,9 +870,7 @@ impl ::core::cmp::PartialEq for Fieldless {
fn eq(&self, other: &Fieldless) -> bool { fn eq(&self, other: &Fieldless) -> bool {
let __self_tag = ::core::intrinsics::discriminant_value(self); let __self_tag = ::core::intrinsics::discriminant_value(self);
let __arg1_tag = ::core::intrinsics::discriminant_value(other); let __arg1_tag = ::core::intrinsics::discriminant_value(other);
if __self_tag == __arg1_tag { __self_tag == __arg1_tag
match (self, other) { _ => true, }
} else { false }
} }
} }
impl ::core::marker::StructuralEq for Fieldless {} impl ::core::marker::StructuralEq for Fieldless {}
@ -905,14 +890,7 @@ impl ::core::cmp::PartialOrd for Fieldless {
-> ::core::option::Option<::core::cmp::Ordering> { -> ::core::option::Option<::core::cmp::Ordering> {
let __self_tag = ::core::intrinsics::discriminant_value(self); let __self_tag = ::core::intrinsics::discriminant_value(self);
let __arg1_tag = ::core::intrinsics::discriminant_value(other); let __arg1_tag = ::core::intrinsics::discriminant_value(other);
if __self_tag == __arg1_tag { ::core::cmp::PartialOrd::partial_cmp(&__self_tag, &__arg1_tag)
match (self, other) {
_ =>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
}
} else {
::core::cmp::PartialOrd::partial_cmp(&__self_tag, &__arg1_tag)
}
} }
} }
#[automatically_derived] #[automatically_derived]
@ -922,9 +900,7 @@ impl ::core::cmp::Ord for Fieldless {
fn cmp(&self, other: &Fieldless) -> ::core::cmp::Ordering { fn cmp(&self, other: &Fieldless) -> ::core::cmp::Ordering {
let __self_tag = ::core::intrinsics::discriminant_value(self); let __self_tag = ::core::intrinsics::discriminant_value(self);
let __arg1_tag = ::core::intrinsics::discriminant_value(other); let __arg1_tag = ::core::intrinsics::discriminant_value(other);
if __self_tag == __arg1_tag { ::core::cmp::Ord::cmp(&__self_tag, &__arg1_tag)
match (self, other) { _ => ::core::cmp::Ordering::Equal, }
} else { ::core::cmp::Ord::cmp(&__self_tag, &__arg1_tag) }
} }
} }
@ -978,21 +954,15 @@ impl ::core::default::Default for Mixed {
#[allow(unused_qualifications)] #[allow(unused_qualifications)]
impl ::core::hash::Hash for Mixed { impl ::core::hash::Hash for Mixed {
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () { fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
let __self_tag = ::core::intrinsics::discriminant_value(self);
::core::hash::Hash::hash(&__self_tag, state);
match self { match self {
Mixed::R(__self_0) => { Mixed::R(__self_0) => ::core::hash::Hash::hash(__self_0, state),
::core::hash::Hash::hash(&::core::intrinsics::discriminant_value(self),
state);
::core::hash::Hash::hash(__self_0, state)
}
Mixed::S { d1: __self_0, d2: __self_1 } => { Mixed::S { d1: __self_0, d2: __self_1 } => {
::core::hash::Hash::hash(&::core::intrinsics::discriminant_value(self),
state);
::core::hash::Hash::hash(__self_0, state); ::core::hash::Hash::hash(__self_0, state);
::core::hash::Hash::hash(__self_1, state) ::core::hash::Hash::hash(__self_1, state)
} }
_ => _ => {}
::core::hash::Hash::hash(&::core::intrinsics::discriminant_value(self),
state),
} }
} }
} }
@ -1004,31 +974,29 @@ impl ::core::cmp::PartialEq for Mixed {
fn eq(&self, other: &Mixed) -> bool { fn eq(&self, other: &Mixed) -> bool {
let __self_tag = ::core::intrinsics::discriminant_value(self); let __self_tag = ::core::intrinsics::discriminant_value(self);
let __arg1_tag = ::core::intrinsics::discriminant_value(other); let __arg1_tag = ::core::intrinsics::discriminant_value(other);
if __self_tag == __arg1_tag { __self_tag == __arg1_tag &&
match (self, other) { match (self, other) {
(Mixed::R(__self_0), Mixed::R(__arg1_0)) => (Mixed::R(__self_0), Mixed::R(__arg1_0)) =>
*__self_0 == *__arg1_0, *__self_0 == *__arg1_0,
(Mixed::S { d1: __self_0, d2: __self_1 }, Mixed::S { (Mixed::S { d1: __self_0, d2: __self_1 }, Mixed::S {
d1: __arg1_0, d2: __arg1_1 }) => d1: __arg1_0, d2: __arg1_1 }) =>
*__self_0 == *__arg1_0 && *__self_1 == *__arg1_1, *__self_0 == *__arg1_0 && *__self_1 == *__arg1_1,
_ => true, _ => true,
} }
} else { false }
} }
#[inline] #[inline]
fn ne(&self, other: &Mixed) -> bool { fn ne(&self, other: &Mixed) -> bool {
let __self_tag = ::core::intrinsics::discriminant_value(self); let __self_tag = ::core::intrinsics::discriminant_value(self);
let __arg1_tag = ::core::intrinsics::discriminant_value(other); let __arg1_tag = ::core::intrinsics::discriminant_value(other);
if __self_tag == __arg1_tag { __self_tag != __arg1_tag ||
match (self, other) { match (self, other) {
(Mixed::R(__self_0), Mixed::R(__arg1_0)) => (Mixed::R(__self_0), Mixed::R(__arg1_0)) =>
*__self_0 != *__arg1_0, *__self_0 != *__arg1_0,
(Mixed::S { d1: __self_0, d2: __self_1 }, Mixed::S { (Mixed::S { d1: __self_0, d2: __self_1 }, Mixed::S {
d1: __arg1_0, d2: __arg1_1 }) => d1: __arg1_0, d2: __arg1_1 }) =>
*__self_0 != *__arg1_0 || *__self_1 != *__arg1_1, *__self_0 != *__arg1_0 || *__self_1 != *__arg1_1,
_ => false, _ => false,
} }
} else { true }
} }
} }
impl ::core::marker::StructuralEq for Mixed {} impl ::core::marker::StructuralEq for Mixed {}
@ -1050,7 +1018,8 @@ impl ::core::cmp::PartialOrd for Mixed {
-> ::core::option::Option<::core::cmp::Ordering> { -> ::core::option::Option<::core::cmp::Ordering> {
let __self_tag = ::core::intrinsics::discriminant_value(self); let __self_tag = ::core::intrinsics::discriminant_value(self);
let __arg1_tag = ::core::intrinsics::discriminant_value(other); let __arg1_tag = ::core::intrinsics::discriminant_value(other);
if __self_tag == __arg1_tag { match ::core::cmp::PartialOrd::partial_cmp(&__self_tag, &__arg1_tag) {
::core::option::Option::Some(::core::cmp::Ordering::Equal) =>
match (self, other) { match (self, other) {
(Mixed::R(__self_0), Mixed::R(__arg1_0)) => (Mixed::R(__self_0), Mixed::R(__arg1_0)) =>
::core::cmp::PartialOrd::partial_cmp(__self_0, __arg1_0), ::core::cmp::PartialOrd::partial_cmp(__self_0, __arg1_0),
@ -1064,10 +1033,9 @@ impl ::core::cmp::PartialOrd for Mixed {
}, },
_ => _ =>
::core::option::Option::Some(::core::cmp::Ordering::Equal), ::core::option::Option::Some(::core::cmp::Ordering::Equal),
} },
} else { cmp => cmp,
::core::cmp::PartialOrd::partial_cmp(&__self_tag, &__arg1_tag) }
}
} }
} }
#[automatically_derived] #[automatically_derived]
@ -1077,7 +1045,8 @@ impl ::core::cmp::Ord for Mixed {
fn cmp(&self, other: &Mixed) -> ::core::cmp::Ordering { fn cmp(&self, other: &Mixed) -> ::core::cmp::Ordering {
let __self_tag = ::core::intrinsics::discriminant_value(self); let __self_tag = ::core::intrinsics::discriminant_value(self);
let __arg1_tag = ::core::intrinsics::discriminant_value(other); let __arg1_tag = ::core::intrinsics::discriminant_value(other);
if __self_tag == __arg1_tag { match ::core::cmp::Ord::cmp(&__self_tag, &__arg1_tag) {
::core::cmp::Ordering::Equal =>
match (self, other) { match (self, other) {
(Mixed::R(__self_0), Mixed::R(__arg1_0)) => (Mixed::R(__self_0), Mixed::R(__arg1_0)) =>
::core::cmp::Ord::cmp(__self_0, __arg1_0), ::core::cmp::Ord::cmp(__self_0, __arg1_0),
@ -1089,8 +1058,9 @@ impl ::core::cmp::Ord for Mixed {
cmp => cmp, cmp => cmp,
}, },
_ => ::core::cmp::Ordering::Equal, _ => ::core::cmp::Ordering::Equal,
} },
} else { ::core::cmp::Ord::cmp(&__self_tag, &__arg1_tag) } cmp => cmp,
}
} }
} }
@ -1133,22 +1103,12 @@ impl ::core::fmt::Debug for Fielded {
#[allow(unused_qualifications)] #[allow(unused_qualifications)]
impl ::core::hash::Hash for Fielded { impl ::core::hash::Hash for Fielded {
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () { fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
let __self_tag = ::core::intrinsics::discriminant_value(self);
::core::hash::Hash::hash(&__self_tag, state);
match self { match self {
Fielded::X(__self_0) => { Fielded::X(__self_0) => ::core::hash::Hash::hash(__self_0, state),
::core::hash::Hash::hash(&::core::intrinsics::discriminant_value(self), Fielded::Y(__self_0) => ::core::hash::Hash::hash(__self_0, state),
state); Fielded::Z(__self_0) => ::core::hash::Hash::hash(__self_0, state),
::core::hash::Hash::hash(__self_0, state)
}
Fielded::Y(__self_0) => {
::core::hash::Hash::hash(&::core::intrinsics::discriminant_value(self),
state);
::core::hash::Hash::hash(__self_0, state)
}
Fielded::Z(__self_0) => {
::core::hash::Hash::hash(&::core::intrinsics::discriminant_value(self),
state);
::core::hash::Hash::hash(__self_0, state)
}
} }
} }
} }
@ -1160,33 +1120,31 @@ impl ::core::cmp::PartialEq for Fielded {
fn eq(&self, other: &Fielded) -> bool { fn eq(&self, other: &Fielded) -> bool {
let __self_tag = ::core::intrinsics::discriminant_value(self); let __self_tag = ::core::intrinsics::discriminant_value(self);
let __arg1_tag = ::core::intrinsics::discriminant_value(other); let __arg1_tag = ::core::intrinsics::discriminant_value(other);
if __self_tag == __arg1_tag { __self_tag == __arg1_tag &&
match (self, other) { match (self, other) {
(Fielded::X(__self_0), Fielded::X(__arg1_0)) => (Fielded::X(__self_0), Fielded::X(__arg1_0)) =>
*__self_0 == *__arg1_0, *__self_0 == *__arg1_0,
(Fielded::Y(__self_0), Fielded::Y(__arg1_0)) => (Fielded::Y(__self_0), Fielded::Y(__arg1_0)) =>
*__self_0 == *__arg1_0, *__self_0 == *__arg1_0,
(Fielded::Z(__self_0), Fielded::Z(__arg1_0)) => (Fielded::Z(__self_0), Fielded::Z(__arg1_0)) =>
*__self_0 == *__arg1_0, *__self_0 == *__arg1_0,
_ => unsafe { ::core::intrinsics::unreachable() } _ => unsafe { ::core::intrinsics::unreachable() }
} }
} else { false }
} }
#[inline] #[inline]
fn ne(&self, other: &Fielded) -> bool { fn ne(&self, other: &Fielded) -> bool {
let __self_tag = ::core::intrinsics::discriminant_value(self); let __self_tag = ::core::intrinsics::discriminant_value(self);
let __arg1_tag = ::core::intrinsics::discriminant_value(other); let __arg1_tag = ::core::intrinsics::discriminant_value(other);
if __self_tag == __arg1_tag { __self_tag != __arg1_tag ||
match (self, other) { match (self, other) {
(Fielded::X(__self_0), Fielded::X(__arg1_0)) => (Fielded::X(__self_0), Fielded::X(__arg1_0)) =>
*__self_0 != *__arg1_0, *__self_0 != *__arg1_0,
(Fielded::Y(__self_0), Fielded::Y(__arg1_0)) => (Fielded::Y(__self_0), Fielded::Y(__arg1_0)) =>
*__self_0 != *__arg1_0, *__self_0 != *__arg1_0,
(Fielded::Z(__self_0), Fielded::Z(__arg1_0)) => (Fielded::Z(__self_0), Fielded::Z(__arg1_0)) =>
*__self_0 != *__arg1_0, *__self_0 != *__arg1_0,
_ => unsafe { ::core::intrinsics::unreachable() } _ => unsafe { ::core::intrinsics::unreachable() }
} }
} else { true }
} }
} }
impl ::core::marker::StructuralEq for Fielded {} impl ::core::marker::StructuralEq for Fielded {}
@ -1210,7 +1168,8 @@ impl ::core::cmp::PartialOrd for Fielded {
-> ::core::option::Option<::core::cmp::Ordering> { -> ::core::option::Option<::core::cmp::Ordering> {
let __self_tag = ::core::intrinsics::discriminant_value(self); let __self_tag = ::core::intrinsics::discriminant_value(self);
let __arg1_tag = ::core::intrinsics::discriminant_value(other); let __arg1_tag = ::core::intrinsics::discriminant_value(other);
if __self_tag == __arg1_tag { match ::core::cmp::PartialOrd::partial_cmp(&__self_tag, &__arg1_tag) {
::core::option::Option::Some(::core::cmp::Ordering::Equal) =>
match (self, other) { match (self, other) {
(Fielded::X(__self_0), Fielded::X(__arg1_0)) => (Fielded::X(__self_0), Fielded::X(__arg1_0)) =>
::core::cmp::PartialOrd::partial_cmp(__self_0, __arg1_0), ::core::cmp::PartialOrd::partial_cmp(__self_0, __arg1_0),
@ -1219,10 +1178,9 @@ impl ::core::cmp::PartialOrd for Fielded {
(Fielded::Z(__self_0), Fielded::Z(__arg1_0)) => (Fielded::Z(__self_0), Fielded::Z(__arg1_0)) =>
::core::cmp::PartialOrd::partial_cmp(__self_0, __arg1_0), ::core::cmp::PartialOrd::partial_cmp(__self_0, __arg1_0),
_ => unsafe { ::core::intrinsics::unreachable() } _ => unsafe { ::core::intrinsics::unreachable() }
} },
} else { cmp => cmp,
::core::cmp::PartialOrd::partial_cmp(&__self_tag, &__arg1_tag) }
}
} }
} }
#[automatically_derived] #[automatically_derived]
@ -1232,7 +1190,8 @@ impl ::core::cmp::Ord for Fielded {
fn cmp(&self, other: &Fielded) -> ::core::cmp::Ordering { fn cmp(&self, other: &Fielded) -> ::core::cmp::Ordering {
let __self_tag = ::core::intrinsics::discriminant_value(self); let __self_tag = ::core::intrinsics::discriminant_value(self);
let __arg1_tag = ::core::intrinsics::discriminant_value(other); let __arg1_tag = ::core::intrinsics::discriminant_value(other);
if __self_tag == __arg1_tag { match ::core::cmp::Ord::cmp(&__self_tag, &__arg1_tag) {
::core::cmp::Ordering::Equal =>
match (self, other) { match (self, other) {
(Fielded::X(__self_0), Fielded::X(__arg1_0)) => (Fielded::X(__self_0), Fielded::X(__arg1_0)) =>
::core::cmp::Ord::cmp(__self_0, __arg1_0), ::core::cmp::Ord::cmp(__self_0, __arg1_0),
@ -1241,8 +1200,9 @@ impl ::core::cmp::Ord for Fielded {
(Fielded::Z(__self_0), Fielded::Z(__arg1_0)) => (Fielded::Z(__self_0), Fielded::Z(__arg1_0)) =>
::core::cmp::Ord::cmp(__self_0, __arg1_0), ::core::cmp::Ord::cmp(__self_0, __arg1_0),
_ => unsafe { ::core::intrinsics::unreachable() } _ => unsafe { ::core::intrinsics::unreachable() }
} },
} else { ::core::cmp::Ord::cmp(&__self_tag, &__arg1_tag) } cmp => cmp,
}
} }
} }