Auto merge of #98376 - nnethercote:improve-derive-PartialEq, r=petrochenkov

Improve some deriving code and add a test

The `.stdout` test is particularly useful.

r? `@petrochenkov`
This commit is contained in:
bors
2022-06-29 00:20:57 +00:00
7 changed files with 1251 additions and 131 deletions
@@ -3,9 +3,9 @@
use crate::deriving::path_std;
use rustc_ast::ptr::P;
use rustc_ast::{self as ast, Expr, GenericArg, Generics, ItemKind, MetaItem, VariantData};
use rustc_ast::{self as ast, Expr, Generics, ItemKind, MetaItem, VariantData};
use rustc_expand::base::{Annotatable, ExtCtxt};
use rustc_span::symbol::{kw, sym, Ident, Symbol};
use rustc_span::symbol::{kw, sym, Ident};
use rustc_span::Span;
pub fn expand_deriving_clone(
@@ -107,44 +107,38 @@ fn cs_clone_shallow(
substr: &Substructure<'_>,
is_union: bool,
) -> P<Expr> {
fn assert_ty_bounds(
cx: &mut ExtCtxt<'_>,
stmts: &mut Vec<ast::Stmt>,
ty: P<ast::Ty>,
span: Span,
helper_name: &str,
) {
// Generate statement `let _: helper_name<ty>;`,
// set the expn ID so we can use the unstable struct.
let span = cx.with_def_site_ctxt(span);
let assert_path = cx.path_all(
span,
true,
cx.std_path(&[sym::clone, Symbol::intern(helper_name)]),
vec![GenericArg::Type(ty)],
);
stmts.push(cx.stmt_let_type_only(span, cx.ty_path(assert_path)));
}
fn process_variant(cx: &mut ExtCtxt<'_>, stmts: &mut Vec<ast::Stmt>, variant: &VariantData) {
let mut stmts = Vec::new();
let mut process_variant = |variant: &VariantData| {
for field in variant.fields() {
// let _: AssertParamIsClone<FieldTy>;
assert_ty_bounds(cx, stmts, field.ty.clone(), field.span, "AssertParamIsClone");
super::assert_ty_bounds(
cx,
&mut stmts,
field.ty.clone(),
field.span,
&[sym::clone, sym::AssertParamIsClone],
);
}
}
};
let mut stmts = Vec::new();
if is_union {
// let _: AssertParamIsCopy<Self>;
let self_ty = cx.ty_path(cx.path_ident(trait_span, Ident::with_dummy_span(kw::SelfUpper)));
assert_ty_bounds(cx, &mut stmts, self_ty, trait_span, "AssertParamIsCopy");
super::assert_ty_bounds(
cx,
&mut stmts,
self_ty,
trait_span,
&[sym::clone, sym::AssertParamIsCopy],
);
} else {
match *substr.fields {
StaticStruct(vdata, ..) => {
process_variant(cx, &mut stmts, vdata);
process_variant(vdata);
}
StaticEnum(enum_def, ..) => {
for variant in &enum_def.variants {
process_variant(cx, &mut stmts, &variant.data);
process_variant(&variant.data);
}
}
_ => cx.span_bug(
@@ -3,9 +3,9 @@
use crate::deriving::path_std;
use rustc_ast::ptr::P;
use rustc_ast::{self as ast, Expr, GenericArg, MetaItem};
use rustc_ast::{self as ast, Expr, MetaItem};
use rustc_expand::base::{Annotatable, ExtCtxt};
use rustc_span::symbol::{sym, Ident, Symbol};
use rustc_span::symbol::{sym, Ident};
use rustc_span::Span;
pub fn expand_deriving_eq(
@@ -55,43 +55,27 @@ fn cs_total_eq_assert(
trait_span: Span,
substr: &Substructure<'_>,
) -> P<Expr> {
fn assert_ty_bounds(
cx: &mut ExtCtxt<'_>,
stmts: &mut Vec<ast::Stmt>,
ty: P<ast::Ty>,
span: Span,
helper_name: &str,
) {
// Generate statement `let _: helper_name<ty>;`,
// set the expn ID so we can use the unstable struct.
let span = cx.with_def_site_ctxt(span);
let assert_path = cx.path_all(
span,
true,
cx.std_path(&[sym::cmp, Symbol::intern(helper_name)]),
vec![GenericArg::Type(ty)],
);
stmts.push(cx.stmt_let_type_only(span, cx.ty_path(assert_path)));
}
fn process_variant(
cx: &mut ExtCtxt<'_>,
stmts: &mut Vec<ast::Stmt>,
variant: &ast::VariantData,
) {
let mut stmts = Vec::new();
let mut process_variant = |variant: &ast::VariantData| {
for field in variant.fields() {
// let _: AssertParamIsEq<FieldTy>;
assert_ty_bounds(cx, stmts, field.ty.clone(), field.span, "AssertParamIsEq");
super::assert_ty_bounds(
cx,
&mut stmts,
field.ty.clone(),
field.span,
&[sym::cmp, sym::AssertParamIsEq],
);
}
}
};
let mut stmts = Vec::new();
match *substr.fields {
StaticStruct(vdata, ..) => {
process_variant(cx, &mut stmts, vdata);
process_variant(vdata);
}
StaticEnum(enum_def, ..) => {
for variant in &enum_def.variants {
process_variant(cx, &mut stmts, &variant.data);
process_variant(&variant.data);
}
}
_ => cx.span_bug(trait_span, "unexpected substructure in `derive(Eq)`"),
@@ -1126,75 +1126,43 @@ fn expand_static_struct_method_body(
/// A1,
/// A2(i32)
/// }
///
/// // is equivalent to
///
/// impl PartialEq for A {
/// ```
/// is equivalent to:
/// ```
/// impl ::core::cmp::PartialEq for A {
/// #[inline]
/// fn eq(&self, other: &A) -> bool {
/// use A::*;
/// match (&*self, &*other) {
/// (&A1, &A1) => true,
/// (&A2(ref self_0),
/// &A2(ref __arg_1_0)) => (*self_0).eq(&(*__arg_1_0)),
/// _ => {
/// let __self_vi = match *self { A1 => 0, A2(..) => 1 };
/// let __arg_1_vi = match *other { A1 => 0, A2(..) => 1 };
/// false
/// {
/// let __self_vi = ::core::intrinsics::discriminant_value(&*self);
/// let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
/// if true && __self_vi == __arg_1_vi {
/// match (&*self, &*other) {
/// (&A::A2(ref __self_0), &A::A2(ref __arg_1_0)) =>
/// (*__self_0) == (*__arg_1_0),
/// _ => true,
/// }
/// } else {
/// false // catch-all handler
/// }
/// }
/// }
/// }
/// ```
///
/// (Of course `__self_vi` and `__arg_1_vi` are unused for
/// `PartialEq`, and those subcomputations will hopefully be removed
/// as their results are unused. The point of `__self_vi` and
/// `__arg_1_vi` is for `PartialOrd`; see #15503.)
fn expand_enum_method_body<'b>(
&self,
cx: &mut ExtCtxt<'_>,
trait_: &TraitDef<'b>,
enum_def: &'b EnumDef,
type_ident: Ident,
self_args: Vec<P<Expr>>,
nonself_args: &[P<Expr>],
) -> P<Expr> {
self.build_enum_match_tuple(cx, trait_, enum_def, type_ident, self_args, nonself_args)
}
/// Creates a match for a tuple of all `self_args`, where either all
/// variants match, or it falls into a catch-all for when one variant
/// does not match.
///
/// There are N + 1 cases because is a case for each of the N
/// variants where all of the variants match, and one catch-all for
/// when one does not match.
///
/// As an optimization we generate code which checks whether all variants
/// match first which makes llvm see that C-like enums can be compiled into
/// a simple equality check (for PartialEq).
///
/// The catch-all handler is provided access the variant index values
/// for each of the self-args, carried in precomputed variables.
/// ```{.text}
/// let __self0_vi = std::intrinsics::discriminant_value(&self);
/// let __self1_vi = std::intrinsics::discriminant_value(&arg1);
/// let __self2_vi = std::intrinsics::discriminant_value(&arg2);
///
/// if __self0_vi == __self1_vi && __self0_vi == __self2_vi && ... {
/// match (...) {
/// (Variant1, Variant1, ...) => Body1
/// (Variant2, Variant2, ...) => Body2,
/// ...
/// _ => ::core::intrinsics::unreachable()
/// }
/// }
/// else {
/// ... // catch-all remainder can inspect above variant index values.
/// }
/// ```
fn build_enum_match_tuple<'b>(
fn expand_enum_method_body<'b>(
&self,
cx: &mut ExtCtxt<'_>,
trait_: &TraitDef<'b>,
@@ -1392,37 +1360,32 @@ fn build_enum_match_tuple<'b>(
//
// i.e., for `enum E<T> { A, B(1), C(T, T) }`, and a deriving
// with three Self args, builds three statements:
//
// ```
// let __self0_vi = std::intrinsics::discriminant_value(&self);
// let __self1_vi = std::intrinsics::discriminant_value(&arg1);
// let __self2_vi = std::intrinsics::discriminant_value(&arg2);
// let __self_vi = std::intrinsics::discriminant_value(&self);
// let __arg_1_vi = std::intrinsics::discriminant_value(&arg1);
// let __arg_2_vi = std::intrinsics::discriminant_value(&arg2);
// ```
let mut index_let_stmts: Vec<ast::Stmt> = Vec::with_capacity(vi_idents.len() + 1);
// We also build an expression which checks whether all discriminants are equal
// discriminant_test = __self0_vi == __self1_vi && __self0_vi == __self2_vi && ...
// We also build an expression which checks whether all discriminants are equal:
// `__self_vi == __arg_1_vi && __self_vi == __arg_2_vi && ...`
let mut discriminant_test = cx.expr_bool(span, true);
let mut first_ident = None;
for (&ident, self_arg) in iter::zip(&vi_idents, &self_args) {
for (i, (&ident, self_arg)) in iter::zip(&vi_idents, &self_args).enumerate() {
let self_addr = cx.expr_addr_of(span, self_arg.clone());
let variant_value =
deriving::call_intrinsic(cx, span, sym::discriminant_value, vec![self_addr]);
let let_stmt = cx.stmt_let(span, false, ident, variant_value);
index_let_stmts.push(let_stmt);
match first_ident {
Some(first) => {
let first_expr = cx.expr_ident(span, first);
let id = cx.expr_ident(span, ident);
let test = cx.expr_binary(span, BinOpKind::Eq, first_expr, id);
discriminant_test =
cx.expr_binary(span, BinOpKind::And, discriminant_test, test)
}
None => {
first_ident = Some(ident);
}
if i > 0 {
let id0 = cx.expr_ident(span, vi_idents[0]);
let id = cx.expr_ident(span, ident);
let test = cx.expr_binary(span, BinOpKind::Eq, id0, id);
discriminant_test = if i == 1 {
test
} else {
cx.expr_binary(span, BinOpKind::And, discriminant_test, test)
};
}
}
@@ -1453,7 +1416,7 @@ fn build_enum_match_tuple<'b>(
// }
// }
// else {
// <delegated expression referring to __self0_vi, et al.>
// <delegated expression referring to __self_vi, et al.>
// }
let all_match = cx.expr_match(span, match_arg, match_arms);
let arm_expr = cx.expr_if(span, discriminant_test, all_match, Some(arm_expr));
@@ -2,7 +2,7 @@
use rustc_ast as ast;
use rustc_ast::ptr::P;
use rustc_ast::{Impl, ItemKind, MetaItem};
use rustc_ast::{GenericArg, Impl, ItemKind, MetaItem};
use rustc_expand::base::{Annotatable, ExpandResult, ExtCtxt, MultiItemModifier};
use rustc_span::symbol::{sym, Ident, Symbol};
use rustc_span::Span;
@@ -193,3 +193,16 @@ fn inject_impl_of_structural_trait(
push(Annotatable::Item(newitem));
}
fn assert_ty_bounds(
cx: &mut ExtCtxt<'_>,
stmts: &mut Vec<ast::Stmt>,
ty: P<ast::Ty>,
span: Span,
assert_path: &[Symbol],
) {
// Generate statement `let _: assert_path<ty>;`.
let span = cx.with_def_site_ctxt(span);
let assert_path = cx.path_all(span, true, cx.std_path(assert_path), vec![GenericArg::Type(ty)]);
stmts.push(cx.stmt_let_type_only(span, cx.ty_path(assert_path)));
}
+3
View File
@@ -135,6 +135,9 @@
Arguments,
AsMut,
AsRef,
AssertParamIsClone,
AssertParamIsCopy,
AssertParamIsEq,
AtomicBool,
AtomicI128,
AtomicI16,
@@ -0,0 +1,63 @@
// check-pass
// compile-flags: -Zunpretty=expanded
// edition:2021
//
// This test checks the code generated for all[*] the builtin derivable traits
// on a variety of structs and enums. It protects against accidental changes to
// the generated code, and makes deliberate changes to the generated code
// easier to review.
//
// [*] It excludes `Copy` in some cases, because that changes the code
// generated for `Clone`.
//
// [*] It excludes `RustcEncodable` and `RustDecodable`, which are obsolete and
// also require the `rustc_serialize` crate.
#![crate_type = "lib"]
#![allow(dead_code)]
#![allow(deprecated)]
// Empty struct.
#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
struct Empty;
// A basic struct.
#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
struct Point {
x: u32,
y: u32,
}
// A long struct.
#[derive(Clone, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
struct Big {
b1: u32, b2: u32, b3: u32, b4: u32, b5: u32, b6: u32, b7: u32, b8:u32,
}
// A C-like, fieldless enum.
#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
enum Fieldless {
#[default]
A,
B,
C,
}
// An enum with multiple fieldless and fielded variants.
#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
enum Mixed {
#[default]
P,
Q,
R(u32),
S { d1: u32, d2: u32 },
}
// An enum with no fieldless variants. Note that `Default` cannot be derived
// for this enum.
#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)]
enum Fielded {
X(u32),
Y(bool),
Z(Option<i32>),
}
@@ -0,0 +1,1100 @@
#![feature(prelude_import)]
// check-pass
// compile-flags: -Zunpretty=expanded
// edition:2021
//
// This test checks the code generated for all[*] the builtin derivable traits
// on a variety of structs and enums. It protects against accidental changes to
// the generated code, and makes deliberate changes to the generated code
// easier to review.
//
// [*] It excludes `Copy` in some cases, because that changes the code
// generated for `Clone`.
//
// [*] It excludes `RustcEncodable` and `RustDecodable`, which are obsolete and
// also require the `rustc_serialize` crate.
#![crate_type = "lib"]
#![allow(dead_code)]
#![allow(deprecated)]
#[prelude_import]
use std::prelude::rust_2021::*;
#[macro_use]
extern crate std;
// Empty struct.
struct Empty;
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::clone::Clone for Empty {
#[inline]
fn clone(&self) -> Empty { { *self } }
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::marker::Copy for Empty { }
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::fmt::Debug for Empty {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match *self { Self => ::core::fmt::Formatter::write_str(f, "Empty"), }
}
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::default::Default for Empty {
#[inline]
fn default() -> Empty { Empty {} }
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::hash::Hash for Empty {
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
match *self { Self => {} }
}
}
impl ::core::marker::StructuralPartialEq for Empty {}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::cmp::PartialEq for Empty {
#[inline]
fn eq(&self, other: &Empty) -> bool {
match *other { Self => match *self { Self => true, }, }
}
}
impl ::core::marker::StructuralEq for Empty {}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::cmp::Eq for Empty {
#[inline]
#[doc(hidden)]
#[no_coverage]
fn assert_receiver_is_total_eq(&self) -> () { {} }
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::cmp::PartialOrd for Empty {
#[inline]
fn partial_cmp(&self, other: &Empty)
-> ::core::option::Option<::core::cmp::Ordering> {
match *other {
Self =>
match *self {
Self =>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
},
}
}
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::cmp::Ord for Empty {
#[inline]
fn cmp(&self, other: &Empty) -> ::core::cmp::Ordering {
match *other {
Self => match *self { Self => ::core::cmp::Ordering::Equal, },
}
}
}
// A basic struct.
struct Point {
x: u32,
y: u32,
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::clone::Clone for Point {
#[inline]
fn clone(&self) -> Point {
{
let _: ::core::clone::AssertParamIsClone<u32>;
let _: ::core::clone::AssertParamIsClone<u32>;
*self
}
}
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::marker::Copy for Point { }
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::fmt::Debug for Point {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match *self {
Self { x: ref __self_0_0, y: ref __self_0_1 } =>
::core::fmt::Formatter::debug_struct_field2_finish(f, "Point",
"x", &&(*__self_0_0), "y", &&(*__self_0_1)),
}
}
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::default::Default for Point {
#[inline]
fn default() -> Point {
Point {
x: ::core::default::Default::default(),
y: ::core::default::Default::default(),
}
}
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::hash::Hash for Point {
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
match *self {
Self { x: ref __self_0_0, y: ref __self_0_1 } => {
::core::hash::Hash::hash(&(*__self_0_0), state);
::core::hash::Hash::hash(&(*__self_0_1), state)
}
}
}
}
impl ::core::marker::StructuralPartialEq for Point {}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::cmp::PartialEq for Point {
#[inline]
fn eq(&self, other: &Point) -> bool {
match *other {
Self { x: ref __self_1_0, y: ref __self_1_1 } =>
match *self {
Self { x: ref __self_0_0, y: ref __self_0_1 } =>
(*__self_0_0) == (*__self_1_0) &&
(*__self_0_1) == (*__self_1_1),
},
}
}
#[inline]
fn ne(&self, other: &Point) -> bool {
match *other {
Self { x: ref __self_1_0, y: ref __self_1_1 } =>
match *self {
Self { x: ref __self_0_0, y: ref __self_0_1 } =>
(*__self_0_0) != (*__self_1_0) ||
(*__self_0_1) != (*__self_1_1),
},
}
}
}
impl ::core::marker::StructuralEq for Point {}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::cmp::Eq for Point {
#[inline]
#[doc(hidden)]
#[no_coverage]
fn assert_receiver_is_total_eq(&self) -> () {
{
let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<u32>;
}
}
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::cmp::PartialOrd for Point {
#[inline]
fn partial_cmp(&self, other: &Point)
-> ::core::option::Option<::core::cmp::Ordering> {
match *other {
Self { x: ref __self_1_0, y: ref __self_1_1 } =>
match *self {
Self { x: ref __self_0_0, y: ref __self_0_1 } =>
match ::core::cmp::PartialOrd::partial_cmp(&(*__self_0_0),
&(*__self_1_0)) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
match ::core::cmp::PartialOrd::partial_cmp(&(*__self_0_1),
&(*__self_1_1)) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
cmp => cmp,
},
cmp => cmp,
},
},
}
}
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::cmp::Ord for Point {
#[inline]
fn cmp(&self, other: &Point) -> ::core::cmp::Ordering {
match *other {
Self { x: ref __self_1_0, y: ref __self_1_1 } =>
match *self {
Self { x: ref __self_0_0, y: ref __self_0_1 } =>
match ::core::cmp::Ord::cmp(&(*__self_0_0), &(*__self_1_0))
{
::core::cmp::Ordering::Equal =>
match ::core::cmp::Ord::cmp(&(*__self_0_1), &(*__self_1_1))
{
::core::cmp::Ordering::Equal =>
::core::cmp::Ordering::Equal,
cmp => cmp,
},
cmp => cmp,
},
},
}
}
}
// A long struct.
struct Big {
b1: u32,
b2: u32,
b3: u32,
b4: u32,
b5: u32,
b6: u32,
b7: u32,
b8: u32,
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::clone::Clone for Big {
#[inline]
fn clone(&self) -> Big {
match *self {
Self {
b1: ref __self_0_0,
b2: ref __self_0_1,
b3: ref __self_0_2,
b4: ref __self_0_3,
b5: ref __self_0_4,
b6: ref __self_0_5,
b7: ref __self_0_6,
b8: ref __self_0_7 } =>
Big {
b1: ::core::clone::Clone::clone(&(*__self_0_0)),
b2: ::core::clone::Clone::clone(&(*__self_0_1)),
b3: ::core::clone::Clone::clone(&(*__self_0_2)),
b4: ::core::clone::Clone::clone(&(*__self_0_3)),
b5: ::core::clone::Clone::clone(&(*__self_0_4)),
b6: ::core::clone::Clone::clone(&(*__self_0_5)),
b7: ::core::clone::Clone::clone(&(*__self_0_6)),
b8: ::core::clone::Clone::clone(&(*__self_0_7)),
},
}
}
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::fmt::Debug for Big {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match *self {
Self {
b1: ref __self_0_0,
b2: ref __self_0_1,
b3: ref __self_0_2,
b4: ref __self_0_3,
b5: ref __self_0_4,
b6: ref __self_0_5,
b7: ref __self_0_6,
b8: ref __self_0_7 } => {
let names: &'static _ =
&["b1", "b2", "b3", "b4", "b5", "b6", "b7", "b8"];
let values: &[&dyn ::core::fmt::Debug] =
&[&&(*__self_0_0), &&(*__self_0_1), &&(*__self_0_2),
&&(*__self_0_3), &&(*__self_0_4), &&(*__self_0_5),
&&(*__self_0_6), &&(*__self_0_7)];
::core::fmt::Formatter::debug_struct_fields_finish(f, "Big",
names, values)
}
}
}
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::default::Default for Big {
#[inline]
fn default() -> Big {
Big {
b1: ::core::default::Default::default(),
b2: ::core::default::Default::default(),
b3: ::core::default::Default::default(),
b4: ::core::default::Default::default(),
b5: ::core::default::Default::default(),
b6: ::core::default::Default::default(),
b7: ::core::default::Default::default(),
b8: ::core::default::Default::default(),
}
}
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::hash::Hash for Big {
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
match *self {
Self {
b1: ref __self_0_0,
b2: ref __self_0_1,
b3: ref __self_0_2,
b4: ref __self_0_3,
b5: ref __self_0_4,
b6: ref __self_0_5,
b7: ref __self_0_6,
b8: ref __self_0_7 } => {
::core::hash::Hash::hash(&(*__self_0_0), state);
::core::hash::Hash::hash(&(*__self_0_1), state);
::core::hash::Hash::hash(&(*__self_0_2), state);
::core::hash::Hash::hash(&(*__self_0_3), state);
::core::hash::Hash::hash(&(*__self_0_4), state);
::core::hash::Hash::hash(&(*__self_0_5), state);
::core::hash::Hash::hash(&(*__self_0_6), state);
::core::hash::Hash::hash(&(*__self_0_7), state)
}
}
}
}
impl ::core::marker::StructuralPartialEq for Big {}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::cmp::PartialEq for Big {
#[inline]
fn eq(&self, other: &Big) -> bool {
match *other {
Self {
b1: ref __self_1_0,
b2: ref __self_1_1,
b3: ref __self_1_2,
b4: ref __self_1_3,
b5: ref __self_1_4,
b6: ref __self_1_5,
b7: ref __self_1_6,
b8: ref __self_1_7 } =>
match *self {
Self {
b1: ref __self_0_0,
b2: ref __self_0_1,
b3: ref __self_0_2,
b4: ref __self_0_3,
b5: ref __self_0_4,
b6: ref __self_0_5,
b7: ref __self_0_6,
b8: ref __self_0_7 } =>
(*__self_0_0) == (*__self_1_0) &&
(*__self_0_1) == (*__self_1_1) &&
(*__self_0_2) == (*__self_1_2) &&
(*__self_0_3) == (*__self_1_3) &&
(*__self_0_4) == (*__self_1_4) &&
(*__self_0_5) == (*__self_1_5) &&
(*__self_0_6) == (*__self_1_6) &&
(*__self_0_7) == (*__self_1_7),
},
}
}
#[inline]
fn ne(&self, other: &Big) -> bool {
match *other {
Self {
b1: ref __self_1_0,
b2: ref __self_1_1,
b3: ref __self_1_2,
b4: ref __self_1_3,
b5: ref __self_1_4,
b6: ref __self_1_5,
b7: ref __self_1_6,
b8: ref __self_1_7 } =>
match *self {
Self {
b1: ref __self_0_0,
b2: ref __self_0_1,
b3: ref __self_0_2,
b4: ref __self_0_3,
b5: ref __self_0_4,
b6: ref __self_0_5,
b7: ref __self_0_6,
b8: ref __self_0_7 } =>
(*__self_0_0) != (*__self_1_0) ||
(*__self_0_1) != (*__self_1_1) ||
(*__self_0_2) != (*__self_1_2) ||
(*__self_0_3) != (*__self_1_3) ||
(*__self_0_4) != (*__self_1_4) ||
(*__self_0_5) != (*__self_1_5) ||
(*__self_0_6) != (*__self_1_6) ||
(*__self_0_7) != (*__self_1_7),
},
}
}
}
impl ::core::marker::StructuralEq for Big {}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::cmp::Eq for Big {
#[inline]
#[doc(hidden)]
#[no_coverage]
fn assert_receiver_is_total_eq(&self) -> () {
{
let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<u32>;
}
}
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::cmp::PartialOrd for Big {
#[inline]
fn partial_cmp(&self, other: &Big)
-> ::core::option::Option<::core::cmp::Ordering> {
match *other {
Self {
b1: ref __self_1_0,
b2: ref __self_1_1,
b3: ref __self_1_2,
b4: ref __self_1_3,
b5: ref __self_1_4,
b6: ref __self_1_5,
b7: ref __self_1_6,
b8: ref __self_1_7 } =>
match *self {
Self {
b1: ref __self_0_0,
b2: ref __self_0_1,
b3: ref __self_0_2,
b4: ref __self_0_3,
b5: ref __self_0_4,
b6: ref __self_0_5,
b7: ref __self_0_6,
b8: ref __self_0_7 } =>
match ::core::cmp::PartialOrd::partial_cmp(&(*__self_0_0),
&(*__self_1_0)) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
match ::core::cmp::PartialOrd::partial_cmp(&(*__self_0_1),
&(*__self_1_1)) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
match ::core::cmp::PartialOrd::partial_cmp(&(*__self_0_2),
&(*__self_1_2)) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
match ::core::cmp::PartialOrd::partial_cmp(&(*__self_0_3),
&(*__self_1_3)) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
match ::core::cmp::PartialOrd::partial_cmp(&(*__self_0_4),
&(*__self_1_4)) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
match ::core::cmp::PartialOrd::partial_cmp(&(*__self_0_5),
&(*__self_1_5)) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
match ::core::cmp::PartialOrd::partial_cmp(&(*__self_0_6),
&(*__self_1_6)) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
match ::core::cmp::PartialOrd::partial_cmp(&(*__self_0_7),
&(*__self_1_7)) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
cmp => cmp,
},
cmp => cmp,
},
cmp => cmp,
},
cmp => cmp,
},
cmp => cmp,
},
cmp => cmp,
},
cmp => cmp,
},
cmp => cmp,
},
},
}
}
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::cmp::Ord for Big {
#[inline]
fn cmp(&self, other: &Big) -> ::core::cmp::Ordering {
match *other {
Self {
b1: ref __self_1_0,
b2: ref __self_1_1,
b3: ref __self_1_2,
b4: ref __self_1_3,
b5: ref __self_1_4,
b6: ref __self_1_5,
b7: ref __self_1_6,
b8: ref __self_1_7 } =>
match *self {
Self {
b1: ref __self_0_0,
b2: ref __self_0_1,
b3: ref __self_0_2,
b4: ref __self_0_3,
b5: ref __self_0_4,
b6: ref __self_0_5,
b7: ref __self_0_6,
b8: ref __self_0_7 } =>
match ::core::cmp::Ord::cmp(&(*__self_0_0), &(*__self_1_0))
{
::core::cmp::Ordering::Equal =>
match ::core::cmp::Ord::cmp(&(*__self_0_1), &(*__self_1_1))
{
::core::cmp::Ordering::Equal =>
match ::core::cmp::Ord::cmp(&(*__self_0_2), &(*__self_1_2))
{
::core::cmp::Ordering::Equal =>
match ::core::cmp::Ord::cmp(&(*__self_0_3), &(*__self_1_3))
{
::core::cmp::Ordering::Equal =>
match ::core::cmp::Ord::cmp(&(*__self_0_4), &(*__self_1_4))
{
::core::cmp::Ordering::Equal =>
match ::core::cmp::Ord::cmp(&(*__self_0_5), &(*__self_1_5))
{
::core::cmp::Ordering::Equal =>
match ::core::cmp::Ord::cmp(&(*__self_0_6), &(*__self_1_6))
{
::core::cmp::Ordering::Equal =>
match ::core::cmp::Ord::cmp(&(*__self_0_7), &(*__self_1_7))
{
::core::cmp::Ordering::Equal =>
::core::cmp::Ordering::Equal,
cmp => cmp,
},
cmp => cmp,
},
cmp => cmp,
},
cmp => cmp,
},
cmp => cmp,
},
cmp => cmp,
},
cmp => cmp,
},
cmp => cmp,
},
},
}
}
}
// A C-like, fieldless enum.
enum Fieldless {
#[default]
A,
B,
C,
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::clone::Clone for Fieldless {
#[inline]
fn clone(&self) -> Fieldless { { *self } }
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::marker::Copy for Fieldless { }
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::fmt::Debug for Fieldless {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match (&*self,) {
(&Fieldless::A,) => ::core::fmt::Formatter::write_str(f, "A"),
(&Fieldless::B,) => ::core::fmt::Formatter::write_str(f, "B"),
(&Fieldless::C,) => ::core::fmt::Formatter::write_str(f, "C"),
}
}
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::default::Default for Fieldless {
#[inline]
fn default() -> Fieldless { Self::A }
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::hash::Hash for Fieldless {
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
match (&*self,) {
_ => {
::core::hash::Hash::hash(&::core::intrinsics::discriminant_value(self),
state)
}
}
}
}
impl ::core::marker::StructuralPartialEq for Fieldless {}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::cmp::PartialEq for Fieldless {
#[inline]
fn eq(&self, other: &Fieldless) -> bool {
{
let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi {
match (&*self, &*other) { _ => true, }
} else { false }
}
}
}
impl ::core::marker::StructuralEq for Fieldless {}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::cmp::Eq for Fieldless {
#[inline]
#[doc(hidden)]
#[no_coverage]
fn assert_receiver_is_total_eq(&self) -> () { {} }
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::cmp::PartialOrd for Fieldless {
#[inline]
fn partial_cmp(&self, other: &Fieldless)
-> ::core::option::Option<::core::cmp::Ordering> {
{
let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi {
match (&*self, &*other) {
_ =>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
}
} else {
::core::cmp::PartialOrd::partial_cmp(&__self_vi,
&__arg_1_vi)
}
}
}
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::cmp::Ord for Fieldless {
#[inline]
fn cmp(&self, other: &Fieldless) -> ::core::cmp::Ordering {
{
let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi {
match (&*self, &*other) {
_ => ::core::cmp::Ordering::Equal,
}
} else { ::core::cmp::Ord::cmp(&__self_vi, &__arg_1_vi) }
}
}
}
// An enum with multiple fieldless and fielded variants.
enum Mixed {
#[default]
P,
Q,
R(u32),
S {
d1: u32,
d2: u32,
},
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::clone::Clone for Mixed {
#[inline]
fn clone(&self) -> Mixed {
{
let _: ::core::clone::AssertParamIsClone<u32>;
let _: ::core::clone::AssertParamIsClone<u32>;
let _: ::core::clone::AssertParamIsClone<u32>;
*self
}
}
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::marker::Copy for Mixed { }
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::fmt::Debug for Mixed {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match (&*self,) {
(&Mixed::P,) => ::core::fmt::Formatter::write_str(f, "P"),
(&Mixed::Q,) => ::core::fmt::Formatter::write_str(f, "Q"),
(&Mixed::R(ref __self_0),) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "R",
&&(*__self_0)),
(&Mixed::S { d1: ref __self_0, d2: ref __self_1 },) =>
::core::fmt::Formatter::debug_struct_field2_finish(f, "S",
"d1", &&(*__self_0), "d2", &&(*__self_1)),
}
}
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::default::Default for Mixed {
#[inline]
fn default() -> Mixed { Self::P }
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::hash::Hash for Mixed {
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
match (&*self,) {
(&Mixed::R(ref __self_0),) => {
::core::hash::Hash::hash(&::core::intrinsics::discriminant_value(self),
state);
::core::hash::Hash::hash(&(*__self_0), state)
}
(&Mixed::S { d1: ref __self_0, d2: ref __self_1 },) => {
::core::hash::Hash::hash(&::core::intrinsics::discriminant_value(self),
state);
::core::hash::Hash::hash(&(*__self_0), state);
::core::hash::Hash::hash(&(*__self_1), state)
}
_ => {
::core::hash::Hash::hash(&::core::intrinsics::discriminant_value(self),
state)
}
}
}
}
impl ::core::marker::StructuralPartialEq for Mixed {}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::cmp::PartialEq for Mixed {
#[inline]
fn eq(&self, other: &Mixed) -> bool {
{
let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi {
match (&*self, &*other) {
(&Mixed::R(ref __self_0), &Mixed::R(ref __arg_1_0)) =>
(*__self_0) == (*__arg_1_0),
(&Mixed::S { d1: ref __self_0, d2: ref __self_1 },
&Mixed::S { d1: ref __arg_1_0, d2: ref __arg_1_1 }) =>
(*__self_0) == (*__arg_1_0) && (*__self_1) == (*__arg_1_1),
_ => true,
}
} else { false }
}
}
#[inline]
fn ne(&self, other: &Mixed) -> bool {
{
let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi {
match (&*self, &*other) {
(&Mixed::R(ref __self_0), &Mixed::R(ref __arg_1_0)) =>
(*__self_0) != (*__arg_1_0),
(&Mixed::S { d1: ref __self_0, d2: ref __self_1 },
&Mixed::S { d1: ref __arg_1_0, d2: ref __arg_1_1 }) =>
(*__self_0) != (*__arg_1_0) || (*__self_1) != (*__arg_1_1),
_ => false,
}
} else { true }
}
}
}
impl ::core::marker::StructuralEq for Mixed {}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::cmp::Eq for Mixed {
#[inline]
#[doc(hidden)]
#[no_coverage]
fn assert_receiver_is_total_eq(&self) -> () {
{
let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<u32>;
}
}
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::cmp::PartialOrd for Mixed {
#[inline]
fn partial_cmp(&self, other: &Mixed)
-> ::core::option::Option<::core::cmp::Ordering> {
{
let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi {
match (&*self, &*other) {
(&Mixed::R(ref __self_0), &Mixed::R(ref __arg_1_0)) =>
match ::core::cmp::PartialOrd::partial_cmp(&(*__self_0),
&(*__arg_1_0)) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
cmp => cmp,
},
(&Mixed::S { d1: ref __self_0, d2: ref __self_1 },
&Mixed::S { d1: ref __arg_1_0, d2: ref __arg_1_1 }) =>
match ::core::cmp::PartialOrd::partial_cmp(&(*__self_0),
&(*__arg_1_0)) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
match ::core::cmp::PartialOrd::partial_cmp(&(*__self_1),
&(*__arg_1_1)) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
cmp => cmp,
},
cmp => cmp,
},
_ =>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
}
} else {
::core::cmp::PartialOrd::partial_cmp(&__self_vi,
&__arg_1_vi)
}
}
}
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::cmp::Ord for Mixed {
#[inline]
fn cmp(&self, other: &Mixed) -> ::core::cmp::Ordering {
{
let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi {
match (&*self, &*other) {
(&Mixed::R(ref __self_0), &Mixed::R(ref __arg_1_0)) =>
match ::core::cmp::Ord::cmp(&(*__self_0), &(*__arg_1_0)) {
::core::cmp::Ordering::Equal =>
::core::cmp::Ordering::Equal,
cmp => cmp,
},
(&Mixed::S { d1: ref __self_0, d2: ref __self_1 },
&Mixed::S { d1: ref __arg_1_0, d2: ref __arg_1_1 }) =>
match ::core::cmp::Ord::cmp(&(*__self_0), &(*__arg_1_0)) {
::core::cmp::Ordering::Equal =>
match ::core::cmp::Ord::cmp(&(*__self_1), &(*__arg_1_1)) {
::core::cmp::Ordering::Equal =>
::core::cmp::Ordering::Equal,
cmp => cmp,
},
cmp => cmp,
},
_ => ::core::cmp::Ordering::Equal,
}
} else { ::core::cmp::Ord::cmp(&__self_vi, &__arg_1_vi) }
}
}
}
// An enum with no fieldless variants. Note that `Default` cannot be derived
// for this enum.
enum Fielded { X(u32), Y(bool), Z(Option<i32>), }
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::clone::Clone for Fielded {
#[inline]
fn clone(&self) -> Fielded {
match (&*self,) {
(&Fielded::X(ref __self_0),) =>
Fielded::X(::core::clone::Clone::clone(&(*__self_0))),
(&Fielded::Y(ref __self_0),) =>
Fielded::Y(::core::clone::Clone::clone(&(*__self_0))),
(&Fielded::Z(ref __self_0),) =>
Fielded::Z(::core::clone::Clone::clone(&(*__self_0))),
}
}
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::fmt::Debug for Fielded {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match (&*self,) {
(&Fielded::X(ref __self_0),) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "X",
&&(*__self_0)),
(&Fielded::Y(ref __self_0),) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Y",
&&(*__self_0)),
(&Fielded::Z(ref __self_0),) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Z",
&&(*__self_0)),
}
}
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::hash::Hash for Fielded {
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
match (&*self,) {
(&Fielded::X(ref __self_0),) => {
::core::hash::Hash::hash(&::core::intrinsics::discriminant_value(self),
state);
::core::hash::Hash::hash(&(*__self_0), state)
}
(&Fielded::Y(ref __self_0),) => {
::core::hash::Hash::hash(&::core::intrinsics::discriminant_value(self),
state);
::core::hash::Hash::hash(&(*__self_0), state)
}
(&Fielded::Z(ref __self_0),) => {
::core::hash::Hash::hash(&::core::intrinsics::discriminant_value(self),
state);
::core::hash::Hash::hash(&(*__self_0), state)
}
}
}
}
impl ::core::marker::StructuralPartialEq for Fielded {}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::cmp::PartialEq for Fielded {
#[inline]
fn eq(&self, other: &Fielded) -> bool {
{
let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi {
match (&*self, &*other) {
(&Fielded::X(ref __self_0), &Fielded::X(ref __arg_1_0)) =>
(*__self_0) == (*__arg_1_0),
(&Fielded::Y(ref __self_0), &Fielded::Y(ref __arg_1_0)) =>
(*__self_0) == (*__arg_1_0),
(&Fielded::Z(ref __self_0), &Fielded::Z(ref __arg_1_0)) =>
(*__self_0) == (*__arg_1_0),
_ => unsafe { ::core::intrinsics::unreachable() }
}
} else { false }
}
}
#[inline]
fn ne(&self, other: &Fielded) -> bool {
{
let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi {
match (&*self, &*other) {
(&Fielded::X(ref __self_0), &Fielded::X(ref __arg_1_0)) =>
(*__self_0) != (*__arg_1_0),
(&Fielded::Y(ref __self_0), &Fielded::Y(ref __arg_1_0)) =>
(*__self_0) != (*__arg_1_0),
(&Fielded::Z(ref __self_0), &Fielded::Z(ref __arg_1_0)) =>
(*__self_0) != (*__arg_1_0),
_ => unsafe { ::core::intrinsics::unreachable() }
}
} else { true }
}
}
}
impl ::core::marker::StructuralEq for Fielded {}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::cmp::Eq for Fielded {
#[inline]
#[doc(hidden)]
#[no_coverage]
fn assert_receiver_is_total_eq(&self) -> () {
{
let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<bool>;
let _: ::core::cmp::AssertParamIsEq<Option<i32>>;
}
}
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::cmp::PartialOrd for Fielded {
#[inline]
fn partial_cmp(&self, other: &Fielded)
-> ::core::option::Option<::core::cmp::Ordering> {
{
let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi {
match (&*self, &*other) {
(&Fielded::X(ref __self_0), &Fielded::X(ref __arg_1_0)) =>
match ::core::cmp::PartialOrd::partial_cmp(&(*__self_0),
&(*__arg_1_0)) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
cmp => cmp,
},
(&Fielded::Y(ref __self_0), &Fielded::Y(ref __arg_1_0)) =>
match ::core::cmp::PartialOrd::partial_cmp(&(*__self_0),
&(*__arg_1_0)) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
cmp => cmp,
},
(&Fielded::Z(ref __self_0), &Fielded::Z(ref __arg_1_0)) =>
match ::core::cmp::PartialOrd::partial_cmp(&(*__self_0),
&(*__arg_1_0)) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
cmp => cmp,
},
_ => unsafe { ::core::intrinsics::unreachable() }
}
} else {
::core::cmp::PartialOrd::partial_cmp(&__self_vi,
&__arg_1_vi)
}
}
}
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::cmp::Ord for Fielded {
#[inline]
fn cmp(&self, other: &Fielded) -> ::core::cmp::Ordering {
{
let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi {
match (&*self, &*other) {
(&Fielded::X(ref __self_0), &Fielded::X(ref __arg_1_0)) =>
match ::core::cmp::Ord::cmp(&(*__self_0), &(*__arg_1_0)) {
::core::cmp::Ordering::Equal =>
::core::cmp::Ordering::Equal,
cmp => cmp,
},
(&Fielded::Y(ref __self_0), &Fielded::Y(ref __arg_1_0)) =>
match ::core::cmp::Ord::cmp(&(*__self_0), &(*__arg_1_0)) {
::core::cmp::Ordering::Equal =>
::core::cmp::Ordering::Equal,
cmp => cmp,
},
(&Fielded::Z(ref __self_0), &Fielded::Z(ref __arg_1_0)) =>
match ::core::cmp::Ord::cmp(&(*__self_0), &(*__arg_1_0)) {
::core::cmp::Ordering::Equal =>
::core::cmp::Ordering::Equal,
cmp => cmp,
},
_ => unsafe { ::core::intrinsics::unreachable() }
}
} else { ::core::cmp::Ord::cmp(&__self_vi, &__arg_1_vi) }
}
}
}