1 // Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! Some code that abstracts away much of the boilerplate of writing
12 //! `derive` instances for traits. Among other things it manages getting
13 //! access to the fields of the 4 different sorts of structs and enum
14 //! variants, as well as creating the method and impl ast instances.
16 //! Supported features (fairly exhaustive):
18 //! - Methods taking any number of parameters of any type, and returning
19 //! any type, other than vectors, bottom and closures.
20 //! - Generating `impl`s for types with type parameters and lifetimes
21 //! (e.g. `Option<T>`), the parameters are automatically given the
22 //! current trait as a bound. (This includes separate type parameters
23 //! and lifetimes for methods.)
24 //! - Additional bounds on the type parameters (`TraitDef.additional_bounds`)
26 //! The most important thing for implementers is the `Substructure` and
27 //! `SubstructureFields` objects. The latter groups 5 possibilities of the
30 //! - `Struct`, when `Self` is a struct (including tuple structs, e.g
31 //! `struct T(i32, char)`).
32 //! - `EnumMatching`, when `Self` is an enum and all the arguments are the
33 //! same variant of the enum (e.g. `Some(1)`, `Some(3)` and `Some(4)`)
34 //! - `EnumNonMatchingCollapsed` when `Self` is an enum and the arguments
35 //! are not the same variant (e.g. `None`, `Some(1)` and `None`).
36 //! - `StaticEnum` and `StaticStruct` for static methods, where the type
37 //! being derived upon is either an enum or struct respectively. (Any
38 //! argument with type Self is just grouped among the non-self
41 //! In the first two cases, the values from the corresponding fields in
42 //! all the arguments are grouped together. For `EnumNonMatchingCollapsed`
43 //! this isn't possible (different variants have different fields), so the
44 //! fields are inaccessible. (Previous versions of the deriving infrastructure
45 //! had a way to expand into code that could access them, at the cost of
46 //! generating exponential amounts of code; see issue #15375). There are no
47 //! fields with values in the static cases, so these are treated entirely
50 //! The non-static cases have `Option<ident>` in several places associated
51 //! with field `expr`s. This represents the name of the field it is
52 //! associated with. It is only not `None` when the associated field has
53 //! an identifier in the source code. For example, the `x`s in the
57 //! struct A { x : i32 }
67 //! The `i32`s in `B` and `C0` don't have an identifier, so the
68 //! `Option<ident>`s would be `None` for them.
70 //! In the static cases, the structure is summarised, either into the just
71 //! spans of the fields or a list of spans and the field idents (for tuple
72 //! structs and record structs, respectively), or a list of these, for
73 //! enums (one for each variant). For empty struct and empty enum
74 //! variants, it is represented as a count of 0.
76 //! # "`cs`" functions
78 //! The `cs_...` functions ("combine substructure) are designed to
79 //! make life easier by providing some pre-made recipes for common
80 //! threads; mostly calling the function being derived on all the
81 //! arguments and then combining them back together in some way (or
82 //! letting the user chose that). They are not meant to be the only
83 //! way to handle the structures that this code creates.
87 //! The following simplified `PartialEq` is used for in-code examples:
91 //! fn eq(&self, other: &Self);
93 //! impl PartialEq for i32 {
94 //! fn eq(&self, other: &i32) -> bool {
100 //! Some examples of the values of `SubstructureFields` follow, using the
101 //! above `PartialEq`, `A`, `B` and `C`.
105 //! When generating the `expr` for the `A` impl, the `SubstructureFields` is
108 //! Struct(vec![FieldInfo {
109 //! span: <span of x>
110 //! name: Some(<ident of x>),
111 //! self_: <expr for &self.x>,
112 //! other: vec![<expr for &other.x]
116 //! For the `B` impl, called with `B(a)` and `B(b)`,
119 //! Struct(vec![FieldInfo {
120 //! span: <span of `i32`>,
122 //! self_: <expr for &a>
123 //! other: vec![<expr for &b>]
129 //! When generating the `expr` for a call with `self == C0(a)` and `other
130 //! == C0(b)`, the SubstructureFields is
133 //! EnumMatching(0, <ast::Variant for C0>,
135 //! span: <span of i32>
137 //! self_: <expr for &a>,
138 //! other: vec![<expr for &b>]
142 //! For `C1 {x}` and `C1 {x}`,
145 //! EnumMatching(1, <ast::Variant for C1>,
147 //! span: <span of x>
148 //! name: Some(<ident of x>),
149 //! self_: <expr for &self.x>,
150 //! other: vec![<expr for &other.x>]
154 //! For `C0(a)` and `C1 {x}` ,
157 //! EnumNonMatchingCollapsed(
158 //! vec![<ident of self>, <ident of __arg_1>],
159 //! &[<ast::Variant for C0>, <ast::Variant for C1>],
160 //! &[<ident for self index value>, <ident of __arg_1 index value>])
163 //! It is the same for when the arguments are flipped to `C1 {x}` and
164 //! `C0(a)`; the only difference is what the values of the identifiers
165 //! <ident for self index value> and <ident of __arg_1 index value> will
166 //! be in the generated code.
168 //! `EnumNonMatchingCollapsed` deliberately provides far less information
169 //! than is generally available for a given pair of variants; see #15375
174 //! A static method on the types above would result in,
177 //! StaticStruct(<ast::StructDef of A>, Named(vec![(<ident of x>, <span of x>)]))
179 //! StaticStruct(<ast::StructDef of B>, Unnamed(vec![<span of x>]))
181 //! StaticEnum(<ast::EnumDef of C>,
182 //! vec![(<ident of C0>, <span of C0>, Unnamed(vec![<span of i32>])),
183 //! (<ident of C1>, <span of C1>, Named(vec![(<ident of x>, <span of x>)]))])
186 pub use self::StaticFields
::*;
187 pub use self::SubstructureFields
::*;
188 use self::StructType
::*;
190 use std
::cell
::RefCell
;
191 use std
::collections
::HashSet
;
197 use ast
::{EnumDef, Expr, Ident, Generics, StructDef}
;
200 use attr
::AttrMetaMethods
;
201 use ext
::base
::{ExtCtxt, Annotatable}
;
202 use ext
::build
::AstBuilder
;
203 use codemap
::{self, DUMMY_SP}
;
205 use diagnostic
::SpanHandler
;
207 use owned_slice
::OwnedSlice
;
208 use parse
::token
::InternedString
;
209 use parse
::token
::special_idents
;
212 use self::ty
::{LifetimeBounds, Path, Ptr, PtrTy, Self_, Ty}
;
216 pub struct TraitDef
<'a
> {
217 /// The span for the current #[derive(Foo)] header.
220 pub attributes
: Vec
<ast
::Attribute
>,
222 /// Path of the trait, including any type parameters
225 /// Additional bounds required of any type parameters of the type,
226 /// other than the current trait
227 pub additional_bounds
: Vec
<Ty
<'a
>>,
229 /// Any extra lifetimes and/or bounds, e.g. `D: serialize::Decoder`
230 pub generics
: LifetimeBounds
<'a
>,
232 pub methods
: Vec
<MethodDef
<'a
>>,
234 pub associated_types
: Vec
<(ast
::Ident
, Ty
<'a
>)>,
238 pub struct MethodDef
<'a
> {
239 /// name of the method
241 /// List of generics, e.g. `R: rand::Rng`
242 pub generics
: LifetimeBounds
<'a
>,
244 /// Whether there is a self argument (outer Option) i.e. whether
245 /// this is a static function, and whether it is a pointer (inner
247 pub explicit_self
: Option
<Option
<PtrTy
<'a
>>>,
249 /// Arguments other than the self argument
250 pub args
: Vec
<Ty
<'a
>>,
255 pub attributes
: Vec
<ast
::Attribute
>,
257 // Is it an `unsafe fn`?
260 pub combine_substructure
: RefCell
<CombineSubstructureFunc
<'a
>>,
263 /// All the data about the data structure/method being derived upon.
264 pub struct Substructure
<'a
> {
266 pub type_ident
: Ident
,
267 /// ident of the method
268 pub method_ident
: Ident
,
269 /// dereferenced access to any `Self_` or `Ptr(Self_, _)` arguments
270 pub self_args
: &'a
[P
<Expr
>],
271 /// verbatim access to any other arguments
272 pub nonself_args
: &'a
[P
<Expr
>],
273 pub fields
: &'a SubstructureFields
<'a
>
276 /// Summary of the relevant parts of a struct/enum field.
277 pub struct FieldInfo
<'a
> {
279 /// None for tuple structs/normal enum variants, Some for normal
280 /// structs/struct enum variants.
281 pub name
: Option
<Ident
>,
282 /// The expression corresponding to this field of `self`
283 /// (specifically, a reference to it).
285 /// The expressions corresponding to references to this field in
286 /// the other `Self` arguments.
287 pub other
: Vec
<P
<Expr
>>,
288 /// The attributes on the field
289 pub attrs
: &'a
[ast
::Attribute
],
292 /// Fields for a static method
293 pub enum StaticFields
{
294 /// Tuple structs/enum variants like this.
296 /// Normal structs/struct variants.
297 Named(Vec
<(Ident
, Span
)>),
300 /// A summary of the possible sets of fields.
301 pub enum SubstructureFields
<'a
> {
302 Struct(Vec
<FieldInfo
<'a
>>),
303 /// Matching variants of the enum: variant index, ast::Variant,
304 /// fields: the field name is only non-`None` in the case of a struct
306 EnumMatching(usize, &'a ast
::Variant
, Vec
<FieldInfo
<'a
>>),
308 /// Non-matching variants of the enum, but with all state hidden from
309 /// the consequent code. The first component holds `Ident`s for all of
310 /// the `Self` arguments; the second component is a slice of all of the
311 /// variants for the enum itself, and the third component is a list of
312 /// `Ident`s bound to the variant index values for each of the actual
313 /// input `Self` arguments.
314 EnumNonMatchingCollapsed(Vec
<Ident
>, &'a
[P
<ast
::Variant
>], &'a
[Ident
]),
316 /// A static method where `Self` is a struct.
317 StaticStruct(&'a ast
::StructDef
, StaticFields
),
318 /// A static method where `Self` is an enum.
319 StaticEnum(&'a ast
::EnumDef
, Vec
<(Ident
, Span
, StaticFields
)>),
324 /// Combine the values of all the fields together. The last argument is
325 /// all the fields of all the structures.
326 pub type CombineSubstructureFunc
<'a
> =
327 Box
<FnMut(&mut ExtCtxt
, Span
, &Substructure
) -> P
<Expr
> + 'a
>;
329 /// Deal with non-matching enum variants. The tuple is a list of
330 /// identifiers (one for each `Self` argument, which could be any of the
331 /// variants since they have been collapsed together) and the identifiers
332 /// holding the variant index value for each of the `Self` arguments. The
333 /// last argument is all the non-`Self` args of the method being derived.
334 pub type EnumNonMatchCollapsedFunc
<'a
> =
335 Box
<FnMut(&mut ExtCtxt
, Span
, (&[Ident
], &[Ident
]), &[P
<Expr
>]) -> P
<Expr
> + 'a
>;
337 pub fn combine_substructure
<'a
>(f
: CombineSubstructureFunc
<'a
>)
338 -> RefCell
<CombineSubstructureFunc
<'a
>> {
342 /// This method helps to extract all the type parameters referenced from a
343 /// type. For a type parameter `<T>`, it looks for either a `TyPath` that
344 /// is not global and starts with `T`, or a `TyQPath`.
345 fn find_type_parameters(ty
: &ast
::Ty
, ty_param_names
: &[ast
::Name
]) -> Vec
<P
<ast
::Ty
>> {
349 ty_param_names
: &'a
[ast
::Name
],
350 types
: Vec
<P
<ast
::Ty
>>,
353 impl<'a
> visit
::Visitor
<'a
> for Visitor
<'a
> {
354 fn visit_ty(&mut self, ty
: &'a ast
::Ty
) {
356 ast
::TyPath(_
, ref path
) if !path
.global
=> {
357 match path
.segments
.first() {
359 if self.ty_param_names
.contains(&segment
.identifier
.name
) {
360 self.types
.push(P(ty
.clone()));
369 visit
::walk_ty(self, ty
)
373 let mut visitor
= Visitor
{
374 ty_param_names
: ty_param_names
,
378 visit
::Visitor
::visit_ty(&mut visitor
, ty
);
383 impl<'a
> TraitDef
<'a
> {
386 mitem
: &ast
::MetaItem
,
387 item
: &'a Annotatable
,
388 push
: &mut FnMut(Annotatable
))
391 Annotatable
::Item(ref item
) => {
392 let newitem
= match item
.node
{
393 ast
::ItemStruct(ref struct_def
, ref generics
) => {
394 self.expand_struct_def(cx
,
399 ast
::ItemEnum(ref enum_def
, ref generics
) => {
400 self.expand_enum_def(cx
,
407 cx
.span_err(mitem
.span
,
408 "`derive` may only be applied to structs and enums");
412 // Keep the lint attributes of the previous item to control how the
413 // generated implementations are linted
414 let mut attrs
= newitem
.attrs
.clone();
415 attrs
.extend(item
.attrs
.iter().filter(|a
| {
416 match &a
.name()[..] {
417 "allow" | "warn" | "deny" | "forbid" => true,
421 push(Annotatable
::Item(P(ast
::Item
{
427 cx
.span_err(mitem
.span
, "`derive` may only be applied to structs and enums");
432 /// Given that we are deriving a trait `DerivedTrait` for a type like:
435 /// struct Struct<'a, ..., 'z, A, B: DeclaredTrait, C, ..., Z> where C: WhereTrait {
438 /// b1: <B as DeclaredTrait>::Item,
439 /// c1: <C as WhereTrait>::Item,
440 /// c2: Option<<C as WhereTrait>::Item>,
445 /// create an impl like:
448 /// impl<'a, ..., 'z, A, B: DeclaredTrait, C, ... Z> where
450 /// A: DerivedTrait + B1 + ... + BN,
451 /// B: DerivedTrait + B1 + ... + BN,
452 /// C: DerivedTrait + B1 + ... + BN,
453 /// B::Item: DerivedTrait + B1 + ... + BN,
454 /// <C as WhereTrait>::Item: DerivedTrait + B1 + ... + BN,
461 /// where B1, ..., BN are the bounds given by `bounds_paths`.'. Z is a phantom type, and
462 /// therefore does not get bound by the derived trait.
463 fn create_derived_impl(&self,
467 field_tys
: Vec
<P
<ast
::Ty
>>,
468 methods
: Vec
<P
<ast
::ImplItem
>>) -> P
<ast
::Item
> {
469 let trait_path
= self.path
.to_path(cx
, self.span
, type_ident
, generics
);
471 // Transform associated types from `deriving::ty::Ty` into `ast::ImplItem`
472 let associated_types
= self.associated_types
.iter().map(|&(ident
, ref type_def
)| {
474 id
: ast
::DUMMY_NODE_ID
,
479 node
: ast
::TypeImplItem(type_def
.to_ty(cx
,
487 let Generics { mut lifetimes, ty_params, mut where_clause }
=
488 self.generics
.to_generics(cx
, self.span
, type_ident
, generics
);
489 let mut ty_params
= ty_params
.into_vec();
491 // Copy the lifetimes
492 lifetimes
.extend(generics
.lifetimes
.iter().cloned());
494 // Create the type parameters.
495 ty_params
.extend(generics
.ty_params
.iter().map(|ty_param
| {
496 // I don't think this can be moved out of the loop, since
497 // a TyParamBound requires an ast id
498 let mut bounds
: Vec
<_
> =
499 // extra restrictions on the generics parameters to the type being derived upon
500 self.additional_bounds
.iter().map(|p
| {
501 cx
.typarambound(p
.to_path(cx
, self.span
,
502 type_ident
, generics
))
505 // require the current trait
506 bounds
.push(cx
.typarambound(trait_path
.clone()));
508 // also add in any bounds from the declaration
509 for declared_bound
in ty_param
.bounds
.iter() {
510 bounds
.push((*declared_bound
).clone());
513 cx
.typaram(self.span
,
515 OwnedSlice
::from_vec(bounds
),
519 // and similarly for where clauses
520 where_clause
.predicates
.extend(generics
.where_clause
.predicates
.iter().map(|clause
| {
522 ast
::WherePredicate
::BoundPredicate(ref wb
) => {
523 ast
::WherePredicate
::BoundPredicate(ast
::WhereBoundPredicate
{
525 bound_lifetimes
: wb
.bound_lifetimes
.clone(),
526 bounded_ty
: wb
.bounded_ty
.clone(),
527 bounds
: OwnedSlice
::from_vec(wb
.bounds
.iter().cloned().collect())
530 ast
::WherePredicate
::RegionPredicate(ref rb
) => {
531 ast
::WherePredicate
::RegionPredicate(ast
::WhereRegionPredicate
{
533 lifetime
: rb
.lifetime
,
534 bounds
: rb
.bounds
.iter().cloned().collect()
537 ast
::WherePredicate
::EqPredicate(ref we
) => {
538 ast
::WherePredicate
::EqPredicate(ast
::WhereEqPredicate
{
539 id
: ast
::DUMMY_NODE_ID
,
541 path
: we
.path
.clone(),
548 if !ty_params
.is_empty() {
549 let ty_param_names
: Vec
<ast
::Name
> = ty_params
.iter()
550 .map(|ty_param
| ty_param
.ident
.name
)
553 let mut processed_field_types
= HashSet
::new();
554 for field_ty
in field_tys
{
555 let tys
= find_type_parameters(&*field_ty
, &ty_param_names
);
558 // if we have already handled this type, skip it
559 if let ast
::TyPath(_
, ref p
) = ty
.node
{
560 if p
.segments
.len() == 1
561 && ty_param_names
.contains(&p
.segments
[0].identifier
.name
)
562 || processed_field_types
.contains(&p
.segments
) {
565 processed_field_types
.insert(p
.segments
.clone());
567 let mut bounds
: Vec
<_
> = self.additional_bounds
.iter().map(|p
| {
568 cx
.typarambound(p
.to_path(cx
, self.span
, type_ident
, generics
))
571 // require the current trait
572 bounds
.push(cx
.typarambound(trait_path
.clone()));
574 let predicate
= ast
::WhereBoundPredicate
{
576 bound_lifetimes
: vec
![],
578 bounds
: OwnedSlice
::from_vec(bounds
),
581 let predicate
= ast
::WherePredicate
::BoundPredicate(predicate
);
582 where_clause
.predicates
.push(predicate
);
587 let trait_generics
= Generics
{
588 lifetimes
: lifetimes
,
589 ty_params
: OwnedSlice
::from_vec(ty_params
),
590 where_clause
: where_clause
593 // Create the reference to the trait.
594 let trait_ref
= cx
.trait_ref(trait_path
);
596 // Create the type parameters on the `self` path.
597 let self_ty_params
= generics
.ty_params
.map(|ty_param
| {
598 cx
.ty_ident(self.span
, ty_param
.ident
)
601 let self_lifetimes
: Vec
<ast
::Lifetime
> =
604 .map(|ld
| ld
.lifetime
)
607 // Create the type of `self`.
608 let self_type
= cx
.ty_path(
609 cx
.path_all(self.span
, false, vec
!( type_ident
), self_lifetimes
,
610 self_ty_params
.into_vec(), Vec
::new()));
612 let attr
= cx
.attribute(
614 cx
.meta_word(self.span
,
615 InternedString
::new("automatically_derived")));
616 // Just mark it now since we know that it'll end up used downstream
617 attr
::mark_used(&attr
);
618 let opt_trait_ref
= Some(trait_ref
);
619 let ident
= ast_util
::impl_pretty_name(&opt_trait_ref
, Some(&*self_type
));
620 let mut a
= vec
![attr
];
621 a
.extend(self.attributes
.iter().cloned());
626 ast
::ItemImpl(ast
::Unsafety
::Normal
,
627 ast
::ImplPolarity
::Positive
,
631 methods
.into_iter().chain(associated_types
).collect()))
634 fn expand_struct_def(&self,
636 struct_def
: &'a StructDef
,
638 generics
: &Generics
) -> P
<ast
::Item
> {
639 let field_tys
: Vec
<P
<ast
::Ty
>> = struct_def
.fields
.iter()
640 .map(|field
| field
.node
.ty
.clone())
643 let methods
= self.methods
.iter().map(|method_def
| {
644 let (explicit_self
, self_args
, nonself_args
, tys
) =
645 method_def
.split_self_nonself_args(
646 cx
, self, type_ident
, generics
);
648 let body
= if method_def
.is_static() {
649 method_def
.expand_static_struct_method_body(
657 method_def
.expand_struct_method_body(cx
,
665 method_def
.create_method(cx
,
675 self.create_derived_impl(cx
, type_ident
, generics
, field_tys
, methods
)
678 fn expand_enum_def(&self,
680 enum_def
: &'a EnumDef
,
681 type_attrs
: &[ast
::Attribute
],
683 generics
: &Generics
) -> P
<ast
::Item
> {
684 let mut field_tys
= Vec
::new();
686 for variant
in &enum_def
.variants
{
687 match variant
.node
.kind
{
688 ast
::VariantKind
::TupleVariantKind(ref args
) => {
689 field_tys
.extend(args
.iter()
690 .map(|arg
| arg
.ty
.clone()));
692 ast
::VariantKind
::StructVariantKind(ref args
) => {
693 field_tys
.extend(args
.fields
.iter()
694 .map(|field
| field
.node
.ty
.clone()));
699 let methods
= self.methods
.iter().map(|method_def
| {
700 let (explicit_self
, self_args
, nonself_args
, tys
) =
701 method_def
.split_self_nonself_args(cx
, self,
702 type_ident
, generics
);
704 let body
= if method_def
.is_static() {
705 method_def
.expand_static_enum_method_body(
713 method_def
.expand_enum_method_body(cx
,
722 method_def
.create_method(cx
,
732 self.create_derived_impl(cx
, type_ident
, generics
, field_tys
, methods
)
736 fn find_repr_type_name(diagnostic
: &SpanHandler
,
737 type_attrs
: &[ast
::Attribute
]) -> &'
static str {
738 let mut repr_type_name
= "i32";
739 for a
in type_attrs
{
740 for r
in &attr
::find_repr_attrs(diagnostic
, a
) {
741 repr_type_name
= match *r
{
742 attr
::ReprAny
| attr
::ReprPacked
=> continue,
743 attr
::ReprExtern
=> "i32",
745 attr
::ReprInt(_
, attr
::SignedInt(ast
::TyIs
)) => "isize",
746 attr
::ReprInt(_
, attr
::SignedInt(ast
::TyI8
)) => "i8",
747 attr
::ReprInt(_
, attr
::SignedInt(ast
::TyI16
)) => "i16",
748 attr
::ReprInt(_
, attr
::SignedInt(ast
::TyI32
)) => "i32",
749 attr
::ReprInt(_
, attr
::SignedInt(ast
::TyI64
)) => "i64",
751 attr
::ReprInt(_
, attr
::UnsignedInt(ast
::TyUs
)) => "usize",
752 attr
::ReprInt(_
, attr
::UnsignedInt(ast
::TyU8
)) => "u8",
753 attr
::ReprInt(_
, attr
::UnsignedInt(ast
::TyU16
)) => "u16",
754 attr
::ReprInt(_
, attr
::UnsignedInt(ast
::TyU32
)) => "u32",
755 attr
::ReprInt(_
, attr
::UnsignedInt(ast
::TyU64
)) => "u64",
762 impl<'a
> MethodDef
<'a
> {
763 fn call_substructure_method(&self,
767 self_args
: &[P
<Expr
>],
768 nonself_args
: &[P
<Expr
>],
769 fields
: &SubstructureFields
)
771 let substructure
= Substructure
{
772 type_ident
: type_ident
,
773 method_ident
: cx
.ident_of(self.name
),
774 self_args
: self_args
,
775 nonself_args
: nonself_args
,
778 let mut f
= self.combine_substructure
.borrow_mut();
779 let f
: &mut CombineSubstructureFunc
= &mut *f
;
780 f(cx
, trait_
.span
, &substructure
)
789 self.ret_ty
.to_ty(cx
, trait_
.span
, type_ident
, generics
)
792 fn is_static(&self) -> bool
{
793 self.explicit_self
.is_none()
796 fn split_self_nonself_args(&self,
801 -> (ast
::ExplicitSelf
, Vec
<P
<Expr
>>, Vec
<P
<Expr
>>, Vec
<(Ident
, P
<ast
::Ty
>)>) {
803 let mut self_args
= Vec
::new();
804 let mut nonself_args
= Vec
::new();
805 let mut arg_tys
= Vec
::new();
806 let mut nonstatic
= false;
808 let ast_explicit_self
= match self.explicit_self
{
809 Some(ref self_ptr
) => {
810 let (self_expr
, explicit_self
) =
811 ty
::get_explicit_self(cx
, trait_
.span
, self_ptr
);
813 self_args
.push(self_expr
);
818 None
=> codemap
::respan(trait_
.span
, ast
::SelfStatic
),
821 for (i
, ty
) in self.args
.iter().enumerate() {
822 let ast_ty
= ty
.to_ty(cx
, trait_
.span
, type_ident
, generics
);
823 let ident
= cx
.ident_of(&format
!("__arg_{}", i
));
824 arg_tys
.push((ident
, ast_ty
));
826 let arg_expr
= cx
.expr_ident(trait_
.span
, ident
);
829 // for static methods, just treat any Self
830 // arguments as a normal arg
831 Self_
if nonstatic
=> {
832 self_args
.push(arg_expr
);
834 Ptr(ref ty
, _
) if **ty
== Self_
&& nonstatic
=> {
835 self_args
.push(cx
.expr_deref(trait_
.span
, arg_expr
))
838 nonself_args
.push(arg_expr
);
843 (ast_explicit_self
, self_args
, nonself_args
, arg_tys
)
846 fn create_method(&self,
852 explicit_self
: ast
::ExplicitSelf
,
853 arg_types
: Vec
<(Ident
, P
<ast
::Ty
>)> ,
854 body
: P
<Expr
>) -> P
<ast
::ImplItem
> {
855 // create the generics that aren't for Self
856 let fn_generics
= self.generics
.to_generics(cx
, trait_
.span
, type_ident
, generics
);
858 let self_arg
= match explicit_self
.node
{
859 ast
::SelfStatic
=> None
,
860 // creating fresh self id
861 _
=> Some(ast
::Arg
::new_self(trait_
.span
, ast
::MutImmutable
, special_idents
::self_
))
864 let args
= arg_types
.into_iter().map(|(name
, ty
)| {
865 cx
.arg(trait_
.span
, name
, ty
)
867 self_arg
.into_iter().chain(args
).collect()
870 let ret_type
= self.get_ret_ty(cx
, trait_
, generics
, type_ident
);
872 let method_ident
= cx
.ident_of(self.name
);
873 let fn_decl
= cx
.fn_decl(args
, ret_type
);
874 let body_block
= cx
.block_expr(body
);
876 let unsafety
= if self.is_unsafe
{
877 ast
::Unsafety
::Unsafe
879 ast
::Unsafety
::Normal
882 // Create the method.
884 id
: ast
::DUMMY_NODE_ID
,
885 attrs
: self.attributes
.clone(),
889 node
: ast
::MethodImplItem(ast
::MethodSig
{
890 generics
: fn_generics
,
892 explicit_self
: explicit_self
,
894 constness
: ast
::Constness
::NotConst
,
901 /// #[derive(PartialEq)]
902 /// struct A { x: i32, y: i32 }
904 /// // equivalent to:
905 /// impl PartialEq for A {
906 /// fn eq(&self, __arg_1: &A) -> bool {
908 /// A {x: ref __self_0_0, y: ref __self_0_1} => {
910 /// A {x: ref __self_1_0, y: ref __self_1_1} => {
911 /// __self_0_0.eq(__self_1_0) && __self_0_1.eq(__self_1_1)
919 fn expand_struct_method_body
<'b
>(&self,
921 trait_
: &TraitDef
<'b
>,
922 struct_def
: &'b StructDef
,
924 self_args
: &[P
<Expr
>],
925 nonself_args
: &[P
<Expr
>])
928 let mut raw_fields
= Vec
::new(); // Vec<[fields of self],
929 // [fields of next Self arg], [etc]>
930 let mut patterns
= Vec
::new();
931 for i
in 0..self_args
.len() {
932 let struct_path
= cx
.path(DUMMY_SP
, vec
!( type_ident
));
933 let (pat
, ident_expr
) =
934 trait_
.create_struct_pattern(cx
,
937 &format
!("__self_{}",
941 raw_fields
.push(ident_expr
);
944 // transpose raw_fields
945 let fields
= if !raw_fields
.is_empty() {
946 let mut raw_fields
= raw_fields
.into_iter().map(|v
| v
.into_iter());
947 let first_field
= raw_fields
.next().unwrap();
948 let mut other_fields
: Vec
<vec
::IntoIter
<_
>>
949 = raw_fields
.collect();
950 first_field
.map(|(span
, opt_id
, field
, attrs
)| {
955 other
: other_fields
.iter_mut().map(|l
| {
956 match l
.next().unwrap() {
964 cx
.span_bug(trait_
.span
,
965 "no self arguments to non-static method in generic \
969 // body of the inner most destructuring match
970 let mut body
= self.call_substructure_method(
978 // make a series of nested matches, to destructure the
979 // structs. This is actually right-to-left, but it shouldn't
981 for (arg_expr
, pat
) in self_args
.iter().zip(patterns
) {
982 body
= cx
.expr_match(trait_
.span
, arg_expr
.clone(),
983 vec
!( cx
.arm(trait_
.span
, vec
!(pat
.clone()), body
) ))
988 fn expand_static_struct_method_body(&self,
991 struct_def
: &StructDef
,
993 self_args
: &[P
<Expr
>],
994 nonself_args
: &[P
<Expr
>])
996 let summary
= trait_
.summarise_struct(cx
, struct_def
);
998 self.call_substructure_method(cx
,
1001 self_args
, nonself_args
,
1002 &StaticStruct(struct_def
, summary
))
1006 /// #[derive(PartialEq)]
1012 /// // is equivalent to
1014 /// impl PartialEq for A {
1015 /// fn eq(&self, __arg_1: &A) -> ::bool {
1016 /// match (&*self, &*__arg_1) {
1017 /// (&A1, &A1) => true,
1018 /// (&A2(ref __self_0),
1019 /// &A2(ref __arg_1_0)) => (*__self_0).eq(&(*__arg_1_0)),
1021 /// let __self_vi = match *self { A1(..) => 0, A2(..) => 1 };
1022 /// let __arg_1_vi = match *__arg_1 { A1(..) => 0, A2(..) => 1 };
1030 /// (Of course `__self_vi` and `__arg_1_vi` are unused for
1031 /// `PartialEq`, and those subcomputations will hopefully be removed
1032 /// as their results are unused. The point of `__self_vi` and
1033 /// `__arg_1_vi` is for `PartialOrd`; see #15503.)
1034 fn expand_enum_method_body
<'b
>(&self,
1036 trait_
: &TraitDef
<'b
>,
1037 enum_def
: &'b EnumDef
,
1038 type_attrs
: &[ast
::Attribute
],
1040 self_args
: Vec
<P
<Expr
>>,
1041 nonself_args
: &[P
<Expr
>])
1043 self.build_enum_match_tuple(
1044 cx
, trait_
, enum_def
, type_attrs
, type_ident
, self_args
, nonself_args
)
1048 /// Creates a match for a tuple of all `self_args`, where either all
1049 /// variants match, or it falls into a catch-all for when one variant
1052 /// There are N + 1 cases because is a case for each of the N
1053 /// variants where all of the variants match, and one catch-all for
1054 /// when one does not match.
1056 /// As an optimization we generate code which checks whether all variants
1057 /// match first which makes llvm see that C-like enums can be compiled into
1058 /// a simple equality check (for PartialEq).
1060 /// The catch-all handler is provided access the variant index values
1061 /// for each of the self-args, carried in precomputed variables.
1064 /// let __self0_vi = unsafe {
1065 /// std::intrinsics::discriminant_value(&self) } as i32;
1066 /// let __self1_vi = unsafe {
1067 /// std::intrinsics::discriminant_value(&__arg1) } as i32;
1068 /// let __self2_vi = unsafe {
1069 /// std::intrinsics::discriminant_value(&__arg2) } as i32;
1071 /// if __self0_vi == __self1_vi && __self0_vi == __self2_vi && ... {
1073 /// (Variant1, Variant1, ...) => Body1
1074 /// (Variant2, Variant2, ...) => Body2,
1076 /// _ => ::core::intrinsics::unreachable()
1080 /// ... // catch-all remainder can inspect above variant index values.
1083 fn build_enum_match_tuple
<'b
>(
1086 trait_
: &TraitDef
<'b
>,
1087 enum_def
: &'b EnumDef
,
1088 type_attrs
: &[ast
::Attribute
],
1090 self_args
: Vec
<P
<Expr
>>,
1091 nonself_args
: &[P
<Expr
>]) -> P
<Expr
> {
1093 let sp
= trait_
.span
;
1094 let variants
= &enum_def
.variants
;
1096 let self_arg_names
= self_args
.iter().enumerate()
1097 .map(|(arg_count
, _self_arg
)| {
1099 "__self".to_string()
1101 format
!("__arg_{}", arg_count
)
1104 .collect
::<Vec
<String
>>();
1106 let self_arg_idents
= self_arg_names
.iter()
1107 .map(|name
|cx
.ident_of(&name
[..]))
1108 .collect
::<Vec
<ast
::Ident
>>();
1110 // The `vi_idents` will be bound, solely in the catch-all, to
1111 // a series of let statements mapping each self_arg to an int
1112 // value corresponding to its discriminant.
1113 let vi_idents
: Vec
<ast
::Ident
> = self_arg_names
.iter()
1114 .map(|name
| { let vi_suffix = format!("{}_vi
", &name[..]);
1115 cx.ident_of(&vi_suffix[..]) })
1116 .collect::<Vec<ast::Ident>>();
1118 // Builds, via callback to call_substructure_method, the
1119 // delegated expression that handles the catch-all case,
1120 // using `__variants_tuple` to drive logic if necessary.
1121 let catch_all_substructure = EnumNonMatchingCollapsed(
1122 self_arg_idents, &variants[..], &vi_idents[..]);
1124 // These arms are of the form:
1125 // (Variant1, Variant1, ...) => Body1
1126 // (Variant2, Variant2, ...) => Body2
1128 // where each tuple has length = self_args.len()
1129 let mut match_arms: Vec<ast::Arm> = variants.iter().enumerate()
1130 .map(|(index, variant)| {
1131 let mk_self_pat = |cx: &mut ExtCtxt, self_arg_name: &str| {
1132 let (p, idents) = trait_.create_enum_variant_pattern(cx, type_ident,
1136 (cx.pat(sp, ast::PatRegion(p, ast::MutImmutable)), idents)
1139 // A single arm has form (&VariantK, &VariantK, ...) => BodyK
1140 // (see "Final wrinkle
" note below for why.)
1141 let mut subpats = Vec::with_capacity(self_arg_names.len());
1142 let mut self_pats_idents = Vec::with_capacity(self_arg_names.len() - 1);
1143 let first_self_pat_idents = {
1144 let (p, idents) = mk_self_pat(cx, &self_arg_names[0]);
1148 for self_arg_name in &self_arg_names[1..] {
1149 let (p, idents) = mk_self_pat(cx, &self_arg_name[..]);
1151 self_pats_idents.push(idents);
1154 // Here is the pat = `(&VariantK, &VariantK, ...)`
1155 let single_pat = cx.pat_tuple(sp, subpats);
1157 // For the BodyK, we need to delegate to our caller,
1158 // passing it an EnumMatching to indicate which case
1161 // All of the Self args have the same variant in these
1162 // cases. So we transpose the info in self_pats_idents
1163 // to gather the getter expressions together, in the
1164 // form that EnumMatching expects.
1166 // The transposition is driven by walking across the
1167 // arg fields of the variant for the first self pat.
1168 let field_tuples = first_self_pat_idents.into_iter().enumerate()
1169 // For each arg field of self, pull out its getter expr ...
1170 .map(|(field_index, (sp, opt_ident, self_getter_expr, attrs))| {
1171 // ... but FieldInfo also wants getter expr
1172 // for matching other arguments of Self type;
1173 // so walk across the *other* self_pats_idents
1174 // and pull out getter for same field in each
1175 // of them (using `field_index` tracked above).
1176 // That is the heart of the transposition.
1177 let others = self_pats_idents.iter().map(|fields| {
1178 let (_, _opt_ident, ref other_getter_expr, _) =
1179 fields[field_index];
1181 // All Self args have same variant, so
1182 // opt_idents are the same. (Assert
1183 // here to make it self-evident that
1184 // it is okay to ignore `_opt_ident`.)
1185 assert!(opt_ident == _opt_ident);
1187 other_getter_expr.clone()
1188 }).collect::<Vec<P<Expr>>>();
1190 FieldInfo { span: sp,
1192 self_: self_getter_expr,
1196 }).collect::<Vec<FieldInfo>>();
1198 // Now, for some given VariantK, we have built up
1199 // expressions for referencing every field of every
1200 // Self arg, assuming all are instances of VariantK.
1201 // Build up code associated with such a case.
1202 let substructure = EnumMatching(index,
1205 let arm_expr = self.call_substructure_method(
1206 cx, trait_, type_ident, &self_args[..], nonself_args,
1209 cx.arm(sp, vec![single_pat], arm_expr)
1211 // We will usually need the catch-all after matching the
1212 // tuples `(VariantK, VariantK, ...)` for each VariantK of the
1215 // * when there is only one Self arg, the arms above suffice
1216 // (and the deriving we call back into may not be prepared to
1217 // handle EnumNonMatchCollapsed), and,
1219 // * when the enum has only one variant, the single arm that
1220 // is already present always suffices.
1222 // * In either of the two cases above, if we *did* add a
1223 // catch-all `_` match, it would trigger the
1224 // unreachable-pattern error.
1226 if variants.len() > 1 && self_args.len() > 1 {
1227 // Build a series of let statements mapping each self_arg
1228 // to its discriminant value. If this is a C-style enum
1229 // with a specific repr type, then casts the values to
1230 // that type. Otherwise casts to `i32` (the default repr
1233 // i.e. for `enum E<T> { A, B(1), C(T, T) }`, and a deriving
1234 // with three Self args, builds three statements:
1237 // let __self0_vi = unsafe {
1238 // std::intrinsics::discriminant_value(&self) } as i32;
1239 // let __self1_vi = unsafe {
1240 // std::intrinsics::discriminant_value(&__arg1) } as i32;
1241 // let __self2_vi = unsafe {
1242 // std::intrinsics::discriminant_value(&__arg2) } as i32;
1244 let mut index_let_stmts: Vec<P<ast::Stmt>> = Vec::new();
1246 //We also build an expression which checks whether all discriminants are equal
1247 // discriminant_test = __self0_vi == __self1_vi && __self0_vi == __self2_vi && ...
1248 let mut discriminant_test = cx.expr_bool(sp, true);
1250 let target_type_name =
1251 find_repr_type_name(&cx.parse_sess.span_diagnostic, type_attrs);
1253 let mut first_ident = None;
1254 for (&ident, self_arg) in vi_idents.iter().zip(&self_args) {
1255 let path = vec![cx.ident_of_std("core
"),
1256 cx.ident_of("intrinsics
"),
1257 cx.ident_of("discriminant_value
")];
1258 let call = cx.expr_call_global(
1259 sp, path, vec![cx.expr_addr_of(sp, self_arg.clone())]);
1260 let variant_value = cx.expr_block(P(ast::Block {
1263 id: ast::DUMMY_NODE_ID,
1264 rules: ast::UnsafeBlock(ast::CompilerGenerated),
1267 let target_ty = cx.ty_ident(sp, cx.ident_of(target_type_name));
1268 let variant_disr = cx.expr_cast(sp, variant_value, target_ty);
1269 let let_stmt = cx.stmt_let(sp, false, ident, variant_disr);
1270 index_let_stmts.push(let_stmt);
1274 let first_expr = cx.expr_ident(sp, first);
1275 let id = cx.expr_ident(sp, ident);
1276 let test = cx.expr_binary(sp, ast::BiEq, first_expr, id);
1277 discriminant_test = cx.expr_binary(sp, ast::BiAnd, discriminant_test, test)
1280 first_ident = Some(ident);
1285 let arm_expr = self.call_substructure_method(
1286 cx, trait_, type_ident, &self_args[..], nonself_args,
1287 &catch_all_substructure);
1289 //Since we know that all the arguments will match if we reach the match expression we
1290 //add the unreachable intrinsics as the result of the catch all which should help llvm
1292 let path = vec![cx.ident_of_std("core
"),
1293 cx.ident_of("intrinsics
"),
1294 cx.ident_of("unreachable
")];
1295 let call = cx.expr_call_global(
1297 let unreachable = cx.expr_block(P(ast::Block {
1300 id: ast::DUMMY_NODE_ID,
1301 rules: ast::UnsafeBlock(ast::CompilerGenerated),
1303 match_arms.push(cx.arm(sp, vec![cx.pat_wild(sp)], unreachable));
1305 // Final wrinkle: the self_args are expressions that deref
1306 // down to desired l-values, but we cannot actually deref
1307 // them when they are fed as r-values into a tuple
1308 // expression; here add a layer of borrowing, turning
1309 // `(*self, *__arg_0, ...)` into `(&*self, &*__arg_0, ...)`.
1310 let borrowed_self_args = self_args.move_map(|self_arg| cx.expr_addr_of(sp, self_arg));
1311 let match_arg = cx.expr(sp, ast::ExprTup(borrowed_self_args));
1313 //Lastly we create an expression which branches on all discriminants being equal
1314 // if discriminant_test {
1316 // (Variant1, Variant1, ...) => Body1
1317 // (Variant2, Variant2, ...) => Body2,
1319 // _ => ::core::intrinsics::unreachable()
1323 // <delegated expression referring to __self0_vi, et al.>
1325 let all_match = cx.expr_match(sp, match_arg, match_arms);
1326 let arm_expr = cx.expr_if(sp, discriminant_test, all_match, Some(arm_expr));
1328 cx.block_all(sp, index_let_stmts, Some(arm_expr)))
1329 } else if variants.is_empty() {
1330 // As an additional wrinkle, For a zero-variant enum A,
1331 // currently the compiler
1332 // will accept `fn (a: &Self) { match *a { } }`
1333 // but rejects `fn (a: &Self) { match (&*a,) { } }`
1334 // as well as `fn (a: &Self) { match ( *a,) { } }`
1336 // This means that the strategy of building up a tuple of
1337 // all Self arguments fails when Self is a zero variant
1338 // enum: rustc rejects the expanded program, even though
1339 // the actual code tends to be impossible to execute (at
1340 // least safely), according to the type system.
1342 // The most expedient fix for this is to just let the
1343 // code fall through to the catch-all. But even this is
1344 // error-prone, since the catch-all as defined above would
1345 // generate code like this:
1347 // _ => { let __self0 = match *self { };
1348 // let __self1 = match *__arg_0 { };
1349 // <catch-all-expr> }
1351 // Which is yields bindings for variables which type
1352 // inference cannot resolve to unique types.
1354 // One option to the above might be to add explicit type
1355 // annotations. But the *only* reason to go down that path
1356 // would be to try to make the expanded output consistent
1357 // with the case when the number of enum variants >= 1.
1359 // That just isn't worth it. In fact, trying to generate
1360 // sensible code for *any* deriving on a zero-variant enum
1361 // does not make sense. But at the same time, for now, we
1362 // do not want to cause a compile failure just because the
1363 // user happened to attach a deriving to their
1364 // zero-variant enum.
1366 // Instead, just generate a failing expression for the
1367 // zero variant case, skipping matches and also skipping
1368 // delegating back to the end user code entirely.
1370 // (See also #4499 and #12609; note that some of the
1371 // discussions there influence what choice we make here;
1372 // e.g. if we feature-gate `match x { ... }` when x refers
1373 // to an uninhabited type (e.g. a zero-variant enum or a
1374 // type holding such an enum), but do not feature-gate
1375 // zero-variant enums themselves, then attempting to
1376 // derive Debug on such a type could here generate code
1377 // that needs the feature gate enabled.)
1379 cx.expr_unreachable(sp)
1383 // Final wrinkle: the self_args are expressions that deref
1384 // down to desired l-values, but we cannot actually deref
1385 // them when they are fed as r-values into a tuple
1386 // expression; here add a layer of borrowing, turning
1387 // `(*self, *__arg_0, ...)` into `(&*self, &*__arg_0, ...)`.
1388 let borrowed_self_args = self_args.move_map(|self_arg| cx.expr_addr_of(sp, self_arg));
1389 let match_arg = cx.expr(sp, ast::ExprTup(borrowed_self_args));
1390 cx.expr_match(sp, match_arg, match_arms)
1394 fn expand_static_enum_method_body(&self,
1399 self_args: &[P<Expr>],
1400 nonself_args: &[P<Expr>])
1402 let summary = enum_def.variants.iter().map(|v| {
1403 let ident = v.node.name;
1404 let summary = match v.node.kind {
1405 ast::TupleVariantKind(ref args) => {
1406 Unnamed(args.iter().map(|va| trait_.set_expn_info(cx, va.ty.span)).collect())
1408 ast::StructVariantKind(ref struct_def) => {
1409 trait_.summarise_struct(cx, &**struct_def)
1412 (ident, v.span, summary)
1414 self.call_substructure_method(cx, trait_, type_ident,
1415 self_args, nonself_args,
1416 &StaticEnum(enum_def, summary))
1420 #[derive(PartialEq)] // dogfooding!
1422 Unknown, Record, Tuple
1425 // general helper methods.
1426 impl<'a> TraitDef<'a> {
1427 fn set_expn_info(&self,
1429 mut to_set: Span) -> Span {
1430 let trait_name = match self.path.path.last() {
1431 None => cx.span_bug(self.span, "trait with empty path
in generic `derive`
"),
1434 to_set.expn_id = cx.codemap().record_expansion(codemap::ExpnInfo {
1436 callee: codemap::NameAndSpan {
1437 name: format!("derive({}
)", trait_name),
1438 format: codemap::MacroAttribute,
1439 span: Some(self.span),
1440 allow_internal_unstable: false,
1446 fn summarise_struct(&self,
1448 struct_def: &StructDef) -> StaticFields {
1449 let mut named_idents = Vec::new();
1450 let mut just_spans = Vec::new();
1451 for field in struct_def.fields.iter(){
1452 let sp = self.set_expn_info(cx, field.span);
1453 match field.node.kind {
1454 ast::NamedField(ident, _) => named_idents.push((ident, sp)),
1455 ast::UnnamedField(..) => just_spans.push(sp),
1459 match (just_spans.is_empty(), named_idents.is_empty()) {
1460 (false, false) => cx.span_bug(self.span,
1461 "a
struct with named and unnamed
\
1462 fields
in generic `derive`
"),
1464 (_, false) => Named(named_idents),
1465 // tuple structs (includes empty structs)
1466 (_, _) => Unnamed(just_spans)
1470 fn create_subpatterns(&self,
1472 field_paths: Vec<ast::SpannedIdent> ,
1473 mutbl: ast::Mutability)
1474 -> Vec<P<ast::Pat>> {
1475 field_paths.iter().map(|path| {
1477 ast::PatIdent(ast::BindByRef(mutbl), (*path).clone(), None))
1481 fn create_struct_pattern(&self,
1483 struct_path: ast::Path,
1484 struct_def: &'a StructDef,
1486 mutbl: ast::Mutability)
1487 -> (P<ast::Pat>, Vec<(Span, Option<Ident>,
1489 &'a [ast::Attribute])>) {
1490 if struct_def.fields.is_empty() {
1491 return (cx.pat_enum(self.span, struct_path, vec![]), vec![]);
1494 let mut paths = Vec::new();
1495 let mut ident_expr = Vec::new();
1496 let mut struct_type = Unknown;
1498 for (i, struct_field) in struct_def.fields.iter().enumerate() {
1499 let sp = self.set_expn_info(cx, struct_field.span);
1500 let opt_id = match struct_field.node.kind {
1501 ast::NamedField(ident, _) if (struct_type == Unknown ||
1502 struct_type == Record) => {
1503 struct_type = Record;
1506 ast::UnnamedField(..) if (struct_type == Unknown ||
1507 struct_type == Tuple) => {
1508 struct_type = Tuple;
1512 cx.span_bug(sp, "a
struct with named and unnamed fields
in `derive`
");
1515 let ident = cx.ident_of(&format!("{}_{}
", prefix, i));
1516 paths.push(codemap::Spanned{span: sp, node: ident});
1518 sp, ast::ExprParen(cx.expr_deref(sp, cx.expr_path(cx.path_ident(sp,ident)))));
1519 ident_expr.push((sp, opt_id, val, &struct_field.node.attrs[..]));
1522 let subpats = self.create_subpatterns(cx, paths, mutbl);
1524 // struct_type is definitely not Unknown, since struct_def.fields
1525 // must be nonempty to reach here
1526 let pattern = if struct_type == Record {
1527 let field_pats = subpats.into_iter().zip(&ident_expr)
1528 .map(|(pat, &(_, id, _, _))| {
1529 // id is guaranteed to be Some
1532 node: ast::FieldPat { ident: id.unwrap(), pat: pat, is_shorthand: false },
1535 cx.pat_struct(self.span, struct_path, field_pats)
1537 cx.pat_enum(self.span, struct_path, subpats)
1540 (pattern, ident_expr)
1543 fn create_enum_variant_pattern(&self,
1545 enum_ident: ast::Ident,
1546 variant: &'a ast::Variant,
1548 mutbl: ast::Mutability)
1549 -> (P<ast::Pat>, Vec<(Span, Option<Ident>, P<Expr>, &'a [ast::Attribute])>) {
1550 let variant_ident = variant.node.name;
1551 let variant_path = cx.path(variant.span, vec![enum_ident, variant_ident]);
1552 match variant.node.kind {
1553 ast::TupleVariantKind(ref variant_args) => {
1554 if variant_args.is_empty() {
1555 return (cx.pat_enum(variant.span, variant_path, vec![]), vec![]);
1558 let mut paths = Vec::new();
1559 let mut ident_expr: Vec<(_, _, _, &'a [ast::Attribute])> = Vec::new();
1560 for (i, va) in variant_args.iter().enumerate() {
1561 let sp = self.set_expn_info(cx, va.ty.span);
1562 let ident = cx.ident_of(&format!("{}_{}
", prefix, i));
1563 let path1 = codemap::Spanned{span: sp, node: ident};
1565 let expr_path = cx.expr_path(cx.path_ident(sp, ident));
1566 let val = cx.expr(sp, ast::ExprParen(cx.expr_deref(sp, expr_path)));
1567 ident_expr.push((sp, None, val, &[]));
1570 let subpats = self.create_subpatterns(cx, paths, mutbl);
1572 (cx.pat_enum(variant.span, variant_path, subpats),
1575 ast::StructVariantKind(ref struct_def) => {
1576 self.create_struct_pattern(cx, variant_path, &**struct_def,
1583 /* helpful premade recipes */
1585 /// Fold the fields. `use_foldl` controls whether this is done
1586 /// left-to-right (`true`) or right-to-left (`false`).
1587 pub fn cs_fold<F>(use_foldl: bool,
1590 mut enum_nonmatch_f: EnumNonMatchCollapsedFunc,
1593 substructure: &Substructure)
1595 F: FnMut(&mut ExtCtxt, Span, P<Expr>, P<Expr>, &[P<Expr>]) -> P<Expr>,
1597 match *substructure.fields {
1598 EnumMatching(_, _, ref all_fields) | Struct(ref all_fields) => {
1600 all_fields.iter().fold(base, |old, field| {
1604 field.self_.clone(),
1608 all_fields.iter().rev().fold(base, |old, field| {
1612 field.self_.clone(),
1617 EnumNonMatchingCollapsed(ref all_args, _, tuple) =>
1618 enum_nonmatch_f(cx, trait_span, (&all_args[..], tuple),
1619 substructure.nonself_args),
1620 StaticEnum(..) | StaticStruct(..) => {
1621 cx.span_bug(trait_span, "static function
in `derive`
")
1627 /// Call the method that is being derived on all the fields, and then
1628 /// process the collected results. i.e.
1631 /// f(cx, span, vec![self_1.method(__arg_1_1, __arg_2_1),
1632 /// self_2.method(__arg_1_2, __arg_2_2)])
1635 pub fn cs_same_method<F>(f: F,
1636 mut enum_nonmatch_f: EnumNonMatchCollapsedFunc,
1639 substructure: &Substructure)
1641 F: FnOnce(&mut ExtCtxt, Span, Vec<P<Expr>>) -> P<Expr>,
1643 match *substructure.fields {
1644 EnumMatching(_, _, ref all_fields) | Struct(ref all_fields) => {
1645 // call self_n.method(other_1_n, other_2_n, ...)
1646 let called = all_fields.iter().map(|field| {
1647 cx.expr_method_call(field.span,
1648 field.self_.clone(),
1649 substructure.method_ident,
1651 .map(|e| cx.expr_addr_of(field.span, e.clone()))
1655 f(cx, trait_span, called)
1657 EnumNonMatchingCollapsed(ref all_self_args, _, tuple) =>
1658 enum_nonmatch_f(cx, trait_span, (&all_self_args[..], tuple),
1659 substructure.nonself_args),
1660 StaticEnum(..) | StaticStruct(..) => {
1661 cx.span_bug(trait_span, "static function
in `derive`
")
1666 /// Fold together the results of calling the derived method on all the
1667 /// fields. `use_foldl` controls whether this is done left-to-right
1668 /// (`true`) or right-to-left (`false`).
1670 pub fn cs_same_method_fold<F>(use_foldl: bool,
1673 enum_nonmatch_f: EnumNonMatchCollapsedFunc,
1676 substructure: &Substructure)
1678 F: FnMut(&mut ExtCtxt, Span, P<Expr>, P<Expr>) -> P<Expr>,
1683 vals.into_iter().fold(base.clone(), |old, new| {
1684 f(cx, span, old, new)
1687 vals.into_iter().rev().fold(base.clone(), |old, new| {
1688 f(cx, span, old, new)
1693 cx, trait_span, substructure)
1696 /// Use a given binop to combine the result of calling the derived method
1697 /// on all the fields.
1699 pub fn cs_binop(binop: ast::BinOp_, base: P<Expr>,
1700 enum_nonmatch_f: EnumNonMatchCollapsedFunc,
1701 cx: &mut ExtCtxt, trait_span: Span,
1702 substructure: &Substructure) -> P<Expr> {
1703 cs_same_method_fold(
1704 true, // foldl is good enough
1705 |cx, span, old, new| {
1706 cx.expr_binary(span,
1713 cx, trait_span, substructure)
1716 /// cs_binop with binop == or
1718 pub fn cs_or(enum_nonmatch_f: EnumNonMatchCollapsedFunc,
1719 cx: &mut ExtCtxt, span: Span,
1720 substructure: &Substructure) -> P<Expr> {
1721 cs_binop(ast::BiOr, cx.expr_bool(span, false),
1723 cx, span, substructure)
1726 /// cs_binop with binop == and
1728 pub fn cs_and(enum_nonmatch_f: EnumNonMatchCollapsedFunc,
1729 cx: &mut ExtCtxt, span: Span,
1730 substructure: &Substructure) -> P<Expr> {
1731 cs_binop(ast::BiAnd, cx.expr_bool(span, true),
1733 cx, span, substructure)