]> git.proxmox.com Git - rustc.git/blob - src/librustc_typeck/variance.rs
Imported Upstream version 1.3.0+dfsg1
[rustc.git] / src / librustc_typeck / variance.rs
1 // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 //! This file infers the variance of type and lifetime parameters. The
12 //! algorithm is taken from Section 4 of the paper "Taming the Wildcards:
13 //! Combining Definition- and Use-Site Variance" published in PLDI'11 and
14 //! written by Altidor et al., and hereafter referred to as The Paper.
15 //!
16 //! This inference is explicitly designed *not* to consider the uses of
17 //! types within code. To determine the variance of type parameters
18 //! defined on type `X`, we only consider the definition of the type `X`
19 //! and the definitions of any types it references.
20 //!
21 //! We only infer variance for type parameters found on *data types*
22 //! like structs and enums. In these cases, there is fairly straightforward
23 //! explanation for what variance means. The variance of the type
24 //! or lifetime parameters defines whether `T<A>` is a subtype of `T<B>`
25 //! (resp. `T<'a>` and `T<'b>`) based on the relationship of `A` and `B`
26 //! (resp. `'a` and `'b`).
27 //!
28 //! We do not infer variance for type parameters found on traits, fns,
29 //! or impls. Variance on trait parameters can make indeed make sense
30 //! (and we used to compute it) but it is actually rather subtle in
31 //! meaning and not that useful in practice, so we removed it. See the
32 //! addendum for some details. Variances on fn/impl parameters, otoh,
33 //! doesn't make sense because these parameters are instantiated and
34 //! then forgotten, they don't persist in types or compiled
35 //! byproducts.
36 //!
37 //! ### The algorithm
38 //!
39 //! The basic idea is quite straightforward. We iterate over the types
40 //! defined and, for each use of a type parameter X, accumulate a
41 //! constraint indicating that the variance of X must be valid for the
42 //! variance of that use site. We then iteratively refine the variance of
43 //! X until all constraints are met. There is *always* a sol'n, because at
44 //! the limit we can declare all type parameters to be invariant and all
45 //! constraints will be satisfied.
46 //!
47 //! As a simple example, consider:
48 //!
49 //! enum Option<A> { Some(A), None }
50 //! enum OptionalFn<B> { Some(|B|), None }
51 //! enum OptionalMap<C> { Some(|C| -> C), None }
52 //!
53 //! Here, we will generate the constraints:
54 //!
55 //! 1. V(A) <= +
56 //! 2. V(B) <= -
57 //! 3. V(C) <= +
58 //! 4. V(C) <= -
59 //!
60 //! These indicate that (1) the variance of A must be at most covariant;
61 //! (2) the variance of B must be at most contravariant; and (3, 4) the
62 //! variance of C must be at most covariant *and* contravariant. All of these
63 //! results are based on a variance lattice defined as follows:
64 //!
65 //! * Top (bivariant)
66 //! - +
67 //! o Bottom (invariant)
68 //!
69 //! Based on this lattice, the solution V(A)=+, V(B)=-, V(C)=o is the
70 //! optimal solution. Note that there is always a naive solution which
71 //! just declares all variables to be invariant.
72 //!
73 //! You may be wondering why fixed-point iteration is required. The reason
74 //! is that the variance of a use site may itself be a function of the
75 //! variance of other type parameters. In full generality, our constraints
76 //! take the form:
77 //!
78 //! V(X) <= Term
79 //! Term := + | - | * | o | V(X) | Term x Term
80 //!
81 //! Here the notation V(X) indicates the variance of a type/region
82 //! parameter `X` with respect to its defining class. `Term x Term`
83 //! represents the "variance transform" as defined in the paper:
84 //!
85 //! If the variance of a type variable `X` in type expression `E` is `V2`
86 //! and the definition-site variance of the [corresponding] type parameter
87 //! of a class `C` is `V1`, then the variance of `X` in the type expression
88 //! `C<E>` is `V3 = V1.xform(V2)`.
89 //!
90 //! ### Constraints
91 //!
92 //! If I have a struct or enum with where clauses:
93 //!
94 //! struct Foo<T:Bar> { ... }
95 //!
96 //! you might wonder whether the variance of `T` with respect to `Bar`
97 //! affects the variance `T` with respect to `Foo`. I claim no. The
98 //! reason: assume that `T` is invariant w/r/t `Bar` but covariant w/r/t
99 //! `Foo`. And then we have a `Foo<X>` that is upcast to `Foo<Y>`, where
100 //! `X <: Y`. However, while `X : Bar`, `Y : Bar` does not hold. In that
101 //! case, the upcast will be illegal, but not because of a variance
102 //! failure, but rather because the target type `Foo<Y>` is itself just
103 //! not well-formed. Basically we get to assume well-formedness of all
104 //! types involved before considering variance.
105 //!
106 //! ### Addendum: Variance on traits
107 //!
108 //! As mentioned above, we used to permit variance on traits. This was
109 //! computed based on the appearance of trait type parameters in
110 //! method signatures and was used to represent the compatibility of
111 //! vtables in trait objects (and also "virtual" vtables or dictionary
112 //! in trait bounds). One complication was that variance for
113 //! associated types is less obvious, since they can be projected out
114 //! and put to myriad uses, so it's not clear when it is safe to allow
115 //! `X<A>::Bar` to vary (or indeed just what that means). Moreover (as
116 //! covered below) all inputs on any trait with an associated type had
117 //! to be invariant, limiting the applicability. Finally, the
118 //! annotations (`MarkerTrait`, `PhantomFn`) needed to ensure that all
119 //! trait type parameters had a variance were confusing and annoying
120 //! for little benefit.
121 //!
122 //! Just for historical reference,I am going to preserve some text indicating
123 //! how one could interpret variance and trait matching.
124 //!
125 //! #### Variance and object types
126 //!
127 //! Just as with structs and enums, we can decide the subtyping
128 //! relationship between two object types `&Trait<A>` and `&Trait<B>`
129 //! based on the relationship of `A` and `B`. Note that for object
130 //! types we ignore the `Self` type parameter -- it is unknown, and
131 //! the nature of dynamic dispatch ensures that we will always call a
132 //! function that is expected the appropriate `Self` type. However, we
133 //! must be careful with the other type parameters, or else we could
134 //! end up calling a function that is expecting one type but provided
135 //! another.
136 //!
137 //! To see what I mean, consider a trait like so:
138 //!
139 //! trait ConvertTo<A> {
140 //! fn convertTo(&self) -> A;
141 //! }
142 //!
143 //! Intuitively, If we had one object `O=&ConvertTo<Object>` and another
144 //! `S=&ConvertTo<String>`, then `S <: O` because `String <: Object`
145 //! (presuming Java-like "string" and "object" types, my go to examples
146 //! for subtyping). The actual algorithm would be to compare the
147 //! (explicit) type parameters pairwise respecting their variance: here,
148 //! the type parameter A is covariant (it appears only in a return
149 //! position), and hence we require that `String <: Object`.
150 //!
151 //! You'll note though that we did not consider the binding for the
152 //! (implicit) `Self` type parameter: in fact, it is unknown, so that's
153 //! good. The reason we can ignore that parameter is precisely because we
154 //! don't need to know its value until a call occurs, and at that time (as
155 //! you said) the dynamic nature of virtual dispatch means the code we run
156 //! will be correct for whatever value `Self` happens to be bound to for
157 //! the particular object whose method we called. `Self` is thus different
158 //! from `A`, because the caller requires that `A` be known in order to
159 //! know the return type of the method `convertTo()`. (As an aside, we
160 //! have rules preventing methods where `Self` appears outside of the
161 //! receiver position from being called via an object.)
162 //!
163 //! #### Trait variance and vtable resolution
164 //!
165 //! But traits aren't only used with objects. They're also used when
166 //! deciding whether a given impl satisfies a given trait bound. To set the
167 //! scene here, imagine I had a function:
168 //!
169 //! fn convertAll<A,T:ConvertTo<A>>(v: &[T]) {
170 //! ...
171 //! }
172 //!
173 //! Now imagine that I have an implementation of `ConvertTo` for `Object`:
174 //!
175 //! impl ConvertTo<int> for Object { ... }
176 //!
177 //! And I want to call `convertAll` on an array of strings. Suppose
178 //! further that for whatever reason I specifically supply the value of
179 //! `String` for the type parameter `T`:
180 //!
181 //! let mut vector = vec!["string", ...];
182 //! convertAll::<int, String>(vector);
183 //!
184 //! Is this legal? To put another way, can we apply the `impl` for
185 //! `Object` to the type `String`? The answer is yes, but to see why
186 //! we have to expand out what will happen:
187 //!
188 //! - `convertAll` will create a pointer to one of the entries in the
189 //! vector, which will have type `&String`
190 //! - It will then call the impl of `convertTo()` that is intended
191 //! for use with objects. This has the type:
192 //!
193 //! fn(self: &Object) -> int
194 //!
195 //! It is ok to provide a value for `self` of type `&String` because
196 //! `&String <: &Object`.
197 //!
198 //! OK, so intuitively we want this to be legal, so let's bring this back
199 //! to variance and see whether we are computing the correct result. We
200 //! must first figure out how to phrase the question "is an impl for
201 //! `Object,int` usable where an impl for `String,int` is expected?"
202 //!
203 //! Maybe it's helpful to think of a dictionary-passing implementation of
204 //! type classes. In that case, `convertAll()` takes an implicit parameter
205 //! representing the impl. In short, we *have* an impl of type:
206 //!
207 //! V_O = ConvertTo<int> for Object
208 //!
209 //! and the function prototype expects an impl of type:
210 //!
211 //! V_S = ConvertTo<int> for String
212 //!
213 //! As with any argument, this is legal if the type of the value given
214 //! (`V_O`) is a subtype of the type expected (`V_S`). So is `V_O <: V_S`?
215 //! The answer will depend on the variance of the various parameters. In
216 //! this case, because the `Self` parameter is contravariant and `A` is
217 //! covariant, it means that:
218 //!
219 //! V_O <: V_S iff
220 //! int <: int
221 //! String <: Object
222 //!
223 //! These conditions are satisfied and so we are happy.
224 //!
225 //! #### Variance and associated types
226 //!
227 //! Traits with associated types -- or at minimum projection
228 //! expressions -- must be invariant with respect to all of their
229 //! inputs. To see why this makes sense, consider what subtyping for a
230 //! trait reference means:
231 //!
232 //! <T as Trait> <: <U as Trait>
233 //!
234 //! means that if I know that `T as Trait`, I also know that `U as
235 //! Trait`. Moreover, if you think of it as dictionary passing style,
236 //! it means that a dictionary for `<T as Trait>` is safe to use where
237 //! a dictionary for `<U as Trait>` is expected.
238 //!
239 //! The problem is that when you can project types out from `<T as
240 //! Trait>`, the relationship to types projected out of `<U as Trait>`
241 //! is completely unknown unless `T==U` (see #21726 for more
242 //! details). Making `Trait` invariant ensures that this is true.
243 //!
244 //! Another related reason is that if we didn't make traits with
245 //! associated types invariant, then projection is no longer a
246 //! function with a single result. Consider:
247 //!
248 //! ```
249 //! trait Identity { type Out; fn foo(&self); }
250 //! impl<T> Identity for T { type Out = T; ... }
251 //! ```
252 //!
253 //! Now if I have `<&'static () as Identity>::Out`, this can be
254 //! validly derived as `&'a ()` for any `'a`:
255 //!
256 //! <&'a () as Identity> <: <&'static () as Identity>
257 //! if &'static () < : &'a () -- Identity is contravariant in Self
258 //! if 'static : 'a -- Subtyping rules for relations
259 //!
260 //! This change otoh means that `<'static () as Identity>::Out` is
261 //! always `&'static ()` (which might then be upcast to `'a ()`,
262 //! separately). This was helpful in solving #21750.
263
264 use self::VarianceTerm::*;
265 use self::ParamKind::*;
266
267 use arena;
268 use arena::TypedArena;
269 use middle::resolve_lifetime as rl;
270 use middle::subst;
271 use middle::subst::{ParamSpace, FnSpace, TypeSpace, SelfSpace, VecPerParamSpace};
272 use middle::ty::{self, Ty};
273 use rustc::ast_map;
274 use std::fmt;
275 use std::rc::Rc;
276 use syntax::ast;
277 use syntax::ast_util;
278 use syntax::visit;
279 use syntax::visit::Visitor;
280 use util::nodemap::NodeMap;
281
282 pub fn infer_variance(tcx: &ty::ctxt) {
283 let krate = tcx.map.krate();
284 let mut arena = arena::TypedArena::new();
285 let terms_cx = determine_parameters_to_be_inferred(tcx, &mut arena, krate);
286 let constraints_cx = add_constraints_from_crate(terms_cx, krate);
287 solve_constraints(constraints_cx);
288 tcx.variance_computed.set(true);
289 }
290
291 // Representing terms
292 //
293 // Terms are structured as a straightforward tree. Rather than rely on
294 // GC, we allocate terms out of a bounded arena (the lifetime of this
295 // arena is the lifetime 'a that is threaded around).
296 //
297 // We assign a unique index to each type/region parameter whose variance
298 // is to be inferred. We refer to such variables as "inferreds". An
299 // `InferredIndex` is a newtype'd int representing the index of such
300 // a variable.
301
302 type VarianceTermPtr<'a> = &'a VarianceTerm<'a>;
303
304 #[derive(Copy, Clone, Debug)]
305 struct InferredIndex(usize);
306
307 #[derive(Copy, Clone)]
308 enum VarianceTerm<'a> {
309 ConstantTerm(ty::Variance),
310 TransformTerm(VarianceTermPtr<'a>, VarianceTermPtr<'a>),
311 InferredTerm(InferredIndex),
312 }
313
314 impl<'a> fmt::Debug for VarianceTerm<'a> {
315 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
316 match *self {
317 ConstantTerm(c1) => write!(f, "{:?}", c1),
318 TransformTerm(v1, v2) => write!(f, "({:?} \u{00D7} {:?})", v1, v2),
319 InferredTerm(id) => write!(f, "[{}]", { let InferredIndex(i) = id; i })
320 }
321 }
322 }
323
324 // The first pass over the crate simply builds up the set of inferreds.
325
326 struct TermsContext<'a, 'tcx: 'a> {
327 tcx: &'a ty::ctxt<'tcx>,
328 arena: &'a TypedArena<VarianceTerm<'a>>,
329
330 empty_variances: Rc<ty::ItemVariances>,
331
332 // For marker types, UnsafeCell, and other lang items where
333 // variance is hardcoded, records the item-id and the hardcoded
334 // variance.
335 lang_items: Vec<(ast::NodeId, Vec<ty::Variance>)>,
336
337 // Maps from the node id of a type/generic parameter to the
338 // corresponding inferred index.
339 inferred_map: NodeMap<InferredIndex>,
340
341 // Maps from an InferredIndex to the info for that variable.
342 inferred_infos: Vec<InferredInfo<'a>> ,
343 }
344
345 #[derive(Copy, Clone, Debug, PartialEq)]
346 enum ParamKind {
347 TypeParam,
348 RegionParam,
349 }
350
351 struct InferredInfo<'a> {
352 item_id: ast::NodeId,
353 kind: ParamKind,
354 space: ParamSpace,
355 index: usize,
356 param_id: ast::NodeId,
357 term: VarianceTermPtr<'a>,
358
359 // Initial value to use for this parameter when inferring
360 // variance. For most parameters, this is Bivariant. But for lang
361 // items and input type parameters on traits, it is different.
362 initial_variance: ty::Variance,
363 }
364
365 fn determine_parameters_to_be_inferred<'a, 'tcx>(tcx: &'a ty::ctxt<'tcx>,
366 arena: &'a mut TypedArena<VarianceTerm<'a>>,
367 krate: &ast::Crate)
368 -> TermsContext<'a, 'tcx> {
369 let mut terms_cx = TermsContext {
370 tcx: tcx,
371 arena: arena,
372 inferred_map: NodeMap(),
373 inferred_infos: Vec::new(),
374
375 lang_items: lang_items(tcx),
376
377 // cache and share the variance struct used for items with
378 // no type/region parameters
379 empty_variances: Rc::new(ty::ItemVariances {
380 types: VecPerParamSpace::empty(),
381 regions: VecPerParamSpace::empty()
382 })
383 };
384
385 visit::walk_crate(&mut terms_cx, krate);
386
387 terms_cx
388 }
389
390 fn lang_items(tcx: &ty::ctxt) -> Vec<(ast::NodeId,Vec<ty::Variance>)> {
391 let all = vec![
392 (tcx.lang_items.phantom_data(), vec![ty::Covariant]),
393 (tcx.lang_items.unsafe_cell_type(), vec![ty::Invariant]),
394
395 // Deprecated:
396 (tcx.lang_items.covariant_type(), vec![ty::Covariant]),
397 (tcx.lang_items.contravariant_type(), vec![ty::Contravariant]),
398 (tcx.lang_items.invariant_type(), vec![ty::Invariant]),
399 (tcx.lang_items.covariant_lifetime(), vec![ty::Covariant]),
400 (tcx.lang_items.contravariant_lifetime(), vec![ty::Contravariant]),
401 (tcx.lang_items.invariant_lifetime(), vec![ty::Invariant]),
402
403 ];
404
405 all.into_iter()
406 .filter(|&(ref d,_)| d.is_some())
407 .filter(|&(ref d,_)| d.as_ref().unwrap().krate == ast::LOCAL_CRATE)
408 .map(|(d, v)| (d.unwrap().node, v))
409 .collect()
410 }
411
412 impl<'a, 'tcx> TermsContext<'a, 'tcx> {
413 fn add_inferreds_for_item(&mut self,
414 item_id: ast::NodeId,
415 has_self: bool,
416 generics: &ast::Generics)
417 {
418 /*!
419 * Add "inferreds" for the generic parameters declared on this
420 * item. This has a lot of annoying parameters because we are
421 * trying to drive this from the AST, rather than the
422 * ty::Generics, so that we can get span info -- but this
423 * means we must accommodate syntactic distinctions.
424 */
425
426 // NB: In the code below for writing the results back into the
427 // tcx, we rely on the fact that all inferreds for a particular
428 // item are assigned continuous indices.
429
430 let inferreds_on_entry = self.num_inferred();
431
432 if has_self {
433 self.add_inferred(item_id, TypeParam, SelfSpace, 0, item_id);
434 }
435
436 for (i, p) in generics.lifetimes.iter().enumerate() {
437 let id = p.lifetime.id;
438 self.add_inferred(item_id, RegionParam, TypeSpace, i, id);
439 }
440
441 for (i, p) in generics.ty_params.iter().enumerate() {
442 self.add_inferred(item_id, TypeParam, TypeSpace, i, p.id);
443 }
444
445 // If this item has no type or lifetime parameters,
446 // then there are no variances to infer, so just
447 // insert an empty entry into the variance map.
448 // Arguably we could just leave the map empty in this
449 // case but it seems cleaner to be able to distinguish
450 // "invalid item id" from "item id with no
451 // parameters".
452 if self.num_inferred() == inferreds_on_entry {
453 let newly_added =
454 self.tcx.item_variance_map.borrow_mut().insert(
455 ast_util::local_def(item_id),
456 self.empty_variances.clone()).is_none();
457 assert!(newly_added);
458 }
459 }
460
461 fn add_inferred(&mut self,
462 item_id: ast::NodeId,
463 kind: ParamKind,
464 space: ParamSpace,
465 index: usize,
466 param_id: ast::NodeId) {
467 let inf_index = InferredIndex(self.inferred_infos.len());
468 let term = self.arena.alloc(InferredTerm(inf_index));
469 let initial_variance = self.pick_initial_variance(item_id, space, index);
470 self.inferred_infos.push(InferredInfo { item_id: item_id,
471 kind: kind,
472 space: space,
473 index: index,
474 param_id: param_id,
475 term: term,
476 initial_variance: initial_variance });
477 let newly_added = self.inferred_map.insert(param_id, inf_index).is_none();
478 assert!(newly_added);
479
480 debug!("add_inferred(item_path={}, \
481 item_id={}, \
482 kind={:?}, \
483 space={:?}, \
484 index={}, \
485 param_id={}, \
486 inf_index={:?}, \
487 initial_variance={:?})",
488 self.tcx.item_path_str(ast_util::local_def(item_id)),
489 item_id, kind, space, index, param_id, inf_index,
490 initial_variance);
491 }
492
493 fn pick_initial_variance(&self,
494 item_id: ast::NodeId,
495 space: ParamSpace,
496 index: usize)
497 -> ty::Variance
498 {
499 match space {
500 SelfSpace | FnSpace => {
501 ty::Bivariant
502 }
503
504 TypeSpace => {
505 match self.lang_items.iter().find(|&&(n, _)| n == item_id) {
506 Some(&(_, ref variances)) => variances[index],
507 None => ty::Bivariant
508 }
509 }
510 }
511 }
512
513 fn num_inferred(&self) -> usize {
514 self.inferred_infos.len()
515 }
516 }
517
518 impl<'a, 'tcx, 'v> Visitor<'v> for TermsContext<'a, 'tcx> {
519 fn visit_item(&mut self, item: &ast::Item) {
520 debug!("add_inferreds for item {}", self.tcx.map.node_to_string(item.id));
521
522 match item.node {
523 ast::ItemEnum(_, ref generics) |
524 ast::ItemStruct(_, ref generics) => {
525 self.add_inferreds_for_item(item.id, false, generics);
526 }
527 ast::ItemTrait(_, ref generics, _, _) => {
528 // Note: all inputs for traits are ultimately
529 // constrained to be invariant. See `visit_item` in
530 // the impl for `ConstraintContext` below.
531 self.add_inferreds_for_item(item.id, true, generics);
532 visit::walk_item(self, item);
533 }
534
535 ast::ItemExternCrate(_) |
536 ast::ItemUse(_) |
537 ast::ItemDefaultImpl(..) |
538 ast::ItemImpl(..) |
539 ast::ItemStatic(..) |
540 ast::ItemConst(..) |
541 ast::ItemFn(..) |
542 ast::ItemMod(..) |
543 ast::ItemForeignMod(..) |
544 ast::ItemTy(..) |
545 ast::ItemMac(..) => {
546 visit::walk_item(self, item);
547 }
548 }
549 }
550 }
551
552 // Constraint construction and representation
553 //
554 // The second pass over the AST determines the set of constraints.
555 // We walk the set of items and, for each member, generate new constraints.
556
557 struct ConstraintContext<'a, 'tcx: 'a> {
558 terms_cx: TermsContext<'a, 'tcx>,
559
560 // These are pointers to common `ConstantTerm` instances
561 covariant: VarianceTermPtr<'a>,
562 contravariant: VarianceTermPtr<'a>,
563 invariant: VarianceTermPtr<'a>,
564 bivariant: VarianceTermPtr<'a>,
565
566 constraints: Vec<Constraint<'a>> ,
567 }
568
569 /// Declares that the variable `decl_id` appears in a location with
570 /// variance `variance`.
571 #[derive(Copy, Clone)]
572 struct Constraint<'a> {
573 inferred: InferredIndex,
574 variance: &'a VarianceTerm<'a>,
575 }
576
577 fn add_constraints_from_crate<'a, 'tcx>(terms_cx: TermsContext<'a, 'tcx>,
578 krate: &ast::Crate)
579 -> ConstraintContext<'a, 'tcx>
580 {
581 let covariant = terms_cx.arena.alloc(ConstantTerm(ty::Covariant));
582 let contravariant = terms_cx.arena.alloc(ConstantTerm(ty::Contravariant));
583 let invariant = terms_cx.arena.alloc(ConstantTerm(ty::Invariant));
584 let bivariant = terms_cx.arena.alloc(ConstantTerm(ty::Bivariant));
585 let mut constraint_cx = ConstraintContext {
586 terms_cx: terms_cx,
587 covariant: covariant,
588 contravariant: contravariant,
589 invariant: invariant,
590 bivariant: bivariant,
591 constraints: Vec::new(),
592 };
593 visit::walk_crate(&mut constraint_cx, krate);
594 constraint_cx
595 }
596
597 impl<'a, 'tcx, 'v> Visitor<'v> for ConstraintContext<'a, 'tcx> {
598 fn visit_item(&mut self, item: &ast::Item) {
599 let did = ast_util::local_def(item.id);
600 let tcx = self.terms_cx.tcx;
601
602 debug!("visit_item item={}", tcx.map.node_to_string(item.id));
603
604 match item.node {
605 ast::ItemEnum(ref enum_definition, _) => {
606 let scheme = tcx.lookup_item_type(did);
607
608 // Not entirely obvious: constraints on structs/enums do not
609 // affect the variance of their type parameters. See discussion
610 // in comment at top of module.
611 //
612 // self.add_constraints_from_generics(&scheme.generics);
613
614 // Hack: If we directly call `ty::enum_variants`, it
615 // annoyingly takes it upon itself to run off and
616 // evaluate the discriminants eagerly (*grumpy* that's
617 // not the typical pattern). This results in double
618 // error messages because typeck goes off and does
619 // this at a later time. All we really care about is
620 // the types of the variant arguments, so we just call
621 // `ty::VariantInfo::from_ast_variant()` ourselves
622 // here, mainly so as to mask the differences between
623 // struct-like enums and so forth.
624 for ast_variant in &enum_definition.variants {
625 let variant =
626 ty::VariantInfo::from_ast_variant(tcx,
627 &**ast_variant,
628 /*discriminant*/ 0);
629 for arg_ty in &variant.args {
630 self.add_constraints_from_ty(&scheme.generics, *arg_ty, self.covariant);
631 }
632 }
633 }
634
635 ast::ItemStruct(..) => {
636 let scheme = tcx.lookup_item_type(did);
637
638 // Not entirely obvious: constraints on structs/enums do not
639 // affect the variance of their type parameters. See discussion
640 // in comment at top of module.
641 //
642 // self.add_constraints_from_generics(&scheme.generics);
643
644 let struct_fields = tcx.lookup_struct_fields(did);
645 for field_info in &struct_fields {
646 assert_eq!(field_info.id.krate, ast::LOCAL_CRATE);
647 let field_ty = tcx.node_id_to_type(field_info.id.node);
648 self.add_constraints_from_ty(&scheme.generics, field_ty, self.covariant);
649 }
650 }
651
652 ast::ItemTrait(..) => {
653 let trait_def = tcx.lookup_trait_def(did);
654 self.add_constraints_from_trait_ref(&trait_def.generics,
655 trait_def.trait_ref,
656 self.invariant);
657 }
658
659 ast::ItemExternCrate(_) |
660 ast::ItemUse(_) |
661 ast::ItemStatic(..) |
662 ast::ItemConst(..) |
663 ast::ItemFn(..) |
664 ast::ItemMod(..) |
665 ast::ItemForeignMod(..) |
666 ast::ItemTy(..) |
667 ast::ItemImpl(..) |
668 ast::ItemDefaultImpl(..) |
669 ast::ItemMac(..) => {
670 }
671 }
672
673 visit::walk_item(self, item);
674 }
675 }
676
677 /// Is `param_id` a lifetime according to `map`?
678 fn is_lifetime(map: &ast_map::Map, param_id: ast::NodeId) -> bool {
679 match map.find(param_id) {
680 Some(ast_map::NodeLifetime(..)) => true, _ => false
681 }
682 }
683
684 impl<'a, 'tcx> ConstraintContext<'a, 'tcx> {
685 fn tcx(&self) -> &'a ty::ctxt<'tcx> {
686 self.terms_cx.tcx
687 }
688
689 fn inferred_index(&self, param_id: ast::NodeId) -> InferredIndex {
690 match self.terms_cx.inferred_map.get(&param_id) {
691 Some(&index) => index,
692 None => {
693 self.tcx().sess.bug(&format!(
694 "no inferred index entry for {}",
695 self.tcx().map.node_to_string(param_id)));
696 }
697 }
698 }
699
700 fn find_binding_for_lifetime(&self, param_id: ast::NodeId) -> ast::NodeId {
701 let tcx = self.terms_cx.tcx;
702 assert!(is_lifetime(&tcx.map, param_id));
703 match tcx.named_region_map.get(&param_id) {
704 Some(&rl::DefEarlyBoundRegion(_, _, lifetime_decl_id))
705 => lifetime_decl_id,
706 Some(_) => panic!("should not encounter non early-bound cases"),
707
708 // The lookup should only fail when `param_id` is
709 // itself a lifetime binding: use it as the decl_id.
710 None => param_id,
711 }
712
713 }
714
715 /// Is `param_id` a type parameter for which we infer variance?
716 fn is_to_be_inferred(&self, param_id: ast::NodeId) -> bool {
717 let result = self.terms_cx.inferred_map.contains_key(&param_id);
718
719 // To safe-guard against invalid inferred_map constructions,
720 // double-check if variance is inferred at some use of a type
721 // parameter (by inspecting parent of its binding declaration
722 // to see if it is introduced by a type or by a fn/impl).
723
724 let check_result = |this:&ConstraintContext| -> bool {
725 let tcx = this.terms_cx.tcx;
726 let decl_id = this.find_binding_for_lifetime(param_id);
727 // Currently only called on lifetimes; double-checking that.
728 assert!(is_lifetime(&tcx.map, param_id));
729 let parent_id = tcx.map.get_parent(decl_id);
730 let parent = tcx.map.find(parent_id).unwrap_or_else(
731 || panic!("tcx.map missing entry for id: {}", parent_id));
732
733 let is_inferred;
734 macro_rules! cannot_happen { () => { {
735 panic!("invalid parent: {} for {}",
736 tcx.map.node_to_string(parent_id),
737 tcx.map.node_to_string(param_id));
738 } } }
739
740 match parent {
741 ast_map::NodeItem(p) => {
742 match p.node {
743 ast::ItemTy(..) |
744 ast::ItemEnum(..) |
745 ast::ItemStruct(..) |
746 ast::ItemTrait(..) => is_inferred = true,
747 ast::ItemFn(..) => is_inferred = false,
748 _ => cannot_happen!(),
749 }
750 }
751 ast_map::NodeTraitItem(..) => is_inferred = false,
752 ast_map::NodeImplItem(..) => is_inferred = false,
753 _ => cannot_happen!(),
754 }
755
756 return is_inferred;
757 };
758
759 assert_eq!(result, check_result(self));
760
761 return result;
762 }
763
764 /// Returns a variance term representing the declared variance of the type/region parameter
765 /// with the given id.
766 fn declared_variance(&self,
767 param_def_id: ast::DefId,
768 item_def_id: ast::DefId,
769 kind: ParamKind,
770 space: ParamSpace,
771 index: usize)
772 -> VarianceTermPtr<'a> {
773 assert_eq!(param_def_id.krate, item_def_id.krate);
774
775 if param_def_id.krate == ast::LOCAL_CRATE {
776 // Parameter on an item defined within current crate:
777 // variance not yet inferred, so return a symbolic
778 // variance.
779 let InferredIndex(index) = self.inferred_index(param_def_id.node);
780 self.terms_cx.inferred_infos[index].term
781 } else {
782 // Parameter on an item defined within another crate:
783 // variance already inferred, just look it up.
784 let variances = self.tcx().item_variances(item_def_id);
785 let variance = match kind {
786 TypeParam => *variances.types.get(space, index),
787 RegionParam => *variances.regions.get(space, index),
788 };
789 self.constant_term(variance)
790 }
791 }
792
793 fn add_constraint(&mut self,
794 InferredIndex(index): InferredIndex,
795 variance: VarianceTermPtr<'a>) {
796 debug!("add_constraint(index={}, variance={:?})",
797 index, variance);
798 self.constraints.push(Constraint { inferred: InferredIndex(index),
799 variance: variance });
800 }
801
802 fn contravariant(&mut self,
803 variance: VarianceTermPtr<'a>)
804 -> VarianceTermPtr<'a> {
805 self.xform(variance, self.contravariant)
806 }
807
808 fn invariant(&mut self,
809 variance: VarianceTermPtr<'a>)
810 -> VarianceTermPtr<'a> {
811 self.xform(variance, self.invariant)
812 }
813
814 fn constant_term(&self, v: ty::Variance) -> VarianceTermPtr<'a> {
815 match v {
816 ty::Covariant => self.covariant,
817 ty::Invariant => self.invariant,
818 ty::Contravariant => self.contravariant,
819 ty::Bivariant => self.bivariant,
820 }
821 }
822
823 fn xform(&mut self,
824 v1: VarianceTermPtr<'a>,
825 v2: VarianceTermPtr<'a>)
826 -> VarianceTermPtr<'a> {
827 match (*v1, *v2) {
828 (_, ConstantTerm(ty::Covariant)) => {
829 // Applying a "covariant" transform is always a no-op
830 v1
831 }
832
833 (ConstantTerm(c1), ConstantTerm(c2)) => {
834 self.constant_term(c1.xform(c2))
835 }
836
837 _ => {
838 &*self.terms_cx.arena.alloc(TransformTerm(v1, v2))
839 }
840 }
841 }
842
843 fn add_constraints_from_trait_ref(&mut self,
844 generics: &ty::Generics<'tcx>,
845 trait_ref: ty::TraitRef<'tcx>,
846 variance: VarianceTermPtr<'a>) {
847 debug!("add_constraints_from_trait_ref: trait_ref={:?} variance={:?}",
848 trait_ref,
849 variance);
850
851 let trait_def = self.tcx().lookup_trait_def(trait_ref.def_id);
852
853 self.add_constraints_from_substs(
854 generics,
855 trait_ref.def_id,
856 trait_def.generics.types.as_slice(),
857 trait_def.generics.regions.as_slice(),
858 trait_ref.substs,
859 variance);
860 }
861
862 /// Adds constraints appropriate for an instance of `ty` appearing
863 /// in a context with the generics defined in `generics` and
864 /// ambient variance `variance`
865 fn add_constraints_from_ty(&mut self,
866 generics: &ty::Generics<'tcx>,
867 ty: Ty<'tcx>,
868 variance: VarianceTermPtr<'a>) {
869 debug!("add_constraints_from_ty(ty={:?}, variance={:?})",
870 ty,
871 variance);
872
873 match ty.sty {
874 ty::TyBool |
875 ty::TyChar | ty::TyInt(_) | ty::TyUint(_) |
876 ty::TyFloat(_) | ty::TyStr => {
877 /* leaf type -- noop */
878 }
879
880 ty::TyClosure(..) => {
881 self.tcx().sess.bug("Unexpected closure type in variance computation");
882 }
883
884 ty::TyRef(region, ref mt) => {
885 let contra = self.contravariant(variance);
886 self.add_constraints_from_region(generics, *region, contra);
887 self.add_constraints_from_mt(generics, mt, variance);
888 }
889
890 ty::TyBox(typ) | ty::TyArray(typ, _) | ty::TySlice(typ) => {
891 self.add_constraints_from_ty(generics, typ, variance);
892 }
893
894
895 ty::TyRawPtr(ref mt) => {
896 self.add_constraints_from_mt(generics, mt, variance);
897 }
898
899 ty::TyTuple(ref subtys) => {
900 for &subty in subtys {
901 self.add_constraints_from_ty(generics, subty, variance);
902 }
903 }
904
905 ty::TyEnum(def_id, substs) |
906 ty::TyStruct(def_id, substs) => {
907 let item_type = self.tcx().lookup_item_type(def_id);
908
909 // All type parameters on enums and structs should be
910 // in the TypeSpace.
911 assert!(item_type.generics.types.is_empty_in(subst::SelfSpace));
912 assert!(item_type.generics.types.is_empty_in(subst::FnSpace));
913 assert!(item_type.generics.regions.is_empty_in(subst::SelfSpace));
914 assert!(item_type.generics.regions.is_empty_in(subst::FnSpace));
915
916 self.add_constraints_from_substs(
917 generics,
918 def_id,
919 item_type.generics.types.get_slice(subst::TypeSpace),
920 item_type.generics.regions.get_slice(subst::TypeSpace),
921 substs,
922 variance);
923 }
924
925 ty::TyProjection(ref data) => {
926 let trait_ref = &data.trait_ref;
927 let trait_def = self.tcx().lookup_trait_def(trait_ref.def_id);
928 self.add_constraints_from_substs(
929 generics,
930 trait_ref.def_id,
931 trait_def.generics.types.as_slice(),
932 trait_def.generics.regions.as_slice(),
933 trait_ref.substs,
934 variance);
935 }
936
937 ty::TyTrait(ref data) => {
938 let poly_trait_ref =
939 data.principal_trait_ref_with_self_ty(self.tcx(),
940 self.tcx().types.err);
941
942 // The type `Foo<T+'a>` is contravariant w/r/t `'a`:
943 let contra = self.contravariant(variance);
944 self.add_constraints_from_region(generics, data.bounds.region_bound, contra);
945
946 // Ignore the SelfSpace, it is erased.
947 self.add_constraints_from_trait_ref(generics, poly_trait_ref.0, variance);
948
949 let projections = data.projection_bounds_with_self_ty(self.tcx(),
950 self.tcx().types.err);
951 for projection in &projections {
952 self.add_constraints_from_ty(generics, projection.0.ty, self.invariant);
953 }
954 }
955
956 ty::TyParam(ref data) => {
957 let def_id = generics.types.get(data.space, data.idx as usize).def_id;
958 assert_eq!(def_id.krate, ast::LOCAL_CRATE);
959 match self.terms_cx.inferred_map.get(&def_id.node) {
960 Some(&index) => {
961 self.add_constraint(index, variance);
962 }
963 None => {
964 // We do not infer variance for type parameters
965 // declared on methods. They will not be present
966 // in the inferred_map.
967 }
968 }
969 }
970
971 ty::TyBareFn(_, &ty::BareFnTy { ref sig, .. }) => {
972 self.add_constraints_from_sig(generics, sig, variance);
973 }
974
975 ty::TyError => {
976 // we encounter this when walking the trait references for object
977 // types, where we use TyError as the Self type
978 }
979
980 ty::TyInfer(..) => {
981 self.tcx().sess.bug(
982 &format!("unexpected type encountered in \
983 variance inference: {}", ty));
984 }
985 }
986 }
987
988
989 /// Adds constraints appropriate for a nominal type (enum, struct,
990 /// object, etc) appearing in a context with ambient variance `variance`
991 fn add_constraints_from_substs(&mut self,
992 generics: &ty::Generics<'tcx>,
993 def_id: ast::DefId,
994 type_param_defs: &[ty::TypeParameterDef<'tcx>],
995 region_param_defs: &[ty::RegionParameterDef],
996 substs: &subst::Substs<'tcx>,
997 variance: VarianceTermPtr<'a>) {
998 debug!("add_constraints_from_substs(def_id={:?}, substs={:?}, variance={:?})",
999 def_id,
1000 substs,
1001 variance);
1002
1003 for p in type_param_defs {
1004 let variance_decl =
1005 self.declared_variance(p.def_id, def_id, TypeParam,
1006 p.space, p.index as usize);
1007 let variance_i = self.xform(variance, variance_decl);
1008 let substs_ty = *substs.types.get(p.space, p.index as usize);
1009 debug!("add_constraints_from_substs: variance_decl={:?} variance_i={:?}",
1010 variance_decl, variance_i);
1011 self.add_constraints_from_ty(generics, substs_ty, variance_i);
1012 }
1013
1014 for p in region_param_defs {
1015 let variance_decl =
1016 self.declared_variance(p.def_id, def_id,
1017 RegionParam, p.space, p.index as usize);
1018 let variance_i = self.xform(variance, variance_decl);
1019 let substs_r = *substs.regions().get(p.space, p.index as usize);
1020 self.add_constraints_from_region(generics, substs_r, variance_i);
1021 }
1022 }
1023
1024 /// Adds constraints appropriate for a function with signature
1025 /// `sig` appearing in a context with ambient variance `variance`
1026 fn add_constraints_from_sig(&mut self,
1027 generics: &ty::Generics<'tcx>,
1028 sig: &ty::PolyFnSig<'tcx>,
1029 variance: VarianceTermPtr<'a>) {
1030 let contra = self.contravariant(variance);
1031 for &input in &sig.0.inputs {
1032 self.add_constraints_from_ty(generics, input, contra);
1033 }
1034 if let ty::FnConverging(result_type) = sig.0.output {
1035 self.add_constraints_from_ty(generics, result_type, variance);
1036 }
1037 }
1038
1039 /// Adds constraints appropriate for a region appearing in a
1040 /// context with ambient variance `variance`
1041 fn add_constraints_from_region(&mut self,
1042 _generics: &ty::Generics<'tcx>,
1043 region: ty::Region,
1044 variance: VarianceTermPtr<'a>) {
1045 match region {
1046 ty::ReEarlyBound(ref data) => {
1047 if self.is_to_be_inferred(data.param_id) {
1048 let index = self.inferred_index(data.param_id);
1049 self.add_constraint(index, variance);
1050 }
1051 }
1052
1053 ty::ReStatic => { }
1054
1055 ty::ReLateBound(..) => {
1056 // We do not infer variance for region parameters on
1057 // methods or in fn types.
1058 }
1059
1060 ty::ReFree(..) | ty::ReScope(..) | ty::ReInfer(..) |
1061 ty::ReEmpty => {
1062 // We don't expect to see anything but 'static or bound
1063 // regions when visiting member types or method types.
1064 self.tcx()
1065 .sess
1066 .bug(&format!("unexpected region encountered in variance \
1067 inference: {:?}",
1068 region));
1069 }
1070 }
1071 }
1072
1073 /// Adds constraints appropriate for a mutability-type pair
1074 /// appearing in a context with ambient variance `variance`
1075 fn add_constraints_from_mt(&mut self,
1076 generics: &ty::Generics<'tcx>,
1077 mt: &ty::TypeAndMut<'tcx>,
1078 variance: VarianceTermPtr<'a>) {
1079 match mt.mutbl {
1080 ast::MutMutable => {
1081 let invar = self.invariant(variance);
1082 self.add_constraints_from_ty(generics, mt.ty, invar);
1083 }
1084
1085 ast::MutImmutable => {
1086 self.add_constraints_from_ty(generics, mt.ty, variance);
1087 }
1088 }
1089 }
1090 }
1091
1092 // Constraint solving
1093 //
1094 // The final phase iterates over the constraints, refining the variance
1095 // for each inferred until a fixed point is reached. This will be the
1096 // optimal solution to the constraints. The final variance for each
1097 // inferred is then written into the `variance_map` in the tcx.
1098
1099 struct SolveContext<'a, 'tcx: 'a> {
1100 terms_cx: TermsContext<'a, 'tcx>,
1101 constraints: Vec<Constraint<'a>> ,
1102
1103 // Maps from an InferredIndex to the inferred value for that variable.
1104 solutions: Vec<ty::Variance> }
1105
1106 fn solve_constraints(constraints_cx: ConstraintContext) {
1107 let ConstraintContext { terms_cx, constraints, .. } = constraints_cx;
1108
1109 let solutions =
1110 terms_cx.inferred_infos.iter()
1111 .map(|ii| ii.initial_variance)
1112 .collect();
1113
1114 let mut solutions_cx = SolveContext {
1115 terms_cx: terms_cx,
1116 constraints: constraints,
1117 solutions: solutions
1118 };
1119 solutions_cx.solve();
1120 solutions_cx.write();
1121 }
1122
1123 impl<'a, 'tcx> SolveContext<'a, 'tcx> {
1124 fn solve(&mut self) {
1125 // Propagate constraints until a fixed point is reached. Note
1126 // that the maximum number of iterations is 2C where C is the
1127 // number of constraints (each variable can change values at most
1128 // twice). Since number of constraints is linear in size of the
1129 // input, so is the inference process.
1130 let mut changed = true;
1131 while changed {
1132 changed = false;
1133
1134 for constraint in &self.constraints {
1135 let Constraint { inferred, variance: term } = *constraint;
1136 let InferredIndex(inferred) = inferred;
1137 let variance = self.evaluate(term);
1138 let old_value = self.solutions[inferred];
1139 let new_value = glb(variance, old_value);
1140 if old_value != new_value {
1141 debug!("Updating inferred {} (node {}) \
1142 from {:?} to {:?} due to {:?}",
1143 inferred,
1144 self.terms_cx
1145 .inferred_infos[inferred]
1146 .param_id,
1147 old_value,
1148 new_value,
1149 term);
1150
1151 self.solutions[inferred] = new_value;
1152 changed = true;
1153 }
1154 }
1155 }
1156 }
1157
1158 fn write(&self) {
1159 // Collect all the variances for a particular item and stick
1160 // them into the variance map. We rely on the fact that we
1161 // generate all the inferreds for a particular item
1162 // consecutively (that is, we collect solutions for an item
1163 // until we see a new item id, and we assume (1) the solutions
1164 // are in the same order as the type parameters were declared
1165 // and (2) all solutions or a given item appear before a new
1166 // item id).
1167
1168 let tcx = self.terms_cx.tcx;
1169 let solutions = &self.solutions;
1170 let inferred_infos = &self.terms_cx.inferred_infos;
1171 let mut index = 0;
1172 let num_inferred = self.terms_cx.num_inferred();
1173 while index < num_inferred {
1174 let item_id = inferred_infos[index].item_id;
1175 let mut types = VecPerParamSpace::empty();
1176 let mut regions = VecPerParamSpace::empty();
1177
1178 while index < num_inferred && inferred_infos[index].item_id == item_id {
1179 let info = &inferred_infos[index];
1180 let variance = solutions[index];
1181 debug!("Index {} Info {} / {:?} / {:?} Variance {:?}",
1182 index, info.index, info.kind, info.space, variance);
1183 match info.kind {
1184 TypeParam => { types.push(info.space, variance); }
1185 RegionParam => { regions.push(info.space, variance); }
1186 }
1187
1188 index += 1;
1189 }
1190
1191 let item_variances = ty::ItemVariances {
1192 types: types,
1193 regions: regions
1194 };
1195 debug!("item_id={} item_variances={:?}",
1196 item_id,
1197 item_variances);
1198
1199 let item_def_id = ast_util::local_def(item_id);
1200
1201 // For unit testing: check for a special "rustc_variance"
1202 // attribute and report an error with various results if found.
1203 if tcx.has_attr(item_def_id, "rustc_variance") {
1204 span_err!(tcx.sess, tcx.map.span(item_id), E0208, "{:?}", item_variances);
1205 }
1206
1207 let newly_added = tcx.item_variance_map.borrow_mut()
1208 .insert(item_def_id, Rc::new(item_variances)).is_none();
1209 assert!(newly_added);
1210 }
1211 }
1212
1213 fn evaluate(&self, term: VarianceTermPtr<'a>) -> ty::Variance {
1214 match *term {
1215 ConstantTerm(v) => {
1216 v
1217 }
1218
1219 TransformTerm(t1, t2) => {
1220 let v1 = self.evaluate(t1);
1221 let v2 = self.evaluate(t2);
1222 v1.xform(v2)
1223 }
1224
1225 InferredTerm(InferredIndex(index)) => {
1226 self.solutions[index]
1227 }
1228 }
1229 }
1230 }
1231
1232 // Miscellany transformations on variance
1233
1234 trait Xform {
1235 fn xform(self, v: Self) -> Self;
1236 }
1237
1238 impl Xform for ty::Variance {
1239 fn xform(self, v: ty::Variance) -> ty::Variance {
1240 // "Variance transformation", Figure 1 of The Paper
1241 match (self, v) {
1242 // Figure 1, column 1.
1243 (ty::Covariant, ty::Covariant) => ty::Covariant,
1244 (ty::Covariant, ty::Contravariant) => ty::Contravariant,
1245 (ty::Covariant, ty::Invariant) => ty::Invariant,
1246 (ty::Covariant, ty::Bivariant) => ty::Bivariant,
1247
1248 // Figure 1, column 2.
1249 (ty::Contravariant, ty::Covariant) => ty::Contravariant,
1250 (ty::Contravariant, ty::Contravariant) => ty::Covariant,
1251 (ty::Contravariant, ty::Invariant) => ty::Invariant,
1252 (ty::Contravariant, ty::Bivariant) => ty::Bivariant,
1253
1254 // Figure 1, column 3.
1255 (ty::Invariant, _) => ty::Invariant,
1256
1257 // Figure 1, column 4.
1258 (ty::Bivariant, _) => ty::Bivariant,
1259 }
1260 }
1261 }
1262
1263 fn glb(v1: ty::Variance, v2: ty::Variance) -> ty::Variance {
1264 // Greatest lower bound of the variance lattice as
1265 // defined in The Paper:
1266 //
1267 // *
1268 // - +
1269 // o
1270 match (v1, v2) {
1271 (ty::Invariant, _) | (_, ty::Invariant) => ty::Invariant,
1272
1273 (ty::Covariant, ty::Contravariant) => ty::Invariant,
1274 (ty::Contravariant, ty::Covariant) => ty::Invariant,
1275
1276 (ty::Covariant, ty::Covariant) => ty::Covariant,
1277
1278 (ty::Contravariant, ty::Contravariant) => ty::Contravariant,
1279
1280 (x, ty::Bivariant) | (ty::Bivariant, x) => x,
1281 }
1282 }