|
203 | 203 | //! failure, but rather because the target type `Foo<Y>` is itself just
|
204 | 204 | //! not well-formed. Basically we get to assume well-formedness of all
|
205 | 205 | //! types involved before considering variance.
|
| 206 | +//! |
| 207 | +//! ### Associated types |
| 208 | +//! |
| 209 | +//! Any trait with an associated type is invariant with respect to all |
| 210 | +//! of its inputs. To see why this makes sense, consider what |
| 211 | +//! subtyping for a trait reference means: |
| 212 | +//! |
| 213 | +//! <T as Trait> <: <U as Trait> |
| 214 | +//! |
| 215 | +//! means that if I know that `T as Trait`, |
| 216 | +//! I also know that `U as |
| 217 | +//! Trait`. Moreover, if you think of it as |
| 218 | +//! dictionary passing style, it means that |
| 219 | +//! a dictionary for `<T as Trait>` is safe |
| 220 | +//! to use where a dictionary for `<U as |
| 221 | +//! Trait>` is expected. |
| 222 | +//! |
| 223 | +//! The problem is that when you can |
| 224 | +//! project types out from `<T as Trait>`, |
| 225 | +//! the relationship to types projected out |
| 226 | +//! of `<U as Trait>` is completely unknown |
| 227 | +//! unless `T==U` (see #21726 for more |
| 228 | +//! details). Making `Trait` invariant |
| 229 | +//! ensures that this is true. |
| 230 | +//! |
| 231 | +//! *Historical note: we used to preserve this invariant another way, |
| 232 | +//! by tweaking the subtyping rules and requiring that when a type `T` |
| 233 | +//! appeared as part of a projection, that was considered an invariant |
| 234 | +//! location, but this version does away with the need for those |
| 235 | +//! somewhat "special-case-feeling" rules.* |
| 236 | +//! |
| 237 | +//! Another related reason is that if we didn't make traits with |
| 238 | +//! associated types invariant, then projection is no longer a |
| 239 | +//! function with a single result. Consider: |
| 240 | +//! |
| 241 | +//! ``` |
| 242 | +//! trait Identity { type Out; fn foo(&self); } |
| 243 | +//! impl<T> Identity for T { type Out = T; ... } |
| 244 | +//! ``` |
| 245 | +//! |
| 246 | +//! Now if I have `<&'static () as Identity>::Out`, this can be |
| 247 | +//! validly derived as `&'a ()` for any `'a`: |
| 248 | +//! |
| 249 | +//! <&'a () as Identity> <: <&'static () as Identity> |
| 250 | +//! if &'static () < : &'a () -- Identity is contravariant in Self |
| 251 | +//! if 'static : 'a -- Subtyping rules for relations |
| 252 | +//! |
| 253 | +//! This change otoh means that `<'static () as Identity>::Out` is |
| 254 | +//! always `&'static ()` (which might then be upcast to `'a ()`, |
| 255 | +//! separately). This was helpful in solving #21750. |
206 | 256 |
|
207 | 257 | use self::VarianceTerm::*;
|
208 | 258 | use self::ParamKind::*;
|
@@ -613,7 +663,18 @@ impl<'a, 'tcx, 'v> Visitor<'v> for ConstraintContext<'a, 'tcx> {
|
613 | 663 | &method.fty.sig,
|
614 | 664 | self.covariant);
|
615 | 665 | }
|
616 |
| - ty::TypeTraitItem(_) => {} |
| 666 | + ty::TypeTraitItem(ref data) => { |
| 667 | + // Any trait with an associated type is |
| 668 | + // invariant with respect to all of its |
| 669 | + // inputs. See length discussion in the comment |
| 670 | + // on this module. |
| 671 | + let projection_ty = ty::mk_projection(tcx, |
| 672 | + trait_def.trait_ref.clone(), |
| 673 | + data.name); |
| 674 | + self.add_constraints_from_ty(&trait_def.generics, |
| 675 | + projection_ty, |
| 676 | + self.invariant); |
| 677 | + } |
617 | 678 | }
|
618 | 679 | }
|
619 | 680 | }
|
@@ -893,7 +954,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> {
|
893 | 954 | trait_def.generics.types.as_slice(),
|
894 | 955 | trait_def.generics.regions.as_slice(),
|
895 | 956 | trait_ref.substs,
|
896 |
| - self.invariant); |
| 957 | + variance); |
897 | 958 | }
|
898 | 959 |
|
899 | 960 | ty::ty_trait(ref data) => {
|
|
0 commit comments