]> git.proxmox.com Git - rustc.git/blame - compiler/rustc_middle/src/ty/util.rs
New upstream version 1.52.1+dfsg1
[rustc.git] / compiler / rustc_middle / src / ty / util.rs
CommitLineData
9fa01778
XL
1//! Miscellaneous type-system utilities that are too small to deserve their own modules.
2
9fa01778 3use crate::ich::NodeIdHashingMode;
f9f354fc 4use crate::middle::codegen_fn_attrs::CodegenFnAttrFlags;
3dfed10e 5use crate::ty::fold::TypeFolder;
ba9703b0 6use crate::ty::layout::IntegerExt;
9fa01778 7use crate::ty::query::TyCtxtAt;
29967ef6 8use crate::ty::subst::{GenericArgKind, Subst, SubstsRef};
9fa01778 9use crate::ty::TyKind::*;
29967ef6 10use crate::ty::{self, DefIdTree, List, Ty, TyCtxt, TypeFoldable};
dfeec247 11use rustc_apfloat::Float as _;
3dfed10e 12use rustc_ast as ast;
74b04a01 13use rustc_attr::{self as attr, SignedInt, UnsignedInt};
0731742a 14use rustc_data_structures::fx::{FxHashMap, FxHashSet};
dfeec247 15use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
ba9703b0 16use rustc_errors::ErrorReported;
dfeec247
XL
17use rustc_hir as hir;
18use rustc_hir::def::DefKind;
19use rustc_hir::def_id::DefId;
532ac7d7 20use rustc_macros::HashStable;
fc512014 21use rustc_span::{Span, DUMMY_SP};
ba9703b0 22use rustc_target::abi::{Integer, Size, TargetDataLayout};
74b04a01 23use smallvec::SmallVec;
0531ce1d 24use std::{cmp, fmt};
e9174d1e 25
0531ce1d
XL
26#[derive(Copy, Clone, Debug)]
27pub struct Discr<'tcx> {
9fa01778 28 /// Bit representation of the discriminant (e.g., `-128i8` is `0xFF_u128`).
0531ce1d 29 pub val: u128,
dfeec247 30 pub ty: Ty<'tcx>,
0531ce1d 31}
8bb4bdeb 32
0531ce1d 33impl<'tcx> fmt::Display for Discr<'tcx> {
0bf4aa26 34 fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
1b1a35ee 35 match *self.ty.kind() {
b7449926 36 ty::Int(ity) => {
5869c6ff 37 let size = ty::tls::with(|tcx| Integer::from_int_ty(&tcx, ity).size());
532ac7d7 38 let x = self.val;
0531ce1d 39 // sign extend the raw representation to be an i128
29967ef6 40 let x = size.sign_extend(x) as i128;
0531ce1d 41 write!(fmt, "{}", x)
dfeec247 42 }
0531ce1d
XL
43 _ => write!(fmt, "{}", self.val),
44 }
45 }
cc61c64b 46}
8bb4bdeb 47
dfeec247 48fn signed_min(size: Size) -> i128 {
29967ef6 49 size.sign_extend(1_u128 << (size.bits() - 1)) as i128
dfeec247
XL
50}
51
52fn signed_max(size: Size) -> i128 {
74b04a01 53 i128::MAX >> (128 - size.bits())
dfeec247
XL
54}
55
56fn unsigned_max(size: Size) -> u128 {
74b04a01 57 u128::MAX >> (128 - size.bits())
dfeec247
XL
58}
59
60fn int_size_and_signed<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> (Size, bool) {
1b1a35ee 61 let (int, signed) = match *ty.kind() {
5869c6ff
XL
62 Int(ity) => (Integer::from_int_ty(&tcx, ity), true),
63 Uint(uty) => (Integer::from_uint_ty(&tcx, uty), false),
dfeec247
XL
64 _ => bug!("non integer discriminant"),
65 };
66 (int.size(), signed)
67}
68
0531ce1d 69impl<'tcx> Discr<'tcx> {
9fa01778 70 /// Adds `1` to the value and wraps around if the maximum for the type is reached.
dc9dc135 71 pub fn wrap_incr(self, tcx: TyCtxt<'tcx>) -> Self {
0531ce1d
XL
72 self.checked_add(tcx, 1).0
73 }
dc9dc135 74 pub fn checked_add(self, tcx: TyCtxt<'tcx>, n: u128) -> (Self, bool) {
dfeec247
XL
75 let (size, signed) = int_size_and_signed(tcx, self.ty);
76 let (val, oflo) = if signed {
77 let min = signed_min(size);
78 let max = signed_max(size);
29967ef6 79 let val = size.sign_extend(self.val) as i128;
74b04a01 80 assert!(n < (i128::MAX as u128));
0531ce1d
XL
81 let n = n as i128;
82 let oflo = val > max - n;
dfeec247 83 let val = if oflo { min + (n - (max - val) - 1) } else { val + n };
0531ce1d
XL
84 // zero the upper bits
85 let val = val as u128;
29967ef6 86 let val = size.truncate(val);
dfeec247 87 (val, oflo)
0531ce1d 88 } else {
dfeec247 89 let max = unsigned_max(size);
0531ce1d
XL
90 let val = self.val;
91 let oflo = val > max - n;
dfeec247
XL
92 let val = if oflo { n - (max - val) - 1 } else { val + n };
93 (val, oflo)
94 };
95 (Self { val, ty: self.ty }, oflo)
8bb4bdeb 96 }
e9174d1e
SL
97}
98
0531ce1d 99pub trait IntTypeExt {
dc9dc135
XL
100 fn to_ty<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx>;
101 fn disr_incr<'tcx>(&self, tcx: TyCtxt<'tcx>, val: Option<Discr<'tcx>>) -> Option<Discr<'tcx>>;
102 fn initial_discriminant<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Discr<'tcx>;
0531ce1d
XL
103}
104
e9174d1e 105impl IntTypeExt for attr::IntType {
dc9dc135 106 fn to_ty<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> {
e9174d1e 107 match *self {
dfeec247
XL
108 SignedInt(ast::IntTy::I8) => tcx.types.i8,
109 SignedInt(ast::IntTy::I16) => tcx.types.i16,
110 SignedInt(ast::IntTy::I32) => tcx.types.i32,
111 SignedInt(ast::IntTy::I64) => tcx.types.i64,
112 SignedInt(ast::IntTy::I128) => tcx.types.i128,
113 SignedInt(ast::IntTy::Isize) => tcx.types.isize,
114 UnsignedInt(ast::UintTy::U8) => tcx.types.u8,
115 UnsignedInt(ast::UintTy::U16) => tcx.types.u16,
116 UnsignedInt(ast::UintTy::U32) => tcx.types.u32,
117 UnsignedInt(ast::UintTy::U64) => tcx.types.u64,
118 UnsignedInt(ast::UintTy::U128) => tcx.types.u128,
2c00a5a8 119 UnsignedInt(ast::UintTy::Usize) => tcx.types.usize,
e9174d1e
SL
120 }
121 }
122
dc9dc135 123 fn initial_discriminant<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Discr<'tcx> {
dfeec247 124 Discr { val: 0, ty: self.to_ty(tcx) }
e9174d1e
SL
125 }
126
dc9dc135 127 fn disr_incr<'tcx>(&self, tcx: TyCtxt<'tcx>, val: Option<Discr<'tcx>>) -> Option<Discr<'tcx>> {
a7813a04 128 if let Some(val) = val {
0531ce1d
XL
129 assert_eq!(self.to_ty(tcx), val.ty);
130 let (new, oflo) = val.checked_add(tcx, 1);
dfeec247 131 if oflo { None } else { Some(new) }
a7813a04
XL
132 } else {
133 Some(self.initial_discriminant(tcx))
134 }
e9174d1e
SL
135 }
136}
137
e9174d1e
SL
138/// Describes whether a type is representable. For types that are not
139/// representable, 'SelfRecursive' and 'ContainsRecursive' are used to
140/// distinguish between types that are recursive with themselves and types that
141/// contain a different recursive type. These cases can therefore be treated
142/// differently when reporting errors.
143///
144/// The ordering of the cases is significant. They are sorted so that cmp::max
145/// will keep the "more erroneous" of two values.
7cac9316 146#[derive(Clone, PartialOrd, Ord, Eq, PartialEq, Debug)]
e9174d1e
SL
147pub enum Representability {
148 Representable,
149 ContainsRecursive,
7cac9316 150 SelfRecursive(Vec<Span>),
e9174d1e
SL
151}
152
dc9dc135 153impl<'tcx> TyCtxt<'tcx> {
cc61c64b
XL
154 /// Creates a hash of the type `Ty` which will be the same no matter what crate
155 /// context it's calculated within. This is used by the `type_id` intrinsic.
156 pub fn type_id_hash(self, ty: Ty<'tcx>) -> u64 {
157 let mut hasher = StableHasher::new();
ea8adc8c 158 let mut hcx = self.create_stable_hashing_context();
cc61c64b 159
3b2f2976
XL
160 // We want the type_id be independent of the types free regions, so we
161 // erase them. The erase_regions() call will also anonymize bound
162 // regions, which is desirable too.
fc512014 163 let ty = self.erase_regions(ty);
3b2f2976 164
cc61c64b
XL
165 hcx.while_hashing_spans(false, |hcx| {
166 hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
167 ty.hash_stable(hcx, &mut hasher);
168 });
169 });
170 hasher.finish()
171 }
cc61c64b 172
5bcae85e 173 pub fn has_error_field(self, ty: Ty<'tcx>) -> bool {
1b1a35ee 174 if let ty::Adt(def, substs) = *ty.kind() {
0bf4aa26
XL
175 for field in def.all_fields() {
176 let field_ty = field.ty(self, substs);
1b1a35ee 177 if let Error(_) = field_ty.kind() {
0bf4aa26 178 return true;
5bcae85e
SL
179 }
180 }
5bcae85e
SL
181 }
182 false
183 }
184
416331ca
XL
185 /// Attempts to returns the deeply last field of nested structures, but
186 /// does not apply any normalization in its search. Returns the same type
187 /// if input `ty` is not a structure at all.
dfeec247 188 pub fn struct_tail_without_normalization(self, ty: Ty<'tcx>) -> Ty<'tcx> {
416331ca
XL
189 let tcx = self;
190 tcx.struct_tail_with_normalize(ty, |ty| ty)
191 }
192
193 /// Returns the deeply last field of nested structures, or the same type if
194 /// not a structure at all. Corresponds to the only possible unsized field,
195 /// and its type can be used to determine unsizing strategy.
196 ///
197 /// Should only be called if `ty` has no inference variables and does not
198 /// need its lifetimes preserved (e.g. as part of codegen); otherwise
199 /// normalization attempt may cause compiler bugs.
dfeec247
XL
200 pub fn struct_tail_erasing_lifetimes(
201 self,
202 ty: Ty<'tcx>,
203 param_env: ty::ParamEnv<'tcx>,
204 ) -> Ty<'tcx> {
416331ca
XL
205 let tcx = self;
206 tcx.struct_tail_with_normalize(ty, |ty| tcx.normalize_erasing_regions(param_env, ty))
207 }
208
209 /// Returns the deeply last field of nested structures, or the same type if
210 /// not a structure at all. Corresponds to the only possible unsized field,
211 /// and its type can be used to determine unsizing strategy.
212 ///
213 /// This is parameterized over the normalization strategy (i.e. how to
214 /// handle `<T as Trait>::Assoc` and `impl Trait`); pass the identity
215 /// function to indicate no normalization should take place.
216 ///
217 /// See also `struct_tail_erasing_lifetimes`, which is suitable for use
218 /// during codegen.
dfeec247
XL
219 pub fn struct_tail_with_normalize(
220 self,
221 mut ty: Ty<'tcx>,
222 normalize: impl Fn(Ty<'tcx>) -> Ty<'tcx>,
223 ) -> Ty<'tcx> {
fc512014
XL
224 for iteration in 0.. {
225 if !self.sess.recursion_limit().value_within_limit(iteration) {
226 return self.ty_error_with_message(
227 DUMMY_SP,
228 &format!("reached the recursion limit finding the struct tail for {}", ty),
229 );
230 }
1b1a35ee 231 match *ty.kind() {
b7449926 232 ty::Adt(def, substs) => {
7cac9316
XL
233 if !def.is_struct() {
234 break;
235 }
2c00a5a8 236 match def.non_enum_variant().fields.last() {
7cac9316
XL
237 Some(f) => ty = f.ty(self, substs),
238 None => break,
239 }
240 }
241
b7449926 242 ty::Tuple(tys) => {
7cac9316 243 if let Some((&last_ty, _)) = tys.split_last() {
48663c56 244 ty = last_ty.expect_ty();
7cac9316
XL
245 } else {
246 break;
247 }
248 }
249
416331ca
XL
250 ty::Projection(_) | ty::Opaque(..) => {
251 let normalized = normalize(ty);
252 if ty == normalized {
253 return ty;
254 } else {
255 ty = normalized;
256 }
257 }
258
7cac9316
XL
259 _ => {
260 break;
261 }
e9174d1e
SL
262 }
263 }
264 ty
265 }
266
60c5eb7d 267 /// Same as applying `struct_tail` on `source` and `target`, but only
e9174d1e
SL
268 /// keeps going as long as the two types are instances of the same
269 /// structure definitions.
a1dfa0c6 270 /// For `(Foo<Foo<T>>, Foo<dyn Trait>)`, the result will be `(Foo<T>, Trait)`,
e9174d1e 271 /// whereas struct_tail produces `T`, and `Trait`, respectively.
416331ca
XL
272 ///
273 /// Should only be called if the types have no inference variables and do
60c5eb7d 274 /// not need their lifetimes preserved (e.g., as part of codegen); otherwise,
416331ca 275 /// normalization attempt may cause compiler bugs.
dfeec247
XL
276 pub fn struct_lockstep_tails_erasing_lifetimes(
277 self,
278 source: Ty<'tcx>,
279 target: Ty<'tcx>,
280 param_env: ty::ParamEnv<'tcx>,
281 ) -> (Ty<'tcx>, Ty<'tcx>) {
416331ca 282 let tcx = self;
dfeec247
XL
283 tcx.struct_lockstep_tails_with_normalize(source, target, |ty| {
284 tcx.normalize_erasing_regions(param_env, ty)
285 })
416331ca
XL
286 }
287
60c5eb7d 288 /// Same as applying `struct_tail` on `source` and `target`, but only
416331ca
XL
289 /// keeps going as long as the two types are instances of the same
290 /// structure definitions.
291 /// For `(Foo<Foo<T>>, Foo<dyn Trait>)`, the result will be `(Foo<T>, Trait)`,
292 /// whereas struct_tail produces `T`, and `Trait`, respectively.
293 ///
294 /// See also `struct_lockstep_tails_erasing_lifetimes`, which is suitable for use
295 /// during codegen.
dfeec247
XL
296 pub fn struct_lockstep_tails_with_normalize(
297 self,
298 source: Ty<'tcx>,
299 target: Ty<'tcx>,
300 normalize: impl Fn(Ty<'tcx>) -> Ty<'tcx>,
301 ) -> (Ty<'tcx>, Ty<'tcx>) {
e9174d1e 302 let (mut a, mut b) = (source, target);
041b39d2 303 loop {
1b1a35ee 304 match (&a.kind(), &b.kind()) {
b7449926 305 (&Adt(a_def, a_substs), &Adt(b_def, b_substs))
dfeec247
XL
306 if a_def == b_def && a_def.is_struct() =>
307 {
2c00a5a8 308 if let Some(f) = a_def.non_enum_variant().fields.last() {
041b39d2
XL
309 a = f.ty(self, a_substs);
310 b = f.ty(self, b_substs);
311 } else {
312 break;
313 }
dfeec247
XL
314 }
315 (&Tuple(a_tys), &Tuple(b_tys)) if a_tys.len() == b_tys.len() => {
041b39d2 316 if let Some(a_last) = a_tys.last() {
48663c56
XL
317 a = a_last.expect_ty();
318 b = b_tys.last().unwrap().expect_ty();
041b39d2
XL
319 } else {
320 break;
321 }
dfeec247 322 }
ba9703b0
XL
323 (ty::Projection(_) | ty::Opaque(..), _)
324 | (_, ty::Projection(_) | ty::Opaque(..)) => {
416331ca
XL
325 // If either side is a projection, attempt to
326 // progress via normalization. (Should be safe to
327 // apply to both sides as normalization is
328 // idempotent.)
329 let a_norm = normalize(a);
330 let b_norm = normalize(b);
331 if a == a_norm && b == b_norm {
332 break;
333 } else {
334 a = a_norm;
335 b = b_norm;
336 }
337 }
338
cc61c64b 339 _ => break,
e9174d1e
SL
340 }
341 }
342 (a, b)
343 }
344
8bb4bdeb
XL
345 /// Calculate the destructor of a given type.
346 pub fn calculate_dtor(
347 self,
348 adt_did: DefId,
29967ef6 349 validate: impl Fn(Self, DefId) -> Result<(), ErrorReported>,
8bb4bdeb 350 ) -> Option<ty::Destructor> {
ba9703b0 351 let drop_trait = self.lang_items().drop_trait()?;
9fa01778 352 self.ensure().coherent_trait(drop_trait);
8bb4bdeb 353
7cac9316 354 let ty = self.type_of(adt_did);
29967ef6 355 let dtor_did = self.find_map_relevant_impl(drop_trait, ty, |impl_did| {
74b04a01 356 if let Some(item) = self.associated_items(impl_did).in_definition_order().next() {
0bf4aa26 357 if validate(self, impl_did).is_ok() {
29967ef6 358 return Some(item.def_id);
8bb4bdeb
XL
359 }
360 }
29967ef6 361 None
8bb4bdeb
XL
362 });
363
ff7c6d11 364 Some(ty::Destructor { did: dtor_did? })
cc61c64b
XL
365 }
366
9fa01778 367 /// Returns the set of types that are required to be alive in
cc61c64b
XL
368 /// order to run the destructor of `def` (see RFCs 769 and
369 /// 1238).
370 ///
371 /// Note that this returns only the constraints for the
372 /// destructor of `def` itself. For the destructors of the
373 /// contents, you need `adt_dtorck_constraint`.
dfeec247 374 pub fn destructor_constraints(self, def: &'tcx ty::AdtDef) -> Vec<ty::subst::GenericArg<'tcx>> {
cc61c64b
XL
375 let dtor = match def.destructor(self) {
376 None => {
377 debug!("destructor_constraints({:?}) - no dtor", def.did);
dfeec247 378 return vec![];
cc61c64b 379 }
dfeec247 380 Some(dtor) => dtor.did,
e9174d1e 381 };
b039eaaf 382
cc61c64b 383 let impl_def_id = self.associated_item(dtor).container.id();
7cac9316 384 let impl_generics = self.generics_of(impl_def_id);
cc61c64b
XL
385
386 // We have a destructor - all the parameters that are not
387 // pure_wrt_drop (i.e, don't have a #[may_dangle] attribute)
388 // must be live.
389
390 // We need to return the list of parameters from the ADTs
391 // generics/substs that correspond to impure parameters on the
392 // impl's generics. This is a bit ugly, but conceptually simple:
393 //
394 // Suppose our ADT looks like the following
395 //
396 // struct S<X, Y, Z>(X, Y, Z);
397 //
398 // and the impl is
399 //
400 // impl<#[may_dangle] P0, P1, P2> Drop for S<P1, P2, P0>
401 //
402 // We want to return the parameters (X, Y). For that, we match
403 // up the item-substs <X, Y, Z> with the substs on the impl ADT,
404 // <P1, P2, P0>, and then look up which of the impl substs refer to
405 // parameters marked as pure.
406
1b1a35ee 407 let impl_substs = match *self.type_of(impl_def_id).kind() {
b7449926 408 ty::Adt(def_, substs) if def_ == def => substs,
dfeec247 409 _ => bug!(),
cc61c64b
XL
410 };
411
1b1a35ee 412 let item_substs = match *self.type_of(def.did).kind() {
b7449926 413 ty::Adt(def_, substs) if def_ == def => substs,
dfeec247 414 _ => bug!(),
cc61c64b
XL
415 };
416
dfeec247
XL
417 let result = item_substs
418 .iter()
419 .zip(impl_substs.iter())
f9f354fc 420 .filter(|&(_, k)| {
0531ce1d 421 match k.unpack() {
e74abb32 422 GenericArgKind::Lifetime(&ty::RegionKind::ReEarlyBound(ref ebr)) => {
0531ce1d
XL
423 !impl_generics.region_param(ebr, self).pure_wrt_drop
424 }
dfeec247 425 GenericArgKind::Type(&ty::TyS { kind: ty::Param(ref pt), .. }) => {
0531ce1d
XL
426 !impl_generics.type_param(pt, self).pure_wrt_drop
427 }
e74abb32 428 GenericArgKind::Const(&ty::Const {
dfeec247
XL
429 val: ty::ConstKind::Param(ref pc), ..
430 }) => !impl_generics.const_param(pc, self).pure_wrt_drop,
431 GenericArgKind::Lifetime(_)
432 | GenericArgKind::Type(_)
433 | GenericArgKind::Const(_) => {
532ac7d7 434 // Not a type, const or region param: this should be reported
0531ce1d
XL
435 // as an error.
436 false
437 }
cc61c64b 438 }
0bf4aa26 439 })
f9f354fc 440 .map(|(item_param, _)| item_param)
0bf4aa26 441 .collect();
cc61c64b
XL
442 debug!("destructor_constraint({:?}) = {:?}", def.did, result);
443 result
b039eaaf 444 }
9e0c209e 445
9fa01778
XL
446 /// Returns `true` if `def_id` refers to a closure (e.g., `|x| x * 2`). Note
447 /// that closures have a `DefId`, but the closure *expression* also
8faf50e0
XL
448 /// has a `HirId` that is located within the context where the
449 /// closure appears (and, sadly, a corresponding `NodeId`, since
450 /// those are not yet phased out). The parent of the closure's
9fa01778 451 /// `DefId` will also be the context where it appears.
abe05a73 452 pub fn is_closure(self, def_id: DefId) -> bool {
f9f354fc 453 matches!(self.def_kind(def_id), DefKind::Closure | DefKind::Generator)
abe05a73
XL
454 }
455
9fa01778 456 /// Returns `true` if `def_id` refers to a trait (i.e., `trait Foo { ... }`).
8faf50e0 457 pub fn is_trait(self, def_id: DefId) -> bool {
f9f354fc 458 self.def_kind(def_id) == DefKind::Trait
8faf50e0
XL
459 }
460
9fa01778
XL
461 /// Returns `true` if `def_id` refers to a trait alias (i.e., `trait Foo = ...;`),
462 /// and `false` otherwise.
463 pub fn is_trait_alias(self, def_id: DefId) -> bool {
f9f354fc 464 self.def_kind(def_id) == DefKind::TraitAlias
9fa01778
XL
465 }
466
467 /// Returns `true` if this `DefId` refers to the implicit constructor for
468 /// a tuple struct like `struct Foo(u32)`, and `false` otherwise.
532ac7d7 469 pub fn is_constructor(self, def_id: DefId) -> bool {
f9f354fc 470 matches!(self.def_kind(def_id), DefKind::Ctor(..))
8faf50e0
XL
471 }
472
dc9dc135 473 /// Given the def-ID of a fn or closure, returns the def-ID of
ff7c6d11 474 /// the innermost fn item that the closure is contained within.
9fa01778 475 /// This is a significant `DefId` because, when we do
ff7c6d11 476 /// type-checking, we type-check this fn item and all of its
9fa01778 477 /// (transitive) closures together. Therefore, when we fetch the
3dfed10e
XL
478 /// `typeck` the closure, for example, we really wind up
479 /// fetching the `typeck` the enclosing fn item.
cc61c64b 480 pub fn closure_base_def_id(self, def_id: DefId) -> DefId {
476ff2be 481 let mut def_id = def_id;
abe05a73 482 while self.is_closure(def_id) {
532ac7d7 483 def_id = self.parent(def_id).unwrap_or_else(|| {
476ff2be
SL
484 bug!("closure {:?} has no parent", def_id);
485 });
486 }
487 def_id
9e0c209e 488 }
cc61c64b 489
9fa01778 490 /// Given the `DefId` and substs a closure, creates the type of
ff7c6d11
XL
491 /// `self` argument that the closure expects. For example, for a
492 /// `Fn` closure, this would return a reference type `&T` where
9fa01778 493 /// `T = closure_ty`.
ff7c6d11
XL
494 ///
495 /// Returns `None` if this closure's kind has not yet been inferred.
496 /// This should only be possible during type checking.
497 ///
498 /// Note that the return value is a late-bound region and hence
499 /// wrapped in a binder.
dfeec247
XL
500 pub fn closure_env_ty(
501 self,
502 closure_def_id: DefId,
503 closure_substs: SubstsRef<'tcx>,
504 ) -> Option<ty::Binder<Ty<'tcx>>> {
ff7c6d11 505 let closure_ty = self.mk_closure(closure_def_id, closure_substs);
fc512014
XL
506 let br = ty::BoundRegion { kind: ty::BrEnv };
507 let env_region = ty::ReLateBound(ty::INNERMOST, br);
ba9703b0 508 let closure_kind_ty = closure_substs.as_closure().kind_ty();
ff7c6d11
XL
509 let closure_kind = closure_kind_ty.to_opt_closure_kind()?;
510 let env_ty = match closure_kind {
511 ty::ClosureKind::Fn => self.mk_imm_ref(self.mk_region(env_region), closure_ty),
512 ty::ClosureKind::FnMut => self.mk_mut_ref(self.mk_region(env_region), closure_ty),
513 ty::ClosureKind::FnOnce => closure_ty,
514 };
83c7162d 515 Some(ty::Binder::bind(env_ty))
ff7c6d11
XL
516 }
517
48663c56 518 /// Returns `true` if the node pointed to by `def_id` is a `static` item.
1b1a35ee 519 pub fn is_static(self, def_id: DefId) -> bool {
48663c56
XL
520 self.static_mutability(def_id).is_some()
521 }
522
f9f354fc 523 /// Returns `true` if this is a `static` item with the `#[thread_local]` attribute.
1b1a35ee 524 pub fn is_thread_local_static(self, def_id: DefId) -> bool {
f9f354fc
XL
525 self.codegen_fn_attrs(def_id).flags.contains(CodegenFnAttrFlags::THREAD_LOCAL)
526 }
527
48663c56 528 /// Returns `true` if the node pointed to by `def_id` is a mutable `static` item.
1b1a35ee 529 pub fn is_mutable_static(self, def_id: DefId) -> bool {
dfeec247 530 self.static_mutability(def_id) == Some(hir::Mutability::Mut)
60c5eb7d
XL
531 }
532
533 /// Get the type of the pointer to the static that we use in MIR.
1b1a35ee 534 pub fn static_ptr_ty(self, def_id: DefId) -> Ty<'tcx> {
60c5eb7d 535 // Make sure that any constants in the static's type are evaluated.
dfeec247 536 let static_ty = self.normalize_erasing_regions(ty::ParamEnv::empty(), self.type_of(def_id));
60c5eb7d 537
29967ef6
XL
538 // Make sure that accesses to unsafe statics end up using raw pointers.
539 // For thread-locals, this needs to be kept in sync with `Rvalue::ty`.
60c5eb7d
XL
540 if self.is_mutable_static(def_id) {
541 self.mk_mut_ptr(static_ty)
29967ef6
XL
542 } else if self.is_foreign_item(def_id) {
543 self.mk_imm_ptr(static_ty)
60c5eb7d
XL
544 } else {
545 self.mk_imm_ref(self.lifetimes.re_erased, static_ty)
546 }
abe05a73 547 }
0731742a
XL
548
549 /// Expands the given impl trait type, stopping if the type is recursive.
550 pub fn try_expand_impl_trait_type(
551 self,
552 def_id: DefId,
532ac7d7 553 substs: SubstsRef<'tcx>,
0731742a 554 ) -> Result<Ty<'tcx>, Ty<'tcx>> {
0731742a
XL
555 let mut visitor = OpaqueTypeExpander {
556 seen_opaque_tys: FxHashSet::default(),
e1599b0c 557 expanded_cache: FxHashMap::default(),
3dfed10e 558 primary_def_id: Some(def_id),
0731742a 559 found_recursion: false,
3dfed10e 560 check_recursion: true,
0731742a
XL
561 tcx: self,
562 };
3dfed10e 563
0731742a 564 let expanded_type = visitor.expand_opaque_ty(def_id, substs).unwrap();
dfeec247 565 if visitor.found_recursion { Err(expanded_type) } else { Ok(expanded_type) }
0731742a 566 }
9e0c209e
SL
567}
568
3dfed10e
XL
569struct OpaqueTypeExpander<'tcx> {
570 // Contains the DefIds of the opaque types that are currently being
571 // expanded. When we expand an opaque type we insert the DefId of
572 // that type, and when we finish expanding that type we remove the
573 // its DefId.
574 seen_opaque_tys: FxHashSet<DefId>,
575 // Cache of all expansions we've seen so far. This is a critical
576 // optimization for some large types produced by async fn trees.
577 expanded_cache: FxHashMap<(DefId, SubstsRef<'tcx>), Ty<'tcx>>,
578 primary_def_id: Option<DefId>,
579 found_recursion: bool,
580 /// Whether or not to check for recursive opaque types.
581 /// This is `true` when we're explicitly checking for opaque type
582 /// recursion, and 'false' otherwise to avoid unnecessary work.
583 check_recursion: bool,
584 tcx: TyCtxt<'tcx>,
585}
586
587impl<'tcx> OpaqueTypeExpander<'tcx> {
588 fn expand_opaque_ty(&mut self, def_id: DefId, substs: SubstsRef<'tcx>) -> Option<Ty<'tcx>> {
589 if self.found_recursion {
590 return None;
591 }
592 let substs = substs.fold_with(self);
593 if !self.check_recursion || self.seen_opaque_tys.insert(def_id) {
594 let expanded_ty = match self.expanded_cache.get(&(def_id, substs)) {
595 Some(expanded_ty) => expanded_ty,
596 None => {
597 let generic_ty = self.tcx.type_of(def_id);
598 let concrete_ty = generic_ty.subst(self.tcx, substs);
599 let expanded_ty = self.fold_ty(concrete_ty);
600 self.expanded_cache.insert((def_id, substs), expanded_ty);
601 expanded_ty
602 }
603 };
604 if self.check_recursion {
605 self.seen_opaque_tys.remove(&def_id);
606 }
607 Some(expanded_ty)
608 } else {
609 // If another opaque type that we contain is recursive, then it
610 // will report the error, so we don't have to.
611 self.found_recursion = def_id == *self.primary_def_id.as_ref().unwrap();
612 None
613 }
614 }
615}
616
617impl<'tcx> TypeFolder<'tcx> for OpaqueTypeExpander<'tcx> {
618 fn tcx(&self) -> TyCtxt<'tcx> {
619 self.tcx
620 }
621
622 fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> {
623 if let ty::Opaque(def_id, substs) = t.kind {
624 self.expand_opaque_ty(def_id, substs).unwrap_or(t)
625 } else if t.has_opaque_types() {
626 t.super_fold_with(self)
627 } else {
628 t
629 }
630 }
631}
632
dc9dc135 633impl<'tcx> ty::TyS<'tcx> {
dfeec247
XL
634 /// Returns the maximum value for the given numeric type (including `char`s)
635 /// or returns `None` if the type is not numeric.
636 pub fn numeric_max_val(&'tcx self, tcx: TyCtxt<'tcx>) -> Option<&'tcx ty::Const<'tcx>> {
1b1a35ee 637 let val = match self.kind() {
dfeec247
XL
638 ty::Int(_) | ty::Uint(_) => {
639 let (size, signed) = int_size_and_signed(tcx, self);
640 let val = if signed { signed_max(size) as u128 } else { unsigned_max(size) };
641 Some(val)
642 }
643 ty::Char => Some(std::char::MAX as u128),
644 ty::Float(fty) => Some(match fty {
5869c6ff
XL
645 ty::FloatTy::F32 => rustc_apfloat::ieee::Single::INFINITY.to_bits(),
646 ty::FloatTy::F64 => rustc_apfloat::ieee::Double::INFINITY.to_bits(),
dfeec247
XL
647 }),
648 _ => None,
649 };
650 val.map(|v| ty::Const::from_bits(tcx, v, ty::ParamEnv::empty().and(self)))
651 }
652
653 /// Returns the minimum value for the given numeric type (including `char`s)
654 /// or returns `None` if the type is not numeric.
655 pub fn numeric_min_val(&'tcx self, tcx: TyCtxt<'tcx>) -> Option<&'tcx ty::Const<'tcx>> {
1b1a35ee 656 let val = match self.kind() {
dfeec247
XL
657 ty::Int(_) | ty::Uint(_) => {
658 let (size, signed) = int_size_and_signed(tcx, self);
29967ef6 659 let val = if signed { size.truncate(signed_min(size) as u128) } else { 0 };
dfeec247
XL
660 Some(val)
661 }
662 ty::Char => Some(0),
663 ty::Float(fty) => Some(match fty {
5869c6ff
XL
664 ty::FloatTy::F32 => (-::rustc_apfloat::ieee::Single::INFINITY).to_bits(),
665 ty::FloatTy::F64 => (-::rustc_apfloat::ieee::Double::INFINITY).to_bits(),
dfeec247
XL
666 }),
667 _ => None,
668 };
669 val.map(|v| ty::Const::from_bits(tcx, v, ty::ParamEnv::empty().and(self)))
670 }
671
0731742a
XL
672 /// Checks whether values of this type `T` are *moved* or *copied*
673 /// when referenced -- this amounts to a check for whether `T:
674 /// Copy`, but note that we **don't** consider lifetimes when
675 /// doing this check. This means that we may generate MIR which
676 /// does copies even when the type actually doesn't satisfy the
677 /// full requirements for the `Copy` trait (cc #29149) -- this
678 /// winds up being reported as an error during NLL borrow check.
dc9dc135
XL
679 pub fn is_copy_modulo_regions(
680 &'tcx self,
f035d41b 681 tcx_at: TyCtxtAt<'tcx>,
dc9dc135 682 param_env: ty::ParamEnv<'tcx>,
dc9dc135 683 ) -> bool {
f035d41b 684 tcx_at.is_copy_raw(param_env.and(self))
e9174d1e
SL
685 }
686
0731742a
XL
687 /// Checks whether values of this type `T` have a size known at
688 /// compile time (i.e., whether `T: Sized`). Lifetimes are ignored
689 /// for the purposes of this check, so it can be an
690 /// over-approximation in generic contexts, where one can have
691 /// strange rules like `<T as Foo<'static>>::Bar: Sized` that
692 /// actually carry lifetime requirements.
dc9dc135 693 pub fn is_sized(&'tcx self, tcx_at: TyCtxtAt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> bool {
74b04a01 694 self.is_trivially_sized(tcx_at.tcx) || tcx_at.is_sized_raw(param_env.and(self))
e9174d1e
SL
695 }
696
0731742a
XL
697 /// Checks whether values of this type `T` implement the `Freeze`
698 /// trait -- frozen types are those that do not contain a
9fa01778 699 /// `UnsafeCell` anywhere. This is a language concept used to
0731742a
XL
700 /// distinguish "true immutability", which is relevant to
701 /// optimization as well as the rules around static values. Note
702 /// that the `Freeze` trait is not exposed to end users and is
703 /// effectively an implementation detail.
f035d41b
XL
704 // FIXME: use `TyCtxtAt` instead of separate `Span`.
705 pub fn is_freeze(&'tcx self, tcx_at: TyCtxtAt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> bool {
706 self.is_trivially_freeze() || tcx_at.is_freeze_raw(param_env.and(self))
74b04a01
XL
707 }
708
709 /// Fast path helper for testing if a type is `Freeze`.
710 ///
711 /// Returning true means the type is known to be `Freeze`. Returning
712 /// `false` means nothing -- could be `Freeze`, might not be.
713 fn is_trivially_freeze(&self) -> bool {
1b1a35ee 714 match self.kind() {
74b04a01
XL
715 ty::Int(_)
716 | ty::Uint(_)
717 | ty::Float(_)
718 | ty::Bool
719 | ty::Char
720 | ty::Str
721 | ty::Never
722 | ty::Ref(..)
723 | ty::RawPtr(_)
724 | ty::FnDef(..)
f035d41b 725 | ty::Error(_)
74b04a01
XL
726 | ty::FnPtr(_) => true,
727 ty::Tuple(_) => self.tuple_fields().all(Self::is_trivially_freeze),
728 ty::Slice(elem_ty) | ty::Array(elem_ty, _) => elem_ty.is_trivially_freeze(),
729 ty::Adt(..)
730 | ty::Bound(..)
731 | ty::Closure(..)
732 | ty::Dynamic(..)
733 | ty::Foreign(_)
734 | ty::Generator(..)
735 | ty::GeneratorWitness(_)
736 | ty::Infer(_)
737 | ty::Opaque(..)
738 | ty::Param(_)
739 | ty::Placeholder(_)
f9f354fc 740 | ty::Projection(_) => false,
74b04a01 741 }
cc61c64b
XL
742 }
743
744 /// If `ty.needs_drop(...)` returns `true`, then `ty` is definitely
745 /// non-copy and *might* have a destructor attached; if it returns
0731742a 746 /// `false`, then `ty` definitely has no destructor (i.e., no drop glue).
cc61c64b
XL
747 ///
748 /// (Note that this implies that if `ty` has a destructor attached,
749 /// then `needs_drop` will definitely return `true` for `ty`.)
e74abb32
XL
750 ///
751 /// Note that this method is used to check eligible types in unions.
cc61c64b 752 #[inline]
dc9dc135 753 pub fn needs_drop(&'tcx self, tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> bool {
74b04a01
XL
754 // Avoid querying in simple cases.
755 match needs_drop_components(self, &tcx.data_layout) {
756 Err(AlwaysRequiresDrop) => true,
757 Ok(components) => {
758 let query_ty = match *components {
759 [] => return false,
760 // If we've got a single component, call the query with that
761 // to increase the chance that we hit the query cache.
762 [component_ty] => component_ty,
763 _ => self,
764 };
765 // This doesn't depend on regions, so try to minimize distinct
766 // query keys used.
767 let erased = tcx.normalize_erasing_regions(param_env, query_ty);
768 tcx.needs_drop_raw(param_env.and(erased))
769 }
770 }
cc61c64b
XL
771 }
772
f035d41b
XL
773 /// Returns `true` if equality for this type is both reflexive and structural.
774 ///
775 /// Reflexive equality for a type is indicated by an `Eq` impl for that type.
776 ///
777 /// Primitive types (`u32`, `str`) have structural equality by definition. For composite data
778 /// types, equality for the type as a whole is structural when it is the same as equality
779 /// between all components (fields, array elements, etc.) of that type. For ADTs, structural
780 /// equality is indicated by an implementation of `PartialStructuralEq` and `StructuralEq` for
781 /// that type.
782 ///
783 /// This function is "shallow" because it may return `true` for a composite type whose fields
784 /// are not `StructuralEq`. For example, `[T; 4]` has structural equality regardless of `T`
785 /// because equality for arrays is determined by the equality of each array element. If you
786 /// want to know whether a given call to `PartialEq::eq` will proceed structurally all the way
787 /// down, you will need to use a type visitor.
788 #[inline]
789 pub fn is_structural_eq_shallow(&'tcx self, tcx: TyCtxt<'tcx>) -> bool {
1b1a35ee 790 match self.kind() {
f035d41b
XL
791 // Look for an impl of both `PartialStructuralEq` and `StructuralEq`.
792 Adt(..) => tcx.has_structural_eq_impls(self),
793
794 // Primitive types that satisfy `Eq`.
795 Bool | Char | Int(_) | Uint(_) | Str | Never => true,
796
797 // Composite types that satisfy `Eq` when all of their fields do.
798 //
799 // Because this function is "shallow", we return `true` for these composites regardless
800 // of the type(s) contained within.
801 Ref(..) | Array(..) | Slice(_) | Tuple(..) => true,
802
803 // Raw pointers use bitwise comparison.
804 RawPtr(_) | FnPtr(_) => true,
805
806 // Floating point numbers are not `Eq`.
807 Float(_) => false,
808
809 // Conservatively return `false` for all others...
810
811 // Anonymous function types
812 FnDef(..) | Closure(..) | Dynamic(..) | Generator(..) => false,
813
814 // Generic or inferred types
815 //
816 // FIXME(ecstaticmorse): Maybe we should `bug` here? This should probably only be
817 // called for known, fully-monomorphized types.
818 Projection(_) | Opaque(..) | Param(_) | Bound(..) | Placeholder(_) | Infer(_) => false,
819
820 Foreign(_) | GeneratorWitness(..) | Error(_) => false,
821 }
822 }
823
0731742a 824 pub fn same_type(a: Ty<'tcx>, b: Ty<'tcx>) -> bool {
1b1a35ee 825 match (&a.kind(), &b.kind()) {
0731742a
XL
826 (&Adt(did_a, substs_a), &Adt(did_b, substs_b)) => {
827 if did_a != did_b {
828 return false;
829 }
830
831 substs_a.types().zip(substs_b.types()).all(|(a, b)| Self::same_type(a, b))
832 }
833 _ => a == b,
834 }
835 }
836
e9174d1e
SL
837 /// Check whether a type is representable. This means it cannot contain unboxed
838 /// structural recursion. This check is needed for structs and enums.
dc9dc135 839 pub fn is_representable(&'tcx self, tcx: TyCtxt<'tcx>, sp: Span) -> Representability {
e9174d1e 840 // Iterate until something non-representable is found
dfeec247
XL
841 fn fold_repr<It: Iterator<Item = Representability>>(iter: It) -> Representability {
842 iter.fold(Representability::Representable, |r1, r2| match (r1, r2) {
843 (Representability::SelfRecursive(v1), Representability::SelfRecursive(v2)) => {
844 Representability::SelfRecursive(v1.into_iter().chain(v2).collect())
7cac9316 845 }
dfeec247 846 (r1, r2) => cmp::max(r1, r2),
7cac9316 847 })
e9174d1e
SL
848 }
849
dc9dc135
XL
850 fn are_inner_types_recursive<'tcx>(
851 tcx: TyCtxt<'tcx>,
852 sp: Span,
041b39d2
XL
853 seen: &mut Vec<Ty<'tcx>>,
854 representable_cache: &mut FxHashMap<Ty<'tcx>, Representability>,
dc9dc135
XL
855 ty: Ty<'tcx>,
856 ) -> Representability {
1b1a35ee 857 match ty.kind() {
416331ca 858 Tuple(..) => {
7cac9316 859 // Find non representable
416331ca 860 fold_repr(ty.tuple_fields().map(|ty| {
dfeec247 861 is_type_structurally_recursive(tcx, sp, seen, representable_cache, ty)
7cac9316 862 }))
e9174d1e
SL
863 }
864 // Fixed-length vectors.
865 // FIXME(#11924) Behavior undecided for zero-length vectors.
b7449926 866 Array(ty, _) => {
041b39d2 867 is_type_structurally_recursive(tcx, sp, seen, representable_cache, ty)
e9174d1e 868 }
b7449926 869 Adt(def, substs) => {
7cac9316
XL
870 // Find non representable fields with their spans
871 fold_repr(def.all_fields().map(|field| {
872 let ty = field.ty(tcx, substs);
f035d41b
XL
873 let span = match field
874 .did
875 .as_local()
3dfed10e 876 .map(|id| tcx.hir().local_def_id_to_hir_id(id))
f035d41b
XL
877 .and_then(|id| tcx.hir().find(id))
878 {
879 Some(hir::Node::Field(field)) => field.ty.span,
880 _ => sp,
881 };
dfeec247
XL
882 match is_type_structurally_recursive(
883 tcx,
884 span,
885 seen,
886 representable_cache,
887 ty,
888 ) {
7cac9316
XL
889 Representability::SelfRecursive(_) => {
890 Representability::SelfRecursive(vec![span])
891 }
892 x => x,
893 }
894 }))
e9174d1e 895 }
b7449926 896 Closure(..) => {
e9174d1e
SL
897 // this check is run on type definitions, so we don't expect
898 // to see closure types
54a0048b 899 bug!("requires check invoked on inapplicable type: {:?}", ty)
e9174d1e
SL
900 }
901 _ => Representability::Representable,
902 }
903 }
904
476ff2be 905 fn same_struct_or_enum<'tcx>(ty: Ty<'tcx>, def: &'tcx ty::AdtDef) -> bool {
1b1a35ee 906 match *ty.kind() {
dfeec247
XL
907 Adt(ty_def, _) => ty_def == def,
908 _ => false,
e9174d1e
SL
909 }
910 }
911
e9174d1e
SL
912 // Does the type `ty` directly (without indirection through a pointer)
913 // contain any types on stack `seen`?
dc9dc135
XL
914 fn is_type_structurally_recursive<'tcx>(
915 tcx: TyCtxt<'tcx>,
041b39d2
XL
916 sp: Span,
917 seen: &mut Vec<Ty<'tcx>>,
918 representable_cache: &mut FxHashMap<Ty<'tcx>, Representability>,
dc9dc135
XL
919 ty: Ty<'tcx>,
920 ) -> Representability {
7cac9316 921 debug!("is_type_structurally_recursive: {:?} {:?}", ty, sp);
041b39d2 922 if let Some(representability) = representable_cache.get(ty) {
dfeec247
XL
923 debug!(
924 "is_type_structurally_recursive: {:?} {:?} - (cached) {:?}",
925 ty, sp, representability
926 );
041b39d2
XL
927 return representability.clone();
928 }
929
dfeec247
XL
930 let representability =
931 is_type_structurally_recursive_inner(tcx, sp, seen, representable_cache, ty);
041b39d2
XL
932
933 representable_cache.insert(ty, representability.clone());
934 representability
935 }
e9174d1e 936
dc9dc135
XL
937 fn is_type_structurally_recursive_inner<'tcx>(
938 tcx: TyCtxt<'tcx>,
041b39d2
XL
939 sp: Span,
940 seen: &mut Vec<Ty<'tcx>>,
941 representable_cache: &mut FxHashMap<Ty<'tcx>, Representability>,
dc9dc135
XL
942 ty: Ty<'tcx>,
943 ) -> Representability {
1b1a35ee 944 match ty.kind() {
b7449926 945 Adt(def, _) => {
e9174d1e
SL
946 {
947 // Iterate through stack of previously seen types.
948 let mut iter = seen.iter();
949
950 // The first item in `seen` is the type we are actually curious about.
951 // We want to return SelfRecursive if this type contains itself.
952 // It is important that we DON'T take generic parameters into account
953 // for this check, so that Bar<T> in this example counts as SelfRecursive:
954 //
955 // struct Foo;
956 // struct Bar<T> { x: Bar<Foo> }
957
3157f602 958 if let Some(&seen_type) = iter.next() {
1b1a35ee 959 if same_struct_or_enum(seen_type, *def) {
dfeec247 960 debug!("SelfRecursive: {:?} contains {:?}", seen_type, ty);
7cac9316 961 return Representability::SelfRecursive(vec![sp]);
e9174d1e 962 }
e9174d1e
SL
963 }
964
965 // We also need to know whether the first item contains other types
966 // that are structurally recursive. If we don't catch this case, we
967 // will recurse infinitely for some inputs.
968 //
969 // It is important that we DO take generic parameters into account
970 // here, so that code like this is considered SelfRecursive, not
971 // ContainsRecursive:
972 //
973 // struct Foo { Option<Option<Foo>> }
974
975 for &seen_type in iter {
0731742a 976 if ty::TyS::same_type(ty, seen_type) {
dfeec247 977 debug!("ContainsRecursive: {:?} contains {:?}", seen_type, ty);
e9174d1e
SL
978 return Representability::ContainsRecursive;
979 }
980 }
981 }
982
983 // For structs and enums, track all previously seen types by pushing them
984 // onto the 'seen' stack.
985 seen.push(ty);
041b39d2 986 let out = are_inner_types_recursive(tcx, sp, seen, representable_cache, ty);
e9174d1e
SL
987 seen.pop();
988 out
989 }
990 _ => {
991 // No need to push in other cases.
041b39d2 992 are_inner_types_recursive(tcx, sp, seen, representable_cache, ty)
e9174d1e
SL
993 }
994 }
995 }
996
997 debug!("is_type_representable: {:?}", self);
998
999 // To avoid a stack overflow when checking an enum variant or struct that
1000 // contains a different, structurally recursive type, maintain a stack
1001 // of seen types and check recursion for each of them (issues #3008, #3779).
0bf4aa26
XL
1002 let mut seen: Vec<Ty<'_>> = Vec::new();
1003 let mut representable_cache = FxHashMap::default();
dfeec247 1004 let r = is_type_structurally_recursive(tcx, sp, &mut seen, &mut representable_cache, self);
e9174d1e
SL
1005 debug!("is_type_representable: {:?} is {:?}", self, r);
1006 r
1007 }
e1599b0c
XL
1008
1009 /// Peel off all reference types in this type until there are none left.
1010 ///
1011 /// This method is idempotent, i.e. `ty.peel_refs().peel_refs() == ty.peel_refs()`.
1012 ///
1013 /// # Examples
1014 ///
1015 /// - `u8` -> `u8`
1016 /// - `&'a mut u8` -> `u8`
1017 /// - `&'a &'b u8` -> `u8`
1018 /// - `&'a *const &'b u8 -> *const &'b u8`
1019 pub fn peel_refs(&'tcx self) -> Ty<'tcx> {
1020 let mut ty = self;
1b1a35ee 1021 while let Ref(_, inner_ty, _) = ty.kind() {
e1599b0c
XL
1022 ty = inner_ty;
1023 }
1024 ty
1025 }
e9174d1e 1026}
7cac9316 1027
abe05a73
XL
1028pub enum ExplicitSelf<'tcx> {
1029 ByValue,
1030 ByReference(ty::Region<'tcx>, hir::Mutability),
ff7c6d11 1031 ByRawPointer(hir::Mutability),
abe05a73 1032 ByBox,
dfeec247 1033 Other,
abe05a73
XL
1034}
1035
1036impl<'tcx> ExplicitSelf<'tcx> {
1037 /// Categorizes an explicit self declaration like `self: SomeType`
1038 /// into either `self`, `&self`, `&mut self`, `Box<self>`, or
1039 /// `Other`.
1040 /// This is mainly used to require the arbitrary_self_types feature
1041 /// in the case of `Other`, to improve error messages in the common cases,
1042 /// and to make `Other` non-object-safe.
1043 ///
1044 /// Examples:
1045 ///
1046 /// ```
1047 /// impl<'a> Foo for &'a T {
1048 /// // Legal declarations:
1049 /// fn method1(self: &&'a T); // ExplicitSelf::ByReference
1050 /// fn method2(self: &'a T); // ExplicitSelf::ByValue
1051 /// fn method3(self: Box<&'a T>); // ExplicitSelf::ByBox
1052 /// fn method4(self: Rc<&'a T>); // ExplicitSelf::Other
1053 ///
1054 /// // Invalid cases will be caught by `check_method_receiver`:
1055 /// fn method_err1(self: &'a mut T); // ExplicitSelf::Other
1056 /// fn method_err2(self: &'static T) // ExplicitSelf::ByValue
1057 /// fn method_err3(self: &&T) // ExplicitSelf::ByReference
1058 /// }
1059 /// ```
1060 ///
dfeec247 1061 pub fn determine<P>(self_arg_ty: Ty<'tcx>, is_self_ty: P) -> ExplicitSelf<'tcx>
abe05a73 1062 where
dfeec247 1063 P: Fn(Ty<'tcx>) -> bool,
abe05a73
XL
1064 {
1065 use self::ExplicitSelf::*;
1066
1b1a35ee 1067 match *self_arg_ty.kind() {
abe05a73 1068 _ if is_self_ty(self_arg_ty) => ByValue,
dfeec247
XL
1069 ty::Ref(region, ty, mutbl) if is_self_ty(ty) => ByReference(region, mutbl),
1070 ty::RawPtr(ty::TypeAndMut { ty, mutbl }) if is_self_ty(ty) => ByRawPointer(mutbl),
1071 ty::Adt(def, _) if def.is_box() && is_self_ty(self_arg_ty.boxed_ty()) => ByBox,
1072 _ => Other,
abe05a73
XL
1073 }
1074 }
1075}
74b04a01
XL
1076
1077/// Returns a list of types such that the given type needs drop if and only if
1078/// *any* of the returned types need drop. Returns `Err(AlwaysRequiresDrop)` if
1079/// this type always needs drop.
1080pub fn needs_drop_components(
1081 ty: Ty<'tcx>,
1082 target_layout: &TargetDataLayout,
1083) -> Result<SmallVec<[Ty<'tcx>; 2]>, AlwaysRequiresDrop> {
1b1a35ee 1084 match ty.kind() {
74b04a01
XL
1085 ty::Infer(ty::FreshIntTy(_))
1086 | ty::Infer(ty::FreshFloatTy(_))
1087 | ty::Bool
1088 | ty::Int(_)
1089 | ty::Uint(_)
1090 | ty::Float(_)
1091 | ty::Never
1092 | ty::FnDef(..)
1093 | ty::FnPtr(_)
1094 | ty::Char
1095 | ty::GeneratorWitness(..)
1096 | ty::RawPtr(_)
1097 | ty::Ref(..)
1098 | ty::Str => Ok(SmallVec::new()),
1099
1100 // Foreign types can never have destructors.
1101 ty::Foreign(..) => Ok(SmallVec::new()),
1102
f035d41b 1103 ty::Dynamic(..) | ty::Error(_) => Err(AlwaysRequiresDrop),
74b04a01
XL
1104
1105 ty::Slice(ty) => needs_drop_components(ty, target_layout),
1106 ty::Array(elem_ty, size) => {
1107 match needs_drop_components(elem_ty, target_layout) {
1108 Ok(v) if v.is_empty() => Ok(v),
1109 res => match size.val.try_to_bits(target_layout.pointer_size) {
1110 // Arrays of size zero don't need drop, even if their element
1111 // type does.
1112 Some(0) => Ok(SmallVec::new()),
1113 Some(_) => res,
1114 // We don't know which of the cases above we are in, so
1115 // return the whole type and let the caller decide what to
1116 // do.
1117 None => Ok(smallvec![ty]),
1118 },
1119 }
1120 }
1121 // If any field needs drop, then the whole tuple does.
1122 ty::Tuple(..) => ty.tuple_fields().try_fold(SmallVec::new(), move |mut acc, elem| {
1123 acc.extend(needs_drop_components(elem, target_layout)?);
1124 Ok(acc)
1125 }),
1126
1127 // These require checking for `Copy` bounds or `Adt` destructors.
1128 ty::Adt(..)
1129 | ty::Projection(..)
74b04a01
XL
1130 | ty::Param(_)
1131 | ty::Bound(..)
1132 | ty::Placeholder(..)
1133 | ty::Opaque(..)
1134 | ty::Infer(_)
ba9703b0
XL
1135 | ty::Closure(..)
1136 | ty::Generator(..) => Ok(smallvec![ty]),
74b04a01
XL
1137 }
1138}
1139
fc512014
XL
1140// Does the equivalent of
1141// ```
1142// let v = self.iter().map(|p| p.fold_with(folder)).collect::<SmallVec<[_; 8]>>();
1143// folder.tcx().intern_*(&v)
1144// ```
1145pub fn fold_list<'tcx, F, T>(
1146 list: &'tcx ty::List<T>,
1147 folder: &mut F,
1148 intern: impl FnOnce(TyCtxt<'tcx>, &[T]) -> &'tcx ty::List<T>,
1149) -> &'tcx ty::List<T>
1150where
1151 F: TypeFolder<'tcx>,
1152 T: TypeFoldable<'tcx> + PartialEq + Copy,
1153{
1154 let mut iter = list.iter();
1155 // Look for the first element that changed
1156 if let Some((i, new_t)) = iter.by_ref().enumerate().find_map(|(i, t)| {
1157 let new_t = t.fold_with(folder);
1158 if new_t == t { None } else { Some((i, new_t)) }
1159 }) {
1160 // An element changed, prepare to intern the resulting list
1161 let mut new_list = SmallVec::<[_; 8]>::with_capacity(list.len());
1162 new_list.extend_from_slice(&list[..i]);
1163 new_list.push(new_t);
1164 new_list.extend(iter.map(|t| t.fold_with(folder)));
1165 intern(folder.tcx(), &new_list)
1166 } else {
1167 list
1168 }
1169}
1170
3dfed10e 1171#[derive(Copy, Clone, Debug, HashStable, TyEncodable, TyDecodable)]
74b04a01 1172pub struct AlwaysRequiresDrop;
3dfed10e
XL
1173
1174/// Normalizes all opaque types in the given value, replacing them
1175/// with their underlying types.
1176pub fn normalize_opaque_types(
1177 tcx: TyCtxt<'tcx>,
1178 val: &'tcx List<ty::Predicate<'tcx>>,
1179) -> &'tcx List<ty::Predicate<'tcx>> {
1180 let mut visitor = OpaqueTypeExpander {
1181 seen_opaque_tys: FxHashSet::default(),
1182 expanded_cache: FxHashMap::default(),
1183 primary_def_id: None,
1184 found_recursion: false,
1185 check_recursion: false,
1186 tcx,
1187 };
1188 val.fold_with(&mut visitor)
1189}
1190
1191pub fn provide(providers: &mut ty::query::Providers) {
1192 *providers = ty::query::Providers { normalize_opaque_types, ..*providers }
1193}