]> git.proxmox.com Git - rustc.git/blame - src/librustc_middle/ty/util.rs
New upstream version 1.44.1+dfsg1
[rustc.git] / src / librustc_middle / ty / util.rs
CommitLineData
9fa01778
XL
1//! Miscellaneous type-system utilities that are too small to deserve their own modules.
2
9fa01778 3use crate::ich::NodeIdHashingMode;
dfeec247 4use crate::mir::interpret::{sign_extend, truncate};
ba9703b0 5use crate::ty::layout::IntegerExt;
9fa01778 6use crate::ty::query::TyCtxtAt;
dfeec247 7use crate::ty::subst::{GenericArgKind, InternalSubsts, Subst, SubstsRef};
9fa01778 8use crate::ty::TyKind::*;
dfeec247 9use crate::ty::{self, DefIdTree, GenericParamDefKind, Ty, TyCtxt, TypeFoldable};
dfeec247 10use rustc_apfloat::Float as _;
74b04a01
XL
11use rustc_ast::ast;
12use rustc_attr::{self as attr, SignedInt, UnsignedInt};
0731742a 13use rustc_data_structures::fx::{FxHashMap, FxHashSet};
dfeec247 14use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
ba9703b0 15use rustc_errors::ErrorReported;
dfeec247
XL
16use rustc_hir as hir;
17use rustc_hir::def::DefKind;
18use rustc_hir::def_id::DefId;
ba9703b0 19use rustc_hir::definitions::DefPathData;
532ac7d7 20use rustc_macros::HashStable;
dfeec247 21use rustc_span::Span;
ba9703b0 22use rustc_target::abi::{Integer, Size, TargetDataLayout};
74b04a01 23use smallvec::SmallVec;
0531ce1d 24use std::{cmp, fmt};
e9174d1e 25
0531ce1d
XL
26#[derive(Copy, Clone, Debug)]
27pub struct Discr<'tcx> {
9fa01778 28 /// Bit representation of the discriminant (e.g., `-128i8` is `0xFF_u128`).
0531ce1d 29 pub val: u128,
dfeec247 30 pub ty: Ty<'tcx>,
0531ce1d 31}
8bb4bdeb 32
0531ce1d 33impl<'tcx> fmt::Display for Discr<'tcx> {
0bf4aa26 34 fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
e74abb32 35 match self.ty.kind {
b7449926 36 ty::Int(ity) => {
dfeec247 37 let size = ty::tls::with(|tcx| Integer::from_attr(&tcx, SignedInt(ity)).size());
532ac7d7 38 let x = self.val;
0531ce1d 39 // sign extend the raw representation to be an i128
532ac7d7 40 let x = sign_extend(x, size) as i128;
0531ce1d 41 write!(fmt, "{}", x)
dfeec247 42 }
0531ce1d
XL
43 _ => write!(fmt, "{}", self.val),
44 }
45 }
cc61c64b 46}
8bb4bdeb 47
dfeec247
XL
48fn signed_min(size: Size) -> i128 {
49 sign_extend(1_u128 << (size.bits() - 1), size) as i128
50}
51
52fn signed_max(size: Size) -> i128 {
74b04a01 53 i128::MAX >> (128 - size.bits())
dfeec247
XL
54}
55
56fn unsigned_max(size: Size) -> u128 {
74b04a01 57 u128::MAX >> (128 - size.bits())
dfeec247
XL
58}
59
60fn int_size_and_signed<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> (Size, bool) {
61 let (int, signed) = match ty.kind {
62 Int(ity) => (Integer::from_attr(&tcx, SignedInt(ity)), true),
63 Uint(uty) => (Integer::from_attr(&tcx, UnsignedInt(uty)), false),
64 _ => bug!("non integer discriminant"),
65 };
66 (int.size(), signed)
67}
68
0531ce1d 69impl<'tcx> Discr<'tcx> {
9fa01778 70 /// Adds `1` to the value and wraps around if the maximum for the type is reached.
dc9dc135 71 pub fn wrap_incr(self, tcx: TyCtxt<'tcx>) -> Self {
0531ce1d
XL
72 self.checked_add(tcx, 1).0
73 }
dc9dc135 74 pub fn checked_add(self, tcx: TyCtxt<'tcx>, n: u128) -> (Self, bool) {
dfeec247
XL
75 let (size, signed) = int_size_and_signed(tcx, self.ty);
76 let (val, oflo) = if signed {
77 let min = signed_min(size);
78 let max = signed_max(size);
79 let val = sign_extend(self.val, size) as i128;
74b04a01 80 assert!(n < (i128::MAX as u128));
0531ce1d
XL
81 let n = n as i128;
82 let oflo = val > max - n;
dfeec247 83 let val = if oflo { min + (n - (max - val) - 1) } else { val + n };
0531ce1d
XL
84 // zero the upper bits
85 let val = val as u128;
532ac7d7 86 let val = truncate(val, size);
dfeec247 87 (val, oflo)
0531ce1d 88 } else {
dfeec247 89 let max = unsigned_max(size);
0531ce1d
XL
90 let val = self.val;
91 let oflo = val > max - n;
dfeec247
XL
92 let val = if oflo { n - (max - val) - 1 } else { val + n };
93 (val, oflo)
94 };
95 (Self { val, ty: self.ty }, oflo)
8bb4bdeb 96 }
e9174d1e
SL
97}
98
0531ce1d 99pub trait IntTypeExt {
dc9dc135
XL
100 fn to_ty<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx>;
101 fn disr_incr<'tcx>(&self, tcx: TyCtxt<'tcx>, val: Option<Discr<'tcx>>) -> Option<Discr<'tcx>>;
102 fn initial_discriminant<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Discr<'tcx>;
0531ce1d
XL
103}
104
e9174d1e 105impl IntTypeExt for attr::IntType {
dc9dc135 106 fn to_ty<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> {
e9174d1e 107 match *self {
dfeec247
XL
108 SignedInt(ast::IntTy::I8) => tcx.types.i8,
109 SignedInt(ast::IntTy::I16) => tcx.types.i16,
110 SignedInt(ast::IntTy::I32) => tcx.types.i32,
111 SignedInt(ast::IntTy::I64) => tcx.types.i64,
112 SignedInt(ast::IntTy::I128) => tcx.types.i128,
113 SignedInt(ast::IntTy::Isize) => tcx.types.isize,
114 UnsignedInt(ast::UintTy::U8) => tcx.types.u8,
115 UnsignedInt(ast::UintTy::U16) => tcx.types.u16,
116 UnsignedInt(ast::UintTy::U32) => tcx.types.u32,
117 UnsignedInt(ast::UintTy::U64) => tcx.types.u64,
118 UnsignedInt(ast::UintTy::U128) => tcx.types.u128,
2c00a5a8 119 UnsignedInt(ast::UintTy::Usize) => tcx.types.usize,
e9174d1e
SL
120 }
121 }
122
dc9dc135 123 fn initial_discriminant<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Discr<'tcx> {
dfeec247 124 Discr { val: 0, ty: self.to_ty(tcx) }
e9174d1e
SL
125 }
126
dc9dc135 127 fn disr_incr<'tcx>(&self, tcx: TyCtxt<'tcx>, val: Option<Discr<'tcx>>) -> Option<Discr<'tcx>> {
a7813a04 128 if let Some(val) = val {
0531ce1d
XL
129 assert_eq!(self.to_ty(tcx), val.ty);
130 let (new, oflo) = val.checked_add(tcx, 1);
dfeec247 131 if oflo { None } else { Some(new) }
a7813a04
XL
132 } else {
133 Some(self.initial_discriminant(tcx))
134 }
e9174d1e
SL
135 }
136}
137
e9174d1e
SL
138/// Describes whether a type is representable. For types that are not
139/// representable, 'SelfRecursive' and 'ContainsRecursive' are used to
140/// distinguish between types that are recursive with themselves and types that
141/// contain a different recursive type. These cases can therefore be treated
142/// differently when reporting errors.
143///
144/// The ordering of the cases is significant. They are sorted so that cmp::max
145/// will keep the "more erroneous" of two values.
7cac9316 146#[derive(Clone, PartialOrd, Ord, Eq, PartialEq, Debug)]
e9174d1e
SL
147pub enum Representability {
148 Representable,
149 ContainsRecursive,
7cac9316 150 SelfRecursive(Vec<Span>),
e9174d1e
SL
151}
152
dc9dc135 153impl<'tcx> TyCtxt<'tcx> {
cc61c64b
XL
154 /// Creates a hash of the type `Ty` which will be the same no matter what crate
155 /// context it's calculated within. This is used by the `type_id` intrinsic.
156 pub fn type_id_hash(self, ty: Ty<'tcx>) -> u64 {
157 let mut hasher = StableHasher::new();
ea8adc8c 158 let mut hcx = self.create_stable_hashing_context();
cc61c64b 159
3b2f2976
XL
160 // We want the type_id be independent of the types free regions, so we
161 // erase them. The erase_regions() call will also anonymize bound
162 // regions, which is desirable too.
163 let ty = self.erase_regions(&ty);
164
cc61c64b
XL
165 hcx.while_hashing_spans(false, |hcx| {
166 hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
167 ty.hash_stable(hcx, &mut hasher);
168 });
169 });
170 hasher.finish()
171 }
172}
173
dc9dc135 174impl<'tcx> TyCtxt<'tcx> {
5bcae85e 175 pub fn has_error_field(self, ty: Ty<'tcx>) -> bool {
e74abb32 176 if let ty::Adt(def, substs) = ty.kind {
0bf4aa26
XL
177 for field in def.all_fields() {
178 let field_ty = field.ty(self, substs);
e74abb32 179 if let Error = field_ty.kind {
0bf4aa26 180 return true;
5bcae85e
SL
181 }
182 }
5bcae85e
SL
183 }
184 false
185 }
186
416331ca
XL
187 /// Attempts to returns the deeply last field of nested structures, but
188 /// does not apply any normalization in its search. Returns the same type
189 /// if input `ty` is not a structure at all.
dfeec247 190 pub fn struct_tail_without_normalization(self, ty: Ty<'tcx>) -> Ty<'tcx> {
416331ca
XL
191 let tcx = self;
192 tcx.struct_tail_with_normalize(ty, |ty| ty)
193 }
194
195 /// Returns the deeply last field of nested structures, or the same type if
196 /// not a structure at all. Corresponds to the only possible unsized field,
197 /// and its type can be used to determine unsizing strategy.
198 ///
199 /// Should only be called if `ty` has no inference variables and does not
200 /// need its lifetimes preserved (e.g. as part of codegen); otherwise
201 /// normalization attempt may cause compiler bugs.
dfeec247
XL
202 pub fn struct_tail_erasing_lifetimes(
203 self,
204 ty: Ty<'tcx>,
205 param_env: ty::ParamEnv<'tcx>,
206 ) -> Ty<'tcx> {
416331ca
XL
207 let tcx = self;
208 tcx.struct_tail_with_normalize(ty, |ty| tcx.normalize_erasing_regions(param_env, ty))
209 }
210
211 /// Returns the deeply last field of nested structures, or the same type if
212 /// not a structure at all. Corresponds to the only possible unsized field,
213 /// and its type can be used to determine unsizing strategy.
214 ///
215 /// This is parameterized over the normalization strategy (i.e. how to
216 /// handle `<T as Trait>::Assoc` and `impl Trait`); pass the identity
217 /// function to indicate no normalization should take place.
218 ///
219 /// See also `struct_tail_erasing_lifetimes`, which is suitable for use
220 /// during codegen.
dfeec247
XL
221 pub fn struct_tail_with_normalize(
222 self,
223 mut ty: Ty<'tcx>,
224 normalize: impl Fn(Ty<'tcx>) -> Ty<'tcx>,
225 ) -> Ty<'tcx> {
7cac9316 226 loop {
e74abb32 227 match ty.kind {
b7449926 228 ty::Adt(def, substs) => {
7cac9316
XL
229 if !def.is_struct() {
230 break;
231 }
2c00a5a8 232 match def.non_enum_variant().fields.last() {
7cac9316
XL
233 Some(f) => ty = f.ty(self, substs),
234 None => break,
235 }
236 }
237
b7449926 238 ty::Tuple(tys) => {
7cac9316 239 if let Some((&last_ty, _)) = tys.split_last() {
48663c56 240 ty = last_ty.expect_ty();
7cac9316
XL
241 } else {
242 break;
243 }
244 }
245
416331ca
XL
246 ty::Projection(_) | ty::Opaque(..) => {
247 let normalized = normalize(ty);
248 if ty == normalized {
249 return ty;
250 } else {
251 ty = normalized;
252 }
253 }
254
7cac9316
XL
255 _ => {
256 break;
257 }
e9174d1e
SL
258 }
259 }
260 ty
261 }
262
60c5eb7d 263 /// Same as applying `struct_tail` on `source` and `target`, but only
e9174d1e
SL
264 /// keeps going as long as the two types are instances of the same
265 /// structure definitions.
a1dfa0c6 266 /// For `(Foo<Foo<T>>, Foo<dyn Trait>)`, the result will be `(Foo<T>, Trait)`,
e9174d1e 267 /// whereas struct_tail produces `T`, and `Trait`, respectively.
416331ca
XL
268 ///
269 /// Should only be called if the types have no inference variables and do
60c5eb7d 270 /// not need their lifetimes preserved (e.g., as part of codegen); otherwise,
416331ca 271 /// normalization attempt may cause compiler bugs.
dfeec247
XL
272 pub fn struct_lockstep_tails_erasing_lifetimes(
273 self,
274 source: Ty<'tcx>,
275 target: Ty<'tcx>,
276 param_env: ty::ParamEnv<'tcx>,
277 ) -> (Ty<'tcx>, Ty<'tcx>) {
416331ca 278 let tcx = self;
dfeec247
XL
279 tcx.struct_lockstep_tails_with_normalize(source, target, |ty| {
280 tcx.normalize_erasing_regions(param_env, ty)
281 })
416331ca
XL
282 }
283
60c5eb7d 284 /// Same as applying `struct_tail` on `source` and `target`, but only
416331ca
XL
285 /// keeps going as long as the two types are instances of the same
286 /// structure definitions.
287 /// For `(Foo<Foo<T>>, Foo<dyn Trait>)`, the result will be `(Foo<T>, Trait)`,
288 /// whereas struct_tail produces `T`, and `Trait`, respectively.
289 ///
290 /// See also `struct_lockstep_tails_erasing_lifetimes`, which is suitable for use
291 /// during codegen.
dfeec247
XL
292 pub fn struct_lockstep_tails_with_normalize(
293 self,
294 source: Ty<'tcx>,
295 target: Ty<'tcx>,
296 normalize: impl Fn(Ty<'tcx>) -> Ty<'tcx>,
297 ) -> (Ty<'tcx>, Ty<'tcx>) {
e9174d1e 298 let (mut a, mut b) = (source, target);
041b39d2 299 loop {
e74abb32 300 match (&a.kind, &b.kind) {
b7449926 301 (&Adt(a_def, a_substs), &Adt(b_def, b_substs))
dfeec247
XL
302 if a_def == b_def && a_def.is_struct() =>
303 {
2c00a5a8 304 if let Some(f) = a_def.non_enum_variant().fields.last() {
041b39d2
XL
305 a = f.ty(self, a_substs);
306 b = f.ty(self, b_substs);
307 } else {
308 break;
309 }
dfeec247
XL
310 }
311 (&Tuple(a_tys), &Tuple(b_tys)) if a_tys.len() == b_tys.len() => {
041b39d2 312 if let Some(a_last) = a_tys.last() {
48663c56
XL
313 a = a_last.expect_ty();
314 b = b_tys.last().unwrap().expect_ty();
041b39d2
XL
315 } else {
316 break;
317 }
dfeec247 318 }
ba9703b0
XL
319 (ty::Projection(_) | ty::Opaque(..), _)
320 | (_, ty::Projection(_) | ty::Opaque(..)) => {
416331ca
XL
321 // If either side is a projection, attempt to
322 // progress via normalization. (Should be safe to
323 // apply to both sides as normalization is
324 // idempotent.)
325 let a_norm = normalize(a);
326 let b_norm = normalize(b);
327 if a == a_norm && b == b_norm {
328 break;
329 } else {
330 a = a_norm;
331 b = b_norm;
332 }
333 }
334
cc61c64b 335 _ => break,
e9174d1e
SL
336 }
337 }
338 (a, b)
339 }
340
8bb4bdeb
XL
341 /// Calculate the destructor of a given type.
342 pub fn calculate_dtor(
343 self,
344 adt_did: DefId,
dfeec247 345 validate: &mut dyn FnMut(Self, DefId) -> Result<(), ErrorReported>,
8bb4bdeb 346 ) -> Option<ty::Destructor> {
ba9703b0 347 let drop_trait = self.lang_items().drop_trait()?;
9fa01778 348 self.ensure().coherent_trait(drop_trait);
8bb4bdeb
XL
349
350 let mut dtor_did = None;
7cac9316 351 let ty = self.type_of(adt_did);
041b39d2 352 self.for_each_relevant_impl(drop_trait, ty, |impl_did| {
74b04a01 353 if let Some(item) = self.associated_items(impl_did).in_definition_order().next() {
0bf4aa26 354 if validate(self, impl_did).is_ok() {
8bb4bdeb
XL
355 dtor_did = Some(item.def_id);
356 }
357 }
358 });
359
ff7c6d11 360 Some(ty::Destructor { did: dtor_did? })
cc61c64b
XL
361 }
362
9fa01778 363 /// Returns the set of types that are required to be alive in
cc61c64b
XL
364 /// order to run the destructor of `def` (see RFCs 769 and
365 /// 1238).
366 ///
367 /// Note that this returns only the constraints for the
368 /// destructor of `def` itself. For the destructors of the
369 /// contents, you need `adt_dtorck_constraint`.
dfeec247 370 pub fn destructor_constraints(self, def: &'tcx ty::AdtDef) -> Vec<ty::subst::GenericArg<'tcx>> {
cc61c64b
XL
371 let dtor = match def.destructor(self) {
372 None => {
373 debug!("destructor_constraints({:?}) - no dtor", def.did);
dfeec247 374 return vec![];
cc61c64b 375 }
dfeec247 376 Some(dtor) => dtor.did,
e9174d1e 377 };
b039eaaf 378
cc61c64b 379 let impl_def_id = self.associated_item(dtor).container.id();
7cac9316 380 let impl_generics = self.generics_of(impl_def_id);
cc61c64b
XL
381
382 // We have a destructor - all the parameters that are not
383 // pure_wrt_drop (i.e, don't have a #[may_dangle] attribute)
384 // must be live.
385
386 // We need to return the list of parameters from the ADTs
387 // generics/substs that correspond to impure parameters on the
388 // impl's generics. This is a bit ugly, but conceptually simple:
389 //
390 // Suppose our ADT looks like the following
391 //
392 // struct S<X, Y, Z>(X, Y, Z);
393 //
394 // and the impl is
395 //
396 // impl<#[may_dangle] P0, P1, P2> Drop for S<P1, P2, P0>
397 //
398 // We want to return the parameters (X, Y). For that, we match
399 // up the item-substs <X, Y, Z> with the substs on the impl ADT,
400 // <P1, P2, P0>, and then look up which of the impl substs refer to
401 // parameters marked as pure.
402
e74abb32 403 let impl_substs = match self.type_of(impl_def_id).kind {
b7449926 404 ty::Adt(def_, substs) if def_ == def => substs,
dfeec247 405 _ => bug!(),
cc61c64b
XL
406 };
407
e74abb32 408 let item_substs = match self.type_of(def.did).kind {
b7449926 409 ty::Adt(def_, substs) if def_ == def => substs,
dfeec247 410 _ => bug!(),
cc61c64b
XL
411 };
412
dfeec247
XL
413 let result = item_substs
414 .iter()
415 .zip(impl_substs.iter())
cc61c64b 416 .filter(|&(_, &k)| {
0531ce1d 417 match k.unpack() {
e74abb32 418 GenericArgKind::Lifetime(&ty::RegionKind::ReEarlyBound(ref ebr)) => {
0531ce1d
XL
419 !impl_generics.region_param(ebr, self).pure_wrt_drop
420 }
dfeec247 421 GenericArgKind::Type(&ty::TyS { kind: ty::Param(ref pt), .. }) => {
0531ce1d
XL
422 !impl_generics.type_param(pt, self).pure_wrt_drop
423 }
e74abb32 424 GenericArgKind::Const(&ty::Const {
dfeec247
XL
425 val: ty::ConstKind::Param(ref pc), ..
426 }) => !impl_generics.const_param(pc, self).pure_wrt_drop,
427 GenericArgKind::Lifetime(_)
428 | GenericArgKind::Type(_)
429 | GenericArgKind::Const(_) => {
532ac7d7 430 // Not a type, const or region param: this should be reported
0531ce1d
XL
431 // as an error.
432 false
433 }
cc61c64b 434 }
0bf4aa26
XL
435 })
436 .map(|(&item_param, _)| item_param)
437 .collect();
cc61c64b
XL
438 debug!("destructor_constraint({:?}) = {:?}", def.did, result);
439 result
b039eaaf 440 }
9e0c209e 441
9fa01778
XL
442 /// Returns `true` if `def_id` refers to a closure (e.g., `|x| x * 2`). Note
443 /// that closures have a `DefId`, but the closure *expression* also
8faf50e0
XL
444 /// has a `HirId` that is located within the context where the
445 /// closure appears (and, sadly, a corresponding `NodeId`, since
446 /// those are not yet phased out). The parent of the closure's
9fa01778 447 /// `DefId` will also be the context where it appears.
abe05a73
XL
448 pub fn is_closure(self, def_id: DefId) -> bool {
449 self.def_key(def_id).disambiguated_data.data == DefPathData::ClosureExpr
450 }
451
9fa01778 452 /// Returns `true` if `def_id` refers to a trait (i.e., `trait Foo { ... }`).
8faf50e0 453 pub fn is_trait(self, def_id: DefId) -> bool {
48663c56 454 self.def_kind(def_id) == Some(DefKind::Trait)
8faf50e0
XL
455 }
456
9fa01778
XL
457 /// Returns `true` if `def_id` refers to a trait alias (i.e., `trait Foo = ...;`),
458 /// and `false` otherwise.
459 pub fn is_trait_alias(self, def_id: DefId) -> bool {
48663c56 460 self.def_kind(def_id) == Some(DefKind::TraitAlias)
9fa01778
XL
461 }
462
463 /// Returns `true` if this `DefId` refers to the implicit constructor for
464 /// a tuple struct like `struct Foo(u32)`, and `false` otherwise.
532ac7d7
XL
465 pub fn is_constructor(self, def_id: DefId) -> bool {
466 self.def_key(def_id).disambiguated_data.data == DefPathData::Ctor
8faf50e0
XL
467 }
468
dc9dc135 469 /// Given the def-ID of a fn or closure, returns the def-ID of
ff7c6d11 470 /// the innermost fn item that the closure is contained within.
9fa01778 471 /// This is a significant `DefId` because, when we do
ff7c6d11 472 /// type-checking, we type-check this fn item and all of its
9fa01778 473 /// (transitive) closures together. Therefore, when we fetch the
ff7c6d11
XL
474 /// `typeck_tables_of` the closure, for example, we really wind up
475 /// fetching the `typeck_tables_of` the enclosing fn item.
cc61c64b 476 pub fn closure_base_def_id(self, def_id: DefId) -> DefId {
476ff2be 477 let mut def_id = def_id;
abe05a73 478 while self.is_closure(def_id) {
532ac7d7 479 def_id = self.parent(def_id).unwrap_or_else(|| {
476ff2be
SL
480 bug!("closure {:?} has no parent", def_id);
481 });
482 }
483 def_id
9e0c209e 484 }
cc61c64b 485
9fa01778 486 /// Given the `DefId` and substs a closure, creates the type of
ff7c6d11
XL
487 /// `self` argument that the closure expects. For example, for a
488 /// `Fn` closure, this would return a reference type `&T` where
9fa01778 489 /// `T = closure_ty`.
ff7c6d11
XL
490 ///
491 /// Returns `None` if this closure's kind has not yet been inferred.
492 /// This should only be possible during type checking.
493 ///
494 /// Note that the return value is a late-bound region and hence
495 /// wrapped in a binder.
dfeec247
XL
496 pub fn closure_env_ty(
497 self,
498 closure_def_id: DefId,
499 closure_substs: SubstsRef<'tcx>,
500 ) -> Option<ty::Binder<Ty<'tcx>>> {
ff7c6d11 501 let closure_ty = self.mk_closure(closure_def_id, closure_substs);
94b46f34 502 let env_region = ty::ReLateBound(ty::INNERMOST, ty::BrEnv);
ba9703b0 503 let closure_kind_ty = closure_substs.as_closure().kind_ty();
ff7c6d11
XL
504 let closure_kind = closure_kind_ty.to_opt_closure_kind()?;
505 let env_ty = match closure_kind {
506 ty::ClosureKind::Fn => self.mk_imm_ref(self.mk_region(env_region), closure_ty),
507 ty::ClosureKind::FnMut => self.mk_mut_ref(self.mk_region(env_region), closure_ty),
508 ty::ClosureKind::FnOnce => closure_ty,
509 };
83c7162d 510 Some(ty::Binder::bind(env_ty))
ff7c6d11
XL
511 }
512
532ac7d7 513 /// Given the `DefId` of some item that has no type or const parameters, make
cc61c64b 514 /// a suitable "empty substs" for it.
532ac7d7 515 pub fn empty_substs_for_def_id(self, item_def_id: DefId) -> SubstsRef<'tcx> {
dfeec247
XL
516 InternalSubsts::for_item(self, item_def_id, |param, _| match param.kind {
517 GenericParamDefKind::Lifetime => self.lifetimes.re_erased.into(),
518 GenericParamDefKind::Type { .. } => {
519 bug!("empty_substs_for_def_id: {:?} has type parameters", item_def_id)
520 }
521 GenericParamDefKind::Const { .. } => {
522 bug!("empty_substs_for_def_id: {:?} has const parameters", item_def_id)
94b46f34 523 }
cc61c64b
XL
524 })
525 }
7cac9316 526
48663c56
XL
527 /// Returns `true` if the node pointed to by `def_id` is a `static` item.
528 pub fn is_static(&self, def_id: DefId) -> bool {
529 self.static_mutability(def_id).is_some()
530 }
531
532 /// Returns `true` if the node pointed to by `def_id` is a mutable `static` item.
533 pub fn is_mutable_static(&self, def_id: DefId) -> bool {
dfeec247 534 self.static_mutability(def_id) == Some(hir::Mutability::Mut)
60c5eb7d
XL
535 }
536
537 /// Get the type of the pointer to the static that we use in MIR.
538 pub fn static_ptr_ty(&self, def_id: DefId) -> Ty<'tcx> {
539 // Make sure that any constants in the static's type are evaluated.
dfeec247 540 let static_ty = self.normalize_erasing_regions(ty::ParamEnv::empty(), self.type_of(def_id));
60c5eb7d
XL
541
542 if self.is_mutable_static(def_id) {
543 self.mk_mut_ptr(static_ty)
544 } else {
545 self.mk_imm_ref(self.lifetimes.re_erased, static_ty)
546 }
abe05a73 547 }
0731742a
XL
548
549 /// Expands the given impl trait type, stopping if the type is recursive.
550 pub fn try_expand_impl_trait_type(
551 self,
552 def_id: DefId,
532ac7d7 553 substs: SubstsRef<'tcx>,
0731742a
XL
554 ) -> Result<Ty<'tcx>, Ty<'tcx>> {
555 use crate::ty::fold::TypeFolder;
556
dc9dc135 557 struct OpaqueTypeExpander<'tcx> {
0731742a
XL
558 // Contains the DefIds of the opaque types that are currently being
559 // expanded. When we expand an opaque type we insert the DefId of
560 // that type, and when we finish expanding that type we remove the
561 // its DefId.
562 seen_opaque_tys: FxHashSet<DefId>,
e1599b0c
XL
563 // Cache of all expansions we've seen so far. This is a critical
564 // optimization for some large types produced by async fn trees.
565 expanded_cache: FxHashMap<(DefId, SubstsRef<'tcx>), Ty<'tcx>>,
0731742a
XL
566 primary_def_id: DefId,
567 found_recursion: bool,
dc9dc135 568 tcx: TyCtxt<'tcx>,
0731742a
XL
569 }
570
dc9dc135 571 impl<'tcx> OpaqueTypeExpander<'tcx> {
0731742a
XL
572 fn expand_opaque_ty(
573 &mut self,
574 def_id: DefId,
532ac7d7 575 substs: SubstsRef<'tcx>,
0731742a
XL
576 ) -> Option<Ty<'tcx>> {
577 if self.found_recursion {
e1599b0c
XL
578 return None;
579 }
580 let substs = substs.fold_with(self);
581 if self.seen_opaque_tys.insert(def_id) {
582 let expanded_ty = match self.expanded_cache.get(&(def_id, substs)) {
583 Some(expanded_ty) => expanded_ty,
584 None => {
585 let generic_ty = self.tcx.type_of(def_id);
586 let concrete_ty = generic_ty.subst(self.tcx, substs);
587 let expanded_ty = self.fold_ty(concrete_ty);
588 self.expanded_cache.insert((def_id, substs), expanded_ty);
589 expanded_ty
590 }
591 };
0731742a
XL
592 self.seen_opaque_tys.remove(&def_id);
593 Some(expanded_ty)
594 } else {
595 // If another opaque type that we contain is recursive, then it
596 // will report the error, so we don't have to.
597 self.found_recursion = def_id == self.primary_def_id;
598 None
599 }
600 }
601 }
602
dc9dc135
XL
603 impl<'tcx> TypeFolder<'tcx> for OpaqueTypeExpander<'tcx> {
604 fn tcx(&self) -> TyCtxt<'tcx> {
0731742a
XL
605 self.tcx
606 }
607
608 fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> {
e74abb32 609 if let ty::Opaque(def_id, substs) = t.kind {
0731742a 610 self.expand_opaque_ty(def_id, substs).unwrap_or(t)
74b04a01 611 } else if t.has_opaque_types() {
0731742a 612 t.super_fold_with(self)
e1599b0c
XL
613 } else {
614 t
0731742a
XL
615 }
616 }
617 }
618
619 let mut visitor = OpaqueTypeExpander {
620 seen_opaque_tys: FxHashSet::default(),
e1599b0c 621 expanded_cache: FxHashMap::default(),
0731742a
XL
622 primary_def_id: def_id,
623 found_recursion: false,
624 tcx: self,
625 };
626 let expanded_type = visitor.expand_opaque_ty(def_id, substs).unwrap();
dfeec247 627 if visitor.found_recursion { Err(expanded_type) } else { Ok(expanded_type) }
0731742a 628 }
9e0c209e
SL
629}
630
dc9dc135 631impl<'tcx> ty::TyS<'tcx> {
dfeec247
XL
632 /// Returns the maximum value for the given numeric type (including `char`s)
633 /// or returns `None` if the type is not numeric.
634 pub fn numeric_max_val(&'tcx self, tcx: TyCtxt<'tcx>) -> Option<&'tcx ty::Const<'tcx>> {
635 let val = match self.kind {
636 ty::Int(_) | ty::Uint(_) => {
637 let (size, signed) = int_size_and_signed(tcx, self);
638 let val = if signed { signed_max(size) as u128 } else { unsigned_max(size) };
639 Some(val)
640 }
641 ty::Char => Some(std::char::MAX as u128),
642 ty::Float(fty) => Some(match fty {
643 ast::FloatTy::F32 => ::rustc_apfloat::ieee::Single::INFINITY.to_bits(),
644 ast::FloatTy::F64 => ::rustc_apfloat::ieee::Double::INFINITY.to_bits(),
645 }),
646 _ => None,
647 };
648 val.map(|v| ty::Const::from_bits(tcx, v, ty::ParamEnv::empty().and(self)))
649 }
650
651 /// Returns the minimum value for the given numeric type (including `char`s)
652 /// or returns `None` if the type is not numeric.
653 pub fn numeric_min_val(&'tcx self, tcx: TyCtxt<'tcx>) -> Option<&'tcx ty::Const<'tcx>> {
654 let val = match self.kind {
655 ty::Int(_) | ty::Uint(_) => {
656 let (size, signed) = int_size_and_signed(tcx, self);
657 let val = if signed { truncate(signed_min(size) as u128, size) } else { 0 };
658 Some(val)
659 }
660 ty::Char => Some(0),
661 ty::Float(fty) => Some(match fty {
662 ast::FloatTy::F32 => (-::rustc_apfloat::ieee::Single::INFINITY).to_bits(),
663 ast::FloatTy::F64 => (-::rustc_apfloat::ieee::Double::INFINITY).to_bits(),
664 }),
665 _ => None,
666 };
667 val.map(|v| ty::Const::from_bits(tcx, v, ty::ParamEnv::empty().and(self)))
668 }
669
0731742a
XL
670 /// Checks whether values of this type `T` are *moved* or *copied*
671 /// when referenced -- this amounts to a check for whether `T:
672 /// Copy`, but note that we **don't** consider lifetimes when
673 /// doing this check. This means that we may generate MIR which
674 /// does copies even when the type actually doesn't satisfy the
675 /// full requirements for the `Copy` trait (cc #29149) -- this
676 /// winds up being reported as an error during NLL borrow check.
dc9dc135
XL
677 pub fn is_copy_modulo_regions(
678 &'tcx self,
679 tcx: TyCtxt<'tcx>,
680 param_env: ty::ParamEnv<'tcx>,
681 span: Span,
682 ) -> bool {
0731742a 683 tcx.at(span).is_copy_raw(param_env.and(self))
e9174d1e
SL
684 }
685
0731742a
XL
686 /// Checks whether values of this type `T` have a size known at
687 /// compile time (i.e., whether `T: Sized`). Lifetimes are ignored
688 /// for the purposes of this check, so it can be an
689 /// over-approximation in generic contexts, where one can have
690 /// strange rules like `<T as Foo<'static>>::Bar: Sized` that
691 /// actually carry lifetime requirements.
dc9dc135 692 pub fn is_sized(&'tcx self, tcx_at: TyCtxtAt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> bool {
74b04a01 693 self.is_trivially_sized(tcx_at.tcx) || tcx_at.is_sized_raw(param_env.and(self))
e9174d1e
SL
694 }
695
0731742a
XL
696 /// Checks whether values of this type `T` implement the `Freeze`
697 /// trait -- frozen types are those that do not contain a
9fa01778 698 /// `UnsafeCell` anywhere. This is a language concept used to
0731742a
XL
699 /// distinguish "true immutability", which is relevant to
700 /// optimization as well as the rules around static values. Note
701 /// that the `Freeze` trait is not exposed to end users and is
702 /// effectively an implementation detail.
dc9dc135
XL
703 pub fn is_freeze(
704 &'tcx self,
705 tcx: TyCtxt<'tcx>,
706 param_env: ty::ParamEnv<'tcx>,
707 span: Span,
708 ) -> bool {
74b04a01
XL
709 self.is_trivially_freeze() || tcx.at(span).is_freeze_raw(param_env.and(self))
710 }
711
712 /// Fast path helper for testing if a type is `Freeze`.
713 ///
714 /// Returning true means the type is known to be `Freeze`. Returning
715 /// `false` means nothing -- could be `Freeze`, might not be.
716 fn is_trivially_freeze(&self) -> bool {
717 match self.kind {
718 ty::Int(_)
719 | ty::Uint(_)
720 | ty::Float(_)
721 | ty::Bool
722 | ty::Char
723 | ty::Str
724 | ty::Never
725 | ty::Ref(..)
726 | ty::RawPtr(_)
727 | ty::FnDef(..)
728 | ty::Error
729 | ty::FnPtr(_) => true,
730 ty::Tuple(_) => self.tuple_fields().all(Self::is_trivially_freeze),
731 ty::Slice(elem_ty) | ty::Array(elem_ty, _) => elem_ty.is_trivially_freeze(),
732 ty::Adt(..)
733 | ty::Bound(..)
734 | ty::Closure(..)
735 | ty::Dynamic(..)
736 | ty::Foreign(_)
737 | ty::Generator(..)
738 | ty::GeneratorWitness(_)
739 | ty::Infer(_)
740 | ty::Opaque(..)
741 | ty::Param(_)
742 | ty::Placeholder(_)
743 | ty::Projection(_)
744 | ty::UnnormalizedProjection(_) => false,
745 }
cc61c64b
XL
746 }
747
748 /// If `ty.needs_drop(...)` returns `true`, then `ty` is definitely
749 /// non-copy and *might* have a destructor attached; if it returns
0731742a 750 /// `false`, then `ty` definitely has no destructor (i.e., no drop glue).
cc61c64b
XL
751 ///
752 /// (Note that this implies that if `ty` has a destructor attached,
753 /// then `needs_drop` will definitely return `true` for `ty`.)
e74abb32
XL
754 ///
755 /// Note that this method is used to check eligible types in unions.
cc61c64b 756 #[inline]
dc9dc135 757 pub fn needs_drop(&'tcx self, tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> bool {
74b04a01
XL
758 // Avoid querying in simple cases.
759 match needs_drop_components(self, &tcx.data_layout) {
760 Err(AlwaysRequiresDrop) => true,
761 Ok(components) => {
762 let query_ty = match *components {
763 [] => return false,
764 // If we've got a single component, call the query with that
765 // to increase the chance that we hit the query cache.
766 [component_ty] => component_ty,
767 _ => self,
768 };
769 // This doesn't depend on regions, so try to minimize distinct
770 // query keys used.
771 let erased = tcx.normalize_erasing_regions(param_env, query_ty);
772 tcx.needs_drop_raw(param_env.and(erased))
773 }
774 }
cc61c64b
XL
775 }
776
0731742a 777 pub fn same_type(a: Ty<'tcx>, b: Ty<'tcx>) -> bool {
e74abb32 778 match (&a.kind, &b.kind) {
0731742a
XL
779 (&Adt(did_a, substs_a), &Adt(did_b, substs_b)) => {
780 if did_a != did_b {
781 return false;
782 }
783
784 substs_a.types().zip(substs_b.types()).all(|(a, b)| Self::same_type(a, b))
785 }
786 _ => a == b,
787 }
788 }
789
e9174d1e
SL
790 /// Check whether a type is representable. This means it cannot contain unboxed
791 /// structural recursion. This check is needed for structs and enums.
dc9dc135 792 pub fn is_representable(&'tcx self, tcx: TyCtxt<'tcx>, sp: Span) -> Representability {
e9174d1e 793 // Iterate until something non-representable is found
dfeec247
XL
794 fn fold_repr<It: Iterator<Item = Representability>>(iter: It) -> Representability {
795 iter.fold(Representability::Representable, |r1, r2| match (r1, r2) {
796 (Representability::SelfRecursive(v1), Representability::SelfRecursive(v2)) => {
797 Representability::SelfRecursive(v1.into_iter().chain(v2).collect())
7cac9316 798 }
dfeec247 799 (r1, r2) => cmp::max(r1, r2),
7cac9316 800 })
e9174d1e
SL
801 }
802
dc9dc135
XL
803 fn are_inner_types_recursive<'tcx>(
804 tcx: TyCtxt<'tcx>,
805 sp: Span,
041b39d2
XL
806 seen: &mut Vec<Ty<'tcx>>,
807 representable_cache: &mut FxHashMap<Ty<'tcx>, Representability>,
dc9dc135
XL
808 ty: Ty<'tcx>,
809 ) -> Representability {
e74abb32 810 match ty.kind {
416331ca 811 Tuple(..) => {
7cac9316 812 // Find non representable
416331ca 813 fold_repr(ty.tuple_fields().map(|ty| {
dfeec247 814 is_type_structurally_recursive(tcx, sp, seen, representable_cache, ty)
7cac9316 815 }))
e9174d1e
SL
816 }
817 // Fixed-length vectors.
818 // FIXME(#11924) Behavior undecided for zero-length vectors.
b7449926 819 Array(ty, _) => {
041b39d2 820 is_type_structurally_recursive(tcx, sp, seen, representable_cache, ty)
e9174d1e 821 }
b7449926 822 Adt(def, substs) => {
7cac9316
XL
823 // Find non representable fields with their spans
824 fold_repr(def.all_fields().map(|field| {
825 let ty = field.ty(tcx, substs);
0731742a 826 let span = tcx.hir().span_if_local(field.did).unwrap_or(sp);
dfeec247
XL
827 match is_type_structurally_recursive(
828 tcx,
829 span,
830 seen,
831 representable_cache,
832 ty,
833 ) {
7cac9316
XL
834 Representability::SelfRecursive(_) => {
835 Representability::SelfRecursive(vec![span])
836 }
837 x => x,
838 }
839 }))
e9174d1e 840 }
b7449926 841 Closure(..) => {
e9174d1e
SL
842 // this check is run on type definitions, so we don't expect
843 // to see closure types
54a0048b 844 bug!("requires check invoked on inapplicable type: {:?}", ty)
e9174d1e
SL
845 }
846 _ => Representability::Representable,
847 }
848 }
849
476ff2be 850 fn same_struct_or_enum<'tcx>(ty: Ty<'tcx>, def: &'tcx ty::AdtDef) -> bool {
e74abb32 851 match ty.kind {
dfeec247
XL
852 Adt(ty_def, _) => ty_def == def,
853 _ => false,
e9174d1e
SL
854 }
855 }
856
e9174d1e
SL
857 // Does the type `ty` directly (without indirection through a pointer)
858 // contain any types on stack `seen`?
dc9dc135
XL
859 fn is_type_structurally_recursive<'tcx>(
860 tcx: TyCtxt<'tcx>,
041b39d2
XL
861 sp: Span,
862 seen: &mut Vec<Ty<'tcx>>,
863 representable_cache: &mut FxHashMap<Ty<'tcx>, Representability>,
dc9dc135
XL
864 ty: Ty<'tcx>,
865 ) -> Representability {
7cac9316 866 debug!("is_type_structurally_recursive: {:?} {:?}", ty, sp);
041b39d2 867 if let Some(representability) = representable_cache.get(ty) {
dfeec247
XL
868 debug!(
869 "is_type_structurally_recursive: {:?} {:?} - (cached) {:?}",
870 ty, sp, representability
871 );
041b39d2
XL
872 return representability.clone();
873 }
874
dfeec247
XL
875 let representability =
876 is_type_structurally_recursive_inner(tcx, sp, seen, representable_cache, ty);
041b39d2
XL
877
878 representable_cache.insert(ty, representability.clone());
879 representability
880 }
e9174d1e 881
dc9dc135
XL
882 fn is_type_structurally_recursive_inner<'tcx>(
883 tcx: TyCtxt<'tcx>,
041b39d2
XL
884 sp: Span,
885 seen: &mut Vec<Ty<'tcx>>,
886 representable_cache: &mut FxHashMap<Ty<'tcx>, Representability>,
dc9dc135
XL
887 ty: Ty<'tcx>,
888 ) -> Representability {
e74abb32 889 match ty.kind {
b7449926 890 Adt(def, _) => {
e9174d1e
SL
891 {
892 // Iterate through stack of previously seen types.
893 let mut iter = seen.iter();
894
895 // The first item in `seen` is the type we are actually curious about.
896 // We want to return SelfRecursive if this type contains itself.
897 // It is important that we DON'T take generic parameters into account
898 // for this check, so that Bar<T> in this example counts as SelfRecursive:
899 //
900 // struct Foo;
901 // struct Bar<T> { x: Bar<Foo> }
902
3157f602
XL
903 if let Some(&seen_type) = iter.next() {
904 if same_struct_or_enum(seen_type, def) {
dfeec247 905 debug!("SelfRecursive: {:?} contains {:?}", seen_type, ty);
7cac9316 906 return Representability::SelfRecursive(vec![sp]);
e9174d1e 907 }
e9174d1e
SL
908 }
909
910 // We also need to know whether the first item contains other types
911 // that are structurally recursive. If we don't catch this case, we
912 // will recurse infinitely for some inputs.
913 //
914 // It is important that we DO take generic parameters into account
915 // here, so that code like this is considered SelfRecursive, not
916 // ContainsRecursive:
917 //
918 // struct Foo { Option<Option<Foo>> }
919
920 for &seen_type in iter {
0731742a 921 if ty::TyS::same_type(ty, seen_type) {
dfeec247 922 debug!("ContainsRecursive: {:?} contains {:?}", seen_type, ty);
e9174d1e
SL
923 return Representability::ContainsRecursive;
924 }
925 }
926 }
927
928 // For structs and enums, track all previously seen types by pushing them
929 // onto the 'seen' stack.
930 seen.push(ty);
041b39d2 931 let out = are_inner_types_recursive(tcx, sp, seen, representable_cache, ty);
e9174d1e
SL
932 seen.pop();
933 out
934 }
935 _ => {
936 // No need to push in other cases.
041b39d2 937 are_inner_types_recursive(tcx, sp, seen, representable_cache, ty)
e9174d1e
SL
938 }
939 }
940 }
941
942 debug!("is_type_representable: {:?}", self);
943
944 // To avoid a stack overflow when checking an enum variant or struct that
945 // contains a different, structurally recursive type, maintain a stack
946 // of seen types and check recursion for each of them (issues #3008, #3779).
0bf4aa26
XL
947 let mut seen: Vec<Ty<'_>> = Vec::new();
948 let mut representable_cache = FxHashMap::default();
dfeec247 949 let r = is_type_structurally_recursive(tcx, sp, &mut seen, &mut representable_cache, self);
e9174d1e
SL
950 debug!("is_type_representable: {:?} is {:?}", self, r);
951 r
952 }
e1599b0c
XL
953
954 /// Peel off all reference types in this type until there are none left.
955 ///
956 /// This method is idempotent, i.e. `ty.peel_refs().peel_refs() == ty.peel_refs()`.
957 ///
958 /// # Examples
959 ///
960 /// - `u8` -> `u8`
961 /// - `&'a mut u8` -> `u8`
962 /// - `&'a &'b u8` -> `u8`
963 /// - `&'a *const &'b u8 -> *const &'b u8`
964 pub fn peel_refs(&'tcx self) -> Ty<'tcx> {
965 let mut ty = self;
e74abb32 966 while let Ref(_, inner_ty, _) = ty.kind {
e1599b0c
XL
967 ty = inner_ty;
968 }
969 ty
970 }
e9174d1e 971}
7cac9316 972
abe05a73
XL
973pub enum ExplicitSelf<'tcx> {
974 ByValue,
975 ByReference(ty::Region<'tcx>, hir::Mutability),
ff7c6d11 976 ByRawPointer(hir::Mutability),
abe05a73 977 ByBox,
dfeec247 978 Other,
abe05a73
XL
979}
980
981impl<'tcx> ExplicitSelf<'tcx> {
982 /// Categorizes an explicit self declaration like `self: SomeType`
983 /// into either `self`, `&self`, `&mut self`, `Box<self>`, or
984 /// `Other`.
985 /// This is mainly used to require the arbitrary_self_types feature
986 /// in the case of `Other`, to improve error messages in the common cases,
987 /// and to make `Other` non-object-safe.
988 ///
989 /// Examples:
990 ///
991 /// ```
992 /// impl<'a> Foo for &'a T {
993 /// // Legal declarations:
994 /// fn method1(self: &&'a T); // ExplicitSelf::ByReference
995 /// fn method2(self: &'a T); // ExplicitSelf::ByValue
996 /// fn method3(self: Box<&'a T>); // ExplicitSelf::ByBox
997 /// fn method4(self: Rc<&'a T>); // ExplicitSelf::Other
998 ///
999 /// // Invalid cases will be caught by `check_method_receiver`:
1000 /// fn method_err1(self: &'a mut T); // ExplicitSelf::Other
1001 /// fn method_err2(self: &'static T) // ExplicitSelf::ByValue
1002 /// fn method_err3(self: &&T) // ExplicitSelf::ByReference
1003 /// }
1004 /// ```
1005 ///
dfeec247 1006 pub fn determine<P>(self_arg_ty: Ty<'tcx>, is_self_ty: P) -> ExplicitSelf<'tcx>
abe05a73 1007 where
dfeec247 1008 P: Fn(Ty<'tcx>) -> bool,
abe05a73
XL
1009 {
1010 use self::ExplicitSelf::*;
1011
e74abb32 1012 match self_arg_ty.kind {
abe05a73 1013 _ if is_self_ty(self_arg_ty) => ByValue,
dfeec247
XL
1014 ty::Ref(region, ty, mutbl) if is_self_ty(ty) => ByReference(region, mutbl),
1015 ty::RawPtr(ty::TypeAndMut { ty, mutbl }) if is_self_ty(ty) => ByRawPointer(mutbl),
1016 ty::Adt(def, _) if def.is_box() && is_self_ty(self_arg_ty.boxed_ty()) => ByBox,
1017 _ => Other,
abe05a73
XL
1018 }
1019 }
1020}
74b04a01
XL
1021
1022/// Returns a list of types such that the given type needs drop if and only if
1023/// *any* of the returned types need drop. Returns `Err(AlwaysRequiresDrop)` if
1024/// this type always needs drop.
1025pub fn needs_drop_components(
1026 ty: Ty<'tcx>,
1027 target_layout: &TargetDataLayout,
1028) -> Result<SmallVec<[Ty<'tcx>; 2]>, AlwaysRequiresDrop> {
1029 match ty.kind {
1030 ty::Infer(ty::FreshIntTy(_))
1031 | ty::Infer(ty::FreshFloatTy(_))
1032 | ty::Bool
1033 | ty::Int(_)
1034 | ty::Uint(_)
1035 | ty::Float(_)
1036 | ty::Never
1037 | ty::FnDef(..)
1038 | ty::FnPtr(_)
1039 | ty::Char
1040 | ty::GeneratorWitness(..)
1041 | ty::RawPtr(_)
1042 | ty::Ref(..)
1043 | ty::Str => Ok(SmallVec::new()),
1044
1045 // Foreign types can never have destructors.
1046 ty::Foreign(..) => Ok(SmallVec::new()),
1047
ba9703b0 1048 ty::Dynamic(..) | ty::Error => Err(AlwaysRequiresDrop),
74b04a01
XL
1049
1050 ty::Slice(ty) => needs_drop_components(ty, target_layout),
1051 ty::Array(elem_ty, size) => {
1052 match needs_drop_components(elem_ty, target_layout) {
1053 Ok(v) if v.is_empty() => Ok(v),
1054 res => match size.val.try_to_bits(target_layout.pointer_size) {
1055 // Arrays of size zero don't need drop, even if their element
1056 // type does.
1057 Some(0) => Ok(SmallVec::new()),
1058 Some(_) => res,
1059 // We don't know which of the cases above we are in, so
1060 // return the whole type and let the caller decide what to
1061 // do.
1062 None => Ok(smallvec![ty]),
1063 },
1064 }
1065 }
1066 // If any field needs drop, then the whole tuple does.
1067 ty::Tuple(..) => ty.tuple_fields().try_fold(SmallVec::new(), move |mut acc, elem| {
1068 acc.extend(needs_drop_components(elem, target_layout)?);
1069 Ok(acc)
1070 }),
1071
1072 // These require checking for `Copy` bounds or `Adt` destructors.
1073 ty::Adt(..)
1074 | ty::Projection(..)
1075 | ty::UnnormalizedProjection(..)
1076 | ty::Param(_)
1077 | ty::Bound(..)
1078 | ty::Placeholder(..)
1079 | ty::Opaque(..)
1080 | ty::Infer(_)
ba9703b0
XL
1081 | ty::Closure(..)
1082 | ty::Generator(..) => Ok(smallvec![ty]),
74b04a01
XL
1083 }
1084}
1085
1086#[derive(Copy, Clone, Debug, HashStable, RustcEncodable, RustcDecodable)]
1087pub struct AlwaysRequiresDrop;