]> git.proxmox.com Git - rustc.git/blame - compiler/rustc_middle/src/ty/util.rs
New upstream version 1.56.0~beta.4+dfsg1
[rustc.git] / compiler / rustc_middle / src / ty / util.rs
CommitLineData
9fa01778
XL
1//! Miscellaneous type-system utilities that are too small to deserve their own modules.
2
9fa01778 3use crate::ich::NodeIdHashingMode;
f9f354fc 4use crate::middle::codegen_fn_attrs::CodegenFnAttrFlags;
3dfed10e 5use crate::ty::fold::TypeFolder;
ba9703b0 6use crate::ty::layout::IntegerExt;
9fa01778 7use crate::ty::query::TyCtxtAt;
29967ef6 8use crate::ty::subst::{GenericArgKind, Subst, SubstsRef};
9fa01778 9use crate::ty::TyKind::*;
136023e0 10use crate::ty::{self, DebruijnIndex, DefIdTree, List, Ty, TyCtxt, TypeFoldable};
dfeec247 11use rustc_apfloat::Float as _;
3dfed10e 12use rustc_ast as ast;
74b04a01 13use rustc_attr::{self as attr, SignedInt, UnsignedInt};
0731742a 14use rustc_data_structures::fx::{FxHashMap, FxHashSet};
dfeec247 15use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
ba9703b0 16use rustc_errors::ErrorReported;
dfeec247
XL
17use rustc_hir as hir;
18use rustc_hir::def::DefKind;
19use rustc_hir::def_id::DefId;
532ac7d7 20use rustc_macros::HashStable;
cdc7bbd5 21use rustc_span::DUMMY_SP;
ba9703b0 22use rustc_target::abi::{Integer, Size, TargetDataLayout};
74b04a01 23use smallvec::SmallVec;
cdc7bbd5 24use std::{fmt, iter};
e9174d1e 25
0531ce1d
XL
26#[derive(Copy, Clone, Debug)]
27pub struct Discr<'tcx> {
9fa01778 28 /// Bit representation of the discriminant (e.g., `-128i8` is `0xFF_u128`).
0531ce1d 29 pub val: u128,
dfeec247 30 pub ty: Ty<'tcx>,
0531ce1d 31}
8bb4bdeb 32
0531ce1d 33impl<'tcx> fmt::Display for Discr<'tcx> {
0bf4aa26 34 fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
1b1a35ee 35 match *self.ty.kind() {
b7449926 36 ty::Int(ity) => {
5869c6ff 37 let size = ty::tls::with(|tcx| Integer::from_int_ty(&tcx, ity).size());
532ac7d7 38 let x = self.val;
0531ce1d 39 // sign extend the raw representation to be an i128
29967ef6 40 let x = size.sign_extend(x) as i128;
0531ce1d 41 write!(fmt, "{}", x)
dfeec247 42 }
0531ce1d
XL
43 _ => write!(fmt, "{}", self.val),
44 }
45 }
cc61c64b 46}
8bb4bdeb 47
dfeec247 48fn signed_min(size: Size) -> i128 {
29967ef6 49 size.sign_extend(1_u128 << (size.bits() - 1)) as i128
dfeec247
XL
50}
51
52fn signed_max(size: Size) -> i128 {
74b04a01 53 i128::MAX >> (128 - size.bits())
dfeec247
XL
54}
55
56fn unsigned_max(size: Size) -> u128 {
74b04a01 57 u128::MAX >> (128 - size.bits())
dfeec247
XL
58}
59
60fn int_size_and_signed<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> (Size, bool) {
1b1a35ee 61 let (int, signed) = match *ty.kind() {
5869c6ff
XL
62 Int(ity) => (Integer::from_int_ty(&tcx, ity), true),
63 Uint(uty) => (Integer::from_uint_ty(&tcx, uty), false),
dfeec247
XL
64 _ => bug!("non integer discriminant"),
65 };
66 (int.size(), signed)
67}
68
0531ce1d 69impl<'tcx> Discr<'tcx> {
9fa01778 70 /// Adds `1` to the value and wraps around if the maximum for the type is reached.
dc9dc135 71 pub fn wrap_incr(self, tcx: TyCtxt<'tcx>) -> Self {
0531ce1d
XL
72 self.checked_add(tcx, 1).0
73 }
dc9dc135 74 pub fn checked_add(self, tcx: TyCtxt<'tcx>, n: u128) -> (Self, bool) {
dfeec247
XL
75 let (size, signed) = int_size_and_signed(tcx, self.ty);
76 let (val, oflo) = if signed {
77 let min = signed_min(size);
78 let max = signed_max(size);
29967ef6 79 let val = size.sign_extend(self.val) as i128;
74b04a01 80 assert!(n < (i128::MAX as u128));
0531ce1d
XL
81 let n = n as i128;
82 let oflo = val > max - n;
dfeec247 83 let val = if oflo { min + (n - (max - val) - 1) } else { val + n };
0531ce1d
XL
84 // zero the upper bits
85 let val = val as u128;
29967ef6 86 let val = size.truncate(val);
dfeec247 87 (val, oflo)
0531ce1d 88 } else {
dfeec247 89 let max = unsigned_max(size);
0531ce1d
XL
90 let val = self.val;
91 let oflo = val > max - n;
dfeec247
XL
92 let val = if oflo { n - (max - val) - 1 } else { val + n };
93 (val, oflo)
94 };
95 (Self { val, ty: self.ty }, oflo)
8bb4bdeb 96 }
e9174d1e
SL
97}
98
0531ce1d 99pub trait IntTypeExt {
dc9dc135
XL
100 fn to_ty<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx>;
101 fn disr_incr<'tcx>(&self, tcx: TyCtxt<'tcx>, val: Option<Discr<'tcx>>) -> Option<Discr<'tcx>>;
102 fn initial_discriminant<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Discr<'tcx>;
0531ce1d
XL
103}
104
e9174d1e 105impl IntTypeExt for attr::IntType {
dc9dc135 106 fn to_ty<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> {
e9174d1e 107 match *self {
dfeec247
XL
108 SignedInt(ast::IntTy::I8) => tcx.types.i8,
109 SignedInt(ast::IntTy::I16) => tcx.types.i16,
110 SignedInt(ast::IntTy::I32) => tcx.types.i32,
111 SignedInt(ast::IntTy::I64) => tcx.types.i64,
112 SignedInt(ast::IntTy::I128) => tcx.types.i128,
113 SignedInt(ast::IntTy::Isize) => tcx.types.isize,
114 UnsignedInt(ast::UintTy::U8) => tcx.types.u8,
115 UnsignedInt(ast::UintTy::U16) => tcx.types.u16,
116 UnsignedInt(ast::UintTy::U32) => tcx.types.u32,
117 UnsignedInt(ast::UintTy::U64) => tcx.types.u64,
118 UnsignedInt(ast::UintTy::U128) => tcx.types.u128,
2c00a5a8 119 UnsignedInt(ast::UintTy::Usize) => tcx.types.usize,
e9174d1e
SL
120 }
121 }
122
dc9dc135 123 fn initial_discriminant<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Discr<'tcx> {
dfeec247 124 Discr { val: 0, ty: self.to_ty(tcx) }
e9174d1e
SL
125 }
126
dc9dc135 127 fn disr_incr<'tcx>(&self, tcx: TyCtxt<'tcx>, val: Option<Discr<'tcx>>) -> Option<Discr<'tcx>> {
a7813a04 128 if let Some(val) = val {
0531ce1d
XL
129 assert_eq!(self.to_ty(tcx), val.ty);
130 let (new, oflo) = val.checked_add(tcx, 1);
dfeec247 131 if oflo { None } else { Some(new) }
a7813a04
XL
132 } else {
133 Some(self.initial_discriminant(tcx))
134 }
e9174d1e
SL
135 }
136}
137
dc9dc135 138impl<'tcx> TyCtxt<'tcx> {
cc61c64b
XL
139 /// Creates a hash of the type `Ty` which will be the same no matter what crate
140 /// context it's calculated within. This is used by the `type_id` intrinsic.
141 pub fn type_id_hash(self, ty: Ty<'tcx>) -> u64 {
142 let mut hasher = StableHasher::new();
ea8adc8c 143 let mut hcx = self.create_stable_hashing_context();
cc61c64b 144
3b2f2976
XL
145 // We want the type_id be independent of the types free regions, so we
146 // erase them. The erase_regions() call will also anonymize bound
147 // regions, which is desirable too.
fc512014 148 let ty = self.erase_regions(ty);
3b2f2976 149
cc61c64b
XL
150 hcx.while_hashing_spans(false, |hcx| {
151 hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
152 ty.hash_stable(hcx, &mut hasher);
153 });
154 });
155 hasher.finish()
156 }
cc61c64b 157
5bcae85e 158 pub fn has_error_field(self, ty: Ty<'tcx>) -> bool {
1b1a35ee 159 if let ty::Adt(def, substs) = *ty.kind() {
0bf4aa26
XL
160 for field in def.all_fields() {
161 let field_ty = field.ty(self, substs);
1b1a35ee 162 if let Error(_) = field_ty.kind() {
0bf4aa26 163 return true;
5bcae85e
SL
164 }
165 }
5bcae85e
SL
166 }
167 false
168 }
169
416331ca
XL
170 /// Attempts to returns the deeply last field of nested structures, but
171 /// does not apply any normalization in its search. Returns the same type
172 /// if input `ty` is not a structure at all.
dfeec247 173 pub fn struct_tail_without_normalization(self, ty: Ty<'tcx>) -> Ty<'tcx> {
416331ca
XL
174 let tcx = self;
175 tcx.struct_tail_with_normalize(ty, |ty| ty)
176 }
177
178 /// Returns the deeply last field of nested structures, or the same type if
179 /// not a structure at all. Corresponds to the only possible unsized field,
180 /// and its type can be used to determine unsizing strategy.
181 ///
182 /// Should only be called if `ty` has no inference variables and does not
183 /// need its lifetimes preserved (e.g. as part of codegen); otherwise
184 /// normalization attempt may cause compiler bugs.
dfeec247
XL
185 pub fn struct_tail_erasing_lifetimes(
186 self,
187 ty: Ty<'tcx>,
188 param_env: ty::ParamEnv<'tcx>,
189 ) -> Ty<'tcx> {
416331ca
XL
190 let tcx = self;
191 tcx.struct_tail_with_normalize(ty, |ty| tcx.normalize_erasing_regions(param_env, ty))
192 }
193
194 /// Returns the deeply last field of nested structures, or the same type if
195 /// not a structure at all. Corresponds to the only possible unsized field,
196 /// and its type can be used to determine unsizing strategy.
197 ///
198 /// This is parameterized over the normalization strategy (i.e. how to
199 /// handle `<T as Trait>::Assoc` and `impl Trait`); pass the identity
200 /// function to indicate no normalization should take place.
201 ///
202 /// See also `struct_tail_erasing_lifetimes`, which is suitable for use
203 /// during codegen.
dfeec247
XL
204 pub fn struct_tail_with_normalize(
205 self,
206 mut ty: Ty<'tcx>,
207 normalize: impl Fn(Ty<'tcx>) -> Ty<'tcx>,
208 ) -> Ty<'tcx> {
136023e0 209 let recursion_limit = self.recursion_limit();
fc512014 210 for iteration in 0.. {
136023e0 211 if !recursion_limit.value_within_limit(iteration) {
fc512014
XL
212 return self.ty_error_with_message(
213 DUMMY_SP,
214 &format!("reached the recursion limit finding the struct tail for {}", ty),
215 );
216 }
1b1a35ee 217 match *ty.kind() {
b7449926 218 ty::Adt(def, substs) => {
7cac9316
XL
219 if !def.is_struct() {
220 break;
221 }
2c00a5a8 222 match def.non_enum_variant().fields.last() {
7cac9316
XL
223 Some(f) => ty = f.ty(self, substs),
224 None => break,
225 }
226 }
227
94222f64
XL
228 ty::Tuple(tys) if let Some((&last_ty, _)) = tys.split_last() => {
229 ty = last_ty.expect_ty();
7cac9316
XL
230 }
231
94222f64
XL
232 ty::Tuple(_) => break,
233
416331ca
XL
234 ty::Projection(_) | ty::Opaque(..) => {
235 let normalized = normalize(ty);
236 if ty == normalized {
237 return ty;
238 } else {
239 ty = normalized;
240 }
241 }
242
7cac9316
XL
243 _ => {
244 break;
245 }
e9174d1e
SL
246 }
247 }
248 ty
249 }
250
60c5eb7d 251 /// Same as applying `struct_tail` on `source` and `target`, but only
e9174d1e
SL
252 /// keeps going as long as the two types are instances of the same
253 /// structure definitions.
a1dfa0c6 254 /// For `(Foo<Foo<T>>, Foo<dyn Trait>)`, the result will be `(Foo<T>, Trait)`,
e9174d1e 255 /// whereas struct_tail produces `T`, and `Trait`, respectively.
416331ca
XL
256 ///
257 /// Should only be called if the types have no inference variables and do
60c5eb7d 258 /// not need their lifetimes preserved (e.g., as part of codegen); otherwise,
416331ca 259 /// normalization attempt may cause compiler bugs.
dfeec247
XL
260 pub fn struct_lockstep_tails_erasing_lifetimes(
261 self,
262 source: Ty<'tcx>,
263 target: Ty<'tcx>,
264 param_env: ty::ParamEnv<'tcx>,
265 ) -> (Ty<'tcx>, Ty<'tcx>) {
416331ca 266 let tcx = self;
dfeec247
XL
267 tcx.struct_lockstep_tails_with_normalize(source, target, |ty| {
268 tcx.normalize_erasing_regions(param_env, ty)
269 })
416331ca
XL
270 }
271
60c5eb7d 272 /// Same as applying `struct_tail` on `source` and `target`, but only
416331ca
XL
273 /// keeps going as long as the two types are instances of the same
274 /// structure definitions.
275 /// For `(Foo<Foo<T>>, Foo<dyn Trait>)`, the result will be `(Foo<T>, Trait)`,
276 /// whereas struct_tail produces `T`, and `Trait`, respectively.
277 ///
278 /// See also `struct_lockstep_tails_erasing_lifetimes`, which is suitable for use
279 /// during codegen.
dfeec247
XL
280 pub fn struct_lockstep_tails_with_normalize(
281 self,
282 source: Ty<'tcx>,
283 target: Ty<'tcx>,
284 normalize: impl Fn(Ty<'tcx>) -> Ty<'tcx>,
285 ) -> (Ty<'tcx>, Ty<'tcx>) {
e9174d1e 286 let (mut a, mut b) = (source, target);
041b39d2 287 loop {
1b1a35ee 288 match (&a.kind(), &b.kind()) {
b7449926 289 (&Adt(a_def, a_substs), &Adt(b_def, b_substs))
dfeec247
XL
290 if a_def == b_def && a_def.is_struct() =>
291 {
2c00a5a8 292 if let Some(f) = a_def.non_enum_variant().fields.last() {
041b39d2
XL
293 a = f.ty(self, a_substs);
294 b = f.ty(self, b_substs);
295 } else {
296 break;
297 }
dfeec247
XL
298 }
299 (&Tuple(a_tys), &Tuple(b_tys)) if a_tys.len() == b_tys.len() => {
041b39d2 300 if let Some(a_last) = a_tys.last() {
48663c56
XL
301 a = a_last.expect_ty();
302 b = b_tys.last().unwrap().expect_ty();
041b39d2
XL
303 } else {
304 break;
305 }
dfeec247 306 }
ba9703b0
XL
307 (ty::Projection(_) | ty::Opaque(..), _)
308 | (_, ty::Projection(_) | ty::Opaque(..)) => {
416331ca
XL
309 // If either side is a projection, attempt to
310 // progress via normalization. (Should be safe to
311 // apply to both sides as normalization is
312 // idempotent.)
313 let a_norm = normalize(a);
314 let b_norm = normalize(b);
315 if a == a_norm && b == b_norm {
316 break;
317 } else {
318 a = a_norm;
319 b = b_norm;
320 }
321 }
322
cc61c64b 323 _ => break,
e9174d1e
SL
324 }
325 }
326 (a, b)
327 }
328
8bb4bdeb
XL
329 /// Calculate the destructor of a given type.
330 pub fn calculate_dtor(
331 self,
332 adt_did: DefId,
29967ef6 333 validate: impl Fn(Self, DefId) -> Result<(), ErrorReported>,
8bb4bdeb 334 ) -> Option<ty::Destructor> {
ba9703b0 335 let drop_trait = self.lang_items().drop_trait()?;
9fa01778 336 self.ensure().coherent_trait(drop_trait);
8bb4bdeb 337
7cac9316 338 let ty = self.type_of(adt_did);
29967ef6 339 let dtor_did = self.find_map_relevant_impl(drop_trait, ty, |impl_did| {
74b04a01 340 if let Some(item) = self.associated_items(impl_did).in_definition_order().next() {
0bf4aa26 341 if validate(self, impl_did).is_ok() {
29967ef6 342 return Some(item.def_id);
8bb4bdeb
XL
343 }
344 }
29967ef6 345 None
8bb4bdeb
XL
346 });
347
ff7c6d11 348 Some(ty::Destructor { did: dtor_did? })
cc61c64b
XL
349 }
350
9fa01778 351 /// Returns the set of types that are required to be alive in
cc61c64b
XL
352 /// order to run the destructor of `def` (see RFCs 769 and
353 /// 1238).
354 ///
355 /// Note that this returns only the constraints for the
356 /// destructor of `def` itself. For the destructors of the
357 /// contents, you need `adt_dtorck_constraint`.
dfeec247 358 pub fn destructor_constraints(self, def: &'tcx ty::AdtDef) -> Vec<ty::subst::GenericArg<'tcx>> {
cc61c64b
XL
359 let dtor = match def.destructor(self) {
360 None => {
361 debug!("destructor_constraints({:?}) - no dtor", def.did);
dfeec247 362 return vec![];
cc61c64b 363 }
dfeec247 364 Some(dtor) => dtor.did,
e9174d1e 365 };
b039eaaf 366
cc61c64b 367 let impl_def_id = self.associated_item(dtor).container.id();
7cac9316 368 let impl_generics = self.generics_of(impl_def_id);
cc61c64b
XL
369
370 // We have a destructor - all the parameters that are not
371 // pure_wrt_drop (i.e, don't have a #[may_dangle] attribute)
372 // must be live.
373
374 // We need to return the list of parameters from the ADTs
375 // generics/substs that correspond to impure parameters on the
376 // impl's generics. This is a bit ugly, but conceptually simple:
377 //
378 // Suppose our ADT looks like the following
379 //
380 // struct S<X, Y, Z>(X, Y, Z);
381 //
382 // and the impl is
383 //
384 // impl<#[may_dangle] P0, P1, P2> Drop for S<P1, P2, P0>
385 //
386 // We want to return the parameters (X, Y). For that, we match
387 // up the item-substs <X, Y, Z> with the substs on the impl ADT,
388 // <P1, P2, P0>, and then look up which of the impl substs refer to
389 // parameters marked as pure.
390
1b1a35ee 391 let impl_substs = match *self.type_of(impl_def_id).kind() {
b7449926 392 ty::Adt(def_, substs) if def_ == def => substs,
dfeec247 393 _ => bug!(),
cc61c64b
XL
394 };
395
1b1a35ee 396 let item_substs = match *self.type_of(def.did).kind() {
b7449926 397 ty::Adt(def_, substs) if def_ == def => substs,
dfeec247 398 _ => bug!(),
cc61c64b
XL
399 };
400
cdc7bbd5 401 let result = iter::zip(item_substs, impl_substs)
f9f354fc 402 .filter(|&(_, k)| {
0531ce1d 403 match k.unpack() {
e74abb32 404 GenericArgKind::Lifetime(&ty::RegionKind::ReEarlyBound(ref ebr)) => {
0531ce1d
XL
405 !impl_generics.region_param(ebr, self).pure_wrt_drop
406 }
dfeec247 407 GenericArgKind::Type(&ty::TyS { kind: ty::Param(ref pt), .. }) => {
0531ce1d
XL
408 !impl_generics.type_param(pt, self).pure_wrt_drop
409 }
e74abb32 410 GenericArgKind::Const(&ty::Const {
dfeec247
XL
411 val: ty::ConstKind::Param(ref pc), ..
412 }) => !impl_generics.const_param(pc, self).pure_wrt_drop,
413 GenericArgKind::Lifetime(_)
414 | GenericArgKind::Type(_)
415 | GenericArgKind::Const(_) => {
532ac7d7 416 // Not a type, const or region param: this should be reported
0531ce1d
XL
417 // as an error.
418 false
419 }
cc61c64b 420 }
0bf4aa26 421 })
f9f354fc 422 .map(|(item_param, _)| item_param)
0bf4aa26 423 .collect();
cc61c64b
XL
424 debug!("destructor_constraint({:?}) = {:?}", def.did, result);
425 result
b039eaaf 426 }
9e0c209e 427
9fa01778
XL
428 /// Returns `true` if `def_id` refers to a closure (e.g., `|x| x * 2`). Note
429 /// that closures have a `DefId`, but the closure *expression* also
8faf50e0
XL
430 /// has a `HirId` that is located within the context where the
431 /// closure appears (and, sadly, a corresponding `NodeId`, since
432 /// those are not yet phased out). The parent of the closure's
9fa01778 433 /// `DefId` will also be the context where it appears.
abe05a73 434 pub fn is_closure(self, def_id: DefId) -> bool {
f9f354fc 435 matches!(self.def_kind(def_id), DefKind::Closure | DefKind::Generator)
abe05a73
XL
436 }
437
9fa01778 438 /// Returns `true` if `def_id` refers to a trait (i.e., `trait Foo { ... }`).
8faf50e0 439 pub fn is_trait(self, def_id: DefId) -> bool {
f9f354fc 440 self.def_kind(def_id) == DefKind::Trait
8faf50e0
XL
441 }
442
9fa01778
XL
443 /// Returns `true` if `def_id` refers to a trait alias (i.e., `trait Foo = ...;`),
444 /// and `false` otherwise.
445 pub fn is_trait_alias(self, def_id: DefId) -> bool {
f9f354fc 446 self.def_kind(def_id) == DefKind::TraitAlias
9fa01778
XL
447 }
448
449 /// Returns `true` if this `DefId` refers to the implicit constructor for
450 /// a tuple struct like `struct Foo(u32)`, and `false` otherwise.
532ac7d7 451 pub fn is_constructor(self, def_id: DefId) -> bool {
f9f354fc 452 matches!(self.def_kind(def_id), DefKind::Ctor(..))
8faf50e0
XL
453 }
454
dc9dc135 455 /// Given the def-ID of a fn or closure, returns the def-ID of
ff7c6d11 456 /// the innermost fn item that the closure is contained within.
9fa01778 457 /// This is a significant `DefId` because, when we do
ff7c6d11 458 /// type-checking, we type-check this fn item and all of its
9fa01778 459 /// (transitive) closures together. Therefore, when we fetch the
3dfed10e
XL
460 /// `typeck` the closure, for example, we really wind up
461 /// fetching the `typeck` the enclosing fn item.
cc61c64b 462 pub fn closure_base_def_id(self, def_id: DefId) -> DefId {
476ff2be 463 let mut def_id = def_id;
abe05a73 464 while self.is_closure(def_id) {
532ac7d7 465 def_id = self.parent(def_id).unwrap_or_else(|| {
476ff2be
SL
466 bug!("closure {:?} has no parent", def_id);
467 });
468 }
469 def_id
9e0c209e 470 }
cc61c64b 471
9fa01778 472 /// Given the `DefId` and substs a closure, creates the type of
ff7c6d11
XL
473 /// `self` argument that the closure expects. For example, for a
474 /// `Fn` closure, this would return a reference type `&T` where
9fa01778 475 /// `T = closure_ty`.
ff7c6d11
XL
476 ///
477 /// Returns `None` if this closure's kind has not yet been inferred.
478 /// This should only be possible during type checking.
479 ///
480 /// Note that the return value is a late-bound region and hence
481 /// wrapped in a binder.
dfeec247
XL
482 pub fn closure_env_ty(
483 self,
484 closure_def_id: DefId,
485 closure_substs: SubstsRef<'tcx>,
cdc7bbd5
XL
486 env_region: ty::RegionKind,
487 ) -> Option<Ty<'tcx>> {
ff7c6d11 488 let closure_ty = self.mk_closure(closure_def_id, closure_substs);
ba9703b0 489 let closure_kind_ty = closure_substs.as_closure().kind_ty();
ff7c6d11
XL
490 let closure_kind = closure_kind_ty.to_opt_closure_kind()?;
491 let env_ty = match closure_kind {
492 ty::ClosureKind::Fn => self.mk_imm_ref(self.mk_region(env_region), closure_ty),
493 ty::ClosureKind::FnMut => self.mk_mut_ref(self.mk_region(env_region), closure_ty),
494 ty::ClosureKind::FnOnce => closure_ty,
495 };
cdc7bbd5 496 Some(env_ty)
ff7c6d11
XL
497 }
498
48663c56 499 /// Returns `true` if the node pointed to by `def_id` is a `static` item.
1b1a35ee 500 pub fn is_static(self, def_id: DefId) -> bool {
48663c56
XL
501 self.static_mutability(def_id).is_some()
502 }
503
f9f354fc 504 /// Returns `true` if this is a `static` item with the `#[thread_local]` attribute.
1b1a35ee 505 pub fn is_thread_local_static(self, def_id: DefId) -> bool {
f9f354fc
XL
506 self.codegen_fn_attrs(def_id).flags.contains(CodegenFnAttrFlags::THREAD_LOCAL)
507 }
508
48663c56 509 /// Returns `true` if the node pointed to by `def_id` is a mutable `static` item.
1b1a35ee 510 pub fn is_mutable_static(self, def_id: DefId) -> bool {
dfeec247 511 self.static_mutability(def_id) == Some(hir::Mutability::Mut)
60c5eb7d
XL
512 }
513
514 /// Get the type of the pointer to the static that we use in MIR.
1b1a35ee 515 pub fn static_ptr_ty(self, def_id: DefId) -> Ty<'tcx> {
60c5eb7d 516 // Make sure that any constants in the static's type are evaluated.
dfeec247 517 let static_ty = self.normalize_erasing_regions(ty::ParamEnv::empty(), self.type_of(def_id));
60c5eb7d 518
29967ef6
XL
519 // Make sure that accesses to unsafe statics end up using raw pointers.
520 // For thread-locals, this needs to be kept in sync with `Rvalue::ty`.
60c5eb7d
XL
521 if self.is_mutable_static(def_id) {
522 self.mk_mut_ptr(static_ty)
29967ef6
XL
523 } else if self.is_foreign_item(def_id) {
524 self.mk_imm_ptr(static_ty)
60c5eb7d
XL
525 } else {
526 self.mk_imm_ref(self.lifetimes.re_erased, static_ty)
527 }
abe05a73 528 }
0731742a
XL
529
530 /// Expands the given impl trait type, stopping if the type is recursive.
531 pub fn try_expand_impl_trait_type(
532 self,
533 def_id: DefId,
532ac7d7 534 substs: SubstsRef<'tcx>,
0731742a 535 ) -> Result<Ty<'tcx>, Ty<'tcx>> {
0731742a
XL
536 let mut visitor = OpaqueTypeExpander {
537 seen_opaque_tys: FxHashSet::default(),
e1599b0c 538 expanded_cache: FxHashMap::default(),
3dfed10e 539 primary_def_id: Some(def_id),
0731742a 540 found_recursion: false,
94222f64 541 found_any_recursion: false,
3dfed10e 542 check_recursion: true,
0731742a
XL
543 tcx: self,
544 };
3dfed10e 545
0731742a 546 let expanded_type = visitor.expand_opaque_ty(def_id, substs).unwrap();
dfeec247 547 if visitor.found_recursion { Err(expanded_type) } else { Ok(expanded_type) }
0731742a 548 }
9e0c209e
SL
549}
550
3dfed10e
XL
551struct OpaqueTypeExpander<'tcx> {
552 // Contains the DefIds of the opaque types that are currently being
553 // expanded. When we expand an opaque type we insert the DefId of
554 // that type, and when we finish expanding that type we remove the
555 // its DefId.
556 seen_opaque_tys: FxHashSet<DefId>,
557 // Cache of all expansions we've seen so far. This is a critical
558 // optimization for some large types produced by async fn trees.
559 expanded_cache: FxHashMap<(DefId, SubstsRef<'tcx>), Ty<'tcx>>,
560 primary_def_id: Option<DefId>,
561 found_recursion: bool,
94222f64 562 found_any_recursion: bool,
3dfed10e
XL
563 /// Whether or not to check for recursive opaque types.
564 /// This is `true` when we're explicitly checking for opaque type
565 /// recursion, and 'false' otherwise to avoid unnecessary work.
566 check_recursion: bool,
567 tcx: TyCtxt<'tcx>,
568}
569
570impl<'tcx> OpaqueTypeExpander<'tcx> {
571 fn expand_opaque_ty(&mut self, def_id: DefId, substs: SubstsRef<'tcx>) -> Option<Ty<'tcx>> {
94222f64 572 if self.found_any_recursion {
3dfed10e
XL
573 return None;
574 }
575 let substs = substs.fold_with(self);
576 if !self.check_recursion || self.seen_opaque_tys.insert(def_id) {
577 let expanded_ty = match self.expanded_cache.get(&(def_id, substs)) {
578 Some(expanded_ty) => expanded_ty,
579 None => {
580 let generic_ty = self.tcx.type_of(def_id);
581 let concrete_ty = generic_ty.subst(self.tcx, substs);
582 let expanded_ty = self.fold_ty(concrete_ty);
583 self.expanded_cache.insert((def_id, substs), expanded_ty);
584 expanded_ty
585 }
586 };
587 if self.check_recursion {
588 self.seen_opaque_tys.remove(&def_id);
589 }
590 Some(expanded_ty)
591 } else {
592 // If another opaque type that we contain is recursive, then it
593 // will report the error, so we don't have to.
94222f64 594 self.found_any_recursion = true;
3dfed10e
XL
595 self.found_recursion = def_id == *self.primary_def_id.as_ref().unwrap();
596 None
597 }
598 }
599}
600
601impl<'tcx> TypeFolder<'tcx> for OpaqueTypeExpander<'tcx> {
602 fn tcx(&self) -> TyCtxt<'tcx> {
603 self.tcx
604 }
605
606 fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> {
607 if let ty::Opaque(def_id, substs) = t.kind {
608 self.expand_opaque_ty(def_id, substs).unwrap_or(t)
609 } else if t.has_opaque_types() {
610 t.super_fold_with(self)
611 } else {
612 t
613 }
614 }
615}
616
dc9dc135 617impl<'tcx> ty::TyS<'tcx> {
dfeec247
XL
618 /// Returns the maximum value for the given numeric type (including `char`s)
619 /// or returns `None` if the type is not numeric.
620 pub fn numeric_max_val(&'tcx self, tcx: TyCtxt<'tcx>) -> Option<&'tcx ty::Const<'tcx>> {
1b1a35ee 621 let val = match self.kind() {
dfeec247
XL
622 ty::Int(_) | ty::Uint(_) => {
623 let (size, signed) = int_size_and_signed(tcx, self);
624 let val = if signed { signed_max(size) as u128 } else { unsigned_max(size) };
625 Some(val)
626 }
627 ty::Char => Some(std::char::MAX as u128),
628 ty::Float(fty) => Some(match fty {
5869c6ff
XL
629 ty::FloatTy::F32 => rustc_apfloat::ieee::Single::INFINITY.to_bits(),
630 ty::FloatTy::F64 => rustc_apfloat::ieee::Double::INFINITY.to_bits(),
dfeec247
XL
631 }),
632 _ => None,
633 };
634 val.map(|v| ty::Const::from_bits(tcx, v, ty::ParamEnv::empty().and(self)))
635 }
636
637 /// Returns the minimum value for the given numeric type (including `char`s)
638 /// or returns `None` if the type is not numeric.
639 pub fn numeric_min_val(&'tcx self, tcx: TyCtxt<'tcx>) -> Option<&'tcx ty::Const<'tcx>> {
1b1a35ee 640 let val = match self.kind() {
dfeec247
XL
641 ty::Int(_) | ty::Uint(_) => {
642 let (size, signed) = int_size_and_signed(tcx, self);
29967ef6 643 let val = if signed { size.truncate(signed_min(size) as u128) } else { 0 };
dfeec247
XL
644 Some(val)
645 }
646 ty::Char => Some(0),
647 ty::Float(fty) => Some(match fty {
5869c6ff
XL
648 ty::FloatTy::F32 => (-::rustc_apfloat::ieee::Single::INFINITY).to_bits(),
649 ty::FloatTy::F64 => (-::rustc_apfloat::ieee::Double::INFINITY).to_bits(),
dfeec247
XL
650 }),
651 _ => None,
652 };
653 val.map(|v| ty::Const::from_bits(tcx, v, ty::ParamEnv::empty().and(self)))
654 }
655
0731742a
XL
656 /// Checks whether values of this type `T` are *moved* or *copied*
657 /// when referenced -- this amounts to a check for whether `T:
658 /// Copy`, but note that we **don't** consider lifetimes when
659 /// doing this check. This means that we may generate MIR which
660 /// does copies even when the type actually doesn't satisfy the
661 /// full requirements for the `Copy` trait (cc #29149) -- this
662 /// winds up being reported as an error during NLL borrow check.
dc9dc135
XL
663 pub fn is_copy_modulo_regions(
664 &'tcx self,
f035d41b 665 tcx_at: TyCtxtAt<'tcx>,
dc9dc135 666 param_env: ty::ParamEnv<'tcx>,
dc9dc135 667 ) -> bool {
f035d41b 668 tcx_at.is_copy_raw(param_env.and(self))
e9174d1e
SL
669 }
670
0731742a
XL
671 /// Checks whether values of this type `T` have a size known at
672 /// compile time (i.e., whether `T: Sized`). Lifetimes are ignored
673 /// for the purposes of this check, so it can be an
674 /// over-approximation in generic contexts, where one can have
675 /// strange rules like `<T as Foo<'static>>::Bar: Sized` that
676 /// actually carry lifetime requirements.
dc9dc135 677 pub fn is_sized(&'tcx self, tcx_at: TyCtxtAt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> bool {
74b04a01 678 self.is_trivially_sized(tcx_at.tcx) || tcx_at.is_sized_raw(param_env.and(self))
e9174d1e
SL
679 }
680
0731742a 681 /// Checks whether values of this type `T` implement the `Freeze`
94222f64 682 /// trait -- frozen types are those that do not contain an
9fa01778 683 /// `UnsafeCell` anywhere. This is a language concept used to
0731742a
XL
684 /// distinguish "true immutability", which is relevant to
685 /// optimization as well as the rules around static values. Note
686 /// that the `Freeze` trait is not exposed to end users and is
687 /// effectively an implementation detail.
f035d41b
XL
688 pub fn is_freeze(&'tcx self, tcx_at: TyCtxtAt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> bool {
689 self.is_trivially_freeze() || tcx_at.is_freeze_raw(param_env.and(self))
74b04a01
XL
690 }
691
692 /// Fast path helper for testing if a type is `Freeze`.
693 ///
694 /// Returning true means the type is known to be `Freeze`. Returning
695 /// `false` means nothing -- could be `Freeze`, might not be.
696 fn is_trivially_freeze(&self) -> bool {
1b1a35ee 697 match self.kind() {
74b04a01
XL
698 ty::Int(_)
699 | ty::Uint(_)
700 | ty::Float(_)
701 | ty::Bool
702 | ty::Char
703 | ty::Str
704 | ty::Never
705 | ty::Ref(..)
706 | ty::RawPtr(_)
707 | ty::FnDef(..)
f035d41b 708 | ty::Error(_)
74b04a01
XL
709 | ty::FnPtr(_) => true,
710 ty::Tuple(_) => self.tuple_fields().all(Self::is_trivially_freeze),
711 ty::Slice(elem_ty) | ty::Array(elem_ty, _) => elem_ty.is_trivially_freeze(),
712 ty::Adt(..)
713 | ty::Bound(..)
714 | ty::Closure(..)
715 | ty::Dynamic(..)
716 | ty::Foreign(_)
717 | ty::Generator(..)
718 | ty::GeneratorWitness(_)
719 | ty::Infer(_)
720 | ty::Opaque(..)
721 | ty::Param(_)
722 | ty::Placeholder(_)
f9f354fc 723 | ty::Projection(_) => false,
74b04a01 724 }
cc61c64b
XL
725 }
726
cdc7bbd5
XL
727 /// Checks whether values of this type `T` implement the `Unpin` trait.
728 pub fn is_unpin(&'tcx self, tcx_at: TyCtxtAt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> bool {
729 self.is_trivially_unpin() || tcx_at.is_unpin_raw(param_env.and(self))
730 }
731
732 /// Fast path helper for testing if a type is `Unpin`.
733 ///
734 /// Returning true means the type is known to be `Unpin`. Returning
735 /// `false` means nothing -- could be `Unpin`, might not be.
736 fn is_trivially_unpin(&self) -> bool {
737 match self.kind() {
738 ty::Int(_)
739 | ty::Uint(_)
740 | ty::Float(_)
741 | ty::Bool
742 | ty::Char
743 | ty::Str
744 | ty::Never
745 | ty::Ref(..)
746 | ty::RawPtr(_)
747 | ty::FnDef(..)
748 | ty::Error(_)
749 | ty::FnPtr(_) => true,
750 ty::Tuple(_) => self.tuple_fields().all(Self::is_trivially_unpin),
751 ty::Slice(elem_ty) | ty::Array(elem_ty, _) => elem_ty.is_trivially_unpin(),
752 ty::Adt(..)
753 | ty::Bound(..)
754 | ty::Closure(..)
755 | ty::Dynamic(..)
756 | ty::Foreign(_)
757 | ty::Generator(..)
758 | ty::GeneratorWitness(_)
759 | ty::Infer(_)
760 | ty::Opaque(..)
761 | ty::Param(_)
762 | ty::Placeholder(_)
763 | ty::Projection(_) => false,
764 }
765 }
766
cc61c64b
XL
767 /// If `ty.needs_drop(...)` returns `true`, then `ty` is definitely
768 /// non-copy and *might* have a destructor attached; if it returns
0731742a 769 /// `false`, then `ty` definitely has no destructor (i.e., no drop glue).
cc61c64b
XL
770 ///
771 /// (Note that this implies that if `ty` has a destructor attached,
772 /// then `needs_drop` will definitely return `true` for `ty`.)
e74abb32
XL
773 ///
774 /// Note that this method is used to check eligible types in unions.
cc61c64b 775 #[inline]
dc9dc135 776 pub fn needs_drop(&'tcx self, tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> bool {
74b04a01
XL
777 // Avoid querying in simple cases.
778 match needs_drop_components(self, &tcx.data_layout) {
779 Err(AlwaysRequiresDrop) => true,
780 Ok(components) => {
781 let query_ty = match *components {
782 [] => return false,
783 // If we've got a single component, call the query with that
784 // to increase the chance that we hit the query cache.
785 [component_ty] => component_ty,
786 _ => self,
787 };
788 // This doesn't depend on regions, so try to minimize distinct
789 // query keys used.
790 let erased = tcx.normalize_erasing_regions(param_env, query_ty);
791 tcx.needs_drop_raw(param_env.and(erased))
792 }
793 }
cc61c64b
XL
794 }
795
17df50a5
XL
796 /// Checks if `ty` has has a significant drop.
797 ///
798 /// Note that this method can return false even if `ty` has a destructor
799 /// attached; even if that is the case then the adt has been marked with
800 /// the attribute `rustc_insignificant_dtor`.
801 ///
802 /// Note that this method is used to check for change in drop order for
803 /// 2229 drop reorder migration analysis.
804 #[inline]
805 pub fn has_significant_drop(
806 &'tcx self,
807 tcx: TyCtxt<'tcx>,
808 param_env: ty::ParamEnv<'tcx>,
809 ) -> bool {
810 // Avoid querying in simple cases.
811 match needs_drop_components(self, &tcx.data_layout) {
812 Err(AlwaysRequiresDrop) => true,
813 Ok(components) => {
814 let query_ty = match *components {
815 [] => return false,
816 // If we've got a single component, call the query with that
817 // to increase the chance that we hit the query cache.
818 [component_ty] => component_ty,
819 _ => self,
820 };
136023e0
XL
821
822 // FIXME(#86868): We should be canonicalizing, or else moving this to a method of inference
823 // context, or *something* like that, but for now just avoid passing inference
824 // variables to queries that can't cope with them. Instead, conservatively
825 // return "true" (may change drop order).
826 if query_ty.needs_infer() {
827 return true;
828 }
829
17df50a5
XL
830 // This doesn't depend on regions, so try to minimize distinct
831 // query keys used.
832 let erased = tcx.normalize_erasing_regions(param_env, query_ty);
833 tcx.has_significant_drop_raw(param_env.and(erased))
834 }
835 }
836 }
837
f035d41b
XL
838 /// Returns `true` if equality for this type is both reflexive and structural.
839 ///
840 /// Reflexive equality for a type is indicated by an `Eq` impl for that type.
841 ///
842 /// Primitive types (`u32`, `str`) have structural equality by definition. For composite data
843 /// types, equality for the type as a whole is structural when it is the same as equality
844 /// between all components (fields, array elements, etc.) of that type. For ADTs, structural
845 /// equality is indicated by an implementation of `PartialStructuralEq` and `StructuralEq` for
846 /// that type.
847 ///
848 /// This function is "shallow" because it may return `true` for a composite type whose fields
849 /// are not `StructuralEq`. For example, `[T; 4]` has structural equality regardless of `T`
850 /// because equality for arrays is determined by the equality of each array element. If you
851 /// want to know whether a given call to `PartialEq::eq` will proceed structurally all the way
852 /// down, you will need to use a type visitor.
853 #[inline]
854 pub fn is_structural_eq_shallow(&'tcx self, tcx: TyCtxt<'tcx>) -> bool {
1b1a35ee 855 match self.kind() {
f035d41b
XL
856 // Look for an impl of both `PartialStructuralEq` and `StructuralEq`.
857 Adt(..) => tcx.has_structural_eq_impls(self),
858
859 // Primitive types that satisfy `Eq`.
860 Bool | Char | Int(_) | Uint(_) | Str | Never => true,
861
862 // Composite types that satisfy `Eq` when all of their fields do.
863 //
864 // Because this function is "shallow", we return `true` for these composites regardless
865 // of the type(s) contained within.
866 Ref(..) | Array(..) | Slice(_) | Tuple(..) => true,
867
868 // Raw pointers use bitwise comparison.
869 RawPtr(_) | FnPtr(_) => true,
870
871 // Floating point numbers are not `Eq`.
872 Float(_) => false,
873
874 // Conservatively return `false` for all others...
875
876 // Anonymous function types
877 FnDef(..) | Closure(..) | Dynamic(..) | Generator(..) => false,
878
879 // Generic or inferred types
880 //
881 // FIXME(ecstaticmorse): Maybe we should `bug` here? This should probably only be
882 // called for known, fully-monomorphized types.
883 Projection(_) | Opaque(..) | Param(_) | Bound(..) | Placeholder(_) | Infer(_) => false,
884
885 Foreign(_) | GeneratorWitness(..) | Error(_) => false,
886 }
887 }
888
0731742a 889 pub fn same_type(a: Ty<'tcx>, b: Ty<'tcx>) -> bool {
1b1a35ee 890 match (&a.kind(), &b.kind()) {
0731742a
XL
891 (&Adt(did_a, substs_a), &Adt(did_b, substs_b)) => {
892 if did_a != did_b {
893 return false;
894 }
895
896 substs_a.types().zip(substs_b.types()).all(|(a, b)| Self::same_type(a, b))
897 }
898 _ => a == b,
899 }
900 }
901
e1599b0c
XL
902 /// Peel off all reference types in this type until there are none left.
903 ///
904 /// This method is idempotent, i.e. `ty.peel_refs().peel_refs() == ty.peel_refs()`.
905 ///
906 /// # Examples
907 ///
908 /// - `u8` -> `u8`
909 /// - `&'a mut u8` -> `u8`
910 /// - `&'a &'b u8` -> `u8`
911 /// - `&'a *const &'b u8 -> *const &'b u8`
912 pub fn peel_refs(&'tcx self) -> Ty<'tcx> {
913 let mut ty = self;
1b1a35ee 914 while let Ref(_, inner_ty, _) = ty.kind() {
e1599b0c
XL
915 ty = inner_ty;
916 }
917 ty
918 }
136023e0
XL
919
920 pub fn outer_exclusive_binder(&'tcx self) -> DebruijnIndex {
921 self.outer_exclusive_binder
922 }
e9174d1e 923}
7cac9316 924
abe05a73
XL
925pub enum ExplicitSelf<'tcx> {
926 ByValue,
927 ByReference(ty::Region<'tcx>, hir::Mutability),
ff7c6d11 928 ByRawPointer(hir::Mutability),
abe05a73 929 ByBox,
dfeec247 930 Other,
abe05a73
XL
931}
932
933impl<'tcx> ExplicitSelf<'tcx> {
934 /// Categorizes an explicit self declaration like `self: SomeType`
935 /// into either `self`, `&self`, `&mut self`, `Box<self>`, or
936 /// `Other`.
937 /// This is mainly used to require the arbitrary_self_types feature
938 /// in the case of `Other`, to improve error messages in the common cases,
939 /// and to make `Other` non-object-safe.
940 ///
941 /// Examples:
942 ///
943 /// ```
944 /// impl<'a> Foo for &'a T {
945 /// // Legal declarations:
946 /// fn method1(self: &&'a T); // ExplicitSelf::ByReference
947 /// fn method2(self: &'a T); // ExplicitSelf::ByValue
948 /// fn method3(self: Box<&'a T>); // ExplicitSelf::ByBox
949 /// fn method4(self: Rc<&'a T>); // ExplicitSelf::Other
950 ///
951 /// // Invalid cases will be caught by `check_method_receiver`:
952 /// fn method_err1(self: &'a mut T); // ExplicitSelf::Other
953 /// fn method_err2(self: &'static T) // ExplicitSelf::ByValue
954 /// fn method_err3(self: &&T) // ExplicitSelf::ByReference
955 /// }
956 /// ```
957 ///
dfeec247 958 pub fn determine<P>(self_arg_ty: Ty<'tcx>, is_self_ty: P) -> ExplicitSelf<'tcx>
abe05a73 959 where
dfeec247 960 P: Fn(Ty<'tcx>) -> bool,
abe05a73
XL
961 {
962 use self::ExplicitSelf::*;
963
1b1a35ee 964 match *self_arg_ty.kind() {
abe05a73 965 _ if is_self_ty(self_arg_ty) => ByValue,
dfeec247
XL
966 ty::Ref(region, ty, mutbl) if is_self_ty(ty) => ByReference(region, mutbl),
967 ty::RawPtr(ty::TypeAndMut { ty, mutbl }) if is_self_ty(ty) => ByRawPointer(mutbl),
968 ty::Adt(def, _) if def.is_box() && is_self_ty(self_arg_ty.boxed_ty()) => ByBox,
969 _ => Other,
abe05a73
XL
970 }
971 }
972}
74b04a01
XL
973
974/// Returns a list of types such that the given type needs drop if and only if
975/// *any* of the returned types need drop. Returns `Err(AlwaysRequiresDrop)` if
976/// this type always needs drop.
977pub fn needs_drop_components(
978 ty: Ty<'tcx>,
979 target_layout: &TargetDataLayout,
980) -> Result<SmallVec<[Ty<'tcx>; 2]>, AlwaysRequiresDrop> {
1b1a35ee 981 match ty.kind() {
74b04a01
XL
982 ty::Infer(ty::FreshIntTy(_))
983 | ty::Infer(ty::FreshFloatTy(_))
984 | ty::Bool
985 | ty::Int(_)
986 | ty::Uint(_)
987 | ty::Float(_)
988 | ty::Never
989 | ty::FnDef(..)
990 | ty::FnPtr(_)
991 | ty::Char
992 | ty::GeneratorWitness(..)
993 | ty::RawPtr(_)
994 | ty::Ref(..)
995 | ty::Str => Ok(SmallVec::new()),
996
997 // Foreign types can never have destructors.
998 ty::Foreign(..) => Ok(SmallVec::new()),
999
f035d41b 1000 ty::Dynamic(..) | ty::Error(_) => Err(AlwaysRequiresDrop),
74b04a01
XL
1001
1002 ty::Slice(ty) => needs_drop_components(ty, target_layout),
1003 ty::Array(elem_ty, size) => {
1004 match needs_drop_components(elem_ty, target_layout) {
1005 Ok(v) if v.is_empty() => Ok(v),
1006 res => match size.val.try_to_bits(target_layout.pointer_size) {
1007 // Arrays of size zero don't need drop, even if their element
1008 // type does.
1009 Some(0) => Ok(SmallVec::new()),
1010 Some(_) => res,
1011 // We don't know which of the cases above we are in, so
1012 // return the whole type and let the caller decide what to
1013 // do.
1014 None => Ok(smallvec![ty]),
1015 },
1016 }
1017 }
1018 // If any field needs drop, then the whole tuple does.
1019 ty::Tuple(..) => ty.tuple_fields().try_fold(SmallVec::new(), move |mut acc, elem| {
1020 acc.extend(needs_drop_components(elem, target_layout)?);
1021 Ok(acc)
1022 }),
1023
1024 // These require checking for `Copy` bounds or `Adt` destructors.
1025 ty::Adt(..)
1026 | ty::Projection(..)
74b04a01
XL
1027 | ty::Param(_)
1028 | ty::Bound(..)
1029 | ty::Placeholder(..)
1030 | ty::Opaque(..)
1031 | ty::Infer(_)
ba9703b0
XL
1032 | ty::Closure(..)
1033 | ty::Generator(..) => Ok(smallvec![ty]),
74b04a01
XL
1034 }
1035}
1036
fc512014
XL
1037// Does the equivalent of
1038// ```
1039// let v = self.iter().map(|p| p.fold_with(folder)).collect::<SmallVec<[_; 8]>>();
1040// folder.tcx().intern_*(&v)
1041// ```
1042pub fn fold_list<'tcx, F, T>(
1043 list: &'tcx ty::List<T>,
1044 folder: &mut F,
1045 intern: impl FnOnce(TyCtxt<'tcx>, &[T]) -> &'tcx ty::List<T>,
1046) -> &'tcx ty::List<T>
1047where
1048 F: TypeFolder<'tcx>,
1049 T: TypeFoldable<'tcx> + PartialEq + Copy,
1050{
1051 let mut iter = list.iter();
1052 // Look for the first element that changed
1053 if let Some((i, new_t)) = iter.by_ref().enumerate().find_map(|(i, t)| {
1054 let new_t = t.fold_with(folder);
1055 if new_t == t { None } else { Some((i, new_t)) }
1056 }) {
1057 // An element changed, prepare to intern the resulting list
1058 let mut new_list = SmallVec::<[_; 8]>::with_capacity(list.len());
1059 new_list.extend_from_slice(&list[..i]);
1060 new_list.push(new_t);
1061 new_list.extend(iter.map(|t| t.fold_with(folder)));
1062 intern(folder.tcx(), &new_list)
1063 } else {
1064 list
1065 }
1066}
1067
3dfed10e 1068#[derive(Copy, Clone, Debug, HashStable, TyEncodable, TyDecodable)]
74b04a01 1069pub struct AlwaysRequiresDrop;
3dfed10e
XL
1070
1071/// Normalizes all opaque types in the given value, replacing them
1072/// with their underlying types.
1073pub fn normalize_opaque_types(
1074 tcx: TyCtxt<'tcx>,
1075 val: &'tcx List<ty::Predicate<'tcx>>,
1076) -> &'tcx List<ty::Predicate<'tcx>> {
1077 let mut visitor = OpaqueTypeExpander {
1078 seen_opaque_tys: FxHashSet::default(),
1079 expanded_cache: FxHashMap::default(),
1080 primary_def_id: None,
1081 found_recursion: false,
94222f64 1082 found_any_recursion: false,
3dfed10e
XL
1083 check_recursion: false,
1084 tcx,
1085 };
1086 val.fold_with(&mut visitor)
1087}
1088
1089pub fn provide(providers: &mut ty::query::Providers) {
1090 *providers = ty::query::Providers { normalize_opaque_types, ..*providers }
1091}