]> git.proxmox.com Git - rustc.git/blame - compiler/rustc_middle/src/ty/util.rs
New upstream version 1.54.0+dfsg1
[rustc.git] / compiler / rustc_middle / src / ty / util.rs
CommitLineData
9fa01778
XL
1//! Miscellaneous type-system utilities that are too small to deserve their own modules.
2
9fa01778 3use crate::ich::NodeIdHashingMode;
f9f354fc 4use crate::middle::codegen_fn_attrs::CodegenFnAttrFlags;
3dfed10e 5use crate::ty::fold::TypeFolder;
ba9703b0 6use crate::ty::layout::IntegerExt;
9fa01778 7use crate::ty::query::TyCtxtAt;
29967ef6 8use crate::ty::subst::{GenericArgKind, Subst, SubstsRef};
9fa01778 9use crate::ty::TyKind::*;
29967ef6 10use crate::ty::{self, DefIdTree, List, Ty, TyCtxt, TypeFoldable};
dfeec247 11use rustc_apfloat::Float as _;
3dfed10e 12use rustc_ast as ast;
74b04a01 13use rustc_attr::{self as attr, SignedInt, UnsignedInt};
0731742a 14use rustc_data_structures::fx::{FxHashMap, FxHashSet};
dfeec247 15use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
ba9703b0 16use rustc_errors::ErrorReported;
dfeec247
XL
17use rustc_hir as hir;
18use rustc_hir::def::DefKind;
19use rustc_hir::def_id::DefId;
532ac7d7 20use rustc_macros::HashStable;
cdc7bbd5 21use rustc_span::DUMMY_SP;
ba9703b0 22use rustc_target::abi::{Integer, Size, TargetDataLayout};
74b04a01 23use smallvec::SmallVec;
cdc7bbd5 24use std::{fmt, iter};
e9174d1e 25
0531ce1d
XL
26#[derive(Copy, Clone, Debug)]
27pub struct Discr<'tcx> {
9fa01778 28 /// Bit representation of the discriminant (e.g., `-128i8` is `0xFF_u128`).
0531ce1d 29 pub val: u128,
dfeec247 30 pub ty: Ty<'tcx>,
0531ce1d 31}
8bb4bdeb 32
0531ce1d 33impl<'tcx> fmt::Display for Discr<'tcx> {
0bf4aa26 34 fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
1b1a35ee 35 match *self.ty.kind() {
b7449926 36 ty::Int(ity) => {
5869c6ff 37 let size = ty::tls::with(|tcx| Integer::from_int_ty(&tcx, ity).size());
532ac7d7 38 let x = self.val;
0531ce1d 39 // sign extend the raw representation to be an i128
29967ef6 40 let x = size.sign_extend(x) as i128;
0531ce1d 41 write!(fmt, "{}", x)
dfeec247 42 }
0531ce1d
XL
43 _ => write!(fmt, "{}", self.val),
44 }
45 }
cc61c64b 46}
8bb4bdeb 47
dfeec247 48fn signed_min(size: Size) -> i128 {
29967ef6 49 size.sign_extend(1_u128 << (size.bits() - 1)) as i128
dfeec247
XL
50}
51
52fn signed_max(size: Size) -> i128 {
74b04a01 53 i128::MAX >> (128 - size.bits())
dfeec247
XL
54}
55
56fn unsigned_max(size: Size) -> u128 {
74b04a01 57 u128::MAX >> (128 - size.bits())
dfeec247
XL
58}
59
60fn int_size_and_signed<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> (Size, bool) {
1b1a35ee 61 let (int, signed) = match *ty.kind() {
5869c6ff
XL
62 Int(ity) => (Integer::from_int_ty(&tcx, ity), true),
63 Uint(uty) => (Integer::from_uint_ty(&tcx, uty), false),
dfeec247
XL
64 _ => bug!("non integer discriminant"),
65 };
66 (int.size(), signed)
67}
68
0531ce1d 69impl<'tcx> Discr<'tcx> {
9fa01778 70 /// Adds `1` to the value and wraps around if the maximum for the type is reached.
dc9dc135 71 pub fn wrap_incr(self, tcx: TyCtxt<'tcx>) -> Self {
0531ce1d
XL
72 self.checked_add(tcx, 1).0
73 }
dc9dc135 74 pub fn checked_add(self, tcx: TyCtxt<'tcx>, n: u128) -> (Self, bool) {
dfeec247
XL
75 let (size, signed) = int_size_and_signed(tcx, self.ty);
76 let (val, oflo) = if signed {
77 let min = signed_min(size);
78 let max = signed_max(size);
29967ef6 79 let val = size.sign_extend(self.val) as i128;
74b04a01 80 assert!(n < (i128::MAX as u128));
0531ce1d
XL
81 let n = n as i128;
82 let oflo = val > max - n;
dfeec247 83 let val = if oflo { min + (n - (max - val) - 1) } else { val + n };
0531ce1d
XL
84 // zero the upper bits
85 let val = val as u128;
29967ef6 86 let val = size.truncate(val);
dfeec247 87 (val, oflo)
0531ce1d 88 } else {
dfeec247 89 let max = unsigned_max(size);
0531ce1d
XL
90 let val = self.val;
91 let oflo = val > max - n;
dfeec247
XL
92 let val = if oflo { n - (max - val) - 1 } else { val + n };
93 (val, oflo)
94 };
95 (Self { val, ty: self.ty }, oflo)
8bb4bdeb 96 }
e9174d1e
SL
97}
98
0531ce1d 99pub trait IntTypeExt {
dc9dc135
XL
100 fn to_ty<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx>;
101 fn disr_incr<'tcx>(&self, tcx: TyCtxt<'tcx>, val: Option<Discr<'tcx>>) -> Option<Discr<'tcx>>;
102 fn initial_discriminant<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Discr<'tcx>;
0531ce1d
XL
103}
104
e9174d1e 105impl IntTypeExt for attr::IntType {
dc9dc135 106 fn to_ty<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> {
e9174d1e 107 match *self {
dfeec247
XL
108 SignedInt(ast::IntTy::I8) => tcx.types.i8,
109 SignedInt(ast::IntTy::I16) => tcx.types.i16,
110 SignedInt(ast::IntTy::I32) => tcx.types.i32,
111 SignedInt(ast::IntTy::I64) => tcx.types.i64,
112 SignedInt(ast::IntTy::I128) => tcx.types.i128,
113 SignedInt(ast::IntTy::Isize) => tcx.types.isize,
114 UnsignedInt(ast::UintTy::U8) => tcx.types.u8,
115 UnsignedInt(ast::UintTy::U16) => tcx.types.u16,
116 UnsignedInt(ast::UintTy::U32) => tcx.types.u32,
117 UnsignedInt(ast::UintTy::U64) => tcx.types.u64,
118 UnsignedInt(ast::UintTy::U128) => tcx.types.u128,
2c00a5a8 119 UnsignedInt(ast::UintTy::Usize) => tcx.types.usize,
e9174d1e
SL
120 }
121 }
122
dc9dc135 123 fn initial_discriminant<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Discr<'tcx> {
dfeec247 124 Discr { val: 0, ty: self.to_ty(tcx) }
e9174d1e
SL
125 }
126
dc9dc135 127 fn disr_incr<'tcx>(&self, tcx: TyCtxt<'tcx>, val: Option<Discr<'tcx>>) -> Option<Discr<'tcx>> {
a7813a04 128 if let Some(val) = val {
0531ce1d
XL
129 assert_eq!(self.to_ty(tcx), val.ty);
130 let (new, oflo) = val.checked_add(tcx, 1);
dfeec247 131 if oflo { None } else { Some(new) }
a7813a04
XL
132 } else {
133 Some(self.initial_discriminant(tcx))
134 }
e9174d1e
SL
135 }
136}
137
dc9dc135 138impl<'tcx> TyCtxt<'tcx> {
cc61c64b
XL
139 /// Creates a hash of the type `Ty` which will be the same no matter what crate
140 /// context it's calculated within. This is used by the `type_id` intrinsic.
141 pub fn type_id_hash(self, ty: Ty<'tcx>) -> u64 {
142 let mut hasher = StableHasher::new();
ea8adc8c 143 let mut hcx = self.create_stable_hashing_context();
cc61c64b 144
3b2f2976
XL
145 // We want the type_id be independent of the types free regions, so we
146 // erase them. The erase_regions() call will also anonymize bound
147 // regions, which is desirable too.
fc512014 148 let ty = self.erase_regions(ty);
3b2f2976 149
cc61c64b
XL
150 hcx.while_hashing_spans(false, |hcx| {
151 hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
152 ty.hash_stable(hcx, &mut hasher);
153 });
154 });
155 hasher.finish()
156 }
cc61c64b 157
5bcae85e 158 pub fn has_error_field(self, ty: Ty<'tcx>) -> bool {
1b1a35ee 159 if let ty::Adt(def, substs) = *ty.kind() {
0bf4aa26
XL
160 for field in def.all_fields() {
161 let field_ty = field.ty(self, substs);
1b1a35ee 162 if let Error(_) = field_ty.kind() {
0bf4aa26 163 return true;
5bcae85e
SL
164 }
165 }
5bcae85e
SL
166 }
167 false
168 }
169
416331ca
XL
170 /// Attempts to returns the deeply last field of nested structures, but
171 /// does not apply any normalization in its search. Returns the same type
172 /// if input `ty` is not a structure at all.
dfeec247 173 pub fn struct_tail_without_normalization(self, ty: Ty<'tcx>) -> Ty<'tcx> {
416331ca
XL
174 let tcx = self;
175 tcx.struct_tail_with_normalize(ty, |ty| ty)
176 }
177
178 /// Returns the deeply last field of nested structures, or the same type if
179 /// not a structure at all. Corresponds to the only possible unsized field,
180 /// and its type can be used to determine unsizing strategy.
181 ///
182 /// Should only be called if `ty` has no inference variables and does not
183 /// need its lifetimes preserved (e.g. as part of codegen); otherwise
184 /// normalization attempt may cause compiler bugs.
dfeec247
XL
185 pub fn struct_tail_erasing_lifetimes(
186 self,
187 ty: Ty<'tcx>,
188 param_env: ty::ParamEnv<'tcx>,
189 ) -> Ty<'tcx> {
416331ca
XL
190 let tcx = self;
191 tcx.struct_tail_with_normalize(ty, |ty| tcx.normalize_erasing_regions(param_env, ty))
192 }
193
194 /// Returns the deeply last field of nested structures, or the same type if
195 /// not a structure at all. Corresponds to the only possible unsized field,
196 /// and its type can be used to determine unsizing strategy.
197 ///
198 /// This is parameterized over the normalization strategy (i.e. how to
199 /// handle `<T as Trait>::Assoc` and `impl Trait`); pass the identity
200 /// function to indicate no normalization should take place.
201 ///
202 /// See also `struct_tail_erasing_lifetimes`, which is suitable for use
203 /// during codegen.
dfeec247
XL
204 pub fn struct_tail_with_normalize(
205 self,
206 mut ty: Ty<'tcx>,
207 normalize: impl Fn(Ty<'tcx>) -> Ty<'tcx>,
208 ) -> Ty<'tcx> {
fc512014
XL
209 for iteration in 0.. {
210 if !self.sess.recursion_limit().value_within_limit(iteration) {
211 return self.ty_error_with_message(
212 DUMMY_SP,
213 &format!("reached the recursion limit finding the struct tail for {}", ty),
214 );
215 }
1b1a35ee 216 match *ty.kind() {
b7449926 217 ty::Adt(def, substs) => {
7cac9316
XL
218 if !def.is_struct() {
219 break;
220 }
2c00a5a8 221 match def.non_enum_variant().fields.last() {
7cac9316
XL
222 Some(f) => ty = f.ty(self, substs),
223 None => break,
224 }
225 }
226
b7449926 227 ty::Tuple(tys) => {
7cac9316 228 if let Some((&last_ty, _)) = tys.split_last() {
48663c56 229 ty = last_ty.expect_ty();
7cac9316
XL
230 } else {
231 break;
232 }
233 }
234
416331ca
XL
235 ty::Projection(_) | ty::Opaque(..) => {
236 let normalized = normalize(ty);
237 if ty == normalized {
238 return ty;
239 } else {
240 ty = normalized;
241 }
242 }
243
7cac9316
XL
244 _ => {
245 break;
246 }
e9174d1e
SL
247 }
248 }
249 ty
250 }
251
60c5eb7d 252 /// Same as applying `struct_tail` on `source` and `target`, but only
e9174d1e
SL
253 /// keeps going as long as the two types are instances of the same
254 /// structure definitions.
a1dfa0c6 255 /// For `(Foo<Foo<T>>, Foo<dyn Trait>)`, the result will be `(Foo<T>, Trait)`,
e9174d1e 256 /// whereas struct_tail produces `T`, and `Trait`, respectively.
416331ca
XL
257 ///
258 /// Should only be called if the types have no inference variables and do
60c5eb7d 259 /// not need their lifetimes preserved (e.g., as part of codegen); otherwise,
416331ca 260 /// normalization attempt may cause compiler bugs.
dfeec247
XL
261 pub fn struct_lockstep_tails_erasing_lifetimes(
262 self,
263 source: Ty<'tcx>,
264 target: Ty<'tcx>,
265 param_env: ty::ParamEnv<'tcx>,
266 ) -> (Ty<'tcx>, Ty<'tcx>) {
416331ca 267 let tcx = self;
dfeec247
XL
268 tcx.struct_lockstep_tails_with_normalize(source, target, |ty| {
269 tcx.normalize_erasing_regions(param_env, ty)
270 })
416331ca
XL
271 }
272
60c5eb7d 273 /// Same as applying `struct_tail` on `source` and `target`, but only
416331ca
XL
274 /// keeps going as long as the two types are instances of the same
275 /// structure definitions.
276 /// For `(Foo<Foo<T>>, Foo<dyn Trait>)`, the result will be `(Foo<T>, Trait)`,
277 /// whereas struct_tail produces `T`, and `Trait`, respectively.
278 ///
279 /// See also `struct_lockstep_tails_erasing_lifetimes`, which is suitable for use
280 /// during codegen.
dfeec247
XL
281 pub fn struct_lockstep_tails_with_normalize(
282 self,
283 source: Ty<'tcx>,
284 target: Ty<'tcx>,
285 normalize: impl Fn(Ty<'tcx>) -> Ty<'tcx>,
286 ) -> (Ty<'tcx>, Ty<'tcx>) {
e9174d1e 287 let (mut a, mut b) = (source, target);
041b39d2 288 loop {
1b1a35ee 289 match (&a.kind(), &b.kind()) {
b7449926 290 (&Adt(a_def, a_substs), &Adt(b_def, b_substs))
dfeec247
XL
291 if a_def == b_def && a_def.is_struct() =>
292 {
2c00a5a8 293 if let Some(f) = a_def.non_enum_variant().fields.last() {
041b39d2
XL
294 a = f.ty(self, a_substs);
295 b = f.ty(self, b_substs);
296 } else {
297 break;
298 }
dfeec247
XL
299 }
300 (&Tuple(a_tys), &Tuple(b_tys)) if a_tys.len() == b_tys.len() => {
041b39d2 301 if let Some(a_last) = a_tys.last() {
48663c56
XL
302 a = a_last.expect_ty();
303 b = b_tys.last().unwrap().expect_ty();
041b39d2
XL
304 } else {
305 break;
306 }
dfeec247 307 }
ba9703b0
XL
308 (ty::Projection(_) | ty::Opaque(..), _)
309 | (_, ty::Projection(_) | ty::Opaque(..)) => {
416331ca
XL
310 // If either side is a projection, attempt to
311 // progress via normalization. (Should be safe to
312 // apply to both sides as normalization is
313 // idempotent.)
314 let a_norm = normalize(a);
315 let b_norm = normalize(b);
316 if a == a_norm && b == b_norm {
317 break;
318 } else {
319 a = a_norm;
320 b = b_norm;
321 }
322 }
323
cc61c64b 324 _ => break,
e9174d1e
SL
325 }
326 }
327 (a, b)
328 }
329
8bb4bdeb
XL
330 /// Calculate the destructor of a given type.
331 pub fn calculate_dtor(
332 self,
333 adt_did: DefId,
29967ef6 334 validate: impl Fn(Self, DefId) -> Result<(), ErrorReported>,
8bb4bdeb 335 ) -> Option<ty::Destructor> {
ba9703b0 336 let drop_trait = self.lang_items().drop_trait()?;
9fa01778 337 self.ensure().coherent_trait(drop_trait);
8bb4bdeb 338
7cac9316 339 let ty = self.type_of(adt_did);
29967ef6 340 let dtor_did = self.find_map_relevant_impl(drop_trait, ty, |impl_did| {
74b04a01 341 if let Some(item) = self.associated_items(impl_did).in_definition_order().next() {
0bf4aa26 342 if validate(self, impl_did).is_ok() {
29967ef6 343 return Some(item.def_id);
8bb4bdeb
XL
344 }
345 }
29967ef6 346 None
8bb4bdeb
XL
347 });
348
ff7c6d11 349 Some(ty::Destructor { did: dtor_did? })
cc61c64b
XL
350 }
351
9fa01778 352 /// Returns the set of types that are required to be alive in
cc61c64b
XL
353 /// order to run the destructor of `def` (see RFCs 769 and
354 /// 1238).
355 ///
356 /// Note that this returns only the constraints for the
357 /// destructor of `def` itself. For the destructors of the
358 /// contents, you need `adt_dtorck_constraint`.
dfeec247 359 pub fn destructor_constraints(self, def: &'tcx ty::AdtDef) -> Vec<ty::subst::GenericArg<'tcx>> {
cc61c64b
XL
360 let dtor = match def.destructor(self) {
361 None => {
362 debug!("destructor_constraints({:?}) - no dtor", def.did);
dfeec247 363 return vec![];
cc61c64b 364 }
dfeec247 365 Some(dtor) => dtor.did,
e9174d1e 366 };
b039eaaf 367
cc61c64b 368 let impl_def_id = self.associated_item(dtor).container.id();
7cac9316 369 let impl_generics = self.generics_of(impl_def_id);
cc61c64b
XL
370
371 // We have a destructor - all the parameters that are not
372 // pure_wrt_drop (i.e, don't have a #[may_dangle] attribute)
373 // must be live.
374
375 // We need to return the list of parameters from the ADTs
376 // generics/substs that correspond to impure parameters on the
377 // impl's generics. This is a bit ugly, but conceptually simple:
378 //
379 // Suppose our ADT looks like the following
380 //
381 // struct S<X, Y, Z>(X, Y, Z);
382 //
383 // and the impl is
384 //
385 // impl<#[may_dangle] P0, P1, P2> Drop for S<P1, P2, P0>
386 //
387 // We want to return the parameters (X, Y). For that, we match
388 // up the item-substs <X, Y, Z> with the substs on the impl ADT,
389 // <P1, P2, P0>, and then look up which of the impl substs refer to
390 // parameters marked as pure.
391
1b1a35ee 392 let impl_substs = match *self.type_of(impl_def_id).kind() {
b7449926 393 ty::Adt(def_, substs) if def_ == def => substs,
dfeec247 394 _ => bug!(),
cc61c64b
XL
395 };
396
1b1a35ee 397 let item_substs = match *self.type_of(def.did).kind() {
b7449926 398 ty::Adt(def_, substs) if def_ == def => substs,
dfeec247 399 _ => bug!(),
cc61c64b
XL
400 };
401
cdc7bbd5 402 let result = iter::zip(item_substs, impl_substs)
f9f354fc 403 .filter(|&(_, k)| {
0531ce1d 404 match k.unpack() {
e74abb32 405 GenericArgKind::Lifetime(&ty::RegionKind::ReEarlyBound(ref ebr)) => {
0531ce1d
XL
406 !impl_generics.region_param(ebr, self).pure_wrt_drop
407 }
dfeec247 408 GenericArgKind::Type(&ty::TyS { kind: ty::Param(ref pt), .. }) => {
0531ce1d
XL
409 !impl_generics.type_param(pt, self).pure_wrt_drop
410 }
e74abb32 411 GenericArgKind::Const(&ty::Const {
dfeec247
XL
412 val: ty::ConstKind::Param(ref pc), ..
413 }) => !impl_generics.const_param(pc, self).pure_wrt_drop,
414 GenericArgKind::Lifetime(_)
415 | GenericArgKind::Type(_)
416 | GenericArgKind::Const(_) => {
532ac7d7 417 // Not a type, const or region param: this should be reported
0531ce1d
XL
418 // as an error.
419 false
420 }
cc61c64b 421 }
0bf4aa26 422 })
f9f354fc 423 .map(|(item_param, _)| item_param)
0bf4aa26 424 .collect();
cc61c64b
XL
425 debug!("destructor_constraint({:?}) = {:?}", def.did, result);
426 result
b039eaaf 427 }
9e0c209e 428
9fa01778
XL
429 /// Returns `true` if `def_id` refers to a closure (e.g., `|x| x * 2`). Note
430 /// that closures have a `DefId`, but the closure *expression* also
8faf50e0
XL
431 /// has a `HirId` that is located within the context where the
432 /// closure appears (and, sadly, a corresponding `NodeId`, since
433 /// those are not yet phased out). The parent of the closure's
9fa01778 434 /// `DefId` will also be the context where it appears.
abe05a73 435 pub fn is_closure(self, def_id: DefId) -> bool {
f9f354fc 436 matches!(self.def_kind(def_id), DefKind::Closure | DefKind::Generator)
abe05a73
XL
437 }
438
9fa01778 439 /// Returns `true` if `def_id` refers to a trait (i.e., `trait Foo { ... }`).
8faf50e0 440 pub fn is_trait(self, def_id: DefId) -> bool {
f9f354fc 441 self.def_kind(def_id) == DefKind::Trait
8faf50e0
XL
442 }
443
9fa01778
XL
444 /// Returns `true` if `def_id` refers to a trait alias (i.e., `trait Foo = ...;`),
445 /// and `false` otherwise.
446 pub fn is_trait_alias(self, def_id: DefId) -> bool {
f9f354fc 447 self.def_kind(def_id) == DefKind::TraitAlias
9fa01778
XL
448 }
449
450 /// Returns `true` if this `DefId` refers to the implicit constructor for
451 /// a tuple struct like `struct Foo(u32)`, and `false` otherwise.
532ac7d7 452 pub fn is_constructor(self, def_id: DefId) -> bool {
f9f354fc 453 matches!(self.def_kind(def_id), DefKind::Ctor(..))
8faf50e0
XL
454 }
455
dc9dc135 456 /// Given the def-ID of a fn or closure, returns the def-ID of
ff7c6d11 457 /// the innermost fn item that the closure is contained within.
9fa01778 458 /// This is a significant `DefId` because, when we do
ff7c6d11 459 /// type-checking, we type-check this fn item and all of its
9fa01778 460 /// (transitive) closures together. Therefore, when we fetch the
3dfed10e
XL
461 /// `typeck` the closure, for example, we really wind up
462 /// fetching the `typeck` the enclosing fn item.
cc61c64b 463 pub fn closure_base_def_id(self, def_id: DefId) -> DefId {
476ff2be 464 let mut def_id = def_id;
abe05a73 465 while self.is_closure(def_id) {
532ac7d7 466 def_id = self.parent(def_id).unwrap_or_else(|| {
476ff2be
SL
467 bug!("closure {:?} has no parent", def_id);
468 });
469 }
470 def_id
9e0c209e 471 }
cc61c64b 472
9fa01778 473 /// Given the `DefId` and substs a closure, creates the type of
ff7c6d11
XL
474 /// `self` argument that the closure expects. For example, for a
475 /// `Fn` closure, this would return a reference type `&T` where
9fa01778 476 /// `T = closure_ty`.
ff7c6d11
XL
477 ///
478 /// Returns `None` if this closure's kind has not yet been inferred.
479 /// This should only be possible during type checking.
480 ///
481 /// Note that the return value is a late-bound region and hence
482 /// wrapped in a binder.
dfeec247
XL
483 pub fn closure_env_ty(
484 self,
485 closure_def_id: DefId,
486 closure_substs: SubstsRef<'tcx>,
cdc7bbd5
XL
487 env_region: ty::RegionKind,
488 ) -> Option<Ty<'tcx>> {
ff7c6d11 489 let closure_ty = self.mk_closure(closure_def_id, closure_substs);
ba9703b0 490 let closure_kind_ty = closure_substs.as_closure().kind_ty();
ff7c6d11
XL
491 let closure_kind = closure_kind_ty.to_opt_closure_kind()?;
492 let env_ty = match closure_kind {
493 ty::ClosureKind::Fn => self.mk_imm_ref(self.mk_region(env_region), closure_ty),
494 ty::ClosureKind::FnMut => self.mk_mut_ref(self.mk_region(env_region), closure_ty),
495 ty::ClosureKind::FnOnce => closure_ty,
496 };
cdc7bbd5 497 Some(env_ty)
ff7c6d11
XL
498 }
499
48663c56 500 /// Returns `true` if the node pointed to by `def_id` is a `static` item.
1b1a35ee 501 pub fn is_static(self, def_id: DefId) -> bool {
48663c56
XL
502 self.static_mutability(def_id).is_some()
503 }
504
f9f354fc 505 /// Returns `true` if this is a `static` item with the `#[thread_local]` attribute.
1b1a35ee 506 pub fn is_thread_local_static(self, def_id: DefId) -> bool {
f9f354fc
XL
507 self.codegen_fn_attrs(def_id).flags.contains(CodegenFnAttrFlags::THREAD_LOCAL)
508 }
509
48663c56 510 /// Returns `true` if the node pointed to by `def_id` is a mutable `static` item.
1b1a35ee 511 pub fn is_mutable_static(self, def_id: DefId) -> bool {
dfeec247 512 self.static_mutability(def_id) == Some(hir::Mutability::Mut)
60c5eb7d
XL
513 }
514
515 /// Get the type of the pointer to the static that we use in MIR.
1b1a35ee 516 pub fn static_ptr_ty(self, def_id: DefId) -> Ty<'tcx> {
60c5eb7d 517 // Make sure that any constants in the static's type are evaluated.
dfeec247 518 let static_ty = self.normalize_erasing_regions(ty::ParamEnv::empty(), self.type_of(def_id));
60c5eb7d 519
29967ef6
XL
520 // Make sure that accesses to unsafe statics end up using raw pointers.
521 // For thread-locals, this needs to be kept in sync with `Rvalue::ty`.
60c5eb7d
XL
522 if self.is_mutable_static(def_id) {
523 self.mk_mut_ptr(static_ty)
29967ef6
XL
524 } else if self.is_foreign_item(def_id) {
525 self.mk_imm_ptr(static_ty)
60c5eb7d
XL
526 } else {
527 self.mk_imm_ref(self.lifetimes.re_erased, static_ty)
528 }
abe05a73 529 }
0731742a
XL
530
531 /// Expands the given impl trait type, stopping if the type is recursive.
532 pub fn try_expand_impl_trait_type(
533 self,
534 def_id: DefId,
532ac7d7 535 substs: SubstsRef<'tcx>,
0731742a 536 ) -> Result<Ty<'tcx>, Ty<'tcx>> {
0731742a
XL
537 let mut visitor = OpaqueTypeExpander {
538 seen_opaque_tys: FxHashSet::default(),
e1599b0c 539 expanded_cache: FxHashMap::default(),
3dfed10e 540 primary_def_id: Some(def_id),
0731742a 541 found_recursion: false,
3dfed10e 542 check_recursion: true,
0731742a
XL
543 tcx: self,
544 };
3dfed10e 545
0731742a 546 let expanded_type = visitor.expand_opaque_ty(def_id, substs).unwrap();
dfeec247 547 if visitor.found_recursion { Err(expanded_type) } else { Ok(expanded_type) }
0731742a 548 }
9e0c209e
SL
549}
550
3dfed10e
XL
551struct OpaqueTypeExpander<'tcx> {
552 // Contains the DefIds of the opaque types that are currently being
553 // expanded. When we expand an opaque type we insert the DefId of
554 // that type, and when we finish expanding that type we remove the
555 // its DefId.
556 seen_opaque_tys: FxHashSet<DefId>,
557 // Cache of all expansions we've seen so far. This is a critical
558 // optimization for some large types produced by async fn trees.
559 expanded_cache: FxHashMap<(DefId, SubstsRef<'tcx>), Ty<'tcx>>,
560 primary_def_id: Option<DefId>,
561 found_recursion: bool,
562 /// Whether or not to check for recursive opaque types.
563 /// This is `true` when we're explicitly checking for opaque type
564 /// recursion, and 'false' otherwise to avoid unnecessary work.
565 check_recursion: bool,
566 tcx: TyCtxt<'tcx>,
567}
568
569impl<'tcx> OpaqueTypeExpander<'tcx> {
570 fn expand_opaque_ty(&mut self, def_id: DefId, substs: SubstsRef<'tcx>) -> Option<Ty<'tcx>> {
571 if self.found_recursion {
572 return None;
573 }
574 let substs = substs.fold_with(self);
575 if !self.check_recursion || self.seen_opaque_tys.insert(def_id) {
576 let expanded_ty = match self.expanded_cache.get(&(def_id, substs)) {
577 Some(expanded_ty) => expanded_ty,
578 None => {
579 let generic_ty = self.tcx.type_of(def_id);
580 let concrete_ty = generic_ty.subst(self.tcx, substs);
581 let expanded_ty = self.fold_ty(concrete_ty);
582 self.expanded_cache.insert((def_id, substs), expanded_ty);
583 expanded_ty
584 }
585 };
586 if self.check_recursion {
587 self.seen_opaque_tys.remove(&def_id);
588 }
589 Some(expanded_ty)
590 } else {
591 // If another opaque type that we contain is recursive, then it
592 // will report the error, so we don't have to.
593 self.found_recursion = def_id == *self.primary_def_id.as_ref().unwrap();
594 None
595 }
596 }
597}
598
599impl<'tcx> TypeFolder<'tcx> for OpaqueTypeExpander<'tcx> {
600 fn tcx(&self) -> TyCtxt<'tcx> {
601 self.tcx
602 }
603
604 fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> {
605 if let ty::Opaque(def_id, substs) = t.kind {
606 self.expand_opaque_ty(def_id, substs).unwrap_or(t)
607 } else if t.has_opaque_types() {
608 t.super_fold_with(self)
609 } else {
610 t
611 }
612 }
613}
614
dc9dc135 615impl<'tcx> ty::TyS<'tcx> {
dfeec247
XL
616 /// Returns the maximum value for the given numeric type (including `char`s)
617 /// or returns `None` if the type is not numeric.
618 pub fn numeric_max_val(&'tcx self, tcx: TyCtxt<'tcx>) -> Option<&'tcx ty::Const<'tcx>> {
1b1a35ee 619 let val = match self.kind() {
dfeec247
XL
620 ty::Int(_) | ty::Uint(_) => {
621 let (size, signed) = int_size_and_signed(tcx, self);
622 let val = if signed { signed_max(size) as u128 } else { unsigned_max(size) };
623 Some(val)
624 }
625 ty::Char => Some(std::char::MAX as u128),
626 ty::Float(fty) => Some(match fty {
5869c6ff
XL
627 ty::FloatTy::F32 => rustc_apfloat::ieee::Single::INFINITY.to_bits(),
628 ty::FloatTy::F64 => rustc_apfloat::ieee::Double::INFINITY.to_bits(),
dfeec247
XL
629 }),
630 _ => None,
631 };
632 val.map(|v| ty::Const::from_bits(tcx, v, ty::ParamEnv::empty().and(self)))
633 }
634
635 /// Returns the minimum value for the given numeric type (including `char`s)
636 /// or returns `None` if the type is not numeric.
637 pub fn numeric_min_val(&'tcx self, tcx: TyCtxt<'tcx>) -> Option<&'tcx ty::Const<'tcx>> {
1b1a35ee 638 let val = match self.kind() {
dfeec247
XL
639 ty::Int(_) | ty::Uint(_) => {
640 let (size, signed) = int_size_and_signed(tcx, self);
29967ef6 641 let val = if signed { size.truncate(signed_min(size) as u128) } else { 0 };
dfeec247
XL
642 Some(val)
643 }
644 ty::Char => Some(0),
645 ty::Float(fty) => Some(match fty {
5869c6ff
XL
646 ty::FloatTy::F32 => (-::rustc_apfloat::ieee::Single::INFINITY).to_bits(),
647 ty::FloatTy::F64 => (-::rustc_apfloat::ieee::Double::INFINITY).to_bits(),
dfeec247
XL
648 }),
649 _ => None,
650 };
651 val.map(|v| ty::Const::from_bits(tcx, v, ty::ParamEnv::empty().and(self)))
652 }
653
0731742a
XL
654 /// Checks whether values of this type `T` are *moved* or *copied*
655 /// when referenced -- this amounts to a check for whether `T:
656 /// Copy`, but note that we **don't** consider lifetimes when
657 /// doing this check. This means that we may generate MIR which
658 /// does copies even when the type actually doesn't satisfy the
659 /// full requirements for the `Copy` trait (cc #29149) -- this
660 /// winds up being reported as an error during NLL borrow check.
dc9dc135
XL
661 pub fn is_copy_modulo_regions(
662 &'tcx self,
f035d41b 663 tcx_at: TyCtxtAt<'tcx>,
dc9dc135 664 param_env: ty::ParamEnv<'tcx>,
dc9dc135 665 ) -> bool {
f035d41b 666 tcx_at.is_copy_raw(param_env.and(self))
e9174d1e
SL
667 }
668
0731742a
XL
669 /// Checks whether values of this type `T` have a size known at
670 /// compile time (i.e., whether `T: Sized`). Lifetimes are ignored
671 /// for the purposes of this check, so it can be an
672 /// over-approximation in generic contexts, where one can have
673 /// strange rules like `<T as Foo<'static>>::Bar: Sized` that
674 /// actually carry lifetime requirements.
dc9dc135 675 pub fn is_sized(&'tcx self, tcx_at: TyCtxtAt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> bool {
74b04a01 676 self.is_trivially_sized(tcx_at.tcx) || tcx_at.is_sized_raw(param_env.and(self))
e9174d1e
SL
677 }
678
0731742a
XL
679 /// Checks whether values of this type `T` implement the `Freeze`
680 /// trait -- frozen types are those that do not contain a
9fa01778 681 /// `UnsafeCell` anywhere. This is a language concept used to
0731742a
XL
682 /// distinguish "true immutability", which is relevant to
683 /// optimization as well as the rules around static values. Note
684 /// that the `Freeze` trait is not exposed to end users and is
685 /// effectively an implementation detail.
f035d41b
XL
686 pub fn is_freeze(&'tcx self, tcx_at: TyCtxtAt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> bool {
687 self.is_trivially_freeze() || tcx_at.is_freeze_raw(param_env.and(self))
74b04a01
XL
688 }
689
690 /// Fast path helper for testing if a type is `Freeze`.
691 ///
692 /// Returning true means the type is known to be `Freeze`. Returning
693 /// `false` means nothing -- could be `Freeze`, might not be.
694 fn is_trivially_freeze(&self) -> bool {
1b1a35ee 695 match self.kind() {
74b04a01
XL
696 ty::Int(_)
697 | ty::Uint(_)
698 | ty::Float(_)
699 | ty::Bool
700 | ty::Char
701 | ty::Str
702 | ty::Never
703 | ty::Ref(..)
704 | ty::RawPtr(_)
705 | ty::FnDef(..)
f035d41b 706 | ty::Error(_)
74b04a01
XL
707 | ty::FnPtr(_) => true,
708 ty::Tuple(_) => self.tuple_fields().all(Self::is_trivially_freeze),
709 ty::Slice(elem_ty) | ty::Array(elem_ty, _) => elem_ty.is_trivially_freeze(),
710 ty::Adt(..)
711 | ty::Bound(..)
712 | ty::Closure(..)
713 | ty::Dynamic(..)
714 | ty::Foreign(_)
715 | ty::Generator(..)
716 | ty::GeneratorWitness(_)
717 | ty::Infer(_)
718 | ty::Opaque(..)
719 | ty::Param(_)
720 | ty::Placeholder(_)
f9f354fc 721 | ty::Projection(_) => false,
74b04a01 722 }
cc61c64b
XL
723 }
724
cdc7bbd5
XL
725 /// Checks whether values of this type `T` implement the `Unpin` trait.
726 pub fn is_unpin(&'tcx self, tcx_at: TyCtxtAt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> bool {
727 self.is_trivially_unpin() || tcx_at.is_unpin_raw(param_env.and(self))
728 }
729
730 /// Fast path helper for testing if a type is `Unpin`.
731 ///
732 /// Returning true means the type is known to be `Unpin`. Returning
733 /// `false` means nothing -- could be `Unpin`, might not be.
734 fn is_trivially_unpin(&self) -> bool {
735 match self.kind() {
736 ty::Int(_)
737 | ty::Uint(_)
738 | ty::Float(_)
739 | ty::Bool
740 | ty::Char
741 | ty::Str
742 | ty::Never
743 | ty::Ref(..)
744 | ty::RawPtr(_)
745 | ty::FnDef(..)
746 | ty::Error(_)
747 | ty::FnPtr(_) => true,
748 ty::Tuple(_) => self.tuple_fields().all(Self::is_trivially_unpin),
749 ty::Slice(elem_ty) | ty::Array(elem_ty, _) => elem_ty.is_trivially_unpin(),
750 ty::Adt(..)
751 | ty::Bound(..)
752 | ty::Closure(..)
753 | ty::Dynamic(..)
754 | ty::Foreign(_)
755 | ty::Generator(..)
756 | ty::GeneratorWitness(_)
757 | ty::Infer(_)
758 | ty::Opaque(..)
759 | ty::Param(_)
760 | ty::Placeholder(_)
761 | ty::Projection(_) => false,
762 }
763 }
764
cc61c64b
XL
765 /// If `ty.needs_drop(...)` returns `true`, then `ty` is definitely
766 /// non-copy and *might* have a destructor attached; if it returns
0731742a 767 /// `false`, then `ty` definitely has no destructor (i.e., no drop glue).
cc61c64b
XL
768 ///
769 /// (Note that this implies that if `ty` has a destructor attached,
770 /// then `needs_drop` will definitely return `true` for `ty`.)
e74abb32
XL
771 ///
772 /// Note that this method is used to check eligible types in unions.
cc61c64b 773 #[inline]
dc9dc135 774 pub fn needs_drop(&'tcx self, tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> bool {
74b04a01
XL
775 // Avoid querying in simple cases.
776 match needs_drop_components(self, &tcx.data_layout) {
777 Err(AlwaysRequiresDrop) => true,
778 Ok(components) => {
779 let query_ty = match *components {
780 [] => return false,
781 // If we've got a single component, call the query with that
782 // to increase the chance that we hit the query cache.
783 [component_ty] => component_ty,
784 _ => self,
785 };
786 // This doesn't depend on regions, so try to minimize distinct
787 // query keys used.
788 let erased = tcx.normalize_erasing_regions(param_env, query_ty);
789 tcx.needs_drop_raw(param_env.and(erased))
790 }
791 }
cc61c64b
XL
792 }
793
17df50a5
XL
794 /// Checks if `ty` has has a significant drop.
795 ///
796 /// Note that this method can return false even if `ty` has a destructor
797 /// attached; even if that is the case then the adt has been marked with
798 /// the attribute `rustc_insignificant_dtor`.
799 ///
800 /// Note that this method is used to check for change in drop order for
801 /// 2229 drop reorder migration analysis.
802 #[inline]
803 pub fn has_significant_drop(
804 &'tcx self,
805 tcx: TyCtxt<'tcx>,
806 param_env: ty::ParamEnv<'tcx>,
807 ) -> bool {
808 // Avoid querying in simple cases.
809 match needs_drop_components(self, &tcx.data_layout) {
810 Err(AlwaysRequiresDrop) => true,
811 Ok(components) => {
812 let query_ty = match *components {
813 [] => return false,
814 // If we've got a single component, call the query with that
815 // to increase the chance that we hit the query cache.
816 [component_ty] => component_ty,
817 _ => self,
818 };
819 // This doesn't depend on regions, so try to minimize distinct
820 // query keys used.
821 let erased = tcx.normalize_erasing_regions(param_env, query_ty);
822 tcx.has_significant_drop_raw(param_env.and(erased))
823 }
824 }
825 }
826
f035d41b
XL
827 /// Returns `true` if equality for this type is both reflexive and structural.
828 ///
829 /// Reflexive equality for a type is indicated by an `Eq` impl for that type.
830 ///
831 /// Primitive types (`u32`, `str`) have structural equality by definition. For composite data
832 /// types, equality for the type as a whole is structural when it is the same as equality
833 /// between all components (fields, array elements, etc.) of that type. For ADTs, structural
834 /// equality is indicated by an implementation of `PartialStructuralEq` and `StructuralEq` for
835 /// that type.
836 ///
837 /// This function is "shallow" because it may return `true` for a composite type whose fields
838 /// are not `StructuralEq`. For example, `[T; 4]` has structural equality regardless of `T`
839 /// because equality for arrays is determined by the equality of each array element. If you
840 /// want to know whether a given call to `PartialEq::eq` will proceed structurally all the way
841 /// down, you will need to use a type visitor.
842 #[inline]
843 pub fn is_structural_eq_shallow(&'tcx self, tcx: TyCtxt<'tcx>) -> bool {
1b1a35ee 844 match self.kind() {
f035d41b
XL
845 // Look for an impl of both `PartialStructuralEq` and `StructuralEq`.
846 Adt(..) => tcx.has_structural_eq_impls(self),
847
848 // Primitive types that satisfy `Eq`.
849 Bool | Char | Int(_) | Uint(_) | Str | Never => true,
850
851 // Composite types that satisfy `Eq` when all of their fields do.
852 //
853 // Because this function is "shallow", we return `true` for these composites regardless
854 // of the type(s) contained within.
855 Ref(..) | Array(..) | Slice(_) | Tuple(..) => true,
856
857 // Raw pointers use bitwise comparison.
858 RawPtr(_) | FnPtr(_) => true,
859
860 // Floating point numbers are not `Eq`.
861 Float(_) => false,
862
863 // Conservatively return `false` for all others...
864
865 // Anonymous function types
866 FnDef(..) | Closure(..) | Dynamic(..) | Generator(..) => false,
867
868 // Generic or inferred types
869 //
870 // FIXME(ecstaticmorse): Maybe we should `bug` here? This should probably only be
871 // called for known, fully-monomorphized types.
872 Projection(_) | Opaque(..) | Param(_) | Bound(..) | Placeholder(_) | Infer(_) => false,
873
874 Foreign(_) | GeneratorWitness(..) | Error(_) => false,
875 }
876 }
877
0731742a 878 pub fn same_type(a: Ty<'tcx>, b: Ty<'tcx>) -> bool {
1b1a35ee 879 match (&a.kind(), &b.kind()) {
0731742a
XL
880 (&Adt(did_a, substs_a), &Adt(did_b, substs_b)) => {
881 if did_a != did_b {
882 return false;
883 }
884
885 substs_a.types().zip(substs_b.types()).all(|(a, b)| Self::same_type(a, b))
886 }
887 _ => a == b,
888 }
889 }
890
e1599b0c
XL
891 /// Peel off all reference types in this type until there are none left.
892 ///
893 /// This method is idempotent, i.e. `ty.peel_refs().peel_refs() == ty.peel_refs()`.
894 ///
895 /// # Examples
896 ///
897 /// - `u8` -> `u8`
898 /// - `&'a mut u8` -> `u8`
899 /// - `&'a &'b u8` -> `u8`
900 /// - `&'a *const &'b u8 -> *const &'b u8`
901 pub fn peel_refs(&'tcx self) -> Ty<'tcx> {
902 let mut ty = self;
1b1a35ee 903 while let Ref(_, inner_ty, _) = ty.kind() {
e1599b0c
XL
904 ty = inner_ty;
905 }
906 ty
907 }
e9174d1e 908}
7cac9316 909
abe05a73
XL
910pub enum ExplicitSelf<'tcx> {
911 ByValue,
912 ByReference(ty::Region<'tcx>, hir::Mutability),
ff7c6d11 913 ByRawPointer(hir::Mutability),
abe05a73 914 ByBox,
dfeec247 915 Other,
abe05a73
XL
916}
917
918impl<'tcx> ExplicitSelf<'tcx> {
919 /// Categorizes an explicit self declaration like `self: SomeType`
920 /// into either `self`, `&self`, `&mut self`, `Box<self>`, or
921 /// `Other`.
922 /// This is mainly used to require the arbitrary_self_types feature
923 /// in the case of `Other`, to improve error messages in the common cases,
924 /// and to make `Other` non-object-safe.
925 ///
926 /// Examples:
927 ///
928 /// ```
929 /// impl<'a> Foo for &'a T {
930 /// // Legal declarations:
931 /// fn method1(self: &&'a T); // ExplicitSelf::ByReference
932 /// fn method2(self: &'a T); // ExplicitSelf::ByValue
933 /// fn method3(self: Box<&'a T>); // ExplicitSelf::ByBox
934 /// fn method4(self: Rc<&'a T>); // ExplicitSelf::Other
935 ///
936 /// // Invalid cases will be caught by `check_method_receiver`:
937 /// fn method_err1(self: &'a mut T); // ExplicitSelf::Other
938 /// fn method_err2(self: &'static T) // ExplicitSelf::ByValue
939 /// fn method_err3(self: &&T) // ExplicitSelf::ByReference
940 /// }
941 /// ```
942 ///
dfeec247 943 pub fn determine<P>(self_arg_ty: Ty<'tcx>, is_self_ty: P) -> ExplicitSelf<'tcx>
abe05a73 944 where
dfeec247 945 P: Fn(Ty<'tcx>) -> bool,
abe05a73
XL
946 {
947 use self::ExplicitSelf::*;
948
1b1a35ee 949 match *self_arg_ty.kind() {
abe05a73 950 _ if is_self_ty(self_arg_ty) => ByValue,
dfeec247
XL
951 ty::Ref(region, ty, mutbl) if is_self_ty(ty) => ByReference(region, mutbl),
952 ty::RawPtr(ty::TypeAndMut { ty, mutbl }) if is_self_ty(ty) => ByRawPointer(mutbl),
953 ty::Adt(def, _) if def.is_box() && is_self_ty(self_arg_ty.boxed_ty()) => ByBox,
954 _ => Other,
abe05a73
XL
955 }
956 }
957}
74b04a01
XL
958
959/// Returns a list of types such that the given type needs drop if and only if
960/// *any* of the returned types need drop. Returns `Err(AlwaysRequiresDrop)` if
961/// this type always needs drop.
962pub fn needs_drop_components(
963 ty: Ty<'tcx>,
964 target_layout: &TargetDataLayout,
965) -> Result<SmallVec<[Ty<'tcx>; 2]>, AlwaysRequiresDrop> {
1b1a35ee 966 match ty.kind() {
74b04a01
XL
967 ty::Infer(ty::FreshIntTy(_))
968 | ty::Infer(ty::FreshFloatTy(_))
969 | ty::Bool
970 | ty::Int(_)
971 | ty::Uint(_)
972 | ty::Float(_)
973 | ty::Never
974 | ty::FnDef(..)
975 | ty::FnPtr(_)
976 | ty::Char
977 | ty::GeneratorWitness(..)
978 | ty::RawPtr(_)
979 | ty::Ref(..)
980 | ty::Str => Ok(SmallVec::new()),
981
982 // Foreign types can never have destructors.
983 ty::Foreign(..) => Ok(SmallVec::new()),
984
f035d41b 985 ty::Dynamic(..) | ty::Error(_) => Err(AlwaysRequiresDrop),
74b04a01
XL
986
987 ty::Slice(ty) => needs_drop_components(ty, target_layout),
988 ty::Array(elem_ty, size) => {
989 match needs_drop_components(elem_ty, target_layout) {
990 Ok(v) if v.is_empty() => Ok(v),
991 res => match size.val.try_to_bits(target_layout.pointer_size) {
992 // Arrays of size zero don't need drop, even if their element
993 // type does.
994 Some(0) => Ok(SmallVec::new()),
995 Some(_) => res,
996 // We don't know which of the cases above we are in, so
997 // return the whole type and let the caller decide what to
998 // do.
999 None => Ok(smallvec![ty]),
1000 },
1001 }
1002 }
1003 // If any field needs drop, then the whole tuple does.
1004 ty::Tuple(..) => ty.tuple_fields().try_fold(SmallVec::new(), move |mut acc, elem| {
1005 acc.extend(needs_drop_components(elem, target_layout)?);
1006 Ok(acc)
1007 }),
1008
1009 // These require checking for `Copy` bounds or `Adt` destructors.
1010 ty::Adt(..)
1011 | ty::Projection(..)
74b04a01
XL
1012 | ty::Param(_)
1013 | ty::Bound(..)
1014 | ty::Placeholder(..)
1015 | ty::Opaque(..)
1016 | ty::Infer(_)
ba9703b0
XL
1017 | ty::Closure(..)
1018 | ty::Generator(..) => Ok(smallvec![ty]),
74b04a01
XL
1019 }
1020}
1021
fc512014
XL
1022// Does the equivalent of
1023// ```
1024// let v = self.iter().map(|p| p.fold_with(folder)).collect::<SmallVec<[_; 8]>>();
1025// folder.tcx().intern_*(&v)
1026// ```
1027pub fn fold_list<'tcx, F, T>(
1028 list: &'tcx ty::List<T>,
1029 folder: &mut F,
1030 intern: impl FnOnce(TyCtxt<'tcx>, &[T]) -> &'tcx ty::List<T>,
1031) -> &'tcx ty::List<T>
1032where
1033 F: TypeFolder<'tcx>,
1034 T: TypeFoldable<'tcx> + PartialEq + Copy,
1035{
1036 let mut iter = list.iter();
1037 // Look for the first element that changed
1038 if let Some((i, new_t)) = iter.by_ref().enumerate().find_map(|(i, t)| {
1039 let new_t = t.fold_with(folder);
1040 if new_t == t { None } else { Some((i, new_t)) }
1041 }) {
1042 // An element changed, prepare to intern the resulting list
1043 let mut new_list = SmallVec::<[_; 8]>::with_capacity(list.len());
1044 new_list.extend_from_slice(&list[..i]);
1045 new_list.push(new_t);
1046 new_list.extend(iter.map(|t| t.fold_with(folder)));
1047 intern(folder.tcx(), &new_list)
1048 } else {
1049 list
1050 }
1051}
1052
3dfed10e 1053#[derive(Copy, Clone, Debug, HashStable, TyEncodable, TyDecodable)]
74b04a01 1054pub struct AlwaysRequiresDrop;
3dfed10e
XL
1055
1056/// Normalizes all opaque types in the given value, replacing them
1057/// with their underlying types.
1058pub fn normalize_opaque_types(
1059 tcx: TyCtxt<'tcx>,
1060 val: &'tcx List<ty::Predicate<'tcx>>,
1061) -> &'tcx List<ty::Predicate<'tcx>> {
1062 let mut visitor = OpaqueTypeExpander {
1063 seen_opaque_tys: FxHashSet::default(),
1064 expanded_cache: FxHashMap::default(),
1065 primary_def_id: None,
1066 found_recursion: false,
1067 check_recursion: false,
1068 tcx,
1069 };
1070 val.fold_with(&mut visitor)
1071}
1072
1073pub fn provide(providers: &mut ty::query::Providers) {
1074 *providers = ty::query::Providers { normalize_opaque_types, ..*providers }
1075}