]> git.proxmox.com Git - rustc.git/blob - src/librustc/ty/context.rs
3d154e43a9ae12972cd81f0111e328887102effe
[rustc.git] / src / librustc / ty / context.rs
1 // Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 //! type context book-keeping
12
13 use dep_graph::DepGraph;
14 use dep_graph::{DepNode, DepConstructor};
15 use errors::DiagnosticBuilder;
16 use session::Session;
17 use session::config::{BorrowckMode, OutputFilenames, OptLevel};
18 use session::config::CrateType::*;
19 use middle;
20 use hir::{TraitCandidate, HirId, ItemLocalId};
21 use hir::def::{Def, Export};
22 use hir::def_id::{CrateNum, DefId, DefIndex, LOCAL_CRATE};
23 use hir::map as hir_map;
24 use hir::map::DefPathHash;
25 use lint::{self, Lint};
26 use ich::{StableHashingContext, NodeIdHashingMode};
27 use infer::canonical::{CanonicalVarInfo, CanonicalVarInfos};
28 use infer::outlives::free_region_map::FreeRegionMap;
29 use middle::const_val::ConstVal;
30 use middle::cstore::{CrateStore, LinkMeta};
31 use middle::cstore::EncodedMetadata;
32 use middle::lang_items;
33 use middle::resolve_lifetime::{self, ObjectLifetimeDefault};
34 use middle::stability;
35 use mir::{self, Mir, interpret};
36 use mir::interpret::{Value, PrimVal};
37 use ty::subst::{Kind, Substs, Subst};
38 use ty::ReprOptions;
39 use ty::Instance;
40 use traits;
41 use traits::{Clause, Clauses, Goal, Goals};
42 use ty::{self, Ty, TypeAndMut};
43 use ty::{TyS, TypeVariants, Slice};
44 use ty::{AdtKind, AdtDef, ClosureSubsts, GeneratorInterior, Region, Const};
45 use ty::{PolyFnSig, InferTy, ParamTy, ProjectionTy, ExistentialPredicate, Predicate};
46 use ty::RegionKind;
47 use ty::{TyVar, TyVid, IntVar, IntVid, FloatVar, FloatVid};
48 use ty::TypeVariants::*;
49 use ty::layout::{LayoutDetails, TargetDataLayout};
50 use ty::maps;
51 use ty::steal::Steal;
52 use ty::BindingMode;
53 use ty::CanonicalTy;
54 use util::nodemap::{DefIdSet, ItemLocalMap};
55 use util::nodemap::{FxHashMap, FxHashSet};
56 use rustc_data_structures::accumulate_vec::AccumulateVec;
57 use rustc_data_structures::stable_hasher::{HashStable, hash_stable_hashmap,
58 StableHasher, StableHasherResult,
59 StableVec};
60 use arena::{TypedArena, SyncDroplessArena};
61 use rustc_data_structures::indexed_vec::IndexVec;
62 use rustc_data_structures::sync::{Lrc, Lock};
63 use std::any::Any;
64 use std::borrow::Borrow;
65 use std::cmp::Ordering;
66 use std::collections::hash_map::{self, Entry};
67 use std::hash::{Hash, Hasher};
68 use std::mem;
69 use std::ops::Deref;
70 use std::iter;
71 use std::sync::mpsc;
72 use std::sync::Arc;
73 use rustc_target::spec::abi;
74 use syntax::ast::{self, NodeId};
75 use syntax::attr;
76 use syntax::codemap::MultiSpan;
77 use syntax::feature_gate;
78 use syntax::symbol::{Symbol, keywords, InternedString};
79 use syntax_pos::Span;
80
81 use hir;
82
83 pub struct AllArenas<'tcx> {
84 pub global: GlobalArenas<'tcx>,
85 pub interner: SyncDroplessArena,
86 }
87
88 impl<'tcx> AllArenas<'tcx> {
89 pub fn new() -> Self {
90 AllArenas {
91 global: GlobalArenas::new(),
92 interner: SyncDroplessArena::new(),
93 }
94 }
95 }
96
97 /// Internal storage
98 pub struct GlobalArenas<'tcx> {
99 // internings
100 layout: TypedArena<LayoutDetails>,
101
102 // references
103 generics: TypedArena<ty::Generics>,
104 trait_def: TypedArena<ty::TraitDef>,
105 adt_def: TypedArena<ty::AdtDef>,
106 steal_mir: TypedArena<Steal<Mir<'tcx>>>,
107 mir: TypedArena<Mir<'tcx>>,
108 tables: TypedArena<ty::TypeckTables<'tcx>>,
109 /// miri allocations
110 const_allocs: TypedArena<interpret::Allocation>,
111 }
112
113 impl<'tcx> GlobalArenas<'tcx> {
114 pub fn new() -> GlobalArenas<'tcx> {
115 GlobalArenas {
116 layout: TypedArena::new(),
117 generics: TypedArena::new(),
118 trait_def: TypedArena::new(),
119 adt_def: TypedArena::new(),
120 steal_mir: TypedArena::new(),
121 mir: TypedArena::new(),
122 tables: TypedArena::new(),
123 const_allocs: TypedArena::new(),
124 }
125 }
126 }
127
128 type InternedSet<'tcx, T> = Lock<FxHashSet<Interned<'tcx, T>>>;
129
130 pub struct CtxtInterners<'tcx> {
131 /// The arena that types, regions, etc are allocated from
132 arena: &'tcx SyncDroplessArena,
133
134 /// Specifically use a speedy hash algorithm for these hash sets,
135 /// they're accessed quite often.
136 type_: InternedSet<'tcx, TyS<'tcx>>,
137 type_list: InternedSet<'tcx, Slice<Ty<'tcx>>>,
138 substs: InternedSet<'tcx, Substs<'tcx>>,
139 canonical_var_infos: InternedSet<'tcx, Slice<CanonicalVarInfo>>,
140 region: InternedSet<'tcx, RegionKind>,
141 existential_predicates: InternedSet<'tcx, Slice<ExistentialPredicate<'tcx>>>,
142 predicates: InternedSet<'tcx, Slice<Predicate<'tcx>>>,
143 const_: InternedSet<'tcx, Const<'tcx>>,
144 clauses: InternedSet<'tcx, Slice<Clause<'tcx>>>,
145 goals: InternedSet<'tcx, Slice<Goal<'tcx>>>,
146 }
147
148 impl<'gcx: 'tcx, 'tcx> CtxtInterners<'tcx> {
149 fn new(arena: &'tcx SyncDroplessArena) -> CtxtInterners<'tcx> {
150 CtxtInterners {
151 arena,
152 type_: Default::default(),
153 type_list: Default::default(),
154 substs: Default::default(),
155 region: Default::default(),
156 existential_predicates: Default::default(),
157 canonical_var_infos: Default::default(),
158 predicates: Default::default(),
159 const_: Default::default(),
160 clauses: Default::default(),
161 goals: Default::default(),
162 }
163 }
164
165 /// Intern a type. global_interners is Some only if this is
166 /// a local interner and global_interners is its counterpart.
167 fn intern_ty(&self, st: TypeVariants<'tcx>,
168 global_interners: Option<&CtxtInterners<'gcx>>)
169 -> Ty<'tcx> {
170 let ty = {
171 let mut interner = self.type_.borrow_mut();
172 if let Some(&Interned(ty)) = interner.get(&st) {
173 return ty;
174 }
175 let global_interner = global_interners.map(|interners| {
176 (interners.type_.borrow_mut(), &interners.arena)
177 });
178 if let Some((ref type_, _)) = global_interner {
179 if let Some(&Interned(ty)) = type_.get(&st) {
180 return ty;
181 }
182 }
183
184 let flags = super::flags::FlagComputation::for_sty(&st);
185 let ty_struct = TyS {
186 sty: st,
187 flags: flags.flags,
188 region_depth: flags.depth,
189 };
190
191 // HACK(eddyb) Depend on flags being accurate to
192 // determine that all contents are in the global tcx.
193 // See comments on Lift for why we can't use that.
194 if !flags.flags.intersects(ty::TypeFlags::KEEP_IN_LOCAL_TCX) {
195 if let Some((mut type_, arena)) = global_interner {
196 let ty_struct: TyS<'gcx> = unsafe {
197 mem::transmute(ty_struct)
198 };
199 let ty: Ty<'gcx> = arena.alloc(ty_struct);
200 type_.insert(Interned(ty));
201 return ty;
202 }
203 } else {
204 // Make sure we don't end up with inference
205 // types/regions in the global tcx.
206 if global_interner.is_none() {
207 drop(interner);
208 bug!("Attempted to intern `{:?}` which contains \
209 inference types/regions in the global type context",
210 &ty_struct);
211 }
212 }
213
214 // Don't be &mut TyS.
215 let ty: Ty<'tcx> = self.arena.alloc(ty_struct);
216 interner.insert(Interned(ty));
217 ty
218 };
219
220 debug!("Interned type: {:?} Pointer: {:?}",
221 ty, ty as *const TyS);
222 ty
223 }
224
225 }
226
227 pub struct CommonTypes<'tcx> {
228 pub bool: Ty<'tcx>,
229 pub char: Ty<'tcx>,
230 pub isize: Ty<'tcx>,
231 pub i8: Ty<'tcx>,
232 pub i16: Ty<'tcx>,
233 pub i32: Ty<'tcx>,
234 pub i64: Ty<'tcx>,
235 pub i128: Ty<'tcx>,
236 pub usize: Ty<'tcx>,
237 pub u8: Ty<'tcx>,
238 pub u16: Ty<'tcx>,
239 pub u32: Ty<'tcx>,
240 pub u64: Ty<'tcx>,
241 pub u128: Ty<'tcx>,
242 pub f32: Ty<'tcx>,
243 pub f64: Ty<'tcx>,
244 pub never: Ty<'tcx>,
245 pub err: Ty<'tcx>,
246
247 pub re_empty: Region<'tcx>,
248 pub re_static: Region<'tcx>,
249 pub re_erased: Region<'tcx>,
250 }
251
252 pub struct LocalTableInContext<'a, V: 'a> {
253 local_id_root: Option<DefId>,
254 data: &'a ItemLocalMap<V>
255 }
256
257 /// Validate that the given HirId (respectively its `local_id` part) can be
258 /// safely used as a key in the tables of a TypeckTable. For that to be
259 /// the case, the HirId must have the same `owner` as all the other IDs in
260 /// this table (signified by `local_id_root`). Otherwise the HirId
261 /// would be in a different frame of reference and using its `local_id`
262 /// would result in lookup errors, or worse, in silently wrong data being
263 /// stored/returned.
264 fn validate_hir_id_for_typeck_tables(local_id_root: Option<DefId>,
265 hir_id: hir::HirId,
266 mut_access: bool) {
267 if cfg!(debug_assertions) {
268 if let Some(local_id_root) = local_id_root {
269 if hir_id.owner != local_id_root.index {
270 ty::tls::with(|tcx| {
271 let node_id = tcx.hir
272 .definitions()
273 .find_node_for_hir_id(hir_id);
274
275 bug!("node {} with HirId::owner {:?} cannot be placed in \
276 TypeckTables with local_id_root {:?}",
277 tcx.hir.node_to_string(node_id),
278 DefId::local(hir_id.owner),
279 local_id_root)
280 });
281 }
282 } else {
283 // We use "Null Object" TypeckTables in some of the analysis passes.
284 // These are just expected to be empty and their `local_id_root` is
285 // `None`. Therefore we cannot verify whether a given `HirId` would
286 // be a valid key for the given table. Instead we make sure that
287 // nobody tries to write to such a Null Object table.
288 if mut_access {
289 bug!("access to invalid TypeckTables")
290 }
291 }
292 }
293 }
294
295 impl<'a, V> LocalTableInContext<'a, V> {
296 pub fn contains_key(&self, id: hir::HirId) -> bool {
297 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
298 self.data.contains_key(&id.local_id)
299 }
300
301 pub fn get(&self, id: hir::HirId) -> Option<&V> {
302 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
303 self.data.get(&id.local_id)
304 }
305
306 pub fn iter(&self) -> hash_map::Iter<hir::ItemLocalId, V> {
307 self.data.iter()
308 }
309 }
310
311 impl<'a, V> ::std::ops::Index<hir::HirId> for LocalTableInContext<'a, V> {
312 type Output = V;
313
314 fn index(&self, key: hir::HirId) -> &V {
315 self.get(key).expect("LocalTableInContext: key not found")
316 }
317 }
318
319 pub struct LocalTableInContextMut<'a, V: 'a> {
320 local_id_root: Option<DefId>,
321 data: &'a mut ItemLocalMap<V>
322 }
323
324 impl<'a, V> LocalTableInContextMut<'a, V> {
325 pub fn get_mut(&mut self, id: hir::HirId) -> Option<&mut V> {
326 validate_hir_id_for_typeck_tables(self.local_id_root, id, true);
327 self.data.get_mut(&id.local_id)
328 }
329
330 pub fn entry(&mut self, id: hir::HirId) -> Entry<hir::ItemLocalId, V> {
331 validate_hir_id_for_typeck_tables(self.local_id_root, id, true);
332 self.data.entry(id.local_id)
333 }
334
335 pub fn insert(&mut self, id: hir::HirId, val: V) -> Option<V> {
336 validate_hir_id_for_typeck_tables(self.local_id_root, id, true);
337 self.data.insert(id.local_id, val)
338 }
339
340 pub fn remove(&mut self, id: hir::HirId) -> Option<V> {
341 validate_hir_id_for_typeck_tables(self.local_id_root, id, true);
342 self.data.remove(&id.local_id)
343 }
344 }
345
346 #[derive(RustcEncodable, RustcDecodable, Debug)]
347 pub struct TypeckTables<'tcx> {
348 /// The HirId::owner all ItemLocalIds in this table are relative to.
349 pub local_id_root: Option<DefId>,
350
351 /// Resolved definitions for `<T>::X` associated paths and
352 /// method calls, including those of overloaded operators.
353 type_dependent_defs: ItemLocalMap<Def>,
354
355 /// Resolved field indices for field accesses in expressions (`S { field }`, `obj.field`)
356 /// or patterns (`S { field }`). The index is often useful by itself, but to learn more
357 /// about the field you also need definition of the variant to which the field
358 /// belongs, but it may not exist if it's a tuple field (`tuple.0`).
359 field_indices: ItemLocalMap<usize>,
360
361 /// Stores the canonicalized types provided by the user. See also `UserAssertTy` statement in
362 /// MIR.
363 user_provided_tys: ItemLocalMap<CanonicalTy<'tcx>>,
364
365 /// Stores the types for various nodes in the AST. Note that this table
366 /// is not guaranteed to be populated until after typeck. See
367 /// typeck::check::fn_ctxt for details.
368 node_types: ItemLocalMap<Ty<'tcx>>,
369
370 /// Stores the type parameters which were substituted to obtain the type
371 /// of this node. This only applies to nodes that refer to entities
372 /// parameterized by type parameters, such as generic fns, types, or
373 /// other items.
374 node_substs: ItemLocalMap<&'tcx Substs<'tcx>>,
375
376 adjustments: ItemLocalMap<Vec<ty::adjustment::Adjustment<'tcx>>>,
377
378 /// Stores the actual binding mode for all instances of hir::BindingAnnotation.
379 pat_binding_modes: ItemLocalMap<BindingMode>,
380
381 /// Stores the types which were implicitly dereferenced in pattern binding modes
382 /// for later usage in HAIR lowering. For example,
383 ///
384 /// ```
385 /// match &&Some(5i32) {
386 /// Some(n) => {},
387 /// _ => {},
388 /// }
389 /// ```
390 /// leads to a `vec![&&Option<i32>, &Option<i32>]`. Empty vectors are not stored.
391 ///
392 /// See:
393 /// https://github.com/rust-lang/rfcs/blob/master/text/2005-match-ergonomics.md#definitions
394 pat_adjustments: ItemLocalMap<Vec<Ty<'tcx>>>,
395
396 /// Borrows
397 pub upvar_capture_map: ty::UpvarCaptureMap<'tcx>,
398
399 /// Records the reasons that we picked the kind of each closure;
400 /// not all closures are present in the map.
401 closure_kind_origins: ItemLocalMap<(Span, ast::Name)>,
402
403 /// For each fn, records the "liberated" types of its arguments
404 /// and return type. Liberated means that all bound regions
405 /// (including late-bound regions) are replaced with free
406 /// equivalents. This table is not used in trans (since regions
407 /// are erased there) and hence is not serialized to metadata.
408 liberated_fn_sigs: ItemLocalMap<ty::FnSig<'tcx>>,
409
410 /// For each FRU expression, record the normalized types of the fields
411 /// of the struct - this is needed because it is non-trivial to
412 /// normalize while preserving regions. This table is used only in
413 /// MIR construction and hence is not serialized to metadata.
414 fru_field_types: ItemLocalMap<Vec<Ty<'tcx>>>,
415
416 /// Maps a cast expression to its kind. This is keyed on the
417 /// *from* expression of the cast, not the cast itself.
418 cast_kinds: ItemLocalMap<ty::cast::CastKind>,
419
420 /// Set of trait imports actually used in the method resolution.
421 /// This is used for warning unused imports. During type
422 /// checking, this `Lrc` should not be cloned: it must have a ref-count
423 /// of 1 so that we can insert things into the set mutably.
424 pub used_trait_imports: Lrc<DefIdSet>,
425
426 /// If any errors occurred while type-checking this body,
427 /// this field will be set to `true`.
428 pub tainted_by_errors: bool,
429
430 /// Stores the free-region relationships that were deduced from
431 /// its where clauses and parameter types. These are then
432 /// read-again by borrowck.
433 pub free_region_map: FreeRegionMap<'tcx>,
434 }
435
436 impl<'tcx> TypeckTables<'tcx> {
437 pub fn empty(local_id_root: Option<DefId>) -> TypeckTables<'tcx> {
438 TypeckTables {
439 local_id_root,
440 type_dependent_defs: ItemLocalMap(),
441 field_indices: ItemLocalMap(),
442 user_provided_tys: ItemLocalMap(),
443 node_types: ItemLocalMap(),
444 node_substs: ItemLocalMap(),
445 adjustments: ItemLocalMap(),
446 pat_binding_modes: ItemLocalMap(),
447 pat_adjustments: ItemLocalMap(),
448 upvar_capture_map: FxHashMap(),
449 closure_kind_origins: ItemLocalMap(),
450 liberated_fn_sigs: ItemLocalMap(),
451 fru_field_types: ItemLocalMap(),
452 cast_kinds: ItemLocalMap(),
453 used_trait_imports: Lrc::new(DefIdSet()),
454 tainted_by_errors: false,
455 free_region_map: FreeRegionMap::new(),
456 }
457 }
458
459 /// Returns the final resolution of a `QPath` in an `Expr` or `Pat` node.
460 pub fn qpath_def(&self, qpath: &hir::QPath, id: hir::HirId) -> Def {
461 match *qpath {
462 hir::QPath::Resolved(_, ref path) => path.def,
463 hir::QPath::TypeRelative(..) => {
464 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
465 self.type_dependent_defs.get(&id.local_id).cloned().unwrap_or(Def::Err)
466 }
467 }
468 }
469
470 pub fn type_dependent_defs(&self) -> LocalTableInContext<Def> {
471 LocalTableInContext {
472 local_id_root: self.local_id_root,
473 data: &self.type_dependent_defs
474 }
475 }
476
477 pub fn type_dependent_defs_mut(&mut self) -> LocalTableInContextMut<Def> {
478 LocalTableInContextMut {
479 local_id_root: self.local_id_root,
480 data: &mut self.type_dependent_defs
481 }
482 }
483
484 pub fn field_indices(&self) -> LocalTableInContext<usize> {
485 LocalTableInContext {
486 local_id_root: self.local_id_root,
487 data: &self.field_indices
488 }
489 }
490
491 pub fn field_indices_mut(&mut self) -> LocalTableInContextMut<usize> {
492 LocalTableInContextMut {
493 local_id_root: self.local_id_root,
494 data: &mut self.field_indices
495 }
496 }
497
498 pub fn user_provided_tys(&self) -> LocalTableInContext<CanonicalTy<'tcx>> {
499 LocalTableInContext {
500 local_id_root: self.local_id_root,
501 data: &self.user_provided_tys
502 }
503 }
504
505 pub fn user_provided_tys_mut(&mut self) -> LocalTableInContextMut<CanonicalTy<'tcx>> {
506 LocalTableInContextMut {
507 local_id_root: self.local_id_root,
508 data: &mut self.user_provided_tys
509 }
510 }
511
512 pub fn node_types(&self) -> LocalTableInContext<Ty<'tcx>> {
513 LocalTableInContext {
514 local_id_root: self.local_id_root,
515 data: &self.node_types
516 }
517 }
518
519 pub fn node_types_mut(&mut self) -> LocalTableInContextMut<Ty<'tcx>> {
520 LocalTableInContextMut {
521 local_id_root: self.local_id_root,
522 data: &mut self.node_types
523 }
524 }
525
526 pub fn node_id_to_type(&self, id: hir::HirId) -> Ty<'tcx> {
527 match self.node_id_to_type_opt(id) {
528 Some(ty) => ty,
529 None => {
530 bug!("node_id_to_type: no type for node `{}`",
531 tls::with(|tcx| {
532 let id = tcx.hir.definitions().find_node_for_hir_id(id);
533 tcx.hir.node_to_string(id)
534 }))
535 }
536 }
537 }
538
539 pub fn node_id_to_type_opt(&self, id: hir::HirId) -> Option<Ty<'tcx>> {
540 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
541 self.node_types.get(&id.local_id).cloned()
542 }
543
544 pub fn node_substs_mut(&mut self) -> LocalTableInContextMut<&'tcx Substs<'tcx>> {
545 LocalTableInContextMut {
546 local_id_root: self.local_id_root,
547 data: &mut self.node_substs
548 }
549 }
550
551 pub fn node_substs(&self, id: hir::HirId) -> &'tcx Substs<'tcx> {
552 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
553 self.node_substs.get(&id.local_id).cloned().unwrap_or(Substs::empty())
554 }
555
556 pub fn node_substs_opt(&self, id: hir::HirId) -> Option<&'tcx Substs<'tcx>> {
557 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
558 self.node_substs.get(&id.local_id).cloned()
559 }
560
561 // Returns the type of a pattern as a monotype. Like @expr_ty, this function
562 // doesn't provide type parameter substitutions.
563 pub fn pat_ty(&self, pat: &hir::Pat) -> Ty<'tcx> {
564 self.node_id_to_type(pat.hir_id)
565 }
566
567 pub fn pat_ty_opt(&self, pat: &hir::Pat) -> Option<Ty<'tcx>> {
568 self.node_id_to_type_opt(pat.hir_id)
569 }
570
571 // Returns the type of an expression as a monotype.
572 //
573 // NB (1): This is the PRE-ADJUSTMENT TYPE for the expression. That is, in
574 // some cases, we insert `Adjustment` annotations such as auto-deref or
575 // auto-ref. The type returned by this function does not consider such
576 // adjustments. See `expr_ty_adjusted()` instead.
577 //
578 // NB (2): This type doesn't provide type parameter substitutions; e.g. if you
579 // ask for the type of "id" in "id(3)", it will return "fn(&isize) -> isize"
580 // instead of "fn(ty) -> T with T = isize".
581 pub fn expr_ty(&self, expr: &hir::Expr) -> Ty<'tcx> {
582 self.node_id_to_type(expr.hir_id)
583 }
584
585 pub fn expr_ty_opt(&self, expr: &hir::Expr) -> Option<Ty<'tcx>> {
586 self.node_id_to_type_opt(expr.hir_id)
587 }
588
589 pub fn adjustments(&self) -> LocalTableInContext<Vec<ty::adjustment::Adjustment<'tcx>>> {
590 LocalTableInContext {
591 local_id_root: self.local_id_root,
592 data: &self.adjustments
593 }
594 }
595
596 pub fn adjustments_mut(&mut self)
597 -> LocalTableInContextMut<Vec<ty::adjustment::Adjustment<'tcx>>> {
598 LocalTableInContextMut {
599 local_id_root: self.local_id_root,
600 data: &mut self.adjustments
601 }
602 }
603
604 pub fn expr_adjustments(&self, expr: &hir::Expr)
605 -> &[ty::adjustment::Adjustment<'tcx>] {
606 validate_hir_id_for_typeck_tables(self.local_id_root, expr.hir_id, false);
607 self.adjustments.get(&expr.hir_id.local_id).map_or(&[], |a| &a[..])
608 }
609
610 /// Returns the type of `expr`, considering any `Adjustment`
611 /// entry recorded for that expression.
612 pub fn expr_ty_adjusted(&self, expr: &hir::Expr) -> Ty<'tcx> {
613 self.expr_adjustments(expr)
614 .last()
615 .map_or_else(|| self.expr_ty(expr), |adj| adj.target)
616 }
617
618 pub fn expr_ty_adjusted_opt(&self, expr: &hir::Expr) -> Option<Ty<'tcx>> {
619 self.expr_adjustments(expr)
620 .last()
621 .map(|adj| adj.target)
622 .or_else(|| self.expr_ty_opt(expr))
623 }
624
625 pub fn is_method_call(&self, expr: &hir::Expr) -> bool {
626 // Only paths and method calls/overloaded operators have
627 // entries in type_dependent_defs, ignore the former here.
628 if let hir::ExprPath(_) = expr.node {
629 return false;
630 }
631
632 match self.type_dependent_defs().get(expr.hir_id) {
633 Some(&Def::Method(_)) => true,
634 _ => false
635 }
636 }
637
638 pub fn pat_binding_modes(&self) -> LocalTableInContext<BindingMode> {
639 LocalTableInContext {
640 local_id_root: self.local_id_root,
641 data: &self.pat_binding_modes
642 }
643 }
644
645 pub fn pat_binding_modes_mut(&mut self)
646 -> LocalTableInContextMut<BindingMode> {
647 LocalTableInContextMut {
648 local_id_root: self.local_id_root,
649 data: &mut self.pat_binding_modes
650 }
651 }
652
653 pub fn pat_adjustments(&self) -> LocalTableInContext<Vec<Ty<'tcx>>> {
654 LocalTableInContext {
655 local_id_root: self.local_id_root,
656 data: &self.pat_adjustments,
657 }
658 }
659
660 pub fn pat_adjustments_mut(&mut self)
661 -> LocalTableInContextMut<Vec<Ty<'tcx>>> {
662 LocalTableInContextMut {
663 local_id_root: self.local_id_root,
664 data: &mut self.pat_adjustments,
665 }
666 }
667
668 pub fn upvar_capture(&self, upvar_id: ty::UpvarId) -> ty::UpvarCapture<'tcx> {
669 self.upvar_capture_map[&upvar_id]
670 }
671
672 pub fn closure_kind_origins(&self) -> LocalTableInContext<(Span, ast::Name)> {
673 LocalTableInContext {
674 local_id_root: self.local_id_root,
675 data: &self.closure_kind_origins
676 }
677 }
678
679 pub fn closure_kind_origins_mut(&mut self) -> LocalTableInContextMut<(Span, ast::Name)> {
680 LocalTableInContextMut {
681 local_id_root: self.local_id_root,
682 data: &mut self.closure_kind_origins
683 }
684 }
685
686 pub fn liberated_fn_sigs(&self) -> LocalTableInContext<ty::FnSig<'tcx>> {
687 LocalTableInContext {
688 local_id_root: self.local_id_root,
689 data: &self.liberated_fn_sigs
690 }
691 }
692
693 pub fn liberated_fn_sigs_mut(&mut self) -> LocalTableInContextMut<ty::FnSig<'tcx>> {
694 LocalTableInContextMut {
695 local_id_root: self.local_id_root,
696 data: &mut self.liberated_fn_sigs
697 }
698 }
699
700 pub fn fru_field_types(&self) -> LocalTableInContext<Vec<Ty<'tcx>>> {
701 LocalTableInContext {
702 local_id_root: self.local_id_root,
703 data: &self.fru_field_types
704 }
705 }
706
707 pub fn fru_field_types_mut(&mut self) -> LocalTableInContextMut<Vec<Ty<'tcx>>> {
708 LocalTableInContextMut {
709 local_id_root: self.local_id_root,
710 data: &mut self.fru_field_types
711 }
712 }
713
714 pub fn cast_kinds(&self) -> LocalTableInContext<ty::cast::CastKind> {
715 LocalTableInContext {
716 local_id_root: self.local_id_root,
717 data: &self.cast_kinds
718 }
719 }
720
721 pub fn cast_kinds_mut(&mut self) -> LocalTableInContextMut<ty::cast::CastKind> {
722 LocalTableInContextMut {
723 local_id_root: self.local_id_root,
724 data: &mut self.cast_kinds
725 }
726 }
727 }
728
729 impl<'a, 'gcx> HashStable<StableHashingContext<'a>> for TypeckTables<'gcx> {
730 fn hash_stable<W: StableHasherResult>(&self,
731 hcx: &mut StableHashingContext<'a>,
732 hasher: &mut StableHasher<W>) {
733 let ty::TypeckTables {
734 local_id_root,
735 ref type_dependent_defs,
736 ref field_indices,
737 ref user_provided_tys,
738 ref node_types,
739 ref node_substs,
740 ref adjustments,
741 ref pat_binding_modes,
742 ref pat_adjustments,
743 ref upvar_capture_map,
744 ref closure_kind_origins,
745 ref liberated_fn_sigs,
746 ref fru_field_types,
747
748 ref cast_kinds,
749
750 ref used_trait_imports,
751 tainted_by_errors,
752 ref free_region_map,
753 } = *self;
754
755 hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
756 type_dependent_defs.hash_stable(hcx, hasher);
757 field_indices.hash_stable(hcx, hasher);
758 user_provided_tys.hash_stable(hcx, hasher);
759 node_types.hash_stable(hcx, hasher);
760 node_substs.hash_stable(hcx, hasher);
761 adjustments.hash_stable(hcx, hasher);
762 pat_binding_modes.hash_stable(hcx, hasher);
763 pat_adjustments.hash_stable(hcx, hasher);
764 hash_stable_hashmap(hcx, hasher, upvar_capture_map, |up_var_id, hcx| {
765 let ty::UpvarId {
766 var_id,
767 closure_expr_id
768 } = *up_var_id;
769
770 let local_id_root =
771 local_id_root.expect("trying to hash invalid TypeckTables");
772
773 let var_owner_def_id = DefId {
774 krate: local_id_root.krate,
775 index: var_id.owner,
776 };
777 let closure_def_id = DefId {
778 krate: local_id_root.krate,
779 index: closure_expr_id.to_def_id().index,
780 };
781 (hcx.def_path_hash(var_owner_def_id),
782 var_id.local_id,
783 hcx.def_path_hash(closure_def_id))
784 });
785
786 closure_kind_origins.hash_stable(hcx, hasher);
787 liberated_fn_sigs.hash_stable(hcx, hasher);
788 fru_field_types.hash_stable(hcx, hasher);
789 cast_kinds.hash_stable(hcx, hasher);
790 used_trait_imports.hash_stable(hcx, hasher);
791 tainted_by_errors.hash_stable(hcx, hasher);
792 free_region_map.hash_stable(hcx, hasher);
793 })
794 }
795 }
796
797 impl<'tcx> CommonTypes<'tcx> {
798 fn new(interners: &CtxtInterners<'tcx>) -> CommonTypes<'tcx> {
799 let mk = |sty| interners.intern_ty(sty, None);
800 let mk_region = |r| {
801 if let Some(r) = interners.region.borrow().get(&r) {
802 return r.0;
803 }
804 let r = interners.arena.alloc(r);
805 interners.region.borrow_mut().insert(Interned(r));
806 &*r
807 };
808 CommonTypes {
809 bool: mk(TyBool),
810 char: mk(TyChar),
811 never: mk(TyNever),
812 err: mk(TyError),
813 isize: mk(TyInt(ast::IntTy::Isize)),
814 i8: mk(TyInt(ast::IntTy::I8)),
815 i16: mk(TyInt(ast::IntTy::I16)),
816 i32: mk(TyInt(ast::IntTy::I32)),
817 i64: mk(TyInt(ast::IntTy::I64)),
818 i128: mk(TyInt(ast::IntTy::I128)),
819 usize: mk(TyUint(ast::UintTy::Usize)),
820 u8: mk(TyUint(ast::UintTy::U8)),
821 u16: mk(TyUint(ast::UintTy::U16)),
822 u32: mk(TyUint(ast::UintTy::U32)),
823 u64: mk(TyUint(ast::UintTy::U64)),
824 u128: mk(TyUint(ast::UintTy::U128)),
825 f32: mk(TyFloat(ast::FloatTy::F32)),
826 f64: mk(TyFloat(ast::FloatTy::F64)),
827
828 re_empty: mk_region(RegionKind::ReEmpty),
829 re_static: mk_region(RegionKind::ReStatic),
830 re_erased: mk_region(RegionKind::ReErased),
831 }
832 }
833 }
834
835 /// The central data structure of the compiler. It stores references
836 /// to the various **arenas** and also houses the results of the
837 /// various **compiler queries** that have been performed. See the
838 /// [rustc guide] for more details.
839 ///
840 /// [rustc guide]: https://rust-lang-nursery.github.io/rustc-guide/ty.html
841 #[derive(Copy, Clone)]
842 pub struct TyCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
843 gcx: &'a GlobalCtxt<'gcx>,
844 interners: &'a CtxtInterners<'tcx>
845 }
846
847 impl<'a, 'gcx, 'tcx> Deref for TyCtxt<'a, 'gcx, 'tcx> {
848 type Target = &'a GlobalCtxt<'gcx>;
849 fn deref(&self) -> &Self::Target {
850 &self.gcx
851 }
852 }
853
854 pub struct GlobalCtxt<'tcx> {
855 global_arenas: &'tcx GlobalArenas<'tcx>,
856 global_interners: CtxtInterners<'tcx>,
857
858 cstore: &'tcx dyn CrateStore,
859
860 pub sess: &'tcx Session,
861
862 pub dep_graph: DepGraph,
863
864 /// This provides access to the incr. comp. on-disk cache for query results.
865 /// Do not access this directly. It is only meant to be used by
866 /// `DepGraph::try_mark_green()` and the query infrastructure in `ty::maps`.
867 pub(crate) on_disk_query_result_cache: maps::OnDiskCache<'tcx>,
868
869 /// Common types, pre-interned for your convenience.
870 pub types: CommonTypes<'tcx>,
871
872 /// Map indicating what traits are in scope for places where this
873 /// is relevant; generated by resolve.
874 trait_map: FxHashMap<DefIndex,
875 Lrc<FxHashMap<ItemLocalId,
876 Lrc<StableVec<TraitCandidate>>>>>,
877
878 /// Export map produced by name resolution.
879 export_map: FxHashMap<DefId, Lrc<Vec<Export>>>,
880
881 pub hir: hir_map::Map<'tcx>,
882
883 /// A map from DefPathHash -> DefId. Includes DefIds from the local crate
884 /// as well as all upstream crates. Only populated in incremental mode.
885 pub def_path_hash_to_def_id: Option<FxHashMap<DefPathHash, DefId>>,
886
887 pub maps: maps::Maps<'tcx>,
888
889 // Records the free variables refrenced by every closure
890 // expression. Do not track deps for this, just recompute it from
891 // scratch every time.
892 freevars: FxHashMap<DefId, Lrc<Vec<hir::Freevar>>>,
893
894 maybe_unused_trait_imports: FxHashSet<DefId>,
895
896 maybe_unused_extern_crates: Vec<(DefId, Span)>,
897
898 // Internal cache for metadata decoding. No need to track deps on this.
899 pub rcache: Lock<FxHashMap<ty::CReaderCacheKey, Ty<'tcx>>>,
900
901 /// Caches the results of trait selection. This cache is used
902 /// for things that do not have to do with the parameters in scope.
903 pub selection_cache: traits::SelectionCache<'tcx>,
904
905 /// Caches the results of trait evaluation. This cache is used
906 /// for things that do not have to do with the parameters in scope.
907 /// Merge this with `selection_cache`?
908 pub evaluation_cache: traits::EvaluationCache<'tcx>,
909
910 /// The definite name of the current crate after taking into account
911 /// attributes, commandline parameters, etc.
912 pub crate_name: Symbol,
913
914 /// Data layout specification for the current target.
915 pub data_layout: TargetDataLayout,
916
917 stability_interner: Lock<FxHashSet<&'tcx attr::Stability>>,
918
919 pub interpret_interner: InterpretInterner<'tcx>,
920
921 layout_interner: Lock<FxHashSet<&'tcx LayoutDetails>>,
922
923 /// A general purpose channel to throw data out the back towards LLVM worker
924 /// threads.
925 ///
926 /// This is intended to only get used during the trans phase of the compiler
927 /// when satisfying the query for a particular codegen unit. Internally in
928 /// the query it'll send data along this channel to get processed later.
929 pub tx_to_llvm_workers: Lock<mpsc::Sender<Box<dyn Any + Send>>>,
930
931 output_filenames: Arc<OutputFilenames>,
932 }
933
934 /// Everything needed to efficiently work with interned allocations
935 #[derive(Debug, Default)]
936 pub struct InterpretInterner<'tcx> {
937 inner: Lock<InterpretInternerInner<'tcx>>,
938 }
939
940 #[derive(Debug, Default)]
941 struct InterpretInternerInner<'tcx> {
942 /// Stores the value of constants (and deduplicates the actual memory)
943 allocs: FxHashSet<&'tcx interpret::Allocation>,
944
945 /// Allows obtaining function instance handles via a unique identifier
946 functions: FxHashMap<interpret::AllocId, Instance<'tcx>>,
947
948 /// Inverse map of `interpret_functions`.
949 /// Used so we don't allocate a new pointer every time we need one
950 function_cache: FxHashMap<Instance<'tcx>, interpret::AllocId>,
951
952 /// Allows obtaining const allocs via a unique identifier
953 alloc_by_id: FxHashMap<interpret::AllocId, &'tcx interpret::Allocation>,
954
955 /// Allows obtaining static def ids via a unique id
956 statics: FxHashMap<interpret::AllocId, DefId>,
957
958 /// The AllocId to assign to the next new regular allocation.
959 /// Always incremented, never gets smaller.
960 next_id: interpret::AllocId,
961
962 /// Inverse map of `statics`
963 /// Used so we don't allocate a new pointer every time we need one
964 static_cache: FxHashMap<DefId, interpret::AllocId>,
965
966 /// A cache for basic byte allocations keyed by their contents. This is used to deduplicate
967 /// allocations for string and bytestring literals.
968 literal_alloc_cache: FxHashMap<Vec<u8>, interpret::AllocId>,
969 }
970
971 impl<'tcx> InterpretInterner<'tcx> {
972 pub fn create_fn_alloc(&self, instance: Instance<'tcx>) -> interpret::AllocId {
973 if let Some(&alloc_id) = self.inner.borrow().function_cache.get(&instance) {
974 return alloc_id;
975 }
976 let id = self.reserve();
977 debug!("creating fn ptr: {}", id);
978 let mut inner = self.inner.borrow_mut();
979 inner.functions.insert(id, instance);
980 inner.function_cache.insert(instance, id);
981 id
982 }
983
984 pub fn get_fn(
985 &self,
986 id: interpret::AllocId,
987 ) -> Option<Instance<'tcx>> {
988 self.inner.borrow().functions.get(&id).cloned()
989 }
990
991 pub fn get_alloc(
992 &self,
993 id: interpret::AllocId,
994 ) -> Option<&'tcx interpret::Allocation> {
995 self.inner.borrow().alloc_by_id.get(&id).cloned()
996 }
997
998 pub fn cache_static(
999 &self,
1000 static_id: DefId,
1001 ) -> interpret::AllocId {
1002 if let Some(alloc_id) = self.inner.borrow().static_cache.get(&static_id).cloned() {
1003 return alloc_id;
1004 }
1005 let alloc_id = self.reserve();
1006 let mut inner = self.inner.borrow_mut();
1007 inner.static_cache.insert(static_id, alloc_id);
1008 inner.statics.insert(alloc_id, static_id);
1009 alloc_id
1010 }
1011
1012 pub fn get_static(
1013 &self,
1014 ptr: interpret::AllocId,
1015 ) -> Option<DefId> {
1016 self.inner.borrow().statics.get(&ptr).cloned()
1017 }
1018
1019 pub fn intern_at_reserved(
1020 &self,
1021 id: interpret::AllocId,
1022 alloc: &'tcx interpret::Allocation,
1023 ) {
1024 if let Some(old) = self.inner.borrow_mut().alloc_by_id.insert(id, alloc) {
1025 bug!("tried to intern allocation at {}, but was already existing as {:#?}", id, old);
1026 }
1027 }
1028
1029 /// obtains a new allocation ID that can be referenced but does not
1030 /// yet have an allocation backing it.
1031 pub fn reserve(
1032 &self,
1033 ) -> interpret::AllocId {
1034 let mut inner = self.inner.borrow_mut();
1035 let next = inner.next_id;
1036 inner.next_id.0 = inner.next_id.0
1037 .checked_add(1)
1038 .expect("You overflowed a u64 by incrementing by 1... \
1039 You've just earned yourself a free drink if we ever meet. \
1040 Seriously, how did you do that?!");
1041 next
1042 }
1043 }
1044
1045 impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
1046 /// Get the global TyCtxt.
1047 #[inline]
1048 pub fn global_tcx(self) -> TyCtxt<'a, 'gcx, 'gcx> {
1049 TyCtxt {
1050 gcx: self.gcx,
1051 interners: &self.gcx.global_interners,
1052 }
1053 }
1054
1055 pub fn alloc_generics(self, generics: ty::Generics) -> &'gcx ty::Generics {
1056 self.global_arenas.generics.alloc(generics)
1057 }
1058
1059 pub fn alloc_steal_mir(self, mir: Mir<'gcx>) -> &'gcx Steal<Mir<'gcx>> {
1060 self.global_arenas.steal_mir.alloc(Steal::new(mir))
1061 }
1062
1063 pub fn alloc_mir(self, mir: Mir<'gcx>) -> &'gcx Mir<'gcx> {
1064 self.global_arenas.mir.alloc(mir)
1065 }
1066
1067 pub fn alloc_tables(self, tables: ty::TypeckTables<'gcx>) -> &'gcx ty::TypeckTables<'gcx> {
1068 self.global_arenas.tables.alloc(tables)
1069 }
1070
1071 pub fn alloc_trait_def(self, def: ty::TraitDef) -> &'gcx ty::TraitDef {
1072 self.global_arenas.trait_def.alloc(def)
1073 }
1074
1075 pub fn alloc_adt_def(self,
1076 did: DefId,
1077 kind: AdtKind,
1078 variants: Vec<ty::VariantDef>,
1079 repr: ReprOptions)
1080 -> &'gcx ty::AdtDef {
1081 let def = ty::AdtDef::new(self, did, kind, variants, repr);
1082 self.global_arenas.adt_def.alloc(def)
1083 }
1084
1085 pub fn alloc_byte_array(self, bytes: &[u8]) -> &'gcx [u8] {
1086 if bytes.is_empty() {
1087 &[]
1088 } else {
1089 self.global_interners.arena.alloc_slice(bytes)
1090 }
1091 }
1092
1093 pub fn alloc_const_slice(self, values: &[&'tcx ty::Const<'tcx>])
1094 -> &'tcx [&'tcx ty::Const<'tcx>] {
1095 if values.is_empty() {
1096 &[]
1097 } else {
1098 self.interners.arena.alloc_slice(values)
1099 }
1100 }
1101
1102 pub fn alloc_name_const_slice(self, values: &[(ast::Name, &'tcx ty::Const<'tcx>)])
1103 -> &'tcx [(ast::Name, &'tcx ty::Const<'tcx>)] {
1104 if values.is_empty() {
1105 &[]
1106 } else {
1107 self.interners.arena.alloc_slice(values)
1108 }
1109 }
1110
1111 pub fn intern_const_alloc(
1112 self,
1113 alloc: interpret::Allocation,
1114 ) -> &'gcx interpret::Allocation {
1115 let allocs = &mut self.interpret_interner.inner.borrow_mut().allocs;
1116 if let Some(alloc) = allocs.get(&alloc) {
1117 return alloc;
1118 }
1119
1120 let interned = self.global_arenas.const_allocs.alloc(alloc);
1121 if let Some(prev) = allocs.replace(interned) {
1122 bug!("Tried to overwrite interned Allocation: {:#?}", prev)
1123 }
1124 interned
1125 }
1126
1127 /// Allocates a byte or string literal for `mir::interpret`
1128 pub fn allocate_cached(self, bytes: &[u8]) -> interpret::AllocId {
1129 // check whether we already allocated this literal or a constant with the same memory
1130 if let Some(&alloc_id) = self.interpret_interner.inner.borrow()
1131 .literal_alloc_cache.get(bytes) {
1132 return alloc_id;
1133 }
1134 // create an allocation that just contains these bytes
1135 let alloc = interpret::Allocation::from_bytes(bytes);
1136 let alloc = self.intern_const_alloc(alloc);
1137
1138 // the next unique id
1139 let id = self.interpret_interner.reserve();
1140 // make the allocation identifiable
1141 self.interpret_interner.inner.borrow_mut().alloc_by_id.insert(id, alloc);
1142 // cache it for the future
1143 self.interpret_interner.inner.borrow_mut().literal_alloc_cache.insert(bytes.to_owned(), id);
1144 id
1145 }
1146
1147 pub fn intern_stability(self, stab: attr::Stability) -> &'gcx attr::Stability {
1148 let mut stability_interner = self.stability_interner.borrow_mut();
1149 if let Some(st) = stability_interner.get(&stab) {
1150 return st;
1151 }
1152
1153 let interned = self.global_interners.arena.alloc(stab);
1154 if let Some(prev) = stability_interner.replace(interned) {
1155 bug!("Tried to overwrite interned Stability: {:?}", prev)
1156 }
1157 interned
1158 }
1159
1160 pub fn intern_layout(self, layout: LayoutDetails) -> &'gcx LayoutDetails {
1161 let mut layout_interner = self.layout_interner.borrow_mut();
1162 if let Some(layout) = layout_interner.get(&layout) {
1163 return layout;
1164 }
1165
1166 let interned = self.global_arenas.layout.alloc(layout);
1167 if let Some(prev) = layout_interner.replace(interned) {
1168 bug!("Tried to overwrite interned Layout: {:?}", prev)
1169 }
1170 interned
1171 }
1172
1173 pub fn lift<T: ?Sized + Lift<'tcx>>(self, value: &T) -> Option<T::Lifted> {
1174 value.lift_to_tcx(self)
1175 }
1176
1177 /// Like lift, but only tries in the global tcx.
1178 pub fn lift_to_global<T: ?Sized + Lift<'gcx>>(self, value: &T) -> Option<T::Lifted> {
1179 value.lift_to_tcx(self.global_tcx())
1180 }
1181
1182 /// Returns true if self is the same as self.global_tcx().
1183 fn is_global(self) -> bool {
1184 let local = self.interners as *const _;
1185 let global = &self.global_interners as *const _;
1186 local as usize == global as usize
1187 }
1188
1189 /// Create a type context and call the closure with a `TyCtxt` reference
1190 /// to the context. The closure enforces that the type context and any interned
1191 /// value (types, substs, etc.) can only be used while `ty::tls` has a valid
1192 /// reference to the context, to allow formatting values that need it.
1193 pub fn create_and_enter<F, R>(s: &'tcx Session,
1194 cstore: &'tcx dyn CrateStore,
1195 local_providers: ty::maps::Providers<'tcx>,
1196 extern_providers: ty::maps::Providers<'tcx>,
1197 arenas: &'tcx AllArenas<'tcx>,
1198 resolutions: ty::Resolutions,
1199 hir: hir_map::Map<'tcx>,
1200 on_disk_query_result_cache: maps::OnDiskCache<'tcx>,
1201 crate_name: &str,
1202 tx: mpsc::Sender<Box<dyn Any + Send>>,
1203 output_filenames: &OutputFilenames,
1204 f: F) -> R
1205 where F: for<'b> FnOnce(TyCtxt<'b, 'tcx, 'tcx>) -> R
1206 {
1207 let data_layout = TargetDataLayout::parse(&s.target.target).unwrap_or_else(|err| {
1208 s.fatal(&err);
1209 });
1210 let interners = CtxtInterners::new(&arenas.interner);
1211 let common_types = CommonTypes::new(&interners);
1212 let dep_graph = hir.dep_graph.clone();
1213 let max_cnum = cstore.crates_untracked().iter().map(|c| c.as_usize()).max().unwrap_or(0);
1214 let mut providers = IndexVec::from_elem_n(extern_providers, max_cnum + 1);
1215 providers[LOCAL_CRATE] = local_providers;
1216
1217 let def_path_hash_to_def_id = if s.opts.build_dep_graph() {
1218 let upstream_def_path_tables: Vec<(CrateNum, Lrc<_>)> = cstore
1219 .crates_untracked()
1220 .iter()
1221 .map(|&cnum| (cnum, cstore.def_path_table(cnum)))
1222 .collect();
1223
1224 let def_path_tables = || {
1225 upstream_def_path_tables
1226 .iter()
1227 .map(|&(cnum, ref rc)| (cnum, &**rc))
1228 .chain(iter::once((LOCAL_CRATE, hir.definitions().def_path_table())))
1229 };
1230
1231 // Precompute the capacity of the hashmap so we don't have to
1232 // re-allocate when populating it.
1233 let capacity = def_path_tables().map(|(_, t)| t.size()).sum::<usize>();
1234
1235 let mut map: FxHashMap<_, _> = FxHashMap::with_capacity_and_hasher(
1236 capacity,
1237 ::std::default::Default::default()
1238 );
1239
1240 for (cnum, def_path_table) in def_path_tables() {
1241 def_path_table.add_def_path_hashes_to(cnum, &mut map);
1242 }
1243
1244 Some(map)
1245 } else {
1246 None
1247 };
1248
1249 let mut trait_map = FxHashMap();
1250 for (k, v) in resolutions.trait_map {
1251 let hir_id = hir.node_to_hir_id(k);
1252 let map = trait_map.entry(hir_id.owner)
1253 .or_insert_with(|| Lrc::new(FxHashMap()));
1254 Lrc::get_mut(map).unwrap()
1255 .insert(hir_id.local_id,
1256 Lrc::new(StableVec::new(v)));
1257 }
1258
1259 let gcx = &GlobalCtxt {
1260 sess: s,
1261 cstore,
1262 global_arenas: &arenas.global,
1263 global_interners: interners,
1264 dep_graph: dep_graph.clone(),
1265 on_disk_query_result_cache,
1266 types: common_types,
1267 trait_map,
1268 export_map: resolutions.export_map.into_iter().map(|(k, v)| {
1269 (k, Lrc::new(v))
1270 }).collect(),
1271 freevars: resolutions.freevars.into_iter().map(|(k, v)| {
1272 (hir.local_def_id(k), Lrc::new(v))
1273 }).collect(),
1274 maybe_unused_trait_imports:
1275 resolutions.maybe_unused_trait_imports
1276 .into_iter()
1277 .map(|id| hir.local_def_id(id))
1278 .collect(),
1279 maybe_unused_extern_crates:
1280 resolutions.maybe_unused_extern_crates
1281 .into_iter()
1282 .map(|(id, sp)| (hir.local_def_id(id), sp))
1283 .collect(),
1284 hir,
1285 def_path_hash_to_def_id,
1286 maps: maps::Maps::new(providers),
1287 rcache: Lock::new(FxHashMap()),
1288 selection_cache: traits::SelectionCache::new(),
1289 evaluation_cache: traits::EvaluationCache::new(),
1290 crate_name: Symbol::intern(crate_name),
1291 data_layout,
1292 layout_interner: Lock::new(FxHashSet()),
1293 stability_interner: Lock::new(FxHashSet()),
1294 interpret_interner: Default::default(),
1295 tx_to_llvm_workers: Lock::new(tx),
1296 output_filenames: Arc::new(output_filenames.clone()),
1297 };
1298
1299 tls::enter_global(gcx, f)
1300 }
1301
1302 pub fn consider_optimizing<T: Fn() -> String>(&self, msg: T) -> bool {
1303 let cname = self.crate_name(LOCAL_CRATE).as_str();
1304 self.sess.consider_optimizing(&cname, msg)
1305 }
1306
1307 pub fn lang_items(self) -> Lrc<middle::lang_items::LanguageItems> {
1308 self.get_lang_items(LOCAL_CRATE)
1309 }
1310
1311 /// Due to missing llvm support for lowering 128 bit math to software emulation
1312 /// (on some targets), the lowering can be done in MIR.
1313 ///
1314 /// This function only exists until said support is implemented.
1315 pub fn is_binop_lang_item(&self, def_id: DefId) -> Option<(mir::BinOp, bool)> {
1316 let items = self.lang_items();
1317 let def_id = Some(def_id);
1318 if items.i128_add_fn() == def_id { Some((mir::BinOp::Add, false)) }
1319 else if items.u128_add_fn() == def_id { Some((mir::BinOp::Add, false)) }
1320 else if items.i128_sub_fn() == def_id { Some((mir::BinOp::Sub, false)) }
1321 else if items.u128_sub_fn() == def_id { Some((mir::BinOp::Sub, false)) }
1322 else if items.i128_mul_fn() == def_id { Some((mir::BinOp::Mul, false)) }
1323 else if items.u128_mul_fn() == def_id { Some((mir::BinOp::Mul, false)) }
1324 else if items.i128_div_fn() == def_id { Some((mir::BinOp::Div, false)) }
1325 else if items.u128_div_fn() == def_id { Some((mir::BinOp::Div, false)) }
1326 else if items.i128_rem_fn() == def_id { Some((mir::BinOp::Rem, false)) }
1327 else if items.u128_rem_fn() == def_id { Some((mir::BinOp::Rem, false)) }
1328 else if items.i128_shl_fn() == def_id { Some((mir::BinOp::Shl, false)) }
1329 else if items.u128_shl_fn() == def_id { Some((mir::BinOp::Shl, false)) }
1330 else if items.i128_shr_fn() == def_id { Some((mir::BinOp::Shr, false)) }
1331 else if items.u128_shr_fn() == def_id { Some((mir::BinOp::Shr, false)) }
1332 else if items.i128_addo_fn() == def_id { Some((mir::BinOp::Add, true)) }
1333 else if items.u128_addo_fn() == def_id { Some((mir::BinOp::Add, true)) }
1334 else if items.i128_subo_fn() == def_id { Some((mir::BinOp::Sub, true)) }
1335 else if items.u128_subo_fn() == def_id { Some((mir::BinOp::Sub, true)) }
1336 else if items.i128_mulo_fn() == def_id { Some((mir::BinOp::Mul, true)) }
1337 else if items.u128_mulo_fn() == def_id { Some((mir::BinOp::Mul, true)) }
1338 else if items.i128_shlo_fn() == def_id { Some((mir::BinOp::Shl, true)) }
1339 else if items.u128_shlo_fn() == def_id { Some((mir::BinOp::Shl, true)) }
1340 else if items.i128_shro_fn() == def_id { Some((mir::BinOp::Shr, true)) }
1341 else if items.u128_shro_fn() == def_id { Some((mir::BinOp::Shr, true)) }
1342 else { None }
1343 }
1344
1345 pub fn stability(self) -> Lrc<stability::Index<'tcx>> {
1346 self.stability_index(LOCAL_CRATE)
1347 }
1348
1349 pub fn crates(self) -> Lrc<Vec<CrateNum>> {
1350 self.all_crate_nums(LOCAL_CRATE)
1351 }
1352
1353 pub fn features(self) -> Lrc<feature_gate::Features> {
1354 self.features_query(LOCAL_CRATE)
1355 }
1356
1357 pub fn def_key(self, id: DefId) -> hir_map::DefKey {
1358 if id.is_local() {
1359 self.hir.def_key(id)
1360 } else {
1361 self.cstore.def_key(id)
1362 }
1363 }
1364
1365 /// Convert a `DefId` into its fully expanded `DefPath` (every
1366 /// `DefId` is really just an interned def-path).
1367 ///
1368 /// Note that if `id` is not local to this crate, the result will
1369 /// be a non-local `DefPath`.
1370 pub fn def_path(self, id: DefId) -> hir_map::DefPath {
1371 if id.is_local() {
1372 self.hir.def_path(id)
1373 } else {
1374 self.cstore.def_path(id)
1375 }
1376 }
1377
1378 #[inline]
1379 pub fn def_path_hash(self, def_id: DefId) -> hir_map::DefPathHash {
1380 if def_id.is_local() {
1381 self.hir.definitions().def_path_hash(def_id.index)
1382 } else {
1383 self.cstore.def_path_hash(def_id)
1384 }
1385 }
1386
1387 pub fn def_path_debug_str(self, def_id: DefId) -> String {
1388 // We are explicitly not going through queries here in order to get
1389 // crate name and disambiguator since this code is called from debug!()
1390 // statements within the query system and we'd run into endless
1391 // recursion otherwise.
1392 let (crate_name, crate_disambiguator) = if def_id.is_local() {
1393 (self.crate_name.clone(),
1394 self.sess.local_crate_disambiguator())
1395 } else {
1396 (self.cstore.crate_name_untracked(def_id.krate),
1397 self.cstore.crate_disambiguator_untracked(def_id.krate))
1398 };
1399
1400 format!("{}[{}]{}",
1401 crate_name,
1402 // Don't print the whole crate disambiguator. That's just
1403 // annoying in debug output.
1404 &(crate_disambiguator.to_fingerprint().to_hex())[..4],
1405 self.def_path(def_id).to_string_no_crate())
1406 }
1407
1408 pub fn metadata_encoding_version(self) -> Vec<u8> {
1409 self.cstore.metadata_encoding_version().to_vec()
1410 }
1411
1412 // Note that this is *untracked* and should only be used within the query
1413 // system if the result is otherwise tracked through queries
1414 pub fn crate_data_as_rc_any(self, cnum: CrateNum) -> Lrc<dyn Any> {
1415 self.cstore.crate_data_as_rc_any(cnum)
1416 }
1417
1418 pub fn create_stable_hashing_context(self) -> StableHashingContext<'a> {
1419 let krate = self.dep_graph.with_ignore(|| self.gcx.hir.krate());
1420
1421 StableHashingContext::new(self.sess,
1422 krate,
1423 self.hir.definitions(),
1424 self.cstore)
1425 }
1426
1427 // This method makes sure that we have a DepNode and a Fingerprint for
1428 // every upstream crate. It needs to be called once right after the tcx is
1429 // created.
1430 // With full-fledged red/green, the method will probably become unnecessary
1431 // as this will be done on-demand.
1432 pub fn allocate_metadata_dep_nodes(self) {
1433 // We cannot use the query versions of crates() and crate_hash(), since
1434 // those would need the DepNodes that we are allocating here.
1435 for cnum in self.cstore.crates_untracked() {
1436 let dep_node = DepNode::new(self, DepConstructor::CrateMetadata(cnum));
1437 let crate_hash = self.cstore.crate_hash_untracked(cnum);
1438 self.dep_graph.with_task(dep_node,
1439 self,
1440 crate_hash,
1441 |_, x| x // No transformation needed
1442 );
1443 }
1444 }
1445
1446 // This method exercises the `in_scope_traits_map` query for all possible
1447 // values so that we have their fingerprints available in the DepGraph.
1448 // This is only required as long as we still use the old dependency tracking
1449 // which needs to have the fingerprints of all input nodes beforehand.
1450 pub fn precompute_in_scope_traits_hashes(self) {
1451 for &def_index in self.trait_map.keys() {
1452 self.in_scope_traits_map(def_index);
1453 }
1454 }
1455
1456 pub fn serialize_query_result_cache<E>(self,
1457 encoder: &mut E)
1458 -> Result<(), E::Error>
1459 where E: ty::codec::TyEncoder
1460 {
1461 self.on_disk_query_result_cache.serialize(self.global_tcx(), encoder)
1462 }
1463
1464 /// If true, we should use the MIR-based borrowck (we may *also* use
1465 /// the AST-based borrowck).
1466 pub fn use_mir_borrowck(self) -> bool {
1467 self.borrowck_mode().use_mir()
1468 }
1469
1470 /// If true, pattern variables for use in guards on match arms
1471 /// will be bound as references to the data, and occurrences of
1472 /// those variables in the guard expression will implicitly
1473 /// dereference those bindings. (See rust-lang/rust#27282.)
1474 pub fn all_pat_vars_are_implicit_refs_within_guards(self) -> bool {
1475 self.borrowck_mode().use_mir()
1476 }
1477
1478 /// If true, we should enable two-phase borrows checks. This is
1479 /// done with either `-Ztwo-phase-borrows` or with
1480 /// `#![feature(nll)]`.
1481 pub fn two_phase_borrows(self) -> bool {
1482 self.features().nll || self.sess.opts.debugging_opts.two_phase_borrows
1483 }
1484
1485 /// What mode(s) of borrowck should we run? AST? MIR? both?
1486 /// (Also considers the `#![feature(nll)]` setting.)
1487 pub fn borrowck_mode(&self) -> BorrowckMode {
1488 match self.sess.opts.borrowck_mode {
1489 mode @ BorrowckMode::Mir |
1490 mode @ BorrowckMode::Compare => mode,
1491
1492 mode @ BorrowckMode::Ast => {
1493 if self.features().nll {
1494 BorrowckMode::Mir
1495 } else {
1496 mode
1497 }
1498 }
1499
1500 }
1501 }
1502
1503 /// Should we emit EndRegion MIR statements? These are consumed by
1504 /// MIR borrowck, but not when NLL is used. They are also consumed
1505 /// by the validation stuff.
1506 pub fn emit_end_regions(self) -> bool {
1507 self.sess.opts.debugging_opts.emit_end_regions ||
1508 self.sess.opts.debugging_opts.mir_emit_validate > 0 ||
1509 self.use_mir_borrowck()
1510 }
1511
1512 #[inline]
1513 pub fn share_generics(self) -> bool {
1514 match self.sess.opts.debugging_opts.share_generics {
1515 Some(setting) => setting,
1516 None => {
1517 self.sess.opts.incremental.is_some() ||
1518 match self.sess.opts.optimize {
1519 OptLevel::No |
1520 OptLevel::Less |
1521 OptLevel::Size |
1522 OptLevel::SizeMin => true,
1523 OptLevel::Default |
1524 OptLevel::Aggressive => false,
1525 }
1526 }
1527 }
1528 }
1529
1530 #[inline]
1531 pub fn local_crate_exports_generics(self) -> bool {
1532 debug_assert!(self.share_generics());
1533
1534 self.sess.crate_types.borrow().iter().any(|crate_type| {
1535 match crate_type {
1536 CrateTypeExecutable |
1537 CrateTypeStaticlib |
1538 CrateTypeProcMacro |
1539 CrateTypeCdylib => false,
1540 CrateTypeRlib |
1541 CrateTypeDylib => true,
1542 }
1543 })
1544 }
1545 }
1546
1547 impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
1548 pub fn encode_metadata(self, link_meta: &LinkMeta)
1549 -> EncodedMetadata
1550 {
1551 self.cstore.encode_metadata(self, link_meta)
1552 }
1553 }
1554
1555 impl<'gcx: 'tcx, 'tcx> GlobalCtxt<'gcx> {
1556 /// Call the closure with a local `TyCtxt` using the given arena.
1557 pub fn enter_local<F, R>(
1558 &self,
1559 arena: &'tcx SyncDroplessArena,
1560 f: F
1561 ) -> R
1562 where
1563 F: for<'a> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
1564 {
1565 let interners = CtxtInterners::new(arena);
1566 let tcx = TyCtxt {
1567 gcx: self,
1568 interners: &interners,
1569 };
1570 ty::tls::with_related_context(tcx.global_tcx(), |icx| {
1571 let new_icx = ty::tls::ImplicitCtxt {
1572 tcx,
1573 query: icx.query.clone(),
1574 layout_depth: icx.layout_depth,
1575 task: icx.task,
1576 };
1577 ty::tls::enter_context(&new_icx, |new_icx| {
1578 f(new_icx.tcx)
1579 })
1580 })
1581 }
1582 }
1583
1584 /// A trait implemented for all X<'a> types which can be safely and
1585 /// efficiently converted to X<'tcx> as long as they are part of the
1586 /// provided TyCtxt<'tcx>.
1587 /// This can be done, for example, for Ty<'tcx> or &'tcx Substs<'tcx>
1588 /// by looking them up in their respective interners.
1589 ///
1590 /// However, this is still not the best implementation as it does
1591 /// need to compare the components, even for interned values.
1592 /// It would be more efficient if TypedArena provided a way to
1593 /// determine whether the address is in the allocated range.
1594 ///
1595 /// None is returned if the value or one of the components is not part
1596 /// of the provided context.
1597 /// For Ty, None can be returned if either the type interner doesn't
1598 /// contain the TypeVariants key or if the address of the interned
1599 /// pointer differs. The latter case is possible if a primitive type,
1600 /// e.g. `()` or `u8`, was interned in a different context.
1601 pub trait Lift<'tcx> {
1602 type Lifted: 'tcx;
1603 fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted>;
1604 }
1605
1606 impl<'a, 'tcx> Lift<'tcx> for Ty<'a> {
1607 type Lifted = Ty<'tcx>;
1608 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Ty<'tcx>> {
1609 if tcx.interners.arena.in_arena(*self as *const _) {
1610 return Some(unsafe { mem::transmute(*self) });
1611 }
1612 // Also try in the global tcx if we're not that.
1613 if !tcx.is_global() {
1614 self.lift_to_tcx(tcx.global_tcx())
1615 } else {
1616 None
1617 }
1618 }
1619 }
1620
1621 impl<'a, 'tcx> Lift<'tcx> for Region<'a> {
1622 type Lifted = Region<'tcx>;
1623 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Region<'tcx>> {
1624 if tcx.interners.arena.in_arena(*self as *const _) {
1625 return Some(unsafe { mem::transmute(*self) });
1626 }
1627 // Also try in the global tcx if we're not that.
1628 if !tcx.is_global() {
1629 self.lift_to_tcx(tcx.global_tcx())
1630 } else {
1631 None
1632 }
1633 }
1634 }
1635
1636 impl<'a, 'tcx> Lift<'tcx> for &'a Const<'a> {
1637 type Lifted = &'tcx Const<'tcx>;
1638 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<&'tcx Const<'tcx>> {
1639 if tcx.interners.arena.in_arena(*self as *const _) {
1640 return Some(unsafe { mem::transmute(*self) });
1641 }
1642 // Also try in the global tcx if we're not that.
1643 if !tcx.is_global() {
1644 self.lift_to_tcx(tcx.global_tcx())
1645 } else {
1646 None
1647 }
1648 }
1649 }
1650
1651 impl<'a, 'tcx> Lift<'tcx> for &'a Substs<'a> {
1652 type Lifted = &'tcx Substs<'tcx>;
1653 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<&'tcx Substs<'tcx>> {
1654 if self.len() == 0 {
1655 return Some(Slice::empty());
1656 }
1657 if tcx.interners.arena.in_arena(&self[..] as *const _) {
1658 return Some(unsafe { mem::transmute(*self) });
1659 }
1660 // Also try in the global tcx if we're not that.
1661 if !tcx.is_global() {
1662 self.lift_to_tcx(tcx.global_tcx())
1663 } else {
1664 None
1665 }
1666 }
1667 }
1668
1669 impl<'a, 'tcx> Lift<'tcx> for &'a Slice<Ty<'a>> {
1670 type Lifted = &'tcx Slice<Ty<'tcx>>;
1671 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
1672 -> Option<&'tcx Slice<Ty<'tcx>>> {
1673 if self.len() == 0 {
1674 return Some(Slice::empty());
1675 }
1676 if tcx.interners.arena.in_arena(*self as *const _) {
1677 return Some(unsafe { mem::transmute(*self) });
1678 }
1679 // Also try in the global tcx if we're not that.
1680 if !tcx.is_global() {
1681 self.lift_to_tcx(tcx.global_tcx())
1682 } else {
1683 None
1684 }
1685 }
1686 }
1687
1688 impl<'a, 'tcx> Lift<'tcx> for &'a Slice<ExistentialPredicate<'a>> {
1689 type Lifted = &'tcx Slice<ExistentialPredicate<'tcx>>;
1690 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
1691 -> Option<&'tcx Slice<ExistentialPredicate<'tcx>>> {
1692 if self.is_empty() {
1693 return Some(Slice::empty());
1694 }
1695 if tcx.interners.arena.in_arena(*self as *const _) {
1696 return Some(unsafe { mem::transmute(*self) });
1697 }
1698 // Also try in the global tcx if we're not that.
1699 if !tcx.is_global() {
1700 self.lift_to_tcx(tcx.global_tcx())
1701 } else {
1702 None
1703 }
1704 }
1705 }
1706
1707 impl<'a, 'tcx> Lift<'tcx> for &'a Slice<Predicate<'a>> {
1708 type Lifted = &'tcx Slice<Predicate<'tcx>>;
1709 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
1710 -> Option<&'tcx Slice<Predicate<'tcx>>> {
1711 if self.is_empty() {
1712 return Some(Slice::empty());
1713 }
1714 if tcx.interners.arena.in_arena(*self as *const _) {
1715 return Some(unsafe { mem::transmute(*self) });
1716 }
1717 // Also try in the global tcx if we're not that.
1718 if !tcx.is_global() {
1719 self.lift_to_tcx(tcx.global_tcx())
1720 } else {
1721 None
1722 }
1723 }
1724 }
1725
1726 impl<'a, 'tcx> Lift<'tcx> for &'a Slice<CanonicalVarInfo> {
1727 type Lifted = &'tcx Slice<CanonicalVarInfo>;
1728 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
1729 if self.len() == 0 {
1730 return Some(Slice::empty());
1731 }
1732 if tcx.interners.arena.in_arena(*self as *const _) {
1733 return Some(unsafe { mem::transmute(*self) });
1734 }
1735 // Also try in the global tcx if we're not that.
1736 if !tcx.is_global() {
1737 self.lift_to_tcx(tcx.global_tcx())
1738 } else {
1739 None
1740 }
1741 }
1742 }
1743
1744 pub mod tls {
1745 use super::{GlobalCtxt, TyCtxt};
1746
1747 use std::cell::Cell;
1748 use std::fmt;
1749 use std::mem;
1750 use syntax_pos;
1751 use ty::maps;
1752 use errors::{Diagnostic, TRACK_DIAGNOSTICS};
1753 use rustc_data_structures::OnDrop;
1754 use rustc_data_structures::sync::Lrc;
1755 use dep_graph::OpenTask;
1756
1757 /// This is the implicit state of rustc. It contains the current
1758 /// TyCtxt and query. It is updated when creating a local interner or
1759 /// executing a new query. Whenever there's a TyCtxt value available
1760 /// you should also have access to an ImplicitCtxt through the functions
1761 /// in this module.
1762 #[derive(Clone)]
1763 pub struct ImplicitCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
1764 /// The current TyCtxt. Initially created by `enter_global` and updated
1765 /// by `enter_local` with a new local interner
1766 pub tcx: TyCtxt<'a, 'gcx, 'tcx>,
1767
1768 /// The current query job, if any. This is updated by start_job in
1769 /// ty::maps::plumbing when executing a query
1770 pub query: Option<Lrc<maps::QueryJob<'gcx>>>,
1771
1772 /// Used to prevent layout from recursing too deeply.
1773 pub layout_depth: usize,
1774
1775 /// The current dep graph task. This is used to add dependencies to queries
1776 /// when executing them
1777 pub task: &'a OpenTask,
1778 }
1779
1780 // A thread local value which stores a pointer to the current ImplicitCtxt
1781 thread_local!(static TLV: Cell<usize> = Cell::new(0));
1782
1783 fn set_tlv<F: FnOnce() -> R, R>(value: usize, f: F) -> R {
1784 let old = get_tlv();
1785 let _reset = OnDrop(move || TLV.with(|tlv| tlv.set(old)));
1786 TLV.with(|tlv| tlv.set(value));
1787 f()
1788 }
1789
1790 fn get_tlv() -> usize {
1791 TLV.with(|tlv| tlv.get())
1792 }
1793
1794 /// This is a callback from libsyntax as it cannot access the implicit state
1795 /// in librustc otherwise
1796 fn span_debug(span: syntax_pos::Span, f: &mut fmt::Formatter) -> fmt::Result {
1797 with(|tcx| {
1798 write!(f, "{}", tcx.sess.codemap().span_to_string(span))
1799 })
1800 }
1801
1802 /// This is a callback from libsyntax as it cannot access the implicit state
1803 /// in librustc otherwise. It is used to when diagnostic messages are
1804 /// emitted and stores them in the current query, if there is one.
1805 fn track_diagnostic(diagnostic: &Diagnostic) {
1806 with_context(|context| {
1807 if let Some(ref query) = context.query {
1808 query.diagnostics.lock().push(diagnostic.clone());
1809 }
1810 })
1811 }
1812
1813 /// Sets up the callbacks from libsyntax on the current thread
1814 pub fn with_thread_locals<F, R>(f: F) -> R
1815 where F: FnOnce() -> R
1816 {
1817 syntax_pos::SPAN_DEBUG.with(|span_dbg| {
1818 let original_span_debug = span_dbg.get();
1819 span_dbg.set(span_debug);
1820
1821 let _on_drop = OnDrop(move || {
1822 span_dbg.set(original_span_debug);
1823 });
1824
1825 TRACK_DIAGNOSTICS.with(|current| {
1826 let original = current.get();
1827 current.set(track_diagnostic);
1828
1829 let _on_drop = OnDrop(move || {
1830 current.set(original);
1831 });
1832
1833 f()
1834 })
1835 })
1836 }
1837
1838 /// Sets `context` as the new current ImplicitCtxt for the duration of the function `f`
1839 pub fn enter_context<'a, 'gcx: 'tcx, 'tcx, F, R>(context: &ImplicitCtxt<'a, 'gcx, 'tcx>,
1840 f: F) -> R
1841 where F: FnOnce(&ImplicitCtxt<'a, 'gcx, 'tcx>) -> R
1842 {
1843 set_tlv(context as *const _ as usize, || {
1844 f(&context)
1845 })
1846 }
1847
1848 /// Enters GlobalCtxt by setting up libsyntax callbacks and
1849 /// creating a initial TyCtxt and ImplicitCtxt.
1850 /// This happens once per rustc session and TyCtxts only exists
1851 /// inside the `f` function.
1852 pub fn enter_global<'gcx, F, R>(gcx: &GlobalCtxt<'gcx>, f: F) -> R
1853 where F: for<'a> FnOnce(TyCtxt<'a, 'gcx, 'gcx>) -> R
1854 {
1855 with_thread_locals(|| {
1856 let tcx = TyCtxt {
1857 gcx,
1858 interners: &gcx.global_interners,
1859 };
1860 let icx = ImplicitCtxt {
1861 tcx,
1862 query: None,
1863 layout_depth: 0,
1864 task: &OpenTask::Ignore,
1865 };
1866 enter_context(&icx, |_| {
1867 f(tcx)
1868 })
1869 })
1870 }
1871
1872 /// Allows access to the current ImplicitCtxt in a closure if one is available
1873 pub fn with_context_opt<F, R>(f: F) -> R
1874 where F: for<'a, 'gcx, 'tcx> FnOnce(Option<&ImplicitCtxt<'a, 'gcx, 'tcx>>) -> R
1875 {
1876 let context = get_tlv();
1877 if context == 0 {
1878 f(None)
1879 } else {
1880 unsafe { f(Some(&*(context as *const ImplicitCtxt))) }
1881 }
1882 }
1883
1884 /// Allows access to the current ImplicitCtxt.
1885 /// Panics if there is no ImplicitCtxt available
1886 pub fn with_context<F, R>(f: F) -> R
1887 where F: for<'a, 'gcx, 'tcx> FnOnce(&ImplicitCtxt<'a, 'gcx, 'tcx>) -> R
1888 {
1889 with_context_opt(|opt_context| f(opt_context.expect("no ImplicitCtxt stored in tls")))
1890 }
1891
1892 /// Allows access to the current ImplicitCtxt whose tcx field has the same global
1893 /// interner as the tcx argument passed in. This means the closure is given an ImplicitCtxt
1894 /// with the same 'gcx lifetime as the TyCtxt passed in.
1895 /// This will panic if you pass it a TyCtxt which has a different global interner from
1896 /// the current ImplicitCtxt's tcx field.
1897 pub fn with_related_context<'a, 'gcx, 'tcx1, F, R>(tcx: TyCtxt<'a, 'gcx, 'tcx1>, f: F) -> R
1898 where F: for<'b, 'tcx2> FnOnce(&ImplicitCtxt<'b, 'gcx, 'tcx2>) -> R
1899 {
1900 with_context(|context| {
1901 unsafe {
1902 let gcx = tcx.gcx as *const _ as usize;
1903 assert!(context.tcx.gcx as *const _ as usize == gcx);
1904 let context: &ImplicitCtxt = mem::transmute(context);
1905 f(context)
1906 }
1907 })
1908 }
1909
1910 /// Allows access to the current ImplicitCtxt whose tcx field has the same global
1911 /// interner and local interner as the tcx argument passed in. This means the closure
1912 /// is given an ImplicitCtxt with the same 'tcx and 'gcx lifetimes as the TyCtxt passed in.
1913 /// This will panic if you pass it a TyCtxt which has a different global interner or
1914 /// a different local interner from the current ImplicitCtxt's tcx field.
1915 pub fn with_fully_related_context<'a, 'gcx, 'tcx, F, R>(tcx: TyCtxt<'a, 'gcx, 'tcx>, f: F) -> R
1916 where F: for<'b> FnOnce(&ImplicitCtxt<'b, 'gcx, 'tcx>) -> R
1917 {
1918 with_context(|context| {
1919 unsafe {
1920 let gcx = tcx.gcx as *const _ as usize;
1921 let interners = tcx.interners as *const _ as usize;
1922 assert!(context.tcx.gcx as *const _ as usize == gcx);
1923 assert!(context.tcx.interners as *const _ as usize == interners);
1924 let context: &ImplicitCtxt = mem::transmute(context);
1925 f(context)
1926 }
1927 })
1928 }
1929
1930 /// Allows access to the TyCtxt in the current ImplicitCtxt.
1931 /// Panics if there is no ImplicitCtxt available
1932 pub fn with<F, R>(f: F) -> R
1933 where F: for<'a, 'gcx, 'tcx> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
1934 {
1935 with_context(|context| f(context.tcx))
1936 }
1937
1938 /// Allows access to the TyCtxt in the current ImplicitCtxt.
1939 /// The closure is passed None if there is no ImplicitCtxt available
1940 pub fn with_opt<F, R>(f: F) -> R
1941 where F: for<'a, 'gcx, 'tcx> FnOnce(Option<TyCtxt<'a, 'gcx, 'tcx>>) -> R
1942 {
1943 with_context_opt(|opt_context| f(opt_context.map(|context| context.tcx)))
1944 }
1945 }
1946
1947 macro_rules! sty_debug_print {
1948 ($ctxt: expr, $($variant: ident),*) => {{
1949 // curious inner module to allow variant names to be used as
1950 // variable names.
1951 #[allow(non_snake_case)]
1952 mod inner {
1953 use ty::{self, TyCtxt};
1954 use ty::context::Interned;
1955
1956 #[derive(Copy, Clone)]
1957 struct DebugStat {
1958 total: usize,
1959 region_infer: usize,
1960 ty_infer: usize,
1961 both_infer: usize,
1962 }
1963
1964 pub fn go(tcx: TyCtxt) {
1965 let mut total = DebugStat {
1966 total: 0,
1967 region_infer: 0, ty_infer: 0, both_infer: 0,
1968 };
1969 $(let mut $variant = total;)*
1970
1971
1972 for &Interned(t) in tcx.interners.type_.borrow().iter() {
1973 let variant = match t.sty {
1974 ty::TyBool | ty::TyChar | ty::TyInt(..) | ty::TyUint(..) |
1975 ty::TyFloat(..) | ty::TyStr | ty::TyNever => continue,
1976 ty::TyError => /* unimportant */ continue,
1977 $(ty::$variant(..) => &mut $variant,)*
1978 };
1979 let region = t.flags.intersects(ty::TypeFlags::HAS_RE_INFER);
1980 let ty = t.flags.intersects(ty::TypeFlags::HAS_TY_INFER);
1981
1982 variant.total += 1;
1983 total.total += 1;
1984 if region { total.region_infer += 1; variant.region_infer += 1 }
1985 if ty { total.ty_infer += 1; variant.ty_infer += 1 }
1986 if region && ty { total.both_infer += 1; variant.both_infer += 1 }
1987 }
1988 println!("Ty interner total ty region both");
1989 $(println!(" {:18}: {uses:6} {usespc:4.1}%, \
1990 {ty:4.1}% {region:5.1}% {both:4.1}%",
1991 stringify!($variant),
1992 uses = $variant.total,
1993 usespc = $variant.total as f64 * 100.0 / total.total as f64,
1994 ty = $variant.ty_infer as f64 * 100.0 / total.total as f64,
1995 region = $variant.region_infer as f64 * 100.0 / total.total as f64,
1996 both = $variant.both_infer as f64 * 100.0 / total.total as f64);
1997 )*
1998 println!(" total {uses:6} \
1999 {ty:4.1}% {region:5.1}% {both:4.1}%",
2000 uses = total.total,
2001 ty = total.ty_infer as f64 * 100.0 / total.total as f64,
2002 region = total.region_infer as f64 * 100.0 / total.total as f64,
2003 both = total.both_infer as f64 * 100.0 / total.total as f64)
2004 }
2005 }
2006
2007 inner::go($ctxt)
2008 }}
2009 }
2010
2011 impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
2012 pub fn print_debug_stats(self) {
2013 sty_debug_print!(
2014 self,
2015 TyAdt, TyArray, TySlice, TyRawPtr, TyRef, TyFnDef, TyFnPtr,
2016 TyGenerator, TyGeneratorWitness, TyDynamic, TyClosure, TyTuple,
2017 TyParam, TyInfer, TyProjection, TyAnon, TyForeign);
2018
2019 println!("Substs interner: #{}", self.interners.substs.borrow().len());
2020 println!("Region interner: #{}", self.interners.region.borrow().len());
2021 println!("Stability interner: #{}", self.stability_interner.borrow().len());
2022 println!("Interpret interner: #{}", self.interpret_interner.inner.borrow().allocs.len());
2023 println!("Layout interner: #{}", self.layout_interner.borrow().len());
2024 }
2025 }
2026
2027
2028 /// An entry in an interner.
2029 struct Interned<'tcx, T: 'tcx+?Sized>(&'tcx T);
2030
2031 // NB: An Interned<Ty> compares and hashes as a sty.
2032 impl<'tcx> PartialEq for Interned<'tcx, TyS<'tcx>> {
2033 fn eq(&self, other: &Interned<'tcx, TyS<'tcx>>) -> bool {
2034 self.0.sty == other.0.sty
2035 }
2036 }
2037
2038 impl<'tcx> Eq for Interned<'tcx, TyS<'tcx>> {}
2039
2040 impl<'tcx> Hash for Interned<'tcx, TyS<'tcx>> {
2041 fn hash<H: Hasher>(&self, s: &mut H) {
2042 self.0.sty.hash(s)
2043 }
2044 }
2045
2046 impl<'tcx: 'lcx, 'lcx> Borrow<TypeVariants<'lcx>> for Interned<'tcx, TyS<'tcx>> {
2047 fn borrow<'a>(&'a self) -> &'a TypeVariants<'lcx> {
2048 &self.0.sty
2049 }
2050 }
2051
2052 // NB: An Interned<Slice<T>> compares and hashes as its elements.
2053 impl<'tcx, T: PartialEq> PartialEq for Interned<'tcx, Slice<T>> {
2054 fn eq(&self, other: &Interned<'tcx, Slice<T>>) -> bool {
2055 self.0[..] == other.0[..]
2056 }
2057 }
2058
2059 impl<'tcx, T: Eq> Eq for Interned<'tcx, Slice<T>> {}
2060
2061 impl<'tcx, T: Hash> Hash for Interned<'tcx, Slice<T>> {
2062 fn hash<H: Hasher>(&self, s: &mut H) {
2063 self.0[..].hash(s)
2064 }
2065 }
2066
2067 impl<'tcx: 'lcx, 'lcx> Borrow<[Ty<'lcx>]> for Interned<'tcx, Slice<Ty<'tcx>>> {
2068 fn borrow<'a>(&'a self) -> &'a [Ty<'lcx>] {
2069 &self.0[..]
2070 }
2071 }
2072
2073 impl<'tcx: 'lcx, 'lcx> Borrow<[CanonicalVarInfo]> for Interned<'tcx, Slice<CanonicalVarInfo>> {
2074 fn borrow<'a>(&'a self) -> &'a [CanonicalVarInfo] {
2075 &self.0[..]
2076 }
2077 }
2078
2079 impl<'tcx: 'lcx, 'lcx> Borrow<[Kind<'lcx>]> for Interned<'tcx, Substs<'tcx>> {
2080 fn borrow<'a>(&'a self) -> &'a [Kind<'lcx>] {
2081 &self.0[..]
2082 }
2083 }
2084
2085 impl<'tcx> Borrow<RegionKind> for Interned<'tcx, RegionKind> {
2086 fn borrow<'a>(&'a self) -> &'a RegionKind {
2087 &self.0
2088 }
2089 }
2090
2091 impl<'tcx: 'lcx, 'lcx> Borrow<[ExistentialPredicate<'lcx>]>
2092 for Interned<'tcx, Slice<ExistentialPredicate<'tcx>>> {
2093 fn borrow<'a>(&'a self) -> &'a [ExistentialPredicate<'lcx>] {
2094 &self.0[..]
2095 }
2096 }
2097
2098 impl<'tcx: 'lcx, 'lcx> Borrow<[Predicate<'lcx>]>
2099 for Interned<'tcx, Slice<Predicate<'tcx>>> {
2100 fn borrow<'a>(&'a self) -> &'a [Predicate<'lcx>] {
2101 &self.0[..]
2102 }
2103 }
2104
2105 impl<'tcx: 'lcx, 'lcx> Borrow<Const<'lcx>> for Interned<'tcx, Const<'tcx>> {
2106 fn borrow<'a>(&'a self) -> &'a Const<'lcx> {
2107 &self.0
2108 }
2109 }
2110
2111 impl<'tcx: 'lcx, 'lcx> Borrow<[Clause<'lcx>]>
2112 for Interned<'tcx, Slice<Clause<'tcx>>> {
2113 fn borrow<'a>(&'a self) -> &'a [Clause<'lcx>] {
2114 &self.0[..]
2115 }
2116 }
2117
2118 impl<'tcx: 'lcx, 'lcx> Borrow<[Goal<'lcx>]>
2119 for Interned<'tcx, Slice<Goal<'tcx>>> {
2120 fn borrow<'a>(&'a self) -> &'a [Goal<'lcx>] {
2121 &self.0[..]
2122 }
2123 }
2124
2125 macro_rules! intern_method {
2126 ($lt_tcx:tt, $name:ident: $method:ident($alloc:ty,
2127 $alloc_method:ident,
2128 $alloc_to_key:expr,
2129 $alloc_to_ret:expr,
2130 $keep_in_local_tcx:expr) -> $ty:ty) => {
2131 impl<'a, 'gcx, $lt_tcx> TyCtxt<'a, 'gcx, $lt_tcx> {
2132 pub fn $method(self, v: $alloc) -> &$lt_tcx $ty {
2133 {
2134 let key = ($alloc_to_key)(&v);
2135 if let Some(i) = self.interners.$name.borrow().get(key) {
2136 return i.0;
2137 }
2138 if !self.is_global() {
2139 if let Some(i) = self.global_interners.$name.borrow().get(key) {
2140 return i.0;
2141 }
2142 }
2143 }
2144
2145 // HACK(eddyb) Depend on flags being accurate to
2146 // determine that all contents are in the global tcx.
2147 // See comments on Lift for why we can't use that.
2148 if !($keep_in_local_tcx)(&v) {
2149 if !self.is_global() {
2150 let v = unsafe {
2151 mem::transmute(v)
2152 };
2153 let i = ($alloc_to_ret)(self.global_interners.arena.$alloc_method(v));
2154 self.global_interners.$name.borrow_mut().insert(Interned(i));
2155 return i;
2156 }
2157 } else {
2158 // Make sure we don't end up with inference
2159 // types/regions in the global tcx.
2160 if self.is_global() {
2161 bug!("Attempted to intern `{:?}` which contains \
2162 inference types/regions in the global type context",
2163 v);
2164 }
2165 }
2166
2167 let i = ($alloc_to_ret)(self.interners.arena.$alloc_method(v));
2168 self.interners.$name.borrow_mut().insert(Interned(i));
2169 i
2170 }
2171 }
2172 }
2173 }
2174
2175 macro_rules! direct_interners {
2176 ($lt_tcx:tt, $($name:ident: $method:ident($keep_in_local_tcx:expr) -> $ty:ty),+) => {
2177 $(impl<$lt_tcx> PartialEq for Interned<$lt_tcx, $ty> {
2178 fn eq(&self, other: &Self) -> bool {
2179 self.0 == other.0
2180 }
2181 }
2182
2183 impl<$lt_tcx> Eq for Interned<$lt_tcx, $ty> {}
2184
2185 impl<$lt_tcx> Hash for Interned<$lt_tcx, $ty> {
2186 fn hash<H: Hasher>(&self, s: &mut H) {
2187 self.0.hash(s)
2188 }
2189 }
2190
2191 intern_method!(
2192 $lt_tcx,
2193 $name: $method($ty, alloc, |x| x, |x| x, $keep_in_local_tcx) -> $ty
2194 );)+
2195 }
2196 }
2197
2198 pub fn keep_local<'tcx, T: ty::TypeFoldable<'tcx>>(x: &T) -> bool {
2199 x.has_type_flags(ty::TypeFlags::KEEP_IN_LOCAL_TCX)
2200 }
2201
2202 direct_interners!('tcx,
2203 region: mk_region(|r: &RegionKind| r.keep_in_local_tcx()) -> RegionKind,
2204 const_: mk_const(|c: &Const| keep_local(&c.ty) || keep_local(&c.val)) -> Const<'tcx>
2205 );
2206
2207 macro_rules! slice_interners {
2208 ($($field:ident: $method:ident($ty:ident)),+) => (
2209 $(intern_method!('tcx, $field: $method(&[$ty<'tcx>], alloc_slice, Deref::deref,
2210 |xs: &[$ty]| -> &Slice<$ty> {
2211 unsafe { mem::transmute(xs) }
2212 }, |xs: &[$ty]| xs.iter().any(keep_local)) -> Slice<$ty<'tcx>>);)+
2213 )
2214 }
2215
2216 slice_interners!(
2217 existential_predicates: _intern_existential_predicates(ExistentialPredicate),
2218 predicates: _intern_predicates(Predicate),
2219 type_list: _intern_type_list(Ty),
2220 substs: _intern_substs(Kind),
2221 clauses: _intern_clauses(Clause),
2222 goals: _intern_goals(Goal)
2223 );
2224
2225 // This isn't a perfect fit: CanonicalVarInfo slices are always
2226 // allocated in the global arena, so this `intern_method!` macro is
2227 // overly general. But we just return false for the code that checks
2228 // whether they belong in the thread-local arena, so no harm done, and
2229 // seems better than open-coding the rest.
2230 intern_method! {
2231 'tcx,
2232 canonical_var_infos: _intern_canonical_var_infos(
2233 &[CanonicalVarInfo],
2234 alloc_slice,
2235 Deref::deref,
2236 |xs: &[CanonicalVarInfo]| -> &Slice<CanonicalVarInfo> { unsafe { mem::transmute(xs) } },
2237 |_xs: &[CanonicalVarInfo]| -> bool { false }
2238 ) -> Slice<CanonicalVarInfo>
2239 }
2240
2241 impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
2242 /// Given a `fn` type, returns an equivalent `unsafe fn` type;
2243 /// that is, a `fn` type that is equivalent in every way for being
2244 /// unsafe.
2245 pub fn safe_to_unsafe_fn_ty(self, sig: PolyFnSig<'tcx>) -> Ty<'tcx> {
2246 assert_eq!(sig.unsafety(), hir::Unsafety::Normal);
2247 self.mk_fn_ptr(sig.map_bound(|sig| ty::FnSig {
2248 unsafety: hir::Unsafety::Unsafe,
2249 ..sig
2250 }))
2251 }
2252
2253 /// Given a closure signature `sig`, returns an equivalent `fn`
2254 /// type with the same signature. Detuples and so forth -- so
2255 /// e.g. if we have a sig with `Fn<(u32, i32)>` then you would get
2256 /// a `fn(u32, i32)`.
2257 pub fn coerce_closure_fn_ty(self, sig: PolyFnSig<'tcx>) -> Ty<'tcx> {
2258 let converted_sig = sig.map_bound(|s| {
2259 let params_iter = match s.inputs()[0].sty {
2260 ty::TyTuple(params) => {
2261 params.into_iter().cloned()
2262 }
2263 _ => bug!(),
2264 };
2265 self.mk_fn_sig(
2266 params_iter,
2267 s.output(),
2268 s.variadic,
2269 hir::Unsafety::Normal,
2270 abi::Abi::Rust,
2271 )
2272 });
2273
2274 self.mk_fn_ptr(converted_sig)
2275 }
2276
2277 // Interns a type/name combination, stores the resulting box in cx.interners,
2278 // and returns the box as cast to an unsafe ptr (see comments for Ty above).
2279 pub fn mk_ty(self, st: TypeVariants<'tcx>) -> Ty<'tcx> {
2280 let global_interners = if !self.is_global() {
2281 Some(&self.global_interners)
2282 } else {
2283 None
2284 };
2285 self.interners.intern_ty(st, global_interners)
2286 }
2287
2288 pub fn mk_mach_int(self, tm: ast::IntTy) -> Ty<'tcx> {
2289 match tm {
2290 ast::IntTy::Isize => self.types.isize,
2291 ast::IntTy::I8 => self.types.i8,
2292 ast::IntTy::I16 => self.types.i16,
2293 ast::IntTy::I32 => self.types.i32,
2294 ast::IntTy::I64 => self.types.i64,
2295 ast::IntTy::I128 => self.types.i128,
2296 }
2297 }
2298
2299 pub fn mk_mach_uint(self, tm: ast::UintTy) -> Ty<'tcx> {
2300 match tm {
2301 ast::UintTy::Usize => self.types.usize,
2302 ast::UintTy::U8 => self.types.u8,
2303 ast::UintTy::U16 => self.types.u16,
2304 ast::UintTy::U32 => self.types.u32,
2305 ast::UintTy::U64 => self.types.u64,
2306 ast::UintTy::U128 => self.types.u128,
2307 }
2308 }
2309
2310 pub fn mk_mach_float(self, tm: ast::FloatTy) -> Ty<'tcx> {
2311 match tm {
2312 ast::FloatTy::F32 => self.types.f32,
2313 ast::FloatTy::F64 => self.types.f64,
2314 }
2315 }
2316
2317 pub fn mk_str(self) -> Ty<'tcx> {
2318 self.mk_ty(TyStr)
2319 }
2320
2321 pub fn mk_static_str(self) -> Ty<'tcx> {
2322 self.mk_imm_ref(self.types.re_static, self.mk_str())
2323 }
2324
2325 pub fn mk_adt(self, def: &'tcx AdtDef, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
2326 // take a copy of substs so that we own the vectors inside
2327 self.mk_ty(TyAdt(def, substs))
2328 }
2329
2330 pub fn mk_foreign(self, def_id: DefId) -> Ty<'tcx> {
2331 self.mk_ty(TyForeign(def_id))
2332 }
2333
2334 pub fn mk_box(self, ty: Ty<'tcx>) -> Ty<'tcx> {
2335 let def_id = self.require_lang_item(lang_items::OwnedBoxLangItem);
2336 let adt_def = self.adt_def(def_id);
2337 let generics = self.generics_of(def_id);
2338 let mut substs = vec![Kind::from(ty)];
2339 // Add defaults for other generic params if there are some.
2340 for def in generics.types.iter().skip(1) {
2341 assert!(def.has_default);
2342 let ty = self.type_of(def.def_id).subst(self, &substs);
2343 substs.push(ty.into());
2344 }
2345 let substs = self.mk_substs(substs.into_iter());
2346 self.mk_ty(TyAdt(adt_def, substs))
2347 }
2348
2349 pub fn mk_ptr(self, tm: TypeAndMut<'tcx>) -> Ty<'tcx> {
2350 self.mk_ty(TyRawPtr(tm))
2351 }
2352
2353 pub fn mk_ref(self, r: Region<'tcx>, tm: TypeAndMut<'tcx>) -> Ty<'tcx> {
2354 self.mk_ty(TyRef(r, tm))
2355 }
2356
2357 pub fn mk_mut_ref(self, r: Region<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
2358 self.mk_ref(r, TypeAndMut {ty: ty, mutbl: hir::MutMutable})
2359 }
2360
2361 pub fn mk_imm_ref(self, r: Region<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
2362 self.mk_ref(r, TypeAndMut {ty: ty, mutbl: hir::MutImmutable})
2363 }
2364
2365 pub fn mk_mut_ptr(self, ty: Ty<'tcx>) -> Ty<'tcx> {
2366 self.mk_ptr(TypeAndMut {ty: ty, mutbl: hir::MutMutable})
2367 }
2368
2369 pub fn mk_imm_ptr(self, ty: Ty<'tcx>) -> Ty<'tcx> {
2370 self.mk_ptr(TypeAndMut {ty: ty, mutbl: hir::MutImmutable})
2371 }
2372
2373 pub fn mk_nil_ptr(self) -> Ty<'tcx> {
2374 self.mk_imm_ptr(self.mk_nil())
2375 }
2376
2377 pub fn mk_array(self, ty: Ty<'tcx>, n: u64) -> Ty<'tcx> {
2378 self.mk_ty(TyArray(ty, self.mk_const(ty::Const {
2379 val: ConstVal::Value(Value::ByVal(PrimVal::Bytes(n.into()))),
2380 ty: self.types.usize
2381 })))
2382 }
2383
2384 pub fn mk_slice(self, ty: Ty<'tcx>) -> Ty<'tcx> {
2385 self.mk_ty(TySlice(ty))
2386 }
2387
2388 pub fn intern_tup(self, ts: &[Ty<'tcx>]) -> Ty<'tcx> {
2389 self.mk_ty(TyTuple(self.intern_type_list(ts)))
2390 }
2391
2392 pub fn mk_tup<I: InternAs<[Ty<'tcx>], Ty<'tcx>>>(self, iter: I) -> I::Output {
2393 iter.intern_with(|ts| self.mk_ty(TyTuple(self.intern_type_list(ts))))
2394 }
2395
2396 pub fn mk_nil(self) -> Ty<'tcx> {
2397 self.intern_tup(&[])
2398 }
2399
2400 pub fn mk_diverging_default(self) -> Ty<'tcx> {
2401 if self.features().never_type {
2402 self.types.never
2403 } else {
2404 self.intern_tup(&[])
2405 }
2406 }
2407
2408 pub fn mk_bool(self) -> Ty<'tcx> {
2409 self.mk_ty(TyBool)
2410 }
2411
2412 pub fn mk_fn_def(self, def_id: DefId,
2413 substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
2414 self.mk_ty(TyFnDef(def_id, substs))
2415 }
2416
2417 pub fn mk_fn_ptr(self, fty: PolyFnSig<'tcx>) -> Ty<'tcx> {
2418 self.mk_ty(TyFnPtr(fty))
2419 }
2420
2421 pub fn mk_dynamic(
2422 self,
2423 obj: ty::Binder<&'tcx Slice<ExistentialPredicate<'tcx>>>,
2424 reg: ty::Region<'tcx>
2425 ) -> Ty<'tcx> {
2426 self.mk_ty(TyDynamic(obj, reg))
2427 }
2428
2429 pub fn mk_projection(self,
2430 item_def_id: DefId,
2431 substs: &'tcx Substs<'tcx>)
2432 -> Ty<'tcx> {
2433 self.mk_ty(TyProjection(ProjectionTy {
2434 item_def_id,
2435 substs,
2436 }))
2437 }
2438
2439 pub fn mk_closure(self,
2440 closure_id: DefId,
2441 substs: ClosureSubsts<'tcx>)
2442 -> Ty<'tcx> {
2443 self.mk_closure_from_closure_substs(closure_id, substs)
2444 }
2445
2446 pub fn mk_closure_from_closure_substs(self,
2447 closure_id: DefId,
2448 closure_substs: ClosureSubsts<'tcx>)
2449 -> Ty<'tcx> {
2450 self.mk_ty(TyClosure(closure_id, closure_substs))
2451 }
2452
2453 pub fn mk_generator(self,
2454 id: DefId,
2455 closure_substs: ClosureSubsts<'tcx>,
2456 interior: GeneratorInterior<'tcx>)
2457 -> Ty<'tcx> {
2458 self.mk_ty(TyGenerator(id, closure_substs, interior))
2459 }
2460
2461 pub fn mk_generator_witness(self, types: ty::Binder<&'tcx Slice<Ty<'tcx>>>) -> Ty<'tcx> {
2462 self.mk_ty(TyGeneratorWitness(types))
2463 }
2464
2465 pub fn mk_var(self, v: TyVid) -> Ty<'tcx> {
2466 self.mk_infer(TyVar(v))
2467 }
2468
2469 pub fn mk_int_var(self, v: IntVid) -> Ty<'tcx> {
2470 self.mk_infer(IntVar(v))
2471 }
2472
2473 pub fn mk_float_var(self, v: FloatVid) -> Ty<'tcx> {
2474 self.mk_infer(FloatVar(v))
2475 }
2476
2477 pub fn mk_infer(self, it: InferTy) -> Ty<'tcx> {
2478 self.mk_ty(TyInfer(it))
2479 }
2480
2481 pub fn mk_param(self,
2482 index: u32,
2483 name: InternedString) -> Ty<'tcx> {
2484 self.mk_ty(TyParam(ParamTy { idx: index, name: name }))
2485 }
2486
2487 pub fn mk_self_type(self) -> Ty<'tcx> {
2488 self.mk_param(0, keywords::SelfType.name().as_interned_str())
2489 }
2490
2491 pub fn mk_param_from_def(self, def: &ty::TypeParameterDef) -> Ty<'tcx> {
2492 self.mk_param(def.index, def.name)
2493 }
2494
2495 pub fn mk_anon(self, def_id: DefId, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
2496 self.mk_ty(TyAnon(def_id, substs))
2497 }
2498
2499 pub fn intern_existential_predicates(self, eps: &[ExistentialPredicate<'tcx>])
2500 -> &'tcx Slice<ExistentialPredicate<'tcx>> {
2501 assert!(!eps.is_empty());
2502 assert!(eps.windows(2).all(|w| w[0].cmp(self, &w[1]) != Ordering::Greater));
2503 self._intern_existential_predicates(eps)
2504 }
2505
2506 pub fn intern_predicates(self, preds: &[Predicate<'tcx>])
2507 -> &'tcx Slice<Predicate<'tcx>> {
2508 // FIXME consider asking the input slice to be sorted to avoid
2509 // re-interning permutations, in which case that would be asserted
2510 // here.
2511 if preds.len() == 0 {
2512 // The macro-generated method below asserts we don't intern an empty slice.
2513 Slice::empty()
2514 } else {
2515 self._intern_predicates(preds)
2516 }
2517 }
2518
2519 pub fn intern_type_list(self, ts: &[Ty<'tcx>]) -> &'tcx Slice<Ty<'tcx>> {
2520 if ts.len() == 0 {
2521 Slice::empty()
2522 } else {
2523 self._intern_type_list(ts)
2524 }
2525 }
2526
2527 pub fn intern_substs(self, ts: &[Kind<'tcx>]) -> &'tcx Slice<Kind<'tcx>> {
2528 if ts.len() == 0 {
2529 Slice::empty()
2530 } else {
2531 self._intern_substs(ts)
2532 }
2533 }
2534
2535 pub fn intern_canonical_var_infos(self, ts: &[CanonicalVarInfo]) -> CanonicalVarInfos<'gcx> {
2536 if ts.len() == 0 {
2537 Slice::empty()
2538 } else {
2539 self.global_tcx()._intern_canonical_var_infos(ts)
2540 }
2541 }
2542
2543 pub fn intern_clauses(self, ts: &[Clause<'tcx>]) -> Clauses<'tcx> {
2544 if ts.len() == 0 {
2545 Slice::empty()
2546 } else {
2547 self._intern_clauses(ts)
2548 }
2549 }
2550
2551 pub fn intern_goals(self, ts: &[Goal<'tcx>]) -> Goals<'tcx> {
2552 if ts.len() == 0 {
2553 Slice::empty()
2554 } else {
2555 self._intern_goals(ts)
2556 }
2557 }
2558
2559 pub fn mk_fn_sig<I>(self,
2560 inputs: I,
2561 output: I::Item,
2562 variadic: bool,
2563 unsafety: hir::Unsafety,
2564 abi: abi::Abi)
2565 -> <I::Item as InternIteratorElement<Ty<'tcx>, ty::FnSig<'tcx>>>::Output
2566 where I: Iterator,
2567 I::Item: InternIteratorElement<Ty<'tcx>, ty::FnSig<'tcx>>
2568 {
2569 inputs.chain(iter::once(output)).intern_with(|xs| ty::FnSig {
2570 inputs_and_output: self.intern_type_list(xs),
2571 variadic, unsafety, abi
2572 })
2573 }
2574
2575 pub fn mk_existential_predicates<I: InternAs<[ExistentialPredicate<'tcx>],
2576 &'tcx Slice<ExistentialPredicate<'tcx>>>>(self, iter: I)
2577 -> I::Output {
2578 iter.intern_with(|xs| self.intern_existential_predicates(xs))
2579 }
2580
2581 pub fn mk_predicates<I: InternAs<[Predicate<'tcx>],
2582 &'tcx Slice<Predicate<'tcx>>>>(self, iter: I)
2583 -> I::Output {
2584 iter.intern_with(|xs| self.intern_predicates(xs))
2585 }
2586
2587 pub fn mk_type_list<I: InternAs<[Ty<'tcx>],
2588 &'tcx Slice<Ty<'tcx>>>>(self, iter: I) -> I::Output {
2589 iter.intern_with(|xs| self.intern_type_list(xs))
2590 }
2591
2592 pub fn mk_substs<I: InternAs<[Kind<'tcx>],
2593 &'tcx Slice<Kind<'tcx>>>>(self, iter: I) -> I::Output {
2594 iter.intern_with(|xs| self.intern_substs(xs))
2595 }
2596
2597 pub fn mk_substs_trait(self,
2598 s: Ty<'tcx>,
2599 t: &[Ty<'tcx>])
2600 -> &'tcx Substs<'tcx>
2601 {
2602 self.mk_substs(iter::once(s).chain(t.into_iter().cloned()).map(Kind::from))
2603 }
2604
2605 pub fn mk_clauses<I: InternAs<[Clause<'tcx>], Clauses<'tcx>>>(self, iter: I) -> I::Output {
2606 iter.intern_with(|xs| self.intern_clauses(xs))
2607 }
2608
2609 pub fn mk_goals<I: InternAs<[Goal<'tcx>], Goals<'tcx>>>(self, iter: I) -> I::Output {
2610 iter.intern_with(|xs| self.intern_goals(xs))
2611 }
2612
2613 pub fn mk_goal(self, goal: Goal<'tcx>) -> &'tcx Goal {
2614 &self.mk_goals(iter::once(goal))[0]
2615 }
2616
2617 pub fn lint_node<S: Into<MultiSpan>>(self,
2618 lint: &'static Lint,
2619 id: NodeId,
2620 span: S,
2621 msg: &str) {
2622 self.struct_span_lint_node(lint, id, span.into(), msg).emit()
2623 }
2624
2625 pub fn lint_node_note<S: Into<MultiSpan>>(self,
2626 lint: &'static Lint,
2627 id: NodeId,
2628 span: S,
2629 msg: &str,
2630 note: &str) {
2631 let mut err = self.struct_span_lint_node(lint, id, span.into(), msg);
2632 err.note(note);
2633 err.emit()
2634 }
2635
2636 pub fn lint_level_at_node(self, lint: &'static Lint, mut id: NodeId)
2637 -> (lint::Level, lint::LintSource)
2638 {
2639 // Right now we insert a `with_ignore` node in the dep graph here to
2640 // ignore the fact that `lint_levels` below depends on the entire crate.
2641 // For now this'll prevent false positives of recompiling too much when
2642 // anything changes.
2643 //
2644 // Once red/green incremental compilation lands we should be able to
2645 // remove this because while the crate changes often the lint level map
2646 // will change rarely.
2647 self.dep_graph.with_ignore(|| {
2648 let sets = self.lint_levels(LOCAL_CRATE);
2649 loop {
2650 let hir_id = self.hir.definitions().node_to_hir_id(id);
2651 if let Some(pair) = sets.level_and_source(lint, hir_id, self.sess) {
2652 return pair
2653 }
2654 let next = self.hir.get_parent_node(id);
2655 if next == id {
2656 bug!("lint traversal reached the root of the crate");
2657 }
2658 id = next;
2659 }
2660 })
2661 }
2662
2663 pub fn struct_span_lint_node<S: Into<MultiSpan>>(self,
2664 lint: &'static Lint,
2665 id: NodeId,
2666 span: S,
2667 msg: &str)
2668 -> DiagnosticBuilder<'tcx>
2669 {
2670 let (level, src) = self.lint_level_at_node(lint, id);
2671 lint::struct_lint_level(self.sess, lint, level, src, Some(span.into()), msg)
2672 }
2673
2674 pub fn struct_lint_node(self, lint: &'static Lint, id: NodeId, msg: &str)
2675 -> DiagnosticBuilder<'tcx>
2676 {
2677 let (level, src) = self.lint_level_at_node(lint, id);
2678 lint::struct_lint_level(self.sess, lint, level, src, None, msg)
2679 }
2680
2681 pub fn in_scope_traits(self, id: HirId) -> Option<Lrc<StableVec<TraitCandidate>>> {
2682 self.in_scope_traits_map(id.owner)
2683 .and_then(|map| map.get(&id.local_id).cloned())
2684 }
2685
2686 pub fn named_region(self, id: HirId) -> Option<resolve_lifetime::Region> {
2687 self.named_region_map(id.owner)
2688 .and_then(|map| map.get(&id.local_id).cloned())
2689 }
2690
2691 pub fn is_late_bound(self, id: HirId) -> bool {
2692 self.is_late_bound_map(id.owner)
2693 .map(|set| set.contains(&id.local_id))
2694 .unwrap_or(false)
2695 }
2696
2697 pub fn object_lifetime_defaults(self, id: HirId)
2698 -> Option<Lrc<Vec<ObjectLifetimeDefault>>>
2699 {
2700 self.object_lifetime_defaults_map(id.owner)
2701 .and_then(|map| map.get(&id.local_id).cloned())
2702 }
2703 }
2704
2705 pub trait InternAs<T: ?Sized, R> {
2706 type Output;
2707 fn intern_with<F>(self, f: F) -> Self::Output
2708 where F: FnOnce(&T) -> R;
2709 }
2710
2711 impl<I, T, R, E> InternAs<[T], R> for I
2712 where E: InternIteratorElement<T, R>,
2713 I: Iterator<Item=E> {
2714 type Output = E::Output;
2715 fn intern_with<F>(self, f: F) -> Self::Output
2716 where F: FnOnce(&[T]) -> R {
2717 E::intern_with(self, f)
2718 }
2719 }
2720
2721 pub trait InternIteratorElement<T, R>: Sized {
2722 type Output;
2723 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output;
2724 }
2725
2726 impl<T, R> InternIteratorElement<T, R> for T {
2727 type Output = R;
2728 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
2729 f(&iter.collect::<AccumulateVec<[_; 8]>>())
2730 }
2731 }
2732
2733 impl<'a, T, R> InternIteratorElement<T, R> for &'a T
2734 where T: Clone + 'a
2735 {
2736 type Output = R;
2737 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
2738 f(&iter.cloned().collect::<AccumulateVec<[_; 8]>>())
2739 }
2740 }
2741
2742 impl<T, R, E> InternIteratorElement<T, R> for Result<T, E> {
2743 type Output = Result<R, E>;
2744 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
2745 Ok(f(&iter.collect::<Result<AccumulateVec<[_; 8]>, _>>()?))
2746 }
2747 }
2748
2749 pub fn provide(providers: &mut ty::maps::Providers) {
2750 // FIXME(#44234) - almost all of these queries have no sub-queries and
2751 // therefore no actual inputs, they're just reading tables calculated in
2752 // resolve! Does this work? Unsure! That's what the issue is about
2753 providers.in_scope_traits_map = |tcx, id| tcx.gcx.trait_map.get(&id).cloned();
2754 providers.module_exports = |tcx, id| tcx.gcx.export_map.get(&id).cloned();
2755 providers.crate_name = |tcx, id| {
2756 assert_eq!(id, LOCAL_CRATE);
2757 tcx.crate_name
2758 };
2759 providers.get_lang_items = |tcx, id| {
2760 assert_eq!(id, LOCAL_CRATE);
2761 // FIXME(#42293) Right now we insert a `with_ignore` node in the dep
2762 // graph here to ignore the fact that `get_lang_items` below depends on
2763 // the entire crate. For now this'll prevent false positives of
2764 // recompiling too much when anything changes.
2765 //
2766 // Once red/green incremental compilation lands we should be able to
2767 // remove this because while the crate changes often the lint level map
2768 // will change rarely.
2769 tcx.dep_graph.with_ignore(|| Lrc::new(middle::lang_items::collect(tcx)))
2770 };
2771 providers.freevars = |tcx, id| tcx.gcx.freevars.get(&id).cloned();
2772 providers.maybe_unused_trait_import = |tcx, id| {
2773 tcx.maybe_unused_trait_imports.contains(&id)
2774 };
2775 providers.maybe_unused_extern_crates = |tcx, cnum| {
2776 assert_eq!(cnum, LOCAL_CRATE);
2777 Lrc::new(tcx.maybe_unused_extern_crates.clone())
2778 };
2779
2780 providers.stability_index = |tcx, cnum| {
2781 assert_eq!(cnum, LOCAL_CRATE);
2782 Lrc::new(stability::Index::new(tcx))
2783 };
2784 providers.lookup_stability = |tcx, id| {
2785 assert_eq!(id.krate, LOCAL_CRATE);
2786 let id = tcx.hir.definitions().def_index_to_hir_id(id.index);
2787 tcx.stability().local_stability(id)
2788 };
2789 providers.lookup_deprecation_entry = |tcx, id| {
2790 assert_eq!(id.krate, LOCAL_CRATE);
2791 let id = tcx.hir.definitions().def_index_to_hir_id(id.index);
2792 tcx.stability().local_deprecation_entry(id)
2793 };
2794 providers.extern_mod_stmt_cnum = |tcx, id| {
2795 let id = tcx.hir.as_local_node_id(id).unwrap();
2796 tcx.cstore.extern_mod_stmt_cnum_untracked(id)
2797 };
2798 providers.all_crate_nums = |tcx, cnum| {
2799 assert_eq!(cnum, LOCAL_CRATE);
2800 Lrc::new(tcx.cstore.crates_untracked())
2801 };
2802 providers.postorder_cnums = |tcx, cnum| {
2803 assert_eq!(cnum, LOCAL_CRATE);
2804 Lrc::new(tcx.cstore.postorder_cnums_untracked())
2805 };
2806 providers.output_filenames = |tcx, cnum| {
2807 assert_eq!(cnum, LOCAL_CRATE);
2808 tcx.output_filenames.clone()
2809 };
2810 providers.features_query = |tcx, cnum| {
2811 assert_eq!(cnum, LOCAL_CRATE);
2812 Lrc::new(tcx.sess.features_untracked().clone())
2813 };
2814 providers.is_panic_runtime = |tcx, cnum| {
2815 assert_eq!(cnum, LOCAL_CRATE);
2816 attr::contains_name(tcx.hir.krate_attrs(), "panic_runtime")
2817 };
2818 providers.is_compiler_builtins = |tcx, cnum| {
2819 assert_eq!(cnum, LOCAL_CRATE);
2820 attr::contains_name(tcx.hir.krate_attrs(), "compiler_builtins")
2821 };
2822 }