//! type context book-keeping
use dep_graph::DepGraph;
+use dep_graph::{DepNode, DepConstructor};
use errors::DiagnosticBuilder;
use session::Session;
+use session::config::{BorrowckMode, OutputFilenames, OptLevel};
+use session::config::CrateType::*;
use middle;
-use hir::{TraitMap};
-use hir::def::{Def, ExportMap};
-use hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
+use hir::{TraitCandidate, HirId, ItemLocalId};
+use hir::def::{Def, Export};
+use hir::def_id::{CrateNum, DefId, DefIndex, LOCAL_CRATE};
use hir::map as hir_map;
use hir::map::DefPathHash;
use lint::{self, Lint};
-use ich::{self, StableHashingContext, NodeIdHashingMode};
-use middle::free_region::FreeRegionMap;
+use ich::{StableHashingContext, NodeIdHashingMode};
+use infer::canonical::{CanonicalVarInfo, CanonicalVarInfos};
+use infer::outlives::free_region_map::FreeRegionMap;
+use middle::const_val::ConstVal;
+use middle::cstore::{CrateStore, LinkMeta};
+use middle::cstore::EncodedMetadata;
use middle::lang_items;
-use middle::resolve_lifetime;
+use middle::resolve_lifetime::{self, ObjectLifetimeDefault};
use middle::stability;
-use mir::Mir;
-use mir::transform::Passes;
-use ty::subst::{Kind, Substs};
+use mir::{self, Mir, interpret};
+use mir::interpret::{Value, PrimVal};
+use ty::subst::{Kind, Substs, Subst};
use ty::ReprOptions;
+use ty::Instance;
use traits;
+use traits::{Clause, Clauses, Goal, Goals};
use ty::{self, Ty, TypeAndMut};
use ty::{TyS, TypeVariants, Slice};
-use ty::{AdtKind, AdtDef, ClosureSubsts, Region};
-use hir::FreevarMap;
+use ty::{AdtKind, AdtDef, ClosureSubsts, GeneratorInterior, Region, Const};
use ty::{PolyFnSig, InferTy, ParamTy, ProjectionTy, ExistentialPredicate, Predicate};
use ty::RegionKind;
use ty::{TyVar, TyVid, IntVar, IntVid, FloatVar, FloatVid};
use ty::TypeVariants::*;
-use ty::layout::{Layout, TargetDataLayout};
-use ty::inhabitedness::DefIdForest;
+use ty::layout::{LayoutDetails, TargetDataLayout};
use ty::maps;
use ty::steal::Steal;
use ty::BindingMode;
-use util::nodemap::{NodeMap, NodeSet, DefIdSet, ItemLocalMap};
+use ty::CanonicalTy;
+use util::nodemap::{DefIdSet, ItemLocalMap};
use util::nodemap::{FxHashMap, FxHashSet};
use rustc_data_structures::accumulate_vec::AccumulateVec;
-use rustc_data_structures::stable_hasher::{HashStable, StableHasher,
- StableHasherResult};
-
-use arena::{TypedArena, DroplessArena};
+use rustc_data_structures::stable_hasher::{HashStable, hash_stable_hashmap,
+ StableHasher, StableHasherResult,
+ StableVec};
+use arena::{TypedArena, SyncDroplessArena};
use rustc_data_structures::indexed_vec::IndexVec;
+use rustc_data_structures::sync::{Lrc, Lock};
+use std::any::Any;
use std::borrow::Borrow;
-use std::cell::{Cell, RefCell};
use std::cmp::Ordering;
use std::collections::hash_map::{self, Entry};
use std::hash::{Hash, Hasher};
use std::mem;
use std::ops::Deref;
use std::iter;
-use std::rc::Rc;
-use syntax::abi;
-use syntax::ast::{self, Name, NodeId};
+use std::sync::mpsc;
+use std::sync::Arc;
+use rustc_target::spec::abi;
+use syntax::ast::{self, NodeId};
use syntax::attr;
use syntax::codemap::MultiSpan;
-use syntax::symbol::{Symbol, keywords};
+use syntax::feature_gate;
+use syntax::symbol::{Symbol, keywords, InternedString};
use syntax_pos::Span;
use hir;
+pub struct AllArenas<'tcx> {
+ pub global: GlobalArenas<'tcx>,
+ pub interner: SyncDroplessArena,
+}
+
+impl<'tcx> AllArenas<'tcx> {
+ pub fn new() -> Self {
+ AllArenas {
+ global: GlobalArenas::new(),
+ interner: SyncDroplessArena::new(),
+ }
+ }
+}
+
/// Internal storage
pub struct GlobalArenas<'tcx> {
// internings
- layout: TypedArena<Layout>,
+ layout: TypedArena<LayoutDetails>,
// references
generics: TypedArena<ty::Generics>,
steal_mir: TypedArena<Steal<Mir<'tcx>>>,
mir: TypedArena<Mir<'tcx>>,
tables: TypedArena<ty::TypeckTables<'tcx>>,
+ /// miri allocations
+ const_allocs: TypedArena<interpret::Allocation>,
}
impl<'tcx> GlobalArenas<'tcx> {
steal_mir: TypedArena::new(),
mir: TypedArena::new(),
tables: TypedArena::new(),
+ const_allocs: TypedArena::new(),
}
}
}
+type InternedSet<'tcx, T> = Lock<FxHashSet<Interned<'tcx, T>>>;
+
pub struct CtxtInterners<'tcx> {
/// The arena that types, regions, etc are allocated from
- arena: &'tcx DroplessArena,
+ arena: &'tcx SyncDroplessArena,
/// Specifically use a speedy hash algorithm for these hash sets,
/// they're accessed quite often.
- type_: RefCell<FxHashSet<Interned<'tcx, TyS<'tcx>>>>,
- type_list: RefCell<FxHashSet<Interned<'tcx, Slice<Ty<'tcx>>>>>,
- substs: RefCell<FxHashSet<Interned<'tcx, Substs<'tcx>>>>,
- region: RefCell<FxHashSet<Interned<'tcx, RegionKind>>>,
- existential_predicates: RefCell<FxHashSet<Interned<'tcx, Slice<ExistentialPredicate<'tcx>>>>>,
- predicates: RefCell<FxHashSet<Interned<'tcx, Slice<Predicate<'tcx>>>>>,
+ type_: InternedSet<'tcx, TyS<'tcx>>,
+ type_list: InternedSet<'tcx, Slice<Ty<'tcx>>>,
+ substs: InternedSet<'tcx, Substs<'tcx>>,
+ canonical_var_infos: InternedSet<'tcx, Slice<CanonicalVarInfo>>,
+ region: InternedSet<'tcx, RegionKind>,
+ existential_predicates: InternedSet<'tcx, Slice<ExistentialPredicate<'tcx>>>,
+ predicates: InternedSet<'tcx, Slice<Predicate<'tcx>>>,
+ const_: InternedSet<'tcx, Const<'tcx>>,
+ clauses: InternedSet<'tcx, Slice<Clause<'tcx>>>,
+ goals: InternedSet<'tcx, Slice<Goal<'tcx>>>,
}
impl<'gcx: 'tcx, 'tcx> CtxtInterners<'tcx> {
- fn new(arena: &'tcx DroplessArena) -> CtxtInterners<'tcx> {
+ fn new(arena: &'tcx SyncDroplessArena) -> CtxtInterners<'tcx> {
CtxtInterners {
arena,
- type_: RefCell::new(FxHashSet()),
- type_list: RefCell::new(FxHashSet()),
- substs: RefCell::new(FxHashSet()),
- region: RefCell::new(FxHashSet()),
- existential_predicates: RefCell::new(FxHashSet()),
- predicates: RefCell::new(FxHashSet()),
+ type_: Default::default(),
+ type_list: Default::default(),
+ substs: Default::default(),
+ region: Default::default(),
+ existential_predicates: Default::default(),
+ canonical_var_infos: Default::default(),
+ predicates: Default::default(),
+ const_: Default::default(),
+ clauses: Default::default(),
+ goals: Default::default(),
}
}
-> Ty<'tcx> {
let ty = {
let mut interner = self.type_.borrow_mut();
- let global_interner = global_interners.map(|interners| {
- interners.type_.borrow_mut()
- });
if let Some(&Interned(ty)) = interner.get(&st) {
return ty;
}
- if let Some(ref interner) = global_interner {
- if let Some(&Interned(ty)) = interner.get(&st) {
+ let global_interner = global_interners.map(|interners| {
+ (interners.type_.borrow_mut(), &interners.arena)
+ });
+ if let Some((ref type_, _)) = global_interner {
+ if let Some(&Interned(ty)) = type_.get(&st) {
return ty;
}
}
// determine that all contents are in the global tcx.
// See comments on Lift for why we can't use that.
if !flags.flags.intersects(ty::TypeFlags::KEEP_IN_LOCAL_TCX) {
- if let Some(interner) = global_interners {
+ if let Some((mut type_, arena)) = global_interner {
let ty_struct: TyS<'gcx> = unsafe {
mem::transmute(ty_struct)
};
- let ty: Ty<'gcx> = interner.arena.alloc(ty_struct);
- global_interner.unwrap().insert(Interned(ty));
+ let ty: Ty<'gcx> = arena.alloc(ty_struct);
+ type_.insert(Interned(ty));
return ty;
}
} else {
// Make sure we don't end up with inference
// types/regions in the global tcx.
- if global_interners.is_none() {
+ if global_interner.is_none() {
drop(interner);
bug!("Attempted to intern `{:?}` which contains \
inference types/regions in the global type context",
}
}
-#[derive(RustcEncodable, RustcDecodable)]
+#[derive(RustcEncodable, RustcDecodable, Debug)]
pub struct TypeckTables<'tcx> {
/// The HirId::owner all ItemLocalIds in this table are relative to.
pub local_id_root: Option<DefId>,
/// method calls, including those of overloaded operators.
type_dependent_defs: ItemLocalMap<Def>,
+ /// Resolved field indices for field accesses in expressions (`S { field }`, `obj.field`)
+ /// or patterns (`S { field }`). The index is often useful by itself, but to learn more
+ /// about the field you also need definition of the variant to which the field
+ /// belongs, but it may not exist if it's a tuple field (`tuple.0`).
+ field_indices: ItemLocalMap<usize>,
+
+ /// Stores the canonicalized types provided by the user. See also `UserAssertTy` statement in
+ /// MIR.
+ user_provided_tys: ItemLocalMap<CanonicalTy<'tcx>>,
+
/// Stores the types for various nodes in the AST. Note that this table
/// is not guaranteed to be populated until after typeck. See
/// typeck::check::fn_ctxt for details.
adjustments: ItemLocalMap<Vec<ty::adjustment::Adjustment<'tcx>>>,
- // Stores the actual binding mode for all instances of hir::BindingAnnotation.
+ /// Stores the actual binding mode for all instances of hir::BindingAnnotation.
pat_binding_modes: ItemLocalMap<BindingMode>,
+ /// Stores the types which were implicitly dereferenced in pattern binding modes
+ /// for later usage in HAIR lowering. For example,
+ ///
+ /// ```
+ /// match &&Some(5i32) {
+ /// Some(n) => {},
+ /// _ => {},
+ /// }
+ /// ```
+ /// leads to a `vec![&&Option<i32>, &Option<i32>]`. Empty vectors are not stored.
+ ///
+ /// See:
+ /// https://github.com/rust-lang/rfcs/blob/master/text/2005-match-ergonomics.md#definitions
+ pat_adjustments: ItemLocalMap<Vec<Ty<'tcx>>>,
+
/// Borrows
pub upvar_capture_map: ty::UpvarCaptureMap<'tcx>,
- /// Records the type of each closure.
- closure_tys: ItemLocalMap<ty::PolyFnSig<'tcx>>,
-
- /// Records the kind of each closure and the span and name of the variable
- /// that caused the closure to be this kind.
- closure_kinds: ItemLocalMap<(ty::ClosureKind, Option<(Span, ast::Name)>)>,
+ /// Records the reasons that we picked the kind of each closure;
+ /// not all closures are present in the map.
+ closure_kind_origins: ItemLocalMap<(Span, ast::Name)>,
/// For each fn, records the "liberated" types of its arguments
/// and return type. Liberated means that all bound regions
cast_kinds: ItemLocalMap<ty::cast::CastKind>,
/// Set of trait imports actually used in the method resolution.
- /// This is used for warning unused imports.
- pub used_trait_imports: DefIdSet,
+ /// This is used for warning unused imports. During type
+ /// checking, this `Lrc` should not be cloned: it must have a ref-count
+ /// of 1 so that we can insert things into the set mutably.
+ pub used_trait_imports: Lrc<DefIdSet>,
/// If any errors occurred while type-checking this body,
/// this field will be set to `true`.
TypeckTables {
local_id_root,
type_dependent_defs: ItemLocalMap(),
+ field_indices: ItemLocalMap(),
+ user_provided_tys: ItemLocalMap(),
node_types: ItemLocalMap(),
node_substs: ItemLocalMap(),
adjustments: ItemLocalMap(),
pat_binding_modes: ItemLocalMap(),
+ pat_adjustments: ItemLocalMap(),
upvar_capture_map: FxHashMap(),
- closure_tys: ItemLocalMap(),
- closure_kinds: ItemLocalMap(),
+ closure_kind_origins: ItemLocalMap(),
liberated_fn_sigs: ItemLocalMap(),
fru_field_types: ItemLocalMap(),
cast_kinds: ItemLocalMap(),
- used_trait_imports: DefIdSet(),
+ used_trait_imports: Lrc::new(DefIdSet()),
tainted_by_errors: false,
free_region_map: FreeRegionMap::new(),
}
}
}
+ pub fn field_indices(&self) -> LocalTableInContext<usize> {
+ LocalTableInContext {
+ local_id_root: self.local_id_root,
+ data: &self.field_indices
+ }
+ }
+
+ pub fn field_indices_mut(&mut self) -> LocalTableInContextMut<usize> {
+ LocalTableInContextMut {
+ local_id_root: self.local_id_root,
+ data: &mut self.field_indices
+ }
+ }
+
+ pub fn user_provided_tys(&self) -> LocalTableInContext<CanonicalTy<'tcx>> {
+ LocalTableInContext {
+ local_id_root: self.local_id_root,
+ data: &self.user_provided_tys
+ }
+ }
+
+ pub fn user_provided_tys_mut(&mut self) -> LocalTableInContextMut<CanonicalTy<'tcx>> {
+ LocalTableInContextMut {
+ local_id_root: self.local_id_root,
+ data: &mut self.user_provided_tys
+ }
+ }
+
pub fn node_types(&self) -> LocalTableInContext<Ty<'tcx>> {
LocalTableInContext {
local_id_root: self.local_id_root,
}
}
- pub fn upvar_capture(&self, upvar_id: ty::UpvarId) -> ty::UpvarCapture<'tcx> {
- self.upvar_capture_map[&upvar_id]
- }
-
- pub fn closure_tys(&self) -> LocalTableInContext<ty::PolyFnSig<'tcx>> {
+ pub fn pat_adjustments(&self) -> LocalTableInContext<Vec<Ty<'tcx>>> {
LocalTableInContext {
local_id_root: self.local_id_root,
- data: &self.closure_tys
+ data: &self.pat_adjustments,
}
}
- pub fn closure_tys_mut(&mut self)
- -> LocalTableInContextMut<ty::PolyFnSig<'tcx>> {
+ pub fn pat_adjustments_mut(&mut self)
+ -> LocalTableInContextMut<Vec<Ty<'tcx>>> {
LocalTableInContextMut {
local_id_root: self.local_id_root,
- data: &mut self.closure_tys
+ data: &mut self.pat_adjustments,
}
}
- pub fn closure_kinds(&self) -> LocalTableInContext<(ty::ClosureKind,
- Option<(Span, ast::Name)>)> {
+ pub fn upvar_capture(&self, upvar_id: ty::UpvarId) -> ty::UpvarCapture<'tcx> {
+ self.upvar_capture_map[&upvar_id]
+ }
+
+ pub fn closure_kind_origins(&self) -> LocalTableInContext<(Span, ast::Name)> {
LocalTableInContext {
local_id_root: self.local_id_root,
- data: &self.closure_kinds
+ data: &self.closure_kind_origins
}
}
- pub fn closure_kinds_mut(&mut self)
- -> LocalTableInContextMut<(ty::ClosureKind, Option<(Span, ast::Name)>)> {
+ pub fn closure_kind_origins_mut(&mut self) -> LocalTableInContextMut<(Span, ast::Name)> {
LocalTableInContextMut {
local_id_root: self.local_id_root,
- data: &mut self.closure_kinds
+ data: &mut self.closure_kind_origins
}
}
}
}
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for TypeckTables<'gcx> {
+impl<'a, 'gcx> HashStable<StableHashingContext<'a>> for TypeckTables<'gcx> {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'a>,
hasher: &mut StableHasher<W>) {
let ty::TypeckTables {
local_id_root,
ref type_dependent_defs,
+ ref field_indices,
+ ref user_provided_tys,
ref node_types,
ref node_substs,
ref adjustments,
ref pat_binding_modes,
+ ref pat_adjustments,
ref upvar_capture_map,
- ref closure_tys,
- ref closure_kinds,
+ ref closure_kind_origins,
ref liberated_fn_sigs,
ref fru_field_types,
} = *self;
hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
- ich::hash_stable_itemlocalmap(hcx, hasher, type_dependent_defs);
- ich::hash_stable_itemlocalmap(hcx, hasher, node_types);
- ich::hash_stable_itemlocalmap(hcx, hasher, node_substs);
- ich::hash_stable_itemlocalmap(hcx, hasher, adjustments);
- ich::hash_stable_itemlocalmap(hcx, hasher, pat_binding_modes);
- ich::hash_stable_hashmap(hcx, hasher, upvar_capture_map, |hcx, up_var_id| {
+ type_dependent_defs.hash_stable(hcx, hasher);
+ field_indices.hash_stable(hcx, hasher);
+ user_provided_tys.hash_stable(hcx, hasher);
+ node_types.hash_stable(hcx, hasher);
+ node_substs.hash_stable(hcx, hasher);
+ adjustments.hash_stable(hcx, hasher);
+ pat_binding_modes.hash_stable(hcx, hasher);
+ pat_adjustments.hash_stable(hcx, hasher);
+ hash_stable_hashmap(hcx, hasher, upvar_capture_map, |up_var_id, hcx| {
let ty::UpvarId {
var_id,
closure_expr_id
let local_id_root =
local_id_root.expect("trying to hash invalid TypeckTables");
- let var_def_id = DefId {
+ let var_owner_def_id = DefId {
krate: local_id_root.krate,
- index: var_id,
+ index: var_id.owner,
};
let closure_def_id = DefId {
krate: local_id_root.krate,
- index: closure_expr_id,
+ index: closure_expr_id.to_def_id().index,
};
- (hcx.def_path_hash(var_def_id), hcx.def_path_hash(closure_def_id))
- });
-
- ich::hash_stable_itemlocalmap(hcx, hasher, closure_tys);
- ich::hash_stable_itemlocalmap(hcx, hasher, closure_kinds);
- ich::hash_stable_itemlocalmap(hcx, hasher, liberated_fn_sigs);
- ich::hash_stable_itemlocalmap(hcx, hasher, fru_field_types);
- ich::hash_stable_itemlocalmap(hcx, hasher, cast_kinds);
-
- ich::hash_stable_hashset(hcx, hasher, used_trait_imports, |hcx, def_id| {
- hcx.def_path_hash(*def_id)
+ (hcx.def_path_hash(var_owner_def_id),
+ var_id.local_id,
+ hcx.def_path_hash(closure_def_id))
});
+ closure_kind_origins.hash_stable(hcx, hasher);
+ liberated_fn_sigs.hash_stable(hcx, hasher);
+ fru_field_types.hash_stable(hcx, hasher);
+ cast_kinds.hash_stable(hcx, hasher);
+ used_trait_imports.hash_stable(hcx, hasher);
tainted_by_errors.hash_stable(hcx, hasher);
free_region_map.hash_stable(hcx, hasher);
})
char: mk(TyChar),
never: mk(TyNever),
err: mk(TyError),
- isize: mk(TyInt(ast::IntTy::Is)),
+ isize: mk(TyInt(ast::IntTy::Isize)),
i8: mk(TyInt(ast::IntTy::I8)),
i16: mk(TyInt(ast::IntTy::I16)),
i32: mk(TyInt(ast::IntTy::I32)),
i64: mk(TyInt(ast::IntTy::I64)),
i128: mk(TyInt(ast::IntTy::I128)),
- usize: mk(TyUint(ast::UintTy::Us)),
+ usize: mk(TyUint(ast::UintTy::Usize)),
u8: mk(TyUint(ast::UintTy::U8)),
u16: mk(TyUint(ast::UintTy::U16)),
u32: mk(TyUint(ast::UintTy::U32)),
}
}
-/// The data structure to keep track of all the information that typechecker
-/// generates so that so that it can be reused and doesn't have to be redone
-/// later on.
+/// The central data structure of the compiler. It stores references
+/// to the various **arenas** and also houses the results of the
+/// various **compiler queries** that have been performed. See the
+/// [rustc guide] for more details.
+///
+/// [rustc guide]: https://rust-lang-nursery.github.io/rustc-guide/ty.html
#[derive(Copy, Clone)]
pub struct TyCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
gcx: &'a GlobalCtxt<'gcx>,
global_arenas: &'tcx GlobalArenas<'tcx>,
global_interners: CtxtInterners<'tcx>,
- pub sess: &'tcx Session,
-
- pub specializes_cache: RefCell<traits::SpecializesCache>,
+ cstore: &'tcx dyn CrateStore,
- pub trans_trait_caches: traits::trans::TransTraitCaches<'tcx>,
+ pub sess: &'tcx Session,
pub dep_graph: DepGraph,
+ /// This provides access to the incr. comp. on-disk cache for query results.
+ /// Do not access this directly. It is only meant to be used by
+ /// `DepGraph::try_mark_green()` and the query infrastructure in `ty::maps`.
+ pub(crate) on_disk_query_result_cache: maps::OnDiskCache<'tcx>,
+
/// Common types, pre-interned for your convenience.
pub types: CommonTypes<'tcx>,
/// Map indicating what traits are in scope for places where this
/// is relevant; generated by resolve.
- pub trait_map: TraitMap,
+ trait_map: FxHashMap<DefIndex,
+ Lrc<FxHashMap<ItemLocalId,
+ Lrc<StableVec<TraitCandidate>>>>>,
/// Export map produced by name resolution.
- pub export_map: ExportMap,
-
- pub named_region_map: resolve_lifetime::NamedRegionMap,
+ export_map: FxHashMap<DefId, Lrc<Vec<Export>>>,
pub hir: hir_map::Map<'tcx>,
pub maps: maps::Maps<'tcx>,
- pub mir_passes: Rc<Passes>,
-
// Records the free variables refrenced by every closure
// expression. Do not track deps for this, just recompute it from
// scratch every time.
- pub freevars: RefCell<FreevarMap>,
+ freevars: FxHashMap<DefId, Lrc<Vec<hir::Freevar>>>,
- pub maybe_unused_trait_imports: NodeSet,
+ maybe_unused_trait_imports: FxHashSet<DefId>,
- pub maybe_unused_extern_crates: Vec<(NodeId, Span)>,
+ maybe_unused_extern_crates: Vec<(DefId, Span)>,
// Internal cache for metadata decoding. No need to track deps on this.
- pub rcache: RefCell<FxHashMap<ty::CReaderCacheKey, Ty<'tcx>>>,
-
- // FIXME dep tracking -- should be harmless enough
- pub normalized_cache: RefCell<FxHashMap<Ty<'tcx>, Ty<'tcx>>>,
-
- pub inhabitedness_cache: RefCell<FxHashMap<Ty<'tcx>, DefIdForest>>,
-
- pub lang_items: middle::lang_items::LanguageItems,
-
- /// Set of used unsafe nodes (functions or blocks). Unsafe nodes not
- /// present in this set can be warned about.
- pub used_unsafe: RefCell<NodeSet>,
-
- /// Set of nodes which mark locals as mutable which end up getting used at
- /// some point. Local variable definitions not in this set can be warned
- /// about.
- pub used_mut_nodes: RefCell<NodeSet>,
-
- /// Maps any item's def-id to its stability index.
- pub stability: RefCell<stability::Index<'tcx>>,
+ pub rcache: Lock<FxHashMap<ty::CReaderCacheKey, Ty<'tcx>>>,
/// Caches the results of trait selection. This cache is used
/// for things that do not have to do with the parameters in scope.
/// Merge this with `selection_cache`?
pub evaluation_cache: traits::EvaluationCache<'tcx>,
- /// Maps Expr NodeId's to `true` iff `&expr` can have 'static lifetime.
- pub rvalue_promotable_to_static: RefCell<NodeMap<bool>>,
-
/// The definite name of the current crate after taking into account
/// attributes, commandline parameters, etc.
pub crate_name: Symbol,
/// Data layout specification for the current target.
pub data_layout: TargetDataLayout,
- /// Used to prevent layout from recursing too deeply.
- pub layout_depth: Cell<usize>,
+ stability_interner: Lock<FxHashSet<&'tcx attr::Stability>>,
- /// Map from function to the `#[derive]` mode that it's defining. Only used
- /// by `proc-macro` crates.
- pub derive_macros: RefCell<NodeMap<Symbol>>,
+ pub interpret_interner: InterpretInterner<'tcx>,
- stability_interner: RefCell<FxHashSet<&'tcx attr::Stability>>,
+ layout_interner: Lock<FxHashSet<&'tcx LayoutDetails>>,
- layout_interner: RefCell<FxHashSet<&'tcx Layout>>,
+ /// A general purpose channel to throw data out the back towards LLVM worker
+ /// threads.
+ ///
+ /// This is intended to only get used during the trans phase of the compiler
+ /// when satisfying the query for a particular codegen unit. Internally in
+ /// the query it'll send data along this channel to get processed later.
+ pub tx_to_llvm_workers: Lock<mpsc::Sender<Box<dyn Any + Send>>>,
- /// A vector of every trait accessible in the whole crate
- /// (i.e. including those from subcrates). This is used only for
- /// error reporting, and so is lazily initialized and generally
- /// shouldn't taint the common path (hence the RefCell).
- pub all_traits: RefCell<Option<Vec<DefId>>>,
+ output_filenames: Arc<OutputFilenames>,
}
-impl<'tcx> GlobalCtxt<'tcx> {
- /// Get the global TyCtxt.
- pub fn global_tcx<'a>(&'a self) -> TyCtxt<'a, 'tcx, 'tcx> {
- TyCtxt {
- gcx: self,
- interners: &self.global_interners
- }
- }
+/// Everything needed to efficiently work with interned allocations
+#[derive(Debug, Default)]
+pub struct InterpretInterner<'tcx> {
+ inner: Lock<InterpretInternerInner<'tcx>>,
}
-impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
- pub fn crate_name(self, cnum: CrateNum) -> Symbol {
- if cnum == LOCAL_CRATE {
- self.crate_name
- } else {
- self.sess.cstore.crate_name(cnum)
- }
- }
+#[derive(Debug, Default)]
+struct InterpretInternerInner<'tcx> {
+ /// Stores the value of constants (and deduplicates the actual memory)
+ allocs: FxHashSet<&'tcx interpret::Allocation>,
- pub fn original_crate_name(self, cnum: CrateNum) -> Symbol {
- if cnum == LOCAL_CRATE {
- self.crate_name.clone()
- } else {
- self.sess.cstore.original_crate_name(cnum)
- }
+ /// Allows obtaining function instance handles via a unique identifier
+ functions: FxHashMap<interpret::AllocId, Instance<'tcx>>,
+
+ /// Inverse map of `interpret_functions`.
+ /// Used so we don't allocate a new pointer every time we need one
+ function_cache: FxHashMap<Instance<'tcx>, interpret::AllocId>,
+
+ /// Allows obtaining const allocs via a unique identifier
+ alloc_by_id: FxHashMap<interpret::AllocId, &'tcx interpret::Allocation>,
+
+ /// Allows obtaining static def ids via a unique id
+ statics: FxHashMap<interpret::AllocId, DefId>,
+
+ /// The AllocId to assign to the next new regular allocation.
+ /// Always incremented, never gets smaller.
+ next_id: interpret::AllocId,
+
+ /// Inverse map of `statics`
+ /// Used so we don't allocate a new pointer every time we need one
+ static_cache: FxHashMap<DefId, interpret::AllocId>,
+
+ /// A cache for basic byte allocations keyed by their contents. This is used to deduplicate
+ /// allocations for string and bytestring literals.
+ literal_alloc_cache: FxHashMap<Vec<u8>, interpret::AllocId>,
+}
+
+impl<'tcx> InterpretInterner<'tcx> {
+ pub fn create_fn_alloc(&self, instance: Instance<'tcx>) -> interpret::AllocId {
+ if let Some(&alloc_id) = self.inner.borrow().function_cache.get(&instance) {
+ return alloc_id;
+ }
+ let id = self.reserve();
+ debug!("creating fn ptr: {}", id);
+ let mut inner = self.inner.borrow_mut();
+ inner.functions.insert(id, instance);
+ inner.function_cache.insert(instance, id);
+ id
+ }
+
+ pub fn get_fn(
+ &self,
+ id: interpret::AllocId,
+ ) -> Option<Instance<'tcx>> {
+ self.inner.borrow().functions.get(&id).cloned()
+ }
+
+ pub fn get_alloc(
+ &self,
+ id: interpret::AllocId,
+ ) -> Option<&'tcx interpret::Allocation> {
+ self.inner.borrow().alloc_by_id.get(&id).cloned()
+ }
+
+ pub fn cache_static(
+ &self,
+ static_id: DefId,
+ ) -> interpret::AllocId {
+ if let Some(alloc_id) = self.inner.borrow().static_cache.get(&static_id).cloned() {
+ return alloc_id;
+ }
+ let alloc_id = self.reserve();
+ let mut inner = self.inner.borrow_mut();
+ inner.static_cache.insert(static_id, alloc_id);
+ inner.statics.insert(alloc_id, static_id);
+ alloc_id
+ }
+
+ pub fn get_static(
+ &self,
+ ptr: interpret::AllocId,
+ ) -> Option<DefId> {
+ self.inner.borrow().statics.get(&ptr).cloned()
+ }
+
+ pub fn intern_at_reserved(
+ &self,
+ id: interpret::AllocId,
+ alloc: &'tcx interpret::Allocation,
+ ) {
+ if let Some(old) = self.inner.borrow_mut().alloc_by_id.insert(id, alloc) {
+ bug!("tried to intern allocation at {}, but was already existing as {:#?}", id, old);
+ }
+ }
+
+ /// obtains a new allocation ID that can be referenced but does not
+ /// yet have an allocation backing it.
+ pub fn reserve(
+ &self,
+ ) -> interpret::AllocId {
+ let mut inner = self.inner.borrow_mut();
+ let next = inner.next_id;
+ inner.next_id.0 = inner.next_id.0
+ .checked_add(1)
+ .expect("You overflowed a u64 by incrementing by 1... \
+ You've just earned yourself a free drink if we ever meet. \
+ Seriously, how did you do that?!");
+ next
}
+}
- pub fn crate_disambiguator(self, cnum: CrateNum) -> Symbol {
- if cnum == LOCAL_CRATE {
- self.sess.local_crate_disambiguator()
- } else {
- self.sess.cstore.crate_disambiguator(cnum)
+impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
+ /// Get the global TyCtxt.
+ #[inline]
+ pub fn global_tcx(self) -> TyCtxt<'a, 'gcx, 'gcx> {
+ TyCtxt {
+ gcx: self.gcx,
+ interners: &self.gcx.global_interners,
}
}
self.global_arenas.adt_def.alloc(def)
}
+ pub fn alloc_byte_array(self, bytes: &[u8]) -> &'gcx [u8] {
+ if bytes.is_empty() {
+ &[]
+ } else {
+ self.global_interners.arena.alloc_slice(bytes)
+ }
+ }
+
+ pub fn alloc_const_slice(self, values: &[&'tcx ty::Const<'tcx>])
+ -> &'tcx [&'tcx ty::Const<'tcx>] {
+ if values.is_empty() {
+ &[]
+ } else {
+ self.interners.arena.alloc_slice(values)
+ }
+ }
+
+ pub fn alloc_name_const_slice(self, values: &[(ast::Name, &'tcx ty::Const<'tcx>)])
+ -> &'tcx [(ast::Name, &'tcx ty::Const<'tcx>)] {
+ if values.is_empty() {
+ &[]
+ } else {
+ self.interners.arena.alloc_slice(values)
+ }
+ }
+
+ pub fn intern_const_alloc(
+ self,
+ alloc: interpret::Allocation,
+ ) -> &'gcx interpret::Allocation {
+ let allocs = &mut self.interpret_interner.inner.borrow_mut().allocs;
+ if let Some(alloc) = allocs.get(&alloc) {
+ return alloc;
+ }
+
+ let interned = self.global_arenas.const_allocs.alloc(alloc);
+ if let Some(prev) = allocs.replace(interned) {
+ bug!("Tried to overwrite interned Allocation: {:#?}", prev)
+ }
+ interned
+ }
+
+ /// Allocates a byte or string literal for `mir::interpret`
+ pub fn allocate_cached(self, bytes: &[u8]) -> interpret::AllocId {
+ // check whether we already allocated this literal or a constant with the same memory
+ if let Some(&alloc_id) = self.interpret_interner.inner.borrow()
+ .literal_alloc_cache.get(bytes) {
+ return alloc_id;
+ }
+ // create an allocation that just contains these bytes
+ let alloc = interpret::Allocation::from_bytes(bytes);
+ let alloc = self.intern_const_alloc(alloc);
+
+ // the next unique id
+ let id = self.interpret_interner.reserve();
+ // make the allocation identifiable
+ self.interpret_interner.inner.borrow_mut().alloc_by_id.insert(id, alloc);
+ // cache it for the future
+ self.interpret_interner.inner.borrow_mut().literal_alloc_cache.insert(bytes.to_owned(), id);
+ id
+ }
+
pub fn intern_stability(self, stab: attr::Stability) -> &'gcx attr::Stability {
- if let Some(st) = self.stability_interner.borrow().get(&stab) {
+ let mut stability_interner = self.stability_interner.borrow_mut();
+ if let Some(st) = stability_interner.get(&stab) {
return st;
}
let interned = self.global_interners.arena.alloc(stab);
- if let Some(prev) = self.stability_interner.borrow_mut().replace(interned) {
+ if let Some(prev) = stability_interner.replace(interned) {
bug!("Tried to overwrite interned Stability: {:?}", prev)
}
interned
}
- pub fn intern_layout(self, layout: Layout) -> &'gcx Layout {
- if let Some(layout) = self.layout_interner.borrow().get(&layout) {
+ pub fn intern_layout(self, layout: LayoutDetails) -> &'gcx LayoutDetails {
+ let mut layout_interner = self.layout_interner.borrow_mut();
+ if let Some(layout) = layout_interner.get(&layout) {
return layout;
}
let interned = self.global_arenas.layout.alloc(layout);
- if let Some(prev) = self.layout_interner.borrow_mut().replace(interned) {
+ if let Some(prev) = layout_interner.replace(interned) {
bug!("Tried to overwrite interned Layout: {:?}", prev)
}
interned
/// value (types, substs, etc.) can only be used while `ty::tls` has a valid
/// reference to the context, to allow formatting values that need it.
pub fn create_and_enter<F, R>(s: &'tcx Session,
+ cstore: &'tcx dyn CrateStore,
local_providers: ty::maps::Providers<'tcx>,
extern_providers: ty::maps::Providers<'tcx>,
- mir_passes: Rc<Passes>,
- arenas: &'tcx GlobalArenas<'tcx>,
- arena: &'tcx DroplessArena,
+ arenas: &'tcx AllArenas<'tcx>,
resolutions: ty::Resolutions,
- named_region_map: resolve_lifetime::NamedRegionMap,
hir: hir_map::Map<'tcx>,
- lang_items: middle::lang_items::LanguageItems,
- stability: stability::Index<'tcx>,
+ on_disk_query_result_cache: maps::OnDiskCache<'tcx>,
crate_name: &str,
+ tx: mpsc::Sender<Box<dyn Any + Send>>,
+ output_filenames: &OutputFilenames,
f: F) -> R
where F: for<'b> FnOnce(TyCtxt<'b, 'tcx, 'tcx>) -> R
{
- let data_layout = TargetDataLayout::parse(s);
- let interners = CtxtInterners::new(arena);
+ let data_layout = TargetDataLayout::parse(&s.target.target).unwrap_or_else(|err| {
+ s.fatal(&err);
+ });
+ let interners = CtxtInterners::new(&arenas.interner);
let common_types = CommonTypes::new(&interners);
let dep_graph = hir.dep_graph.clone();
- let max_cnum = s.cstore.crates().iter().map(|c| c.as_usize()).max().unwrap_or(0);
+ let max_cnum = cstore.crates_untracked().iter().map(|c| c.as_usize()).max().unwrap_or(0);
let mut providers = IndexVec::from_elem_n(extern_providers, max_cnum + 1);
providers[LOCAL_CRATE] = local_providers;
let def_path_hash_to_def_id = if s.opts.build_dep_graph() {
- let upstream_def_path_tables: Vec<(CrateNum, Rc<_>)> = s
- .cstore
- .crates()
+ let upstream_def_path_tables: Vec<(CrateNum, Lrc<_>)> = cstore
+ .crates_untracked()
.iter()
- .map(|&cnum| (cnum, s.cstore.def_path_table(cnum)))
+ .map(|&cnum| (cnum, cstore.def_path_table(cnum)))
.collect();
let def_path_tables = || {
None
};
- tls::enter_global(GlobalCtxt {
+ let mut trait_map = FxHashMap();
+ for (k, v) in resolutions.trait_map {
+ let hir_id = hir.node_to_hir_id(k);
+ let map = trait_map.entry(hir_id.owner)
+ .or_insert_with(|| Lrc::new(FxHashMap()));
+ Lrc::get_mut(map).unwrap()
+ .insert(hir_id.local_id,
+ Lrc::new(StableVec::new(v)));
+ }
+
+ let gcx = &GlobalCtxt {
sess: s,
- trans_trait_caches: traits::trans::TransTraitCaches::new(dep_graph.clone()),
- specializes_cache: RefCell::new(traits::SpecializesCache::new()),
- global_arenas: arenas,
+ cstore,
+ global_arenas: &arenas.global,
global_interners: interners,
dep_graph: dep_graph.clone(),
+ on_disk_query_result_cache,
types: common_types,
- named_region_map,
- trait_map: resolutions.trait_map,
- export_map: resolutions.export_map,
+ trait_map,
+ export_map: resolutions.export_map.into_iter().map(|(k, v)| {
+ (k, Lrc::new(v))
+ }).collect(),
+ freevars: resolutions.freevars.into_iter().map(|(k, v)| {
+ (hir.local_def_id(k), Lrc::new(v))
+ }).collect(),
+ maybe_unused_trait_imports:
+ resolutions.maybe_unused_trait_imports
+ .into_iter()
+ .map(|id| hir.local_def_id(id))
+ .collect(),
+ maybe_unused_extern_crates:
+ resolutions.maybe_unused_extern_crates
+ .into_iter()
+ .map(|(id, sp)| (hir.local_def_id(id), sp))
+ .collect(),
hir,
def_path_hash_to_def_id,
maps: maps::Maps::new(providers),
- mir_passes,
- freevars: RefCell::new(resolutions.freevars),
- maybe_unused_trait_imports: resolutions.maybe_unused_trait_imports,
- maybe_unused_extern_crates: resolutions.maybe_unused_extern_crates,
- rcache: RefCell::new(FxHashMap()),
- normalized_cache: RefCell::new(FxHashMap()),
- inhabitedness_cache: RefCell::new(FxHashMap()),
- lang_items,
- used_unsafe: RefCell::new(NodeSet()),
- used_mut_nodes: RefCell::new(NodeSet()),
- stability: RefCell::new(stability),
+ rcache: Lock::new(FxHashMap()),
selection_cache: traits::SelectionCache::new(),
evaluation_cache: traits::EvaluationCache::new(),
- rvalue_promotable_to_static: RefCell::new(NodeMap()),
crate_name: Symbol::intern(crate_name),
data_layout,
- layout_interner: RefCell::new(FxHashSet()),
- layout_depth: Cell::new(0),
- derive_macros: RefCell::new(NodeMap()),
- stability_interner: RefCell::new(FxHashSet()),
- all_traits: RefCell::new(None),
- }, f)
+ layout_interner: Lock::new(FxHashSet()),
+ stability_interner: Lock::new(FxHashSet()),
+ interpret_interner: Default::default(),
+ tx_to_llvm_workers: Lock::new(tx),
+ output_filenames: Arc::new(output_filenames.clone()),
+ };
+
+ tls::enter_global(gcx, f)
}
pub fn consider_optimizing<T: Fn() -> String>(&self, msg: T) -> bool {
let cname = self.crate_name(LOCAL_CRATE).as_str();
self.sess.consider_optimizing(&cname, msg)
}
+
+ pub fn lang_items(self) -> Lrc<middle::lang_items::LanguageItems> {
+ self.get_lang_items(LOCAL_CRATE)
+ }
+
+ /// Due to missing llvm support for lowering 128 bit math to software emulation
+ /// (on some targets), the lowering can be done in MIR.
+ ///
+ /// This function only exists until said support is implemented.
+ pub fn is_binop_lang_item(&self, def_id: DefId) -> Option<(mir::BinOp, bool)> {
+ let items = self.lang_items();
+ let def_id = Some(def_id);
+ if items.i128_add_fn() == def_id { Some((mir::BinOp::Add, false)) }
+ else if items.u128_add_fn() == def_id { Some((mir::BinOp::Add, false)) }
+ else if items.i128_sub_fn() == def_id { Some((mir::BinOp::Sub, false)) }
+ else if items.u128_sub_fn() == def_id { Some((mir::BinOp::Sub, false)) }
+ else if items.i128_mul_fn() == def_id { Some((mir::BinOp::Mul, false)) }
+ else if items.u128_mul_fn() == def_id { Some((mir::BinOp::Mul, false)) }
+ else if items.i128_div_fn() == def_id { Some((mir::BinOp::Div, false)) }
+ else if items.u128_div_fn() == def_id { Some((mir::BinOp::Div, false)) }
+ else if items.i128_rem_fn() == def_id { Some((mir::BinOp::Rem, false)) }
+ else if items.u128_rem_fn() == def_id { Some((mir::BinOp::Rem, false)) }
+ else if items.i128_shl_fn() == def_id { Some((mir::BinOp::Shl, false)) }
+ else if items.u128_shl_fn() == def_id { Some((mir::BinOp::Shl, false)) }
+ else if items.i128_shr_fn() == def_id { Some((mir::BinOp::Shr, false)) }
+ else if items.u128_shr_fn() == def_id { Some((mir::BinOp::Shr, false)) }
+ else if items.i128_addo_fn() == def_id { Some((mir::BinOp::Add, true)) }
+ else if items.u128_addo_fn() == def_id { Some((mir::BinOp::Add, true)) }
+ else if items.i128_subo_fn() == def_id { Some((mir::BinOp::Sub, true)) }
+ else if items.u128_subo_fn() == def_id { Some((mir::BinOp::Sub, true)) }
+ else if items.i128_mulo_fn() == def_id { Some((mir::BinOp::Mul, true)) }
+ else if items.u128_mulo_fn() == def_id { Some((mir::BinOp::Mul, true)) }
+ else if items.i128_shlo_fn() == def_id { Some((mir::BinOp::Shl, true)) }
+ else if items.u128_shlo_fn() == def_id { Some((mir::BinOp::Shl, true)) }
+ else if items.i128_shro_fn() == def_id { Some((mir::BinOp::Shr, true)) }
+ else if items.u128_shro_fn() == def_id { Some((mir::BinOp::Shr, true)) }
+ else { None }
+ }
+
+ pub fn stability(self) -> Lrc<stability::Index<'tcx>> {
+ self.stability_index(LOCAL_CRATE)
+ }
+
+ pub fn crates(self) -> Lrc<Vec<CrateNum>> {
+ self.all_crate_nums(LOCAL_CRATE)
+ }
+
+ pub fn features(self) -> Lrc<feature_gate::Features> {
+ self.features_query(LOCAL_CRATE)
+ }
+
+ pub fn def_key(self, id: DefId) -> hir_map::DefKey {
+ if id.is_local() {
+ self.hir.def_key(id)
+ } else {
+ self.cstore.def_key(id)
+ }
+ }
+
+ /// Convert a `DefId` into its fully expanded `DefPath` (every
+ /// `DefId` is really just an interned def-path).
+ ///
+ /// Note that if `id` is not local to this crate, the result will
+ /// be a non-local `DefPath`.
+ pub fn def_path(self, id: DefId) -> hir_map::DefPath {
+ if id.is_local() {
+ self.hir.def_path(id)
+ } else {
+ self.cstore.def_path(id)
+ }
+ }
+
+ #[inline]
+ pub fn def_path_hash(self, def_id: DefId) -> hir_map::DefPathHash {
+ if def_id.is_local() {
+ self.hir.definitions().def_path_hash(def_id.index)
+ } else {
+ self.cstore.def_path_hash(def_id)
+ }
+ }
+
+ pub fn def_path_debug_str(self, def_id: DefId) -> String {
+ // We are explicitly not going through queries here in order to get
+ // crate name and disambiguator since this code is called from debug!()
+ // statements within the query system and we'd run into endless
+ // recursion otherwise.
+ let (crate_name, crate_disambiguator) = if def_id.is_local() {
+ (self.crate_name.clone(),
+ self.sess.local_crate_disambiguator())
+ } else {
+ (self.cstore.crate_name_untracked(def_id.krate),
+ self.cstore.crate_disambiguator_untracked(def_id.krate))
+ };
+
+ format!("{}[{}]{}",
+ crate_name,
+ // Don't print the whole crate disambiguator. That's just
+ // annoying in debug output.
+ &(crate_disambiguator.to_fingerprint().to_hex())[..4],
+ self.def_path(def_id).to_string_no_crate())
+ }
+
+ pub fn metadata_encoding_version(self) -> Vec<u8> {
+ self.cstore.metadata_encoding_version().to_vec()
+ }
+
+ // Note that this is *untracked* and should only be used within the query
+ // system if the result is otherwise tracked through queries
+ pub fn crate_data_as_rc_any(self, cnum: CrateNum) -> Lrc<dyn Any> {
+ self.cstore.crate_data_as_rc_any(cnum)
+ }
+
+ pub fn create_stable_hashing_context(self) -> StableHashingContext<'a> {
+ let krate = self.dep_graph.with_ignore(|| self.gcx.hir.krate());
+
+ StableHashingContext::new(self.sess,
+ krate,
+ self.hir.definitions(),
+ self.cstore)
+ }
+
+ // This method makes sure that we have a DepNode and a Fingerprint for
+ // every upstream crate. It needs to be called once right after the tcx is
+ // created.
+ // With full-fledged red/green, the method will probably become unnecessary
+ // as this will be done on-demand.
+ pub fn allocate_metadata_dep_nodes(self) {
+ // We cannot use the query versions of crates() and crate_hash(), since
+ // those would need the DepNodes that we are allocating here.
+ for cnum in self.cstore.crates_untracked() {
+ let dep_node = DepNode::new(self, DepConstructor::CrateMetadata(cnum));
+ let crate_hash = self.cstore.crate_hash_untracked(cnum);
+ self.dep_graph.with_task(dep_node,
+ self,
+ crate_hash,
+ |_, x| x // No transformation needed
+ );
+ }
+ }
+
+ // This method exercises the `in_scope_traits_map` query for all possible
+ // values so that we have their fingerprints available in the DepGraph.
+ // This is only required as long as we still use the old dependency tracking
+ // which needs to have the fingerprints of all input nodes beforehand.
+ pub fn precompute_in_scope_traits_hashes(self) {
+ for &def_index in self.trait_map.keys() {
+ self.in_scope_traits_map(def_index);
+ }
+ }
+
+ pub fn serialize_query_result_cache<E>(self,
+ encoder: &mut E)
+ -> Result<(), E::Error>
+ where E: ty::codec::TyEncoder
+ {
+ self.on_disk_query_result_cache.serialize(self.global_tcx(), encoder)
+ }
+
+ /// If true, we should use the MIR-based borrowck (we may *also* use
+ /// the AST-based borrowck).
+ pub fn use_mir_borrowck(self) -> bool {
+ self.borrowck_mode().use_mir()
+ }
+
+ /// If true, pattern variables for use in guards on match arms
+ /// will be bound as references to the data, and occurrences of
+ /// those variables in the guard expression will implicitly
+ /// dereference those bindings. (See rust-lang/rust#27282.)
+ pub fn all_pat_vars_are_implicit_refs_within_guards(self) -> bool {
+ self.borrowck_mode().use_mir()
+ }
+
+ /// If true, we should enable two-phase borrows checks. This is
+ /// done with either `-Ztwo-phase-borrows` or with
+ /// `#![feature(nll)]`.
+ pub fn two_phase_borrows(self) -> bool {
+ self.features().nll || self.sess.opts.debugging_opts.two_phase_borrows
+ }
+
+ /// What mode(s) of borrowck should we run? AST? MIR? both?
+ /// (Also considers the `#![feature(nll)]` setting.)
+ pub fn borrowck_mode(&self) -> BorrowckMode {
+ match self.sess.opts.borrowck_mode {
+ mode @ BorrowckMode::Mir |
+ mode @ BorrowckMode::Compare => mode,
+
+ mode @ BorrowckMode::Ast => {
+ if self.features().nll {
+ BorrowckMode::Mir
+ } else {
+ mode
+ }
+ }
+
+ }
+ }
+
+ /// Should we emit EndRegion MIR statements? These are consumed by
+ /// MIR borrowck, but not when NLL is used. They are also consumed
+ /// by the validation stuff.
+ pub fn emit_end_regions(self) -> bool {
+ self.sess.opts.debugging_opts.emit_end_regions ||
+ self.sess.opts.debugging_opts.mir_emit_validate > 0 ||
+ self.use_mir_borrowck()
+ }
+
+ #[inline]
+ pub fn share_generics(self) -> bool {
+ match self.sess.opts.debugging_opts.share_generics {
+ Some(setting) => setting,
+ None => {
+ self.sess.opts.incremental.is_some() ||
+ match self.sess.opts.optimize {
+ OptLevel::No |
+ OptLevel::Less |
+ OptLevel::Size |
+ OptLevel::SizeMin => true,
+ OptLevel::Default |
+ OptLevel::Aggressive => false,
+ }
+ }
+ }
+ }
+
+ #[inline]
+ pub fn local_crate_exports_generics(self) -> bool {
+ debug_assert!(self.share_generics());
+
+ self.sess.crate_types.borrow().iter().any(|crate_type| {
+ match crate_type {
+ CrateTypeExecutable |
+ CrateTypeStaticlib |
+ CrateTypeProcMacro |
+ CrateTypeCdylib => false,
+ CrateTypeRlib |
+ CrateTypeDylib => true,
+ }
+ })
+ }
+}
+
+impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
+ pub fn encode_metadata(self, link_meta: &LinkMeta)
+ -> EncodedMetadata
+ {
+ self.cstore.encode_metadata(self, link_meta)
+ }
}
impl<'gcx: 'tcx, 'tcx> GlobalCtxt<'gcx> {
/// Call the closure with a local `TyCtxt` using the given arena.
- pub fn enter_local<F, R>(&self, arena: &'tcx DroplessArena, f: F) -> R
- where F: for<'a> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
+ pub fn enter_local<F, R>(
+ &self,
+ arena: &'tcx SyncDroplessArena,
+ f: F
+ ) -> R
+ where
+ F: for<'a> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
{
let interners = CtxtInterners::new(arena);
- tls::enter(self, &interners, f)
+ let tcx = TyCtxt {
+ gcx: self,
+ interners: &interners,
+ };
+ ty::tls::with_related_context(tcx.global_tcx(), |icx| {
+ let new_icx = ty::tls::ImplicitCtxt {
+ tcx,
+ query: icx.query.clone(),
+ layout_depth: icx.layout_depth,
+ task: icx.task,
+ };
+ ty::tls::enter_context(&new_icx, |new_icx| {
+ f(new_icx.tcx)
+ })
+ })
}
}
/// pointer differs. The latter case is possible if a primitive type,
/// e.g. `()` or `u8`, was interned in a different context.
pub trait Lift<'tcx> {
- type Lifted;
+ type Lifted: 'tcx;
fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted>;
}
-impl<'a, 'tcx> Lift<'tcx> for ty::ParamEnv<'a> {
- type Lifted = ty::ParamEnv<'tcx>;
- fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<ty::ParamEnv<'tcx>> {
- self.caller_bounds.lift_to_tcx(tcx).and_then(|caller_bounds| {
- Some(ty::ParamEnv {
- reveal: self.reveal,
- caller_bounds,
- })
- })
- }
-}
-
impl<'a, 'tcx> Lift<'tcx> for Ty<'a> {
type Lifted = Ty<'tcx>;
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Ty<'tcx>> {
}
}
-impl<'a, 'tcx> Lift<'tcx> for &'a Substs<'a> {
- type Lifted = &'tcx Substs<'tcx>;
- fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<&'tcx Substs<'tcx>> {
- if self.len() == 0 {
- return Some(Slice::empty());
- }
- if tcx.interners.arena.in_arena(&self[..] as *const _) {
+impl<'a, 'tcx> Lift<'tcx> for Region<'a> {
+ type Lifted = Region<'tcx>;
+ fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Region<'tcx>> {
+ if tcx.interners.arena.in_arena(*self as *const _) {
return Some(unsafe { mem::transmute(*self) });
}
// Also try in the global tcx if we're not that.
}
}
-impl<'a, 'tcx> Lift<'tcx> for Region<'a> {
- type Lifted = Region<'tcx>;
- fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Region<'tcx>> {
+impl<'a, 'tcx> Lift<'tcx> for &'a Const<'a> {
+ type Lifted = &'tcx Const<'tcx>;
+ fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<&'tcx Const<'tcx>> {
if tcx.interners.arena.in_arena(*self as *const _) {
return Some(unsafe { mem::transmute(*self) });
}
}
}
+impl<'a, 'tcx> Lift<'tcx> for &'a Substs<'a> {
+ type Lifted = &'tcx Substs<'tcx>;
+ fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<&'tcx Substs<'tcx>> {
+ if self.len() == 0 {
+ return Some(Slice::empty());
+ }
+ if tcx.interners.arena.in_arena(&self[..] as *const _) {
+ return Some(unsafe { mem::transmute(*self) });
+ }
+ // Also try in the global tcx if we're not that.
+ if !tcx.is_global() {
+ self.lift_to_tcx(tcx.global_tcx())
+ } else {
+ None
+ }
+ }
+}
+
impl<'a, 'tcx> Lift<'tcx> for &'a Slice<Ty<'a>> {
type Lifted = &'tcx Slice<Ty<'tcx>>;
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
}
}
+impl<'a, 'tcx> Lift<'tcx> for &'a Slice<CanonicalVarInfo> {
+ type Lifted = &'tcx Slice<CanonicalVarInfo>;
+ fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ if self.len() == 0 {
+ return Some(Slice::empty());
+ }
+ if tcx.interners.arena.in_arena(*self as *const _) {
+ return Some(unsafe { mem::transmute(*self) });
+ }
+ // Also try in the global tcx if we're not that.
+ if !tcx.is_global() {
+ self.lift_to_tcx(tcx.global_tcx())
+ } else {
+ None
+ }
+ }
+}
+
pub mod tls {
- use super::{CtxtInterners, GlobalCtxt, TyCtxt};
+ use super::{GlobalCtxt, TyCtxt};
use std::cell::Cell;
use std::fmt;
+ use std::mem;
use syntax_pos;
+ use ty::maps;
+ use errors::{Diagnostic, TRACK_DIAGNOSTICS};
+ use rustc_data_structures::OnDrop;
+ use rustc_data_structures::sync::Lrc;
+ use dep_graph::OpenTask;
+
+ /// This is the implicit state of rustc. It contains the current
+ /// TyCtxt and query. It is updated when creating a local interner or
+ /// executing a new query. Whenever there's a TyCtxt value available
+ /// you should also have access to an ImplicitCtxt through the functions
+ /// in this module.
+ #[derive(Clone)]
+ pub struct ImplicitCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
+ /// The current TyCtxt. Initially created by `enter_global` and updated
+ /// by `enter_local` with a new local interner
+ pub tcx: TyCtxt<'a, 'gcx, 'tcx>,
+
+ /// The current query job, if any. This is updated by start_job in
+ /// ty::maps::plumbing when executing a query
+ pub query: Option<Lrc<maps::QueryJob<'gcx>>>,
- /// Marker types used for the scoped TLS slot.
- /// The type context cannot be used directly because the scoped TLS
- /// in libstd doesn't allow types generic over lifetimes.
- enum ThreadLocalGlobalCtxt {}
- enum ThreadLocalInterners {}
+ /// Used to prevent layout from recursing too deeply.
+ pub layout_depth: usize,
- thread_local! {
- static TLS_TCX: Cell<Option<(*const ThreadLocalGlobalCtxt,
- *const ThreadLocalInterners)>> = Cell::new(None)
+ /// The current dep graph task. This is used to add dependencies to queries
+ /// when executing them
+ pub task: &'a OpenTask,
}
+ // A thread local value which stores a pointer to the current ImplicitCtxt
+ thread_local!(static TLV: Cell<usize> = Cell::new(0));
+
+ fn set_tlv<F: FnOnce() -> R, R>(value: usize, f: F) -> R {
+ let old = get_tlv();
+ let _reset = OnDrop(move || TLV.with(|tlv| tlv.set(old)));
+ TLV.with(|tlv| tlv.set(value));
+ f()
+ }
+
+ fn get_tlv() -> usize {
+ TLV.with(|tlv| tlv.get())
+ }
+
+ /// This is a callback from libsyntax as it cannot access the implicit state
+ /// in librustc otherwise
fn span_debug(span: syntax_pos::Span, f: &mut fmt::Formatter) -> fmt::Result {
with(|tcx| {
write!(f, "{}", tcx.sess.codemap().span_to_string(span))
})
}
- pub fn enter_global<'gcx, F, R>(gcx: GlobalCtxt<'gcx>, f: F) -> R
- where F: for<'a> FnOnce(TyCtxt<'a, 'gcx, 'gcx>) -> R
+ /// This is a callback from libsyntax as it cannot access the implicit state
+ /// in librustc otherwise. It is used to when diagnostic messages are
+ /// emitted and stores them in the current query, if there is one.
+ fn track_diagnostic(diagnostic: &Diagnostic) {
+ with_context(|context| {
+ if let Some(ref query) = context.query {
+ query.diagnostics.lock().push(diagnostic.clone());
+ }
+ })
+ }
+
+ /// Sets up the callbacks from libsyntax on the current thread
+ pub fn with_thread_locals<F, R>(f: F) -> R
+ where F: FnOnce() -> R
{
syntax_pos::SPAN_DEBUG.with(|span_dbg| {
let original_span_debug = span_dbg.get();
span_dbg.set(span_debug);
- let result = enter(&gcx, &gcx.global_interners, f);
- span_dbg.set(original_span_debug);
- result
+
+ let _on_drop = OnDrop(move || {
+ span_dbg.set(original_span_debug);
+ });
+
+ TRACK_DIAGNOSTICS.with(|current| {
+ let original = current.get();
+ current.set(track_diagnostic);
+
+ let _on_drop = OnDrop(move || {
+ current.set(original);
+ });
+
+ f()
+ })
})
}
- pub fn enter<'a, 'gcx: 'tcx, 'tcx, F, R>(gcx: &'a GlobalCtxt<'gcx>,
- interners: &'a CtxtInterners<'tcx>,
- f: F) -> R
- where F: FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
+ /// Sets `context` as the new current ImplicitCtxt for the duration of the function `f`
+ pub fn enter_context<'a, 'gcx: 'tcx, 'tcx, F, R>(context: &ImplicitCtxt<'a, 'gcx, 'tcx>,
+ f: F) -> R
+ where F: FnOnce(&ImplicitCtxt<'a, 'gcx, 'tcx>) -> R
{
- let gcx_ptr = gcx as *const _ as *const ThreadLocalGlobalCtxt;
- let interners_ptr = interners as *const _ as *const ThreadLocalInterners;
- TLS_TCX.with(|tls| {
- let prev = tls.get();
- tls.set(Some((gcx_ptr, interners_ptr)));
- let ret = f(TyCtxt {
- gcx,
- interners,
- });
- tls.set(prev);
- ret
+ set_tlv(context as *const _ as usize, || {
+ f(&context)
})
}
- pub fn with<F, R>(f: F) -> R
- where F: for<'a, 'gcx, 'tcx> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
+ /// Enters GlobalCtxt by setting up libsyntax callbacks and
+ /// creating a initial TyCtxt and ImplicitCtxt.
+ /// This happens once per rustc session and TyCtxts only exists
+ /// inside the `f` function.
+ pub fn enter_global<'gcx, F, R>(gcx: &GlobalCtxt<'gcx>, f: F) -> R
+ where F: for<'a> FnOnce(TyCtxt<'a, 'gcx, 'gcx>) -> R
{
- TLS_TCX.with(|tcx| {
- let (gcx, interners) = tcx.get().unwrap();
- let gcx = unsafe { &*(gcx as *const GlobalCtxt) };
- let interners = unsafe { &*(interners as *const CtxtInterners) };
- f(TyCtxt {
+ with_thread_locals(|| {
+ let tcx = TyCtxt {
gcx,
- interners,
+ interners: &gcx.global_interners,
+ };
+ let icx = ImplicitCtxt {
+ tcx,
+ query: None,
+ layout_depth: 0,
+ task: &OpenTask::Ignore,
+ };
+ enter_context(&icx, |_| {
+ f(tcx)
})
})
}
- pub fn with_opt<F, R>(f: F) -> R
- where F: for<'a, 'gcx, 'tcx> FnOnce(Option<TyCtxt<'a, 'gcx, 'tcx>>) -> R
+ /// Allows access to the current ImplicitCtxt in a closure if one is available
+ pub fn with_context_opt<F, R>(f: F) -> R
+ where F: for<'a, 'gcx, 'tcx> FnOnce(Option<&ImplicitCtxt<'a, 'gcx, 'tcx>>) -> R
{
- if TLS_TCX.with(|tcx| tcx.get().is_some()) {
- with(|v| f(Some(v)))
- } else {
+ let context = get_tlv();
+ if context == 0 {
f(None)
+ } else {
+ unsafe { f(Some(&*(context as *const ImplicitCtxt))) }
}
}
+
+ /// Allows access to the current ImplicitCtxt.
+ /// Panics if there is no ImplicitCtxt available
+ pub fn with_context<F, R>(f: F) -> R
+ where F: for<'a, 'gcx, 'tcx> FnOnce(&ImplicitCtxt<'a, 'gcx, 'tcx>) -> R
+ {
+ with_context_opt(|opt_context| f(opt_context.expect("no ImplicitCtxt stored in tls")))
+ }
+
+ /// Allows access to the current ImplicitCtxt whose tcx field has the same global
+ /// interner as the tcx argument passed in. This means the closure is given an ImplicitCtxt
+ /// with the same 'gcx lifetime as the TyCtxt passed in.
+ /// This will panic if you pass it a TyCtxt which has a different global interner from
+ /// the current ImplicitCtxt's tcx field.
+ pub fn with_related_context<'a, 'gcx, 'tcx1, F, R>(tcx: TyCtxt<'a, 'gcx, 'tcx1>, f: F) -> R
+ where F: for<'b, 'tcx2> FnOnce(&ImplicitCtxt<'b, 'gcx, 'tcx2>) -> R
+ {
+ with_context(|context| {
+ unsafe {
+ let gcx = tcx.gcx as *const _ as usize;
+ assert!(context.tcx.gcx as *const _ as usize == gcx);
+ let context: &ImplicitCtxt = mem::transmute(context);
+ f(context)
+ }
+ })
+ }
+
+ /// Allows access to the current ImplicitCtxt whose tcx field has the same global
+ /// interner and local interner as the tcx argument passed in. This means the closure
+ /// is given an ImplicitCtxt with the same 'tcx and 'gcx lifetimes as the TyCtxt passed in.
+ /// This will panic if you pass it a TyCtxt which has a different global interner or
+ /// a different local interner from the current ImplicitCtxt's tcx field.
+ pub fn with_fully_related_context<'a, 'gcx, 'tcx, F, R>(tcx: TyCtxt<'a, 'gcx, 'tcx>, f: F) -> R
+ where F: for<'b> FnOnce(&ImplicitCtxt<'b, 'gcx, 'tcx>) -> R
+ {
+ with_context(|context| {
+ unsafe {
+ let gcx = tcx.gcx as *const _ as usize;
+ let interners = tcx.interners as *const _ as usize;
+ assert!(context.tcx.gcx as *const _ as usize == gcx);
+ assert!(context.tcx.interners as *const _ as usize == interners);
+ let context: &ImplicitCtxt = mem::transmute(context);
+ f(context)
+ }
+ })
+ }
+
+ /// Allows access to the TyCtxt in the current ImplicitCtxt.
+ /// Panics if there is no ImplicitCtxt available
+ pub fn with<F, R>(f: F) -> R
+ where F: for<'a, 'gcx, 'tcx> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
+ {
+ with_context(|context| f(context.tcx))
+ }
+
+ /// Allows access to the TyCtxt in the current ImplicitCtxt.
+ /// The closure is passed None if there is no ImplicitCtxt available
+ pub fn with_opt<F, R>(f: F) -> R
+ where F: for<'a, 'gcx, 'tcx> FnOnce(Option<TyCtxt<'a, 'gcx, 'tcx>>) -> R
+ {
+ with_context_opt(|opt_context| f(opt_context.map(|context| context.tcx)))
+ }
}
macro_rules! sty_debug_print {
sty_debug_print!(
self,
TyAdt, TyArray, TySlice, TyRawPtr, TyRef, TyFnDef, TyFnPtr,
- TyDynamic, TyClosure, TyTuple, TyParam, TyInfer, TyProjection, TyAnon);
+ TyGenerator, TyGeneratorWitness, TyDynamic, TyClosure, TyTuple,
+ TyParam, TyInfer, TyProjection, TyAnon, TyForeign);
println!("Substs interner: #{}", self.interners.substs.borrow().len());
println!("Region interner: #{}", self.interners.region.borrow().len());
println!("Stability interner: #{}", self.stability_interner.borrow().len());
+ println!("Interpret interner: #{}", self.interpret_interner.inner.borrow().allocs.len());
println!("Layout interner: #{}", self.layout_interner.borrow().len());
}
}
}
}
+impl<'tcx: 'lcx, 'lcx> Borrow<[CanonicalVarInfo]> for Interned<'tcx, Slice<CanonicalVarInfo>> {
+ fn borrow<'a>(&'a self) -> &'a [CanonicalVarInfo] {
+ &self.0[..]
+ }
+}
+
impl<'tcx: 'lcx, 'lcx> Borrow<[Kind<'lcx>]> for Interned<'tcx, Substs<'tcx>> {
fn borrow<'a>(&'a self) -> &'a [Kind<'lcx>] {
&self.0[..]
}
}
+impl<'tcx: 'lcx, 'lcx> Borrow<Const<'lcx>> for Interned<'tcx, Const<'tcx>> {
+ fn borrow<'a>(&'a self) -> &'a Const<'lcx> {
+ &self.0
+ }
+}
+
+impl<'tcx: 'lcx, 'lcx> Borrow<[Clause<'lcx>]>
+for Interned<'tcx, Slice<Clause<'tcx>>> {
+ fn borrow<'a>(&'a self) -> &'a [Clause<'lcx>] {
+ &self.0[..]
+ }
+}
+
+impl<'tcx: 'lcx, 'lcx> Borrow<[Goal<'lcx>]>
+for Interned<'tcx, Slice<Goal<'tcx>>> {
+ fn borrow<'a>(&'a self) -> &'a [Goal<'lcx>] {
+ &self.0[..]
+ }
+}
+
macro_rules! intern_method {
($lt_tcx:tt, $name:ident: $method:ident($alloc:ty,
$alloc_method:ident,
$alloc_to_key:expr,
$alloc_to_ret:expr,
- $needs_infer:expr) -> $ty:ty) => {
+ $keep_in_local_tcx:expr) -> $ty:ty) => {
impl<'a, 'gcx, $lt_tcx> TyCtxt<'a, 'gcx, $lt_tcx> {
pub fn $method(self, v: $alloc) -> &$lt_tcx $ty {
{
// HACK(eddyb) Depend on flags being accurate to
// determine that all contents are in the global tcx.
// See comments on Lift for why we can't use that.
- if !($needs_infer)(&v) {
+ if !($keep_in_local_tcx)(&v) {
if !self.is_global() {
let v = unsafe {
mem::transmute(v)
}
macro_rules! direct_interners {
- ($lt_tcx:tt, $($name:ident: $method:ident($needs_infer:expr) -> $ty:ty),+) => {
+ ($lt_tcx:tt, $($name:ident: $method:ident($keep_in_local_tcx:expr) -> $ty:ty),+) => {
$(impl<$lt_tcx> PartialEq for Interned<$lt_tcx, $ty> {
fn eq(&self, other: &Self) -> bool {
self.0 == other.0
}
}
- intern_method!($lt_tcx, $name: $method($ty, alloc, |x| x, |x| x, $needs_infer) -> $ty);)+
+ intern_method!(
+ $lt_tcx,
+ $name: $method($ty, alloc, |x| x, |x| x, $keep_in_local_tcx) -> $ty
+ );)+
}
}
}
direct_interners!('tcx,
- region: mk_region(|r| {
- match r {
- &ty::ReVar(_) | &ty::ReSkolemized(..) => true,
- _ => false
- }
- }) -> RegionKind
+ region: mk_region(|r: &RegionKind| r.keep_in_local_tcx()) -> RegionKind,
+ const_: mk_const(|c: &Const| keep_local(&c.ty) || keep_local(&c.val)) -> Const<'tcx>
);
macro_rules! slice_interners {
existential_predicates: _intern_existential_predicates(ExistentialPredicate),
predicates: _intern_predicates(Predicate),
type_list: _intern_type_list(Ty),
- substs: _intern_substs(Kind)
+ substs: _intern_substs(Kind),
+ clauses: _intern_clauses(Clause),
+ goals: _intern_goals(Goal)
);
+// This isn't a perfect fit: CanonicalVarInfo slices are always
+// allocated in the global arena, so this `intern_method!` macro is
+// overly general. But we just return false for the code that checks
+// whether they belong in the thread-local arena, so no harm done, and
+// seems better than open-coding the rest.
+intern_method! {
+ 'tcx,
+ canonical_var_infos: _intern_canonical_var_infos(
+ &[CanonicalVarInfo],
+ alloc_slice,
+ Deref::deref,
+ |xs: &[CanonicalVarInfo]| -> &Slice<CanonicalVarInfo> { unsafe { mem::transmute(xs) } },
+ |_xs: &[CanonicalVarInfo]| -> bool { false }
+ ) -> Slice<CanonicalVarInfo>
+}
+
impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
- /// Create an unsafe fn ty based on a safe fn ty.
+ /// Given a `fn` type, returns an equivalent `unsafe fn` type;
+ /// that is, a `fn` type that is equivalent in every way for being
+ /// unsafe.
pub fn safe_to_unsafe_fn_ty(self, sig: PolyFnSig<'tcx>) -> Ty<'tcx> {
assert_eq!(sig.unsafety(), hir::Unsafety::Normal);
self.mk_fn_ptr(sig.map_bound(|sig| ty::FnSig {
}))
}
+ /// Given a closure signature `sig`, returns an equivalent `fn`
+ /// type with the same signature. Detuples and so forth -- so
+ /// e.g. if we have a sig with `Fn<(u32, i32)>` then you would get
+ /// a `fn(u32, i32)`.
+ pub fn coerce_closure_fn_ty(self, sig: PolyFnSig<'tcx>) -> Ty<'tcx> {
+ let converted_sig = sig.map_bound(|s| {
+ let params_iter = match s.inputs()[0].sty {
+ ty::TyTuple(params) => {
+ params.into_iter().cloned()
+ }
+ _ => bug!(),
+ };
+ self.mk_fn_sig(
+ params_iter,
+ s.output(),
+ s.variadic,
+ hir::Unsafety::Normal,
+ abi::Abi::Rust,
+ )
+ });
+
+ self.mk_fn_ptr(converted_sig)
+ }
+
// Interns a type/name combination, stores the resulting box in cx.interners,
// and returns the box as cast to an unsafe ptr (see comments for Ty above).
pub fn mk_ty(self, st: TypeVariants<'tcx>) -> Ty<'tcx> {
pub fn mk_mach_int(self, tm: ast::IntTy) -> Ty<'tcx> {
match tm {
- ast::IntTy::Is => self.types.isize,
+ ast::IntTy::Isize => self.types.isize,
ast::IntTy::I8 => self.types.i8,
ast::IntTy::I16 => self.types.i16,
ast::IntTy::I32 => self.types.i32,
pub fn mk_mach_uint(self, tm: ast::UintTy) -> Ty<'tcx> {
match tm {
- ast::UintTy::Us => self.types.usize,
+ ast::UintTy::Usize => self.types.usize,
ast::UintTy::U8 => self.types.u8,
ast::UintTy::U16 => self.types.u16,
ast::UintTy::U32 => self.types.u32,
self.mk_ty(TyAdt(def, substs))
}
+ pub fn mk_foreign(self, def_id: DefId) -> Ty<'tcx> {
+ self.mk_ty(TyForeign(def_id))
+ }
+
pub fn mk_box(self, ty: Ty<'tcx>) -> Ty<'tcx> {
let def_id = self.require_lang_item(lang_items::OwnedBoxLangItem);
let adt_def = self.adt_def(def_id);
- let substs = self.mk_substs(iter::once(Kind::from(ty)));
+ let generics = self.generics_of(def_id);
+ let mut substs = vec![Kind::from(ty)];
+ // Add defaults for other generic params if there are some.
+ for def in generics.types.iter().skip(1) {
+ assert!(def.has_default);
+ let ty = self.type_of(def.def_id).subst(self, &substs);
+ substs.push(ty.into());
+ }
+ let substs = self.mk_substs(substs.into_iter());
self.mk_ty(TyAdt(adt_def, substs))
}
self.mk_imm_ptr(self.mk_nil())
}
- pub fn mk_array(self, ty: Ty<'tcx>, n: usize) -> Ty<'tcx> {
- self.mk_ty(TyArray(ty, n))
+ pub fn mk_array(self, ty: Ty<'tcx>, n: u64) -> Ty<'tcx> {
+ self.mk_ty(TyArray(ty, self.mk_const(ty::Const {
+ val: ConstVal::Value(Value::ByVal(PrimVal::Bytes(n.into()))),
+ ty: self.types.usize
+ })))
}
pub fn mk_slice(self, ty: Ty<'tcx>) -> Ty<'tcx> {
self.mk_ty(TySlice(ty))
}
- pub fn intern_tup(self, ts: &[Ty<'tcx>], defaulted: bool) -> Ty<'tcx> {
- self.mk_ty(TyTuple(self.intern_type_list(ts), defaulted))
+ pub fn intern_tup(self, ts: &[Ty<'tcx>]) -> Ty<'tcx> {
+ self.mk_ty(TyTuple(self.intern_type_list(ts)))
}
- pub fn mk_tup<I: InternAs<[Ty<'tcx>], Ty<'tcx>>>(self, iter: I,
- defaulted: bool) -> I::Output {
- iter.intern_with(|ts| self.mk_ty(TyTuple(self.intern_type_list(ts), defaulted)))
+ pub fn mk_tup<I: InternAs<[Ty<'tcx>], Ty<'tcx>>>(self, iter: I) -> I::Output {
+ iter.intern_with(|ts| self.mk_ty(TyTuple(self.intern_type_list(ts))))
}
pub fn mk_nil(self) -> Ty<'tcx> {
- self.intern_tup(&[], false)
+ self.intern_tup(&[])
}
pub fn mk_diverging_default(self) -> Ty<'tcx> {
- if self.sess.features.borrow().never_type {
+ if self.features().never_type {
self.types.never
} else {
- self.intern_tup(&[], true)
+ self.intern_tup(&[])
}
}
pub fn mk_closure(self,
closure_id: DefId,
- substs: &'tcx Substs<'tcx>)
- -> Ty<'tcx> {
- self.mk_closure_from_closure_substs(closure_id, ClosureSubsts {
- substs,
- })
+ substs: ClosureSubsts<'tcx>)
+ -> Ty<'tcx> {
+ self.mk_closure_from_closure_substs(closure_id, substs)
}
pub fn mk_closure_from_closure_substs(self,
self.mk_ty(TyClosure(closure_id, closure_substs))
}
+ pub fn mk_generator(self,
+ id: DefId,
+ closure_substs: ClosureSubsts<'tcx>,
+ interior: GeneratorInterior<'tcx>)
+ -> Ty<'tcx> {
+ self.mk_ty(TyGenerator(id, closure_substs, interior))
+ }
+
+ pub fn mk_generator_witness(self, types: ty::Binder<&'tcx Slice<Ty<'tcx>>>) -> Ty<'tcx> {
+ self.mk_ty(TyGeneratorWitness(types))
+ }
+
pub fn mk_var(self, v: TyVid) -> Ty<'tcx> {
self.mk_infer(TyVar(v))
}
pub fn mk_param(self,
index: u32,
- name: Name) -> Ty<'tcx> {
+ name: InternedString) -> Ty<'tcx> {
self.mk_ty(TyParam(ParamTy { idx: index, name: name }))
}
pub fn mk_self_type(self) -> Ty<'tcx> {
- self.mk_param(0, keywords::SelfType.name())
+ self.mk_param(0, keywords::SelfType.name().as_interned_str())
}
pub fn mk_param_from_def(self, def: &ty::TypeParameterDef) -> Ty<'tcx> {
}
}
+ pub fn intern_canonical_var_infos(self, ts: &[CanonicalVarInfo]) -> CanonicalVarInfos<'gcx> {
+ if ts.len() == 0 {
+ Slice::empty()
+ } else {
+ self.global_tcx()._intern_canonical_var_infos(ts)
+ }
+ }
+
+ pub fn intern_clauses(self, ts: &[Clause<'tcx>]) -> Clauses<'tcx> {
+ if ts.len() == 0 {
+ Slice::empty()
+ } else {
+ self._intern_clauses(ts)
+ }
+ }
+
+ pub fn intern_goals(self, ts: &[Goal<'tcx>]) -> Goals<'tcx> {
+ if ts.len() == 0 {
+ Slice::empty()
+ } else {
+ self._intern_goals(ts)
+ }
+ }
+
pub fn mk_fn_sig<I>(self,
inputs: I,
output: I::Item,
self.mk_substs(iter::once(s).chain(t.into_iter().cloned()).map(Kind::from))
}
+ pub fn mk_clauses<I: InternAs<[Clause<'tcx>], Clauses<'tcx>>>(self, iter: I) -> I::Output {
+ iter.intern_with(|xs| self.intern_clauses(xs))
+ }
+
+ pub fn mk_goals<I: InternAs<[Goal<'tcx>], Goals<'tcx>>>(self, iter: I) -> I::Output {
+ iter.intern_with(|xs| self.intern_goals(xs))
+ }
+
+ pub fn mk_goal(self, goal: Goal<'tcx>) -> &'tcx Goal {
+ &self.mk_goals(iter::once(goal))[0]
+ }
+
pub fn lint_node<S: Into<MultiSpan>>(self,
lint: &'static Lint,
id: NodeId,
let sets = self.lint_levels(LOCAL_CRATE);
loop {
let hir_id = self.hir.definitions().node_to_hir_id(id);
- if let Some(pair) = sets.level_and_source(lint, hir_id) {
+ if let Some(pair) = sets.level_and_source(lint, hir_id, self.sess) {
return pair
}
let next = self.hir.get_parent_node(id);
let (level, src) = self.lint_level_at_node(lint, id);
lint::struct_lint_level(self.sess, lint, level, src, None, msg)
}
+
+ pub fn in_scope_traits(self, id: HirId) -> Option<Lrc<StableVec<TraitCandidate>>> {
+ self.in_scope_traits_map(id.owner)
+ .and_then(|map| map.get(&id.local_id).cloned())
+ }
+
+ pub fn named_region(self, id: HirId) -> Option<resolve_lifetime::Region> {
+ self.named_region_map(id.owner)
+ .and_then(|map| map.get(&id.local_id).cloned())
+ }
+
+ pub fn is_late_bound(self, id: HirId) -> bool {
+ self.is_late_bound_map(id.owner)
+ .map(|set| set.contains(&id.local_id))
+ .unwrap_or(false)
+ }
+
+ pub fn object_lifetime_defaults(self, id: HirId)
+ -> Option<Lrc<Vec<ObjectLifetimeDefault>>>
+ {
+ self.object_lifetime_defaults_map(id.owner)
+ .and_then(|map| map.get(&id.local_id).cloned())
+ }
}
pub trait InternAs<T: ?Sized, R> {
Ok(f(&iter.collect::<Result<AccumulateVec<[_; 8]>, _>>()?))
}
}
+
+pub fn provide(providers: &mut ty::maps::Providers) {
+ // FIXME(#44234) - almost all of these queries have no sub-queries and
+ // therefore no actual inputs, they're just reading tables calculated in
+ // resolve! Does this work? Unsure! That's what the issue is about
+ providers.in_scope_traits_map = |tcx, id| tcx.gcx.trait_map.get(&id).cloned();
+ providers.module_exports = |tcx, id| tcx.gcx.export_map.get(&id).cloned();
+ providers.crate_name = |tcx, id| {
+ assert_eq!(id, LOCAL_CRATE);
+ tcx.crate_name
+ };
+ providers.get_lang_items = |tcx, id| {
+ assert_eq!(id, LOCAL_CRATE);
+ // FIXME(#42293) Right now we insert a `with_ignore` node in the dep
+ // graph here to ignore the fact that `get_lang_items` below depends on
+ // the entire crate. For now this'll prevent false positives of
+ // recompiling too much when anything changes.
+ //
+ // Once red/green incremental compilation lands we should be able to
+ // remove this because while the crate changes often the lint level map
+ // will change rarely.
+ tcx.dep_graph.with_ignore(|| Lrc::new(middle::lang_items::collect(tcx)))
+ };
+ providers.freevars = |tcx, id| tcx.gcx.freevars.get(&id).cloned();
+ providers.maybe_unused_trait_import = |tcx, id| {
+ tcx.maybe_unused_trait_imports.contains(&id)
+ };
+ providers.maybe_unused_extern_crates = |tcx, cnum| {
+ assert_eq!(cnum, LOCAL_CRATE);
+ Lrc::new(tcx.maybe_unused_extern_crates.clone())
+ };
+
+ providers.stability_index = |tcx, cnum| {
+ assert_eq!(cnum, LOCAL_CRATE);
+ Lrc::new(stability::Index::new(tcx))
+ };
+ providers.lookup_stability = |tcx, id| {
+ assert_eq!(id.krate, LOCAL_CRATE);
+ let id = tcx.hir.definitions().def_index_to_hir_id(id.index);
+ tcx.stability().local_stability(id)
+ };
+ providers.lookup_deprecation_entry = |tcx, id| {
+ assert_eq!(id.krate, LOCAL_CRATE);
+ let id = tcx.hir.definitions().def_index_to_hir_id(id.index);
+ tcx.stability().local_deprecation_entry(id)
+ };
+ providers.extern_mod_stmt_cnum = |tcx, id| {
+ let id = tcx.hir.as_local_node_id(id).unwrap();
+ tcx.cstore.extern_mod_stmt_cnum_untracked(id)
+ };
+ providers.all_crate_nums = |tcx, cnum| {
+ assert_eq!(cnum, LOCAL_CRATE);
+ Lrc::new(tcx.cstore.crates_untracked())
+ };
+ providers.postorder_cnums = |tcx, cnum| {
+ assert_eq!(cnum, LOCAL_CRATE);
+ Lrc::new(tcx.cstore.postorder_cnums_untracked())
+ };
+ providers.output_filenames = |tcx, cnum| {
+ assert_eq!(cnum, LOCAL_CRATE);
+ tcx.output_filenames.clone()
+ };
+ providers.features_query = |tcx, cnum| {
+ assert_eq!(cnum, LOCAL_CRATE);
+ Lrc::new(tcx.sess.features_untracked().clone())
+ };
+ providers.is_panic_runtime = |tcx, cnum| {
+ assert_eq!(cnum, LOCAL_CRATE);
+ attr::contains_name(tcx.hir.krate_attrs(), "panic_runtime")
+ };
+ providers.is_compiler_builtins = |tcx, cnum| {
+ assert_eq!(cnum, LOCAL_CRATE);
+ attr::contains_name(tcx.hir.krate_attrs(), "compiler_builtins")
+ };
+}