1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 #![allow(non_camel_case_types, non_snake_case)]
13 //! Code that is useful in various trans modules.
17 use llvm
::{ValueRef, BasicBlockRef, BuilderRef, ContextRef, TypeKind}
;
18 use llvm
::{True, False, Bool, OperandBundleDef}
;
20 use rustc
::hir
::def
::Def
;
21 use rustc
::hir
::def_id
::DefId
;
23 use middle
::lang_items
::LangItem
;
24 use rustc
::ty
::subst
::Substs
;
25 use abi
::{Abi, FnType}
;
33 use debuginfo
::{self, DebugLoc}
;
40 use rustc
::ty
::{self, Ty, TyCtxt}
;
41 use rustc
::traits
::{self, SelectionContext, ProjectionMode}
;
42 use rustc
::ty
::fold
::{TypeFolder, TypeFoldable}
;
44 use util
::nodemap
::NodeMap
;
46 use arena
::TypedArena
;
47 use libc
::{c_uint, c_char}
;
49 use std
::ffi
::CString
;
50 use std
::cell
::{Cell, RefCell}
;
53 use syntax
::codemap
::{DUMMY_SP, Span}
;
54 use syntax
::parse
::token
::InternedString
;
55 use syntax
::parse
::token
;
57 pub use context
::CrateContext
;
59 /// Is the type's representation size known at compile time?
60 pub fn type_is_sized
<'tcx
>(tcx
: &TyCtxt
<'tcx
>, ty
: Ty
<'tcx
>) -> bool
{
61 ty
.is_sized(&tcx
.empty_parameter_environment(), DUMMY_SP
)
64 pub fn type_is_fat_ptr
<'tcx
>(cx
: &TyCtxt
<'tcx
>, ty
: Ty
<'tcx
>) -> bool
{
66 ty
::TyRawPtr(ty
::TypeAndMut{ty, ..}
) |
67 ty
::TyRef(_
, ty
::TypeAndMut{ty, ..}
) |
69 !type_is_sized(cx
, ty
)
77 pub fn type_is_immediate
<'a
, 'tcx
>(ccx
: &CrateContext
<'a
, 'tcx
>, ty
: Ty
<'tcx
>) -> bool
{
78 use machine
::llsize_of_alloc
;
79 use type_of
::sizing_type_of
;
82 let simple
= ty
.is_scalar() ||
83 ty
.is_unique() || ty
.is_region_ptr() ||
85 if simple
&& !type_is_fat_ptr(tcx
, ty
) {
88 if !type_is_sized(tcx
, ty
) {
92 ty
::TyStruct(..) | ty
::TyEnum(..) | ty
::TyTuple(..) | ty
::TyArray(_
, _
) |
93 ty
::TyClosure(..) => {
94 let llty
= sizing_type_of(ccx
, ty
);
95 llsize_of_alloc(ccx
, llty
) <= llsize_of_alloc(ccx
, ccx
.int_type())
97 _
=> type_is_zero_size(ccx
, ty
)
101 /// Identify types which have size zero at runtime.
102 pub fn type_is_zero_size
<'a
, 'tcx
>(ccx
: &CrateContext
<'a
, 'tcx
>, ty
: Ty
<'tcx
>) -> bool
{
103 use machine
::llsize_of_alloc
;
104 use type_of
::sizing_type_of
;
105 let llty
= sizing_type_of(ccx
, ty
);
106 llsize_of_alloc(ccx
, llty
) == 0
109 /// Generates a unique symbol based off the name given. This is used to create
110 /// unique symbols for things like closures.
111 pub fn gensym_name(name
: &str) -> ast
::Name
{
112 let num
= token
::gensym(name
).0;
113 // use one colon which will get translated to a period by the mangler, and
114 // we're guaranteed that `num` is globally unique for this crate.
115 token
::gensym(&format
!("{}:{}", name
, num
))
119 * A note on nomenclature of linking: "extern", "foreign", and "upcall".
121 * An "extern" is an LLVM symbol we wind up emitting an undefined external
122 * reference to. This means "we don't have the thing in this compilation unit,
123 * please make sure you link it in at runtime". This could be a reference to
124 * C code found in a C library, or rust code found in a rust crate.
126 * Most "externs" are implicitly declared (automatically) as a result of a
127 * user declaring an extern _module_ dependency; this causes the rust driver
128 * to locate an extern crate, scan its compilation metadata, and emit extern
129 * declarations for any symbols used by the declaring crate.
131 * A "foreign" is an extern that references C (or other non-rust ABI) code.
132 * There is no metadata to scan for extern references so in these cases either
133 * a header-digester like bindgen, or manual function prototypes, have to
134 * serve as declarators. So these are usually given explicitly as prototype
135 * declarations, in rust code, with ABI attributes on them noting which ABI to
138 * An "upcall" is a foreign call generated by the compiler (not corresponding
139 * to any user-written call in the code) into the runtime library, to perform
140 * some helper task such as bringing a task to life, allocating memory, etc.
146 #[derive(Copy, Clone)]
147 pub struct NodeIdAndSpan
{
152 pub fn expr_info(expr
: &hir
::Expr
) -> NodeIdAndSpan
{
153 NodeIdAndSpan { id: expr.id, span: expr.span }
156 /// The concrete version of ty::FieldDef. The name is the field index if
157 /// the field is numeric.
158 pub struct Field
<'tcx
>(pub ast
::Name
, pub Ty
<'tcx
>);
160 /// The concrete version of ty::VariantDef
161 pub struct VariantInfo
<'tcx
> {
163 pub fields
: Vec
<Field
<'tcx
>>
166 impl<'tcx
> VariantInfo
<'tcx
> {
167 pub fn from_ty(tcx
: &TyCtxt
<'tcx
>,
169 opt_def
: Option
<Def
>)
173 ty
::TyStruct(adt
, substs
) | ty
::TyEnum(adt
, substs
) => {
174 let variant
= match opt_def
{
175 None
=> adt
.struct_variant(),
176 Some(def
) => adt
.variant_of_def(def
)
180 discr
: Disr
::from(variant
.disr_val
),
181 fields
: variant
.fields
.iter().map(|f
| {
182 Field(f
.name
, monomorphize
::field_ty(tcx
, substs
, f
))
187 ty
::TyTuple(ref v
) => {
190 fields
: v
.iter().enumerate().map(|(i
, &t
)| {
191 Field(token
::intern(&i
.to_string()), t
)
197 bug
!("cannot get field types from the type {:?}", ty
);
202 /// Return the variant corresponding to a given node (e.g. expr)
203 pub fn of_node(tcx
: &TyCtxt
<'tcx
>, ty
: Ty
<'tcx
>, id
: ast
::NodeId
) -> Self {
204 let node_def
= tcx
.def_map
.borrow().get(&id
).map(|v
| v
.full_def());
205 Self::from_ty(tcx
, ty
, node_def
)
208 pub fn field_index(&self, name
: ast
::Name
) -> usize {
209 self.fields
.iter().position(|&Field(n
,_
)| n
== name
).unwrap_or_else(|| {
210 bug
!("unknown field `{}`", name
)
215 pub struct BuilderRef_res
{
219 impl Drop
for BuilderRef_res
{
222 llvm
::LLVMDisposeBuilder(self.b
);
227 pub fn BuilderRef_res(b
: BuilderRef
) -> BuilderRef_res
{
233 pub fn validate_substs(substs
: &Substs
) {
234 assert
!(!substs
.types
.needs_infer());
237 // work around bizarre resolve errors
238 type RvalueDatum
<'tcx
> = datum
::Datum
<'tcx
, datum
::Rvalue
>;
239 pub type LvalueDatum
<'tcx
> = datum
::Datum
<'tcx
, datum
::Lvalue
>;
241 #[derive(Clone, Debug)]
242 struct HintEntry
<'tcx
> {
243 // The datum for the dropflag-hint itself; note that many
244 // source-level Lvalues will be associated with the same
245 // dropflag-hint datum.
246 datum
: cleanup
::DropHintDatum
<'tcx
>,
249 pub struct DropFlagHintsMap
<'tcx
> {
250 // Maps NodeId for expressions that read/write unfragmented state
251 // to that state's drop-flag "hint." (A stack-local hint
252 // indicates either that (1.) it is certain that no-drop is
253 // needed, or (2.) inline drop-flag must be consulted.)
254 node_map
: NodeMap
<HintEntry
<'tcx
>>,
257 impl<'tcx
> DropFlagHintsMap
<'tcx
> {
258 pub fn new() -> DropFlagHintsMap
<'tcx
> { DropFlagHintsMap { node_map: NodeMap() }
}
259 pub fn has_hint(&self, id
: ast
::NodeId
) -> bool { self.node_map.contains_key(&id) }
260 pub fn insert(&mut self, id
: ast
::NodeId
, datum
: cleanup
::DropHintDatum
<'tcx
>) {
261 self.node_map
.insert(id
, HintEntry { datum: datum }
);
263 pub fn hint_datum(&self, id
: ast
::NodeId
) -> Option
<cleanup
::DropHintDatum
<'tcx
>> {
264 self.node_map
.get(&id
).map(|t
|t
.datum
)
268 // Function context. Every LLVM function we create will have one of
270 pub struct FunctionContext
<'a
, 'tcx
: 'a
> {
271 // The MIR for this function. At present, this is optional because
272 // we only have MIR available for things that are local to the
274 pub mir
: Option
<CachedMir
<'a
, 'tcx
>>,
276 // The ValueRef returned from a call to llvm::LLVMAddFunction; the
277 // address of the first instruction in the sequence of
278 // instructions for this function that will go in the .text
279 // section of the executable we're generating.
282 // always an empty parameter-environment NOTE: @jroesch another use of ParamEnv
283 pub param_env
: ty
::ParameterEnvironment
<'a
, 'tcx
>,
285 // A pointer to where to store the return value. If the return type is
286 // immediate, this points to an alloca in the function. Otherwise, it's a
287 // pointer to the hidden first parameter of the function. After function
288 // construction, this should always be Some.
289 pub llretslotptr
: Cell
<Option
<ValueRef
>>,
291 // These pub elements: "hoisted basic blocks" containing
292 // administrative activities that have to happen in only one place in
293 // the function, due to LLVM's quirks.
294 // A marker for the place where we want to insert the function's static
295 // allocas, so that LLVM will coalesce them into a single alloca call.
296 pub alloca_insert_pt
: Cell
<Option
<ValueRef
>>,
297 pub llreturn
: Cell
<Option
<BasicBlockRef
>>,
299 // If the function has any nested return's, including something like:
300 // fn foo() -> Option<Foo> { Some(Foo { x: return None }) }, then
301 // we use a separate alloca for each return
302 pub needs_ret_allocas
: bool
,
304 // When working with landingpad-based exceptions this value is alloca'd and
305 // later loaded when using the resume instruction. This ends up being
306 // critical to chaining landing pads and resuing already-translated
309 // Note that for cleanuppad-based exceptions this is not used.
310 pub landingpad_alloca
: Cell
<Option
<ValueRef
>>,
312 // Maps the DefId's for local variables to the allocas created for
313 // them in llallocas.
314 pub lllocals
: RefCell
<NodeMap
<LvalueDatum
<'tcx
>>>,
316 // Same as above, but for closure upvars
317 pub llupvars
: RefCell
<NodeMap
<ValueRef
>>,
319 // Carries info about drop-flags for local bindings (longer term,
320 // paths) for the code being compiled.
321 pub lldropflag_hints
: RefCell
<DropFlagHintsMap
<'tcx
>>,
323 // Describes the return/argument LLVM types and their ABI handling.
326 // If this function is being monomorphized, this contains the type
327 // substitutions used.
328 pub param_substs
: &'tcx Substs
<'tcx
>,
330 // The source span and nesting context where this function comes from, for
331 // error reporting and symbol generation.
332 pub span
: Option
<Span
>,
334 // The arena that blocks are allocated from.
335 pub block_arena
: &'a TypedArena
<BlockS
<'a
, 'tcx
>>,
337 // The arena that landing pads are allocated from.
338 pub lpad_arena
: TypedArena
<LandingPad
>,
340 // This function's enclosing crate context.
341 pub ccx
: &'a CrateContext
<'a
, 'tcx
>,
343 // Used and maintained by the debuginfo module.
344 pub debug_context
: debuginfo
::FunctionDebugContext
,
347 pub scopes
: RefCell
<Vec
<cleanup
::CleanupScope
<'a
, 'tcx
>>>,
349 pub cfg
: Option
<cfg
::CFG
>,
352 impl<'a
, 'tcx
> FunctionContext
<'a
, 'tcx
> {
353 pub fn mir(&self) -> CachedMir
<'a
, 'tcx
> {
354 self.mir
.clone().expect("fcx.mir was empty")
357 pub fn cleanup(&self) {
359 llvm
::LLVMInstructionEraseFromParent(self.alloca_insert_pt
365 pub fn get_llreturn(&self) -> BasicBlockRef
{
366 if self.llreturn
.get().is_none() {
368 self.llreturn
.set(Some(unsafe {
369 llvm
::LLVMAppendBasicBlockInContext(self.ccx
.llcx(), self.llfn
,
370 "return\0".as_ptr() as *const _
)
374 self.llreturn
.get().unwrap()
377 pub fn get_ret_slot(&self, bcx
: Block
<'a
, 'tcx
>, name
: &str) -> ValueRef
{
378 if self.needs_ret_allocas
{
379 base
::alloca(bcx
, self.fn_ty
.ret
.memory_ty(self.ccx
), name
)
381 self.llretslotptr
.get().unwrap()
385 pub fn new_block(&'a
self,
387 opt_node_id
: Option
<ast
::NodeId
>)
390 let name
= CString
::new(name
).unwrap();
391 let llbb
= llvm
::LLVMAppendBasicBlockInContext(self.ccx
.llcx(),
394 BlockS
::new(llbb
, opt_node_id
, self)
398 pub fn new_id_block(&'a
self,
400 node_id
: ast
::NodeId
)
402 self.new_block(name
, Some(node_id
))
405 pub fn new_temp_block(&'a
self,
408 self.new_block(name
, None
)
411 pub fn join_blocks(&'a
self,
413 in_cxs
: &[Block
<'a
, 'tcx
>])
415 let out
= self.new_id_block("join", id
);
416 let mut reachable
= false;
418 if !bcx
.unreachable
.get() {
419 build
::Br(*bcx
, out
.llbb
, DebugLoc
::None
);
424 build
::Unreachable(out
);
429 pub fn monomorphize
<T
>(&self, value
: &T
) -> T
430 where T
: TypeFoldable
<'tcx
>
432 monomorphize
::apply_param_substs(self.ccx
.tcx(),
437 /// This is the same as `common::type_needs_drop`, except that it
438 /// may use or update caches within this `FunctionContext`.
439 pub fn type_needs_drop(&self, ty
: Ty
<'tcx
>) -> bool
{
440 self.ccx
.tcx().type_needs_drop_given_env(ty
, &self.param_env
)
443 pub fn eh_personality(&self) -> ValueRef
{
444 // The exception handling personality function.
446 // If our compilation unit has the `eh_personality` lang item somewhere
447 // within it, then we just need to translate that. Otherwise, we're
448 // building an rlib which will depend on some upstream implementation of
449 // this function, so we just codegen a generic reference to it. We don't
450 // specify any of the types for the function, we just make it a symbol
451 // that LLVM can later use.
453 // Note that MSVC is a little special here in that we don't use the
454 // `eh_personality` lang item at all. Currently LLVM has support for
455 // both Dwarf and SEH unwind mechanisms for MSVC targets and uses the
456 // *name of the personality function* to decide what kind of unwind side
457 // tables/landing pads to emit. It looks like Dwarf is used by default,
458 // injecting a dependency on the `_Unwind_Resume` symbol for resuming
459 // an "exception", but for MSVC we want to force SEH. This means that we
460 // can't actually have the personality function be our standard
461 // `rust_eh_personality` function, but rather we wired it up to the
462 // CRT's custom personality function, which forces LLVM to consider
463 // landing pads as "landing pads for SEH".
466 let target
= &ccx
.sess().target
.target
;
467 match tcx
.lang_items
.eh_personality() {
468 Some(def_id
) if !base
::wants_msvc_seh(ccx
.sess()) => {
469 Callee
::def(ccx
, def_id
, tcx
.mk_substs(Substs
::empty())).reify(ccx
).val
471 _
=> if let Some(llpersonality
) = ccx
.eh_personality().get() {
474 let name
= if !base
::wants_msvc_seh(ccx
.sess()) {
475 "rust_eh_personality"
476 } else if target
.arch
== "x86" {
479 "__C_specific_handler"
481 let fty
= Type
::variadic_func(&[], &Type
::i32(ccx
));
482 let f
= declare
::declare_cfn(ccx
, name
, fty
);
483 ccx
.eh_personality().set(Some(f
));
489 // Returns a ValueRef of the "eh_unwind_resume" lang item if one is defined,
490 // otherwise declares it as an external function.
491 pub fn eh_unwind_resume(&self) -> Callee
<'tcx
> {
495 assert
!(ccx
.sess().target
.target
.options
.custom_unwind_resume
);
496 if let Some(def_id
) = tcx
.lang_items
.eh_unwind_resume() {
497 return Callee
::def(ccx
, def_id
, tcx
.mk_substs(Substs
::empty()));
500 let ty
= tcx
.mk_fn_ptr(ty
::BareFnTy
{
501 unsafety
: hir
::Unsafety
::Unsafe
,
503 sig
: ty
::Binder(ty
::FnSig
{
504 inputs
: vec
![tcx
.mk_mut_ptr(tcx
.types
.u8)],
505 output
: ty
::FnDiverging
,
510 let unwresume
= ccx
.eh_unwind_resume();
511 if let Some(llfn
) = unwresume
.get() {
512 return Callee
::ptr(datum
::immediate_rvalue(llfn
, ty
));
514 let llfn
= declare
::declare_fn(ccx
, "rust_eh_unwind_resume", ty
);
515 attributes
::unwind(llfn
, true);
516 unwresume
.set(Some(llfn
));
517 Callee
::ptr(datum
::immediate_rvalue(llfn
, ty
))
521 // Basic block context. We create a block context for each basic block
522 // (single-entry, single-exit sequence of instructions) we generate from Rust
523 // code. Each basic block we generate is attached to a function, typically
524 // with many basic blocks per function. All the basic blocks attached to a
525 // function are organized as a directed graph.
526 pub struct BlockS
<'blk
, 'tcx
: 'blk
> {
527 // The BasicBlockRef returned from a call to
528 // llvm::LLVMAppendBasicBlock(llfn, name), which adds a basic
529 // block to the function pointed to by llfn. We insert
530 // instructions into that block by way of this block context.
531 // The block pointing to this one in the function's digraph.
532 pub llbb
: BasicBlockRef
,
533 pub terminated
: Cell
<bool
>,
534 pub unreachable
: Cell
<bool
>,
536 // If this block part of a landing pad, then this is `Some` indicating what
537 // kind of landing pad its in, otherwise this is none.
538 pub lpad
: Cell
<Option
<&'blk LandingPad
>>,
540 // AST node-id associated with this block, if any. Used for
541 // debugging purposes only.
542 pub opt_node_id
: Option
<ast
::NodeId
>,
544 // The function context for the function to which this block is
546 pub fcx
: &'blk FunctionContext
<'blk
, 'tcx
>,
549 pub type Block
<'blk
, 'tcx
> = &'blk BlockS
<'blk
, 'tcx
>;
551 impl<'blk
, 'tcx
> BlockS
<'blk
, 'tcx
> {
552 pub fn new(llbb
: BasicBlockRef
,
553 opt_node_id
: Option
<ast
::NodeId
>,
554 fcx
: &'blk FunctionContext
<'blk
, 'tcx
>)
555 -> Block
<'blk
, 'tcx
> {
556 fcx
.block_arena
.alloc(BlockS
{
558 terminated
: Cell
::new(false),
559 unreachable
: Cell
::new(false),
560 lpad
: Cell
::new(None
),
561 opt_node_id
: opt_node_id
,
566 pub fn ccx(&self) -> &'blk CrateContext
<'blk
, 'tcx
> {
569 pub fn fcx(&self) -> &'blk FunctionContext
<'blk
, 'tcx
> {
572 pub fn tcx(&self) -> &'blk TyCtxt
<'tcx
> {
575 pub fn sess(&self) -> &'blk Session { self.fcx.ccx.sess() }
577 pub fn lpad(&self) -> Option
<&'blk LandingPad
> {
581 pub fn mir(&self) -> CachedMir
<'blk
, 'tcx
> {
585 pub fn name(&self, name
: ast
::Name
) -> String
{
589 pub fn node_id_to_string(&self, id
: ast
::NodeId
) -> String
{
590 self.tcx().map
.node_to_string(id
).to_string()
593 pub fn def(&self, nid
: ast
::NodeId
) -> Def
{
594 match self.tcx().def_map
.borrow().get(&nid
) {
595 Some(v
) => v
.full_def(),
597 bug
!("no def associated with node id {}", nid
);
602 pub fn to_str(&self) -> String
{
603 format
!("[block {:p}]", self)
606 pub fn monomorphize
<T
>(&self, value
: &T
) -> T
607 where T
: TypeFoldable
<'tcx
>
609 monomorphize
::apply_param_substs(self.tcx(),
610 self.fcx
.param_substs
,
614 pub fn build(&'blk
self) -> BlockAndBuilder
<'blk
, 'tcx
> {
615 BlockAndBuilder
::new(self, OwnedBuilder
::new_with_ccx(self.ccx()))
619 pub struct OwnedBuilder
<'blk
, 'tcx
: 'blk
> {
620 builder
: Builder
<'blk
, 'tcx
>
623 impl<'blk
, 'tcx
> OwnedBuilder
<'blk
, 'tcx
> {
624 pub fn new_with_ccx(ccx
: &'blk CrateContext
<'blk
, 'tcx
>) -> Self {
625 // Create a fresh builder from the crate context.
626 let llbuilder
= unsafe {
627 llvm
::LLVMCreateBuilderInContext(ccx
.llcx())
631 llbuilder
: llbuilder
,
638 impl<'blk
, 'tcx
> Drop
for OwnedBuilder
<'blk
, 'tcx
> {
641 llvm
::LLVMDisposeBuilder(self.builder
.llbuilder
);
646 pub struct BlockAndBuilder
<'blk
, 'tcx
: 'blk
> {
647 bcx
: Block
<'blk
, 'tcx
>,
648 owned_builder
: OwnedBuilder
<'blk
, 'tcx
>,
651 impl<'blk
, 'tcx
> BlockAndBuilder
<'blk
, 'tcx
> {
652 pub fn new(bcx
: Block
<'blk
, 'tcx
>, owned_builder
: OwnedBuilder
<'blk
, 'tcx
>) -> Self {
653 // Set the builder's position to this block's end.
654 owned_builder
.builder
.position_at_end(bcx
.llbb
);
657 owned_builder
: owned_builder
,
661 pub fn with_block
<F
, R
>(&self, f
: F
) -> R
662 where F
: FnOnce(Block
<'blk
, 'tcx
>) -> R
664 let result
= f(self.bcx
);
665 self.position_at_end(self.bcx
.llbb
);
669 pub fn map_block
<F
>(self, f
: F
) -> Self
670 where F
: FnOnce(Block
<'blk
, 'tcx
>) -> Block
<'blk
, 'tcx
>
672 let BlockAndBuilder { bcx, owned_builder }
= self;
674 BlockAndBuilder
::new(bcx
, owned_builder
)
677 pub fn at_start
<F
, R
>(&self, f
: F
) -> R
678 where F
: FnOnce(&BlockAndBuilder
<'blk
, 'tcx
>) -> R
680 self.position_at_start(self.bcx
.llbb
);
682 self.position_at_end(self.bcx
.llbb
);
686 // Methods delegated to bcx
688 pub fn is_unreachable(&self) -> bool
{
689 self.bcx
.unreachable
.get()
692 pub fn ccx(&self) -> &'blk CrateContext
<'blk
, 'tcx
> {
695 pub fn fcx(&self) -> &'blk FunctionContext
<'blk
, 'tcx
> {
698 pub fn tcx(&self) -> &'blk TyCtxt
<'tcx
> {
701 pub fn sess(&self) -> &'blk Session
{
705 pub fn llbb(&self) -> BasicBlockRef
{
709 pub fn mir(&self) -> CachedMir
<'blk
, 'tcx
> {
713 pub fn monomorphize
<T
>(&self, value
: &T
) -> T
714 where T
: TypeFoldable
<'tcx
>
716 self.bcx
.monomorphize(value
)
719 pub fn set_lpad(&self, lpad
: Option
<LandingPad
>) {
720 self.bcx
.lpad
.set(lpad
.map(|p
| &*self.fcx().lpad_arena
.alloc(p
)))
724 impl<'blk
, 'tcx
> Deref
for BlockAndBuilder
<'blk
, 'tcx
> {
725 type Target
= Builder
<'blk
, 'tcx
>;
726 fn deref(&self) -> &Self::Target
{
727 &self.owned_builder
.builder
731 /// A structure representing an active landing pad for the duration of a basic
734 /// Each `Block` may contain an instance of this, indicating whether the block
735 /// is part of a landing pad or not. This is used to make decision about whether
736 /// to emit `invoke` instructions (e.g. in a landing pad we don't continue to
737 /// use `invoke`) and also about various function call metadata.
739 /// For GNU exceptions (`landingpad` + `resume` instructions) this structure is
740 /// just a bunch of `None` instances (not too interesting), but for MSVC
741 /// exceptions (`cleanuppad` + `cleanupret` instructions) this contains data.
742 /// When inside of a landing pad, each function call in LLVM IR needs to be
743 /// annotated with which landing pad it's a part of. This is accomplished via
744 /// the `OperandBundleDef` value created for MSVC landing pads.
745 pub struct LandingPad
{
746 cleanuppad
: Option
<ValueRef
>,
747 operand
: Option
<OperandBundleDef
>,
751 pub fn gnu() -> LandingPad
{
752 LandingPad { cleanuppad: None, operand: None }
755 pub fn msvc(cleanuppad
: ValueRef
) -> LandingPad
{
757 cleanuppad
: Some(cleanuppad
),
758 operand
: Some(OperandBundleDef
::new("funclet", &[cleanuppad
])),
762 pub fn bundle(&self) -> Option
<&OperandBundleDef
> {
763 self.operand
.as_ref()
767 impl Clone
for LandingPad
{
768 fn clone(&self) -> LandingPad
{
770 cleanuppad
: self.cleanuppad
,
771 operand
: self.cleanuppad
.map(|p
| {
772 OperandBundleDef
::new("funclet", &[p
])
778 pub struct Result
<'blk
, 'tcx
: 'blk
> {
779 pub bcx
: Block
<'blk
, 'tcx
>,
783 impl<'b
, 'tcx
> Result
<'b
, 'tcx
> {
784 pub fn new(bcx
: Block
<'b
, 'tcx
>, val
: ValueRef
) -> Result
<'b
, 'tcx
> {
792 pub fn val_ty(v
: ValueRef
) -> Type
{
794 Type
::from_ref(llvm
::LLVMTypeOf(v
))
798 // LLVM constant constructors.
799 pub fn C_null(t
: Type
) -> ValueRef
{
801 llvm
::LLVMConstNull(t
.to_ref())
805 pub fn C_undef(t
: Type
) -> ValueRef
{
807 llvm
::LLVMGetUndef(t
.to_ref())
811 pub fn C_integral(t
: Type
, u
: u64, sign_extend
: bool
) -> ValueRef
{
813 llvm
::LLVMConstInt(t
.to_ref(), u
, sign_extend
as Bool
)
817 pub fn C_floating(s
: &str, t
: Type
) -> ValueRef
{
819 let s
= CString
::new(s
).unwrap();
820 llvm
::LLVMConstRealOfString(t
.to_ref(), s
.as_ptr())
824 pub fn C_floating_f64(f
: f64, t
: Type
) -> ValueRef
{
826 llvm
::LLVMConstReal(t
.to_ref(), f
)
830 pub fn C_nil(ccx
: &CrateContext
) -> ValueRef
{
831 C_struct(ccx
, &[], false)
834 pub fn C_bool(ccx
: &CrateContext
, val
: bool
) -> ValueRef
{
835 C_integral(Type
::i1(ccx
), val
as u64, false)
838 pub fn C_i32(ccx
: &CrateContext
, i
: i32) -> ValueRef
{
839 C_integral(Type
::i32(ccx
), i
as u64, true)
842 pub fn C_u32(ccx
: &CrateContext
, i
: u32) -> ValueRef
{
843 C_integral(Type
::i32(ccx
), i
as u64, false)
846 pub fn C_u64(ccx
: &CrateContext
, i
: u64) -> ValueRef
{
847 C_integral(Type
::i64(ccx
), i
, false)
850 pub fn C_int
<I
: AsI64
>(ccx
: &CrateContext
, i
: I
) -> ValueRef
{
853 let bit_size
= machine
::llbitsize_of_real(ccx
, ccx
.int_type());
856 // make sure it doesn't overflow
857 assert
!(v
< (1<<(bit_size
-1)) && v
>= -(1<<(bit_size
-1)));
860 C_integral(ccx
.int_type(), v
as u64, true)
863 pub fn C_uint
<I
: AsU64
>(ccx
: &CrateContext
, i
: I
) -> ValueRef
{
866 let bit_size
= machine
::llbitsize_of_real(ccx
, ccx
.int_type());
869 // make sure it doesn't overflow
870 assert
!(v
< (1<<bit_size
));
873 C_integral(ccx
.int_type(), v
, false)
876 pub trait AsI64 { fn as_i64(self) -> i64; }
877 pub trait AsU64 { fn as_u64(self) -> u64; }
879 // FIXME: remove the intptr conversions, because they
880 // are host-architecture-dependent
881 impl AsI64
for i64 { fn as_i64(self) -> i64 { self as i64 }
}
882 impl AsI64
for i32 { fn as_i64(self) -> i64 { self as i64 }
}
883 impl AsI64
for isize { fn as_i64(self) -> i64 { self as i64 }
}
885 impl AsU64
for u64 { fn as_u64(self) -> u64 { self as u64 }
}
886 impl AsU64
for u32 { fn as_u64(self) -> u64 { self as u64 }
}
887 impl AsU64
for usize { fn as_u64(self) -> u64 { self as u64 }
}
889 pub fn C_u8(ccx
: &CrateContext
, i
: u8) -> ValueRef
{
890 C_integral(Type
::i8(ccx
), i
as u64, false)
894 // This is a 'c-like' raw string, which differs from
895 // our boxed-and-length-annotated strings.
896 pub fn C_cstr(cx
: &CrateContext
, s
: InternedString
, null_terminated
: bool
) -> ValueRef
{
898 if let Some(&llval
) = cx
.const_cstr_cache().borrow().get(&s
) {
902 let sc
= llvm
::LLVMConstStringInContext(cx
.llcx(),
903 s
.as_ptr() as *const c_char
,
905 !null_terminated
as Bool
);
907 let gsym
= token
::gensym("str");
908 let sym
= format
!("str{}", gsym
.0);
909 let g
= declare
::define_global(cx
, &sym
[..], val_ty(sc
)).unwrap_or_else(||{
910 bug
!("symbol `{}` is already defined", sym
);
912 llvm
::LLVMSetInitializer(g
, sc
);
913 llvm
::LLVMSetGlobalConstant(g
, True
);
914 llvm
::SetLinkage(g
, llvm
::InternalLinkage
);
916 cx
.const_cstr_cache().borrow_mut().insert(s
, g
);
921 // NB: Do not use `do_spill_noroot` to make this into a constant string, or
922 // you will be kicked off fast isel. See issue #4352 for an example of this.
923 pub fn C_str_slice(cx
: &CrateContext
, s
: InternedString
) -> ValueRef
{
925 let cs
= consts
::ptrcast(C_cstr(cx
, s
, false), Type
::i8p(cx
));
926 C_named_struct(cx
.tn().find_type("str_slice").unwrap(), &[cs
, C_uint(cx
, len
)])
929 pub fn C_struct(cx
: &CrateContext
, elts
: &[ValueRef
], packed
: bool
) -> ValueRef
{
930 C_struct_in_context(cx
.llcx(), elts
, packed
)
933 pub fn C_struct_in_context(llcx
: ContextRef
, elts
: &[ValueRef
], packed
: bool
) -> ValueRef
{
935 llvm
::LLVMConstStructInContext(llcx
,
936 elts
.as_ptr(), elts
.len() as c_uint
,
941 pub fn C_named_struct(t
: Type
, elts
: &[ValueRef
]) -> ValueRef
{
943 llvm
::LLVMConstNamedStruct(t
.to_ref(), elts
.as_ptr(), elts
.len() as c_uint
)
947 pub fn C_array(ty
: Type
, elts
: &[ValueRef
]) -> ValueRef
{
949 return llvm
::LLVMConstArray(ty
.to_ref(), elts
.as_ptr(), elts
.len() as c_uint
);
953 pub fn C_vector(elts
: &[ValueRef
]) -> ValueRef
{
955 return llvm
::LLVMConstVector(elts
.as_ptr(), elts
.len() as c_uint
);
959 pub fn C_bytes(cx
: &CrateContext
, bytes
: &[u8]) -> ValueRef
{
960 C_bytes_in_context(cx
.llcx(), bytes
)
963 pub fn C_bytes_in_context(llcx
: ContextRef
, bytes
: &[u8]) -> ValueRef
{
965 let ptr
= bytes
.as_ptr() as *const c_char
;
966 return llvm
::LLVMConstStringInContext(llcx
, ptr
, bytes
.len() as c_uint
, True
);
970 pub fn const_get_elt(v
: ValueRef
, us
: &[c_uint
])
973 let r
= llvm
::LLVMConstExtractValue(v
, us
.as_ptr(), us
.len() as c_uint
);
975 debug
!("const_get_elt(v={:?}, us={:?}, r={:?})",
976 Value(v
), us
, Value(r
));
982 pub fn const_to_int(v
: ValueRef
) -> i64 {
984 llvm
::LLVMConstIntGetSExtValue(v
)
988 pub fn const_to_uint(v
: ValueRef
) -> u64 {
990 llvm
::LLVMConstIntGetZExtValue(v
)
994 fn is_const_integral(v
: ValueRef
) -> bool
{
996 !llvm
::LLVMIsAConstantInt(v
).is_null()
1000 pub fn const_to_opt_int(v
: ValueRef
) -> Option
<i64> {
1002 if is_const_integral(v
) {
1003 Some(llvm
::LLVMConstIntGetSExtValue(v
))
1010 pub fn const_to_opt_uint(v
: ValueRef
) -> Option
<u64> {
1012 if is_const_integral(v
) {
1013 Some(llvm
::LLVMConstIntGetZExtValue(v
))
1020 pub fn is_undef(val
: ValueRef
) -> bool
{
1022 llvm
::LLVMIsUndef(val
) != False
1026 #[allow(dead_code)] // potentially useful
1027 pub fn is_null(val
: ValueRef
) -> bool
{
1029 llvm
::LLVMIsNull(val
) != False
1033 pub fn monomorphize_type
<'blk
, 'tcx
>(bcx
: &BlockS
<'blk
, 'tcx
>, t
: Ty
<'tcx
>) -> Ty
<'tcx
> {
1034 bcx
.fcx
.monomorphize(&t
)
1037 pub fn node_id_type
<'blk
, 'tcx
>(bcx
: &BlockS
<'blk
, 'tcx
>, id
: ast
::NodeId
) -> Ty
<'tcx
> {
1038 let tcx
= bcx
.tcx();
1039 let t
= tcx
.node_id_to_type(id
);
1040 monomorphize_type(bcx
, t
)
1043 pub fn expr_ty
<'blk
, 'tcx
>(bcx
: &BlockS
<'blk
, 'tcx
>, ex
: &hir
::Expr
) -> Ty
<'tcx
> {
1044 node_id_type(bcx
, ex
.id
)
1047 pub fn expr_ty_adjusted
<'blk
, 'tcx
>(bcx
: &BlockS
<'blk
, 'tcx
>, ex
: &hir
::Expr
) -> Ty
<'tcx
> {
1048 monomorphize_type(bcx
, bcx
.tcx().expr_ty_adjusted(ex
))
1051 /// Attempts to resolve an obligation. The result is a shallow vtable resolution -- meaning that we
1052 /// do not (necessarily) resolve all nested obligations on the impl. Note that type check should
1053 /// guarantee to us that all nested obligations *could be* resolved if we wanted to.
1054 pub fn fulfill_obligation
<'a
, 'tcx
>(ccx
: &CrateContext
<'a
, 'tcx
>,
1056 trait_ref
: ty
::PolyTraitRef
<'tcx
>)
1057 -> traits
::Vtable
<'tcx
, ()>
1059 let tcx
= ccx
.tcx();
1061 // Remove any references to regions; this helps improve caching.
1062 let trait_ref
= tcx
.erase_regions(&trait_ref
);
1064 // First check the cache.
1065 match ccx
.trait_cache().borrow().get(&trait_ref
) {
1067 info
!("Cache hit: {:?}", trait_ref
);
1068 return (*vtable
).clone();
1073 debug
!("trans fulfill_obligation: trait_ref={:?} def_id={:?}",
1074 trait_ref
, trait_ref
.def_id());
1077 // Do the initial selection for the obligation. This yields the
1078 // shallow result we are looking for -- that is, what specific impl.
1079 let infcx
= infer
::normalizing_infer_ctxt(tcx
, &tcx
.tables
, ProjectionMode
::Any
);
1080 let mut selcx
= SelectionContext
::new(&infcx
);
1083 traits
::Obligation
::new(traits
::ObligationCause
::misc(span
, ast
::DUMMY_NODE_ID
),
1084 trait_ref
.to_poly_trait_predicate());
1085 let selection
= match selcx
.select(&obligation
) {
1086 Ok(Some(selection
)) => selection
,
1088 // Ambiguity can happen when monomorphizing during trans
1089 // expands to some humongo type that never occurred
1090 // statically -- this humongo type can then overflow,
1091 // leading to an ambiguous result. So report this as an
1092 // overflow bug, since I believe this is the only case
1093 // where ambiguity can result.
1094 debug
!("Encountered ambiguity selecting `{:?}` during trans, \
1095 presuming due to overflow",
1097 ccx
.sess().span_fatal(
1099 "reached the recursion limit during monomorphization (selection ambiguity)");
1104 "Encountered error `{:?}` selecting `{:?}` during trans",
1110 // Currently, we use a fulfillment context to completely resolve
1111 // all nested obligations. This is because they can inform the
1112 // inference of the impl's type parameters.
1113 let mut fulfill_cx
= traits
::FulfillmentContext
::new();
1114 let vtable
= selection
.map(|predicate
| {
1115 fulfill_cx
.register_predicate_obligation(&infcx
, predicate
);
1117 let vtable
= infer
::drain_fulfillment_cx_or_panic(
1118 span
, &infcx
, &mut fulfill_cx
, &vtable
1121 info
!("Cache miss: {:?} => {:?}", trait_ref
, vtable
);
1123 ccx
.trait_cache().borrow_mut().insert(trait_ref
, vtable
.clone());
1128 /// Normalizes the predicates and checks whether they hold. If this
1129 /// returns false, then either normalize encountered an error or one
1130 /// of the predicates did not hold. Used when creating vtables to
1131 /// check for unsatisfiable methods.
1132 pub fn normalize_and_test_predicates
<'a
, 'tcx
>(ccx
: &CrateContext
<'a
, 'tcx
>,
1133 predicates
: Vec
<ty
::Predicate
<'tcx
>>)
1136 debug
!("normalize_and_test_predicates(predicates={:?})",
1139 let tcx
= ccx
.tcx();
1140 let infcx
= infer
::normalizing_infer_ctxt(tcx
, &tcx
.tables
, ProjectionMode
::Any
);
1141 let mut selcx
= SelectionContext
::new(&infcx
);
1142 let mut fulfill_cx
= traits
::FulfillmentContext
::new();
1143 let cause
= traits
::ObligationCause
::dummy();
1144 let traits
::Normalized { value: predicates, obligations }
=
1145 traits
::normalize(&mut selcx
, cause
.clone(), &predicates
);
1146 for obligation
in obligations
{
1147 fulfill_cx
.register_predicate_obligation(&infcx
, obligation
);
1149 for predicate
in predicates
{
1150 let obligation
= traits
::Obligation
::new(cause
.clone(), predicate
);
1151 fulfill_cx
.register_predicate_obligation(&infcx
, obligation
);
1154 infer
::drain_fulfillment_cx(&infcx
, &mut fulfill_cx
, &()).is_ok()
1157 pub fn langcall(bcx
: Block
,
1162 match bcx
.tcx().lang_items
.require(li
) {
1165 let msg
= format
!("{} {}", msg
, s
);
1167 Some(span
) => bcx
.tcx().sess
.span_fatal(span
, &msg
[..]),
1168 None
=> bcx
.tcx().sess
.fatal(&msg
[..]),
1174 /// Return the VariantDef corresponding to an inlined variant node
1175 pub fn inlined_variant_def
<'a
, 'tcx
>(ccx
: &CrateContext
<'a
, 'tcx
>,
1176 inlined_vid
: ast
::NodeId
)
1177 -> ty
::VariantDef
<'tcx
>
1180 let ctor_ty
= ccx
.tcx().node_id_to_type(inlined_vid
);
1181 debug
!("inlined_variant_def: ctor_ty={:?} inlined_vid={:?}", ctor_ty
,
1183 let adt_def
= match ctor_ty
.sty
{
1184 ty
::TyFnDef(_
, _
, &ty
::BareFnTy
{ sig
: ty
::Binder(ty
::FnSig
{
1185 output
: ty
::FnConverging(ty
), ..
1188 }.ty_adt_def().unwrap();
1189 let inlined_vid_def_id
= ccx
.tcx().map
.local_def_id(inlined_vid
);
1190 adt_def
.variants
.iter().find(|v
| {
1191 inlined_vid_def_id
== v
.did
||
1192 ccx
.external().borrow().get(&v
.did
) == Some(&Some(inlined_vid
))
1193 }).unwrap_or_else(|| {
1194 bug
!("no variant for {:?}::{}", adt_def
, inlined_vid
)
1198 // To avoid UB from LLVM, these two functions mask RHS with an
1199 // appropriate mask unconditionally (i.e. the fallback behavior for
1200 // all shifts). For 32- and 64-bit types, this matches the semantics
1201 // of Java. (See related discussion on #1877 and #10183.)
1203 pub fn build_unchecked_lshift
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
1206 binop_debug_loc
: DebugLoc
) -> ValueRef
{
1207 let rhs
= base
::cast_shift_expr_rhs(bcx
, hir
::BinOp_
::BiShl
, lhs
, rhs
);
1208 // #1877, #10183: Ensure that input is always valid
1209 let rhs
= shift_mask_rhs(bcx
, rhs
, binop_debug_loc
);
1210 build
::Shl(bcx
, lhs
, rhs
, binop_debug_loc
)
1213 pub fn build_unchecked_rshift
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
1217 binop_debug_loc
: DebugLoc
) -> ValueRef
{
1218 let rhs
= base
::cast_shift_expr_rhs(bcx
, hir
::BinOp_
::BiShr
, lhs
, rhs
);
1219 // #1877, #10183: Ensure that input is always valid
1220 let rhs
= shift_mask_rhs(bcx
, rhs
, binop_debug_loc
);
1221 let is_signed
= lhs_t
.is_signed();
1223 build
::AShr(bcx
, lhs
, rhs
, binop_debug_loc
)
1225 build
::LShr(bcx
, lhs
, rhs
, binop_debug_loc
)
1229 fn shift_mask_rhs
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
1231 debug_loc
: DebugLoc
) -> ValueRef
{
1232 let rhs_llty
= val_ty(rhs
);
1233 build
::And(bcx
, rhs
, shift_mask_val(bcx
, rhs_llty
, rhs_llty
, false), debug_loc
)
1236 pub fn shift_mask_val
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
1239 invert
: bool
) -> ValueRef
{
1240 let kind
= llty
.kind();
1242 TypeKind
::Integer
=> {
1243 // i8/u8 can shift by at most 7, i16/u16 by at most 15, etc.
1244 let val
= llty
.int_width() - 1;
1246 C_integral(mask_llty
, !val
, true)
1248 C_integral(mask_llty
, val
, false)
1251 TypeKind
::Vector
=> {
1252 let mask
= shift_mask_val(bcx
, llty
.element_type(), mask_llty
.element_type(), invert
);
1253 build
::VectorSplat(bcx
, mask_llty
.vector_length(), mask
)
1255 _
=> bug
!("shift_mask_val: expected Integer or Vector, found {:?}", kind
),