1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 #![allow(non_camel_case_types, non_snake_case)]
13 //! Code that is useful in various trans modules.
17 use llvm
::{ValueRef, BasicBlockRef, BuilderRef, ContextRef, TypeKind}
;
18 use llvm
::{True, False, Bool, OperandBundleDef}
;
20 use rustc
::hir
::def
::Def
;
21 use rustc
::hir
::def_id
::DefId
;
22 use rustc
::infer
::TransNormalize
;
23 use rustc
::util
::common
::MemoizationMap
;
24 use middle
::lang_items
::LangItem
;
25 use rustc
::ty
::subst
::Substs
;
26 use abi
::{Abi, FnType}
;
34 use debuginfo
::{self, DebugLoc}
;
41 use rustc
::ty
::{self, Ty, TyCtxt}
;
42 use rustc
::traits
::{self, SelectionContext, ProjectionMode}
;
43 use rustc
::ty
::fold
::TypeFoldable
;
45 use util
::nodemap
::NodeMap
;
47 use arena
::TypedArena
;
48 use libc
::{c_uint, c_char}
;
50 use std
::ffi
::CString
;
51 use std
::cell
::{Cell, RefCell}
;
54 use syntax
::codemap
::{DUMMY_SP, Span}
;
55 use syntax
::parse
::token
::InternedString
;
56 use syntax
::parse
::token
;
58 pub use context
::{CrateContext, SharedCrateContext}
;
60 /// Is the type's representation size known at compile time?
61 pub fn type_is_sized
<'a
, 'tcx
>(tcx
: TyCtxt
<'a
, 'tcx
, 'tcx
>, ty
: Ty
<'tcx
>) -> bool
{
62 ty
.is_sized(tcx
, &tcx
.empty_parameter_environment(), DUMMY_SP
)
65 pub fn type_is_fat_ptr
<'a
, 'tcx
>(tcx
: TyCtxt
<'a
, 'tcx
, 'tcx
>, ty
: Ty
<'tcx
>) -> bool
{
67 ty
::TyRawPtr(ty
::TypeAndMut{ty, ..}
) |
68 ty
::TyRef(_
, ty
::TypeAndMut{ty, ..}
) |
70 !type_is_sized(tcx
, ty
)
78 pub fn type_is_immediate
<'a
, 'tcx
>(ccx
: &CrateContext
<'a
, 'tcx
>, ty
: Ty
<'tcx
>) -> bool
{
79 use machine
::llsize_of_alloc
;
80 use type_of
::sizing_type_of
;
83 let simple
= ty
.is_scalar() ||
84 ty
.is_unique() || ty
.is_region_ptr() ||
86 if simple
&& !type_is_fat_ptr(tcx
, ty
) {
89 if !type_is_sized(tcx
, ty
) {
93 ty
::TyStruct(..) | ty
::TyEnum(..) | ty
::TyTuple(..) | ty
::TyArray(_
, _
) |
94 ty
::TyClosure(..) => {
95 let llty
= sizing_type_of(ccx
, ty
);
96 llsize_of_alloc(ccx
, llty
) <= llsize_of_alloc(ccx
, ccx
.int_type())
98 _
=> type_is_zero_size(ccx
, ty
)
102 /// Identify types which have size zero at runtime.
103 pub fn type_is_zero_size
<'a
, 'tcx
>(ccx
: &CrateContext
<'a
, 'tcx
>, ty
: Ty
<'tcx
>) -> bool
{
104 use machine
::llsize_of_alloc
;
105 use type_of
::sizing_type_of
;
106 let llty
= sizing_type_of(ccx
, ty
);
107 llsize_of_alloc(ccx
, llty
) == 0
110 /// Generates a unique symbol based off the name given. This is used to create
111 /// unique symbols for things like closures.
112 pub fn gensym_name(name
: &str) -> ast
::Name
{
113 let num
= token
::gensym(name
).0;
114 // use one colon which will get translated to a period by the mangler, and
115 // we're guaranteed that `num` is globally unique for this crate.
116 token
::gensym(&format
!("{}:{}", name
, num
))
120 * A note on nomenclature of linking: "extern", "foreign", and "upcall".
122 * An "extern" is an LLVM symbol we wind up emitting an undefined external
123 * reference to. This means "we don't have the thing in this compilation unit,
124 * please make sure you link it in at runtime". This could be a reference to
125 * C code found in a C library, or rust code found in a rust crate.
127 * Most "externs" are implicitly declared (automatically) as a result of a
128 * user declaring an extern _module_ dependency; this causes the rust driver
129 * to locate an extern crate, scan its compilation metadata, and emit extern
130 * declarations for any symbols used by the declaring crate.
132 * A "foreign" is an extern that references C (or other non-rust ABI) code.
133 * There is no metadata to scan for extern references so in these cases either
134 * a header-digester like bindgen, or manual function prototypes, have to
135 * serve as declarators. So these are usually given explicitly as prototype
136 * declarations, in rust code, with ABI attributes on them noting which ABI to
139 * An "upcall" is a foreign call generated by the compiler (not corresponding
140 * to any user-written call in the code) into the runtime library, to perform
141 * some helper task such as bringing a task to life, allocating memory, etc.
147 #[derive(Copy, Clone)]
148 pub struct NodeIdAndSpan
{
153 pub fn expr_info(expr
: &hir
::Expr
) -> NodeIdAndSpan
{
154 NodeIdAndSpan { id: expr.id, span: expr.span }
157 /// The concrete version of ty::FieldDef. The name is the field index if
158 /// the field is numeric.
159 pub struct Field
<'tcx
>(pub ast
::Name
, pub Ty
<'tcx
>);
161 /// The concrete version of ty::VariantDef
162 pub struct VariantInfo
<'tcx
> {
164 pub fields
: Vec
<Field
<'tcx
>>
167 impl<'a
, 'tcx
> VariantInfo
<'tcx
> {
168 pub fn from_ty(tcx
: TyCtxt
<'a
, 'tcx
, 'tcx
>,
170 opt_def
: Option
<Def
>)
174 ty
::TyStruct(adt
, substs
) | ty
::TyEnum(adt
, substs
) => {
175 let variant
= match opt_def
{
176 None
=> adt
.struct_variant(),
177 Some(def
) => adt
.variant_of_def(def
)
181 discr
: Disr
::from(variant
.disr_val
),
182 fields
: variant
.fields
.iter().map(|f
| {
183 Field(f
.name
, monomorphize
::field_ty(tcx
, substs
, f
))
188 ty
::TyTuple(ref v
) => {
191 fields
: v
.iter().enumerate().map(|(i
, &t
)| {
192 Field(token
::intern(&i
.to_string()), t
)
198 bug
!("cannot get field types from the type {:?}", ty
);
203 /// Return the variant corresponding to a given node (e.g. expr)
204 pub fn of_node(tcx
: TyCtxt
<'a
, 'tcx
, 'tcx
>, ty
: Ty
<'tcx
>, id
: ast
::NodeId
) -> Self {
205 let node_def
= tcx
.def_map
.borrow().get(&id
).map(|v
| v
.full_def());
206 Self::from_ty(tcx
, ty
, node_def
)
209 pub fn field_index(&self, name
: ast
::Name
) -> usize {
210 self.fields
.iter().position(|&Field(n
,_
)| n
== name
).unwrap_or_else(|| {
211 bug
!("unknown field `{}`", name
)
216 pub struct BuilderRef_res
{
220 impl Drop
for BuilderRef_res
{
223 llvm
::LLVMDisposeBuilder(self.b
);
228 pub fn BuilderRef_res(b
: BuilderRef
) -> BuilderRef_res
{
234 pub fn validate_substs(substs
: &Substs
) {
235 assert
!(!substs
.types
.needs_infer());
238 // work around bizarre resolve errors
239 type RvalueDatum
<'tcx
> = datum
::Datum
<'tcx
, datum
::Rvalue
>;
240 pub type LvalueDatum
<'tcx
> = datum
::Datum
<'tcx
, datum
::Lvalue
>;
242 #[derive(Clone, Debug)]
243 struct HintEntry
<'tcx
> {
244 // The datum for the dropflag-hint itself; note that many
245 // source-level Lvalues will be associated with the same
246 // dropflag-hint datum.
247 datum
: cleanup
::DropHintDatum
<'tcx
>,
250 pub struct DropFlagHintsMap
<'tcx
> {
251 // Maps NodeId for expressions that read/write unfragmented state
252 // to that state's drop-flag "hint." (A stack-local hint
253 // indicates either that (1.) it is certain that no-drop is
254 // needed, or (2.) inline drop-flag must be consulted.)
255 node_map
: NodeMap
<HintEntry
<'tcx
>>,
258 impl<'tcx
> DropFlagHintsMap
<'tcx
> {
259 pub fn new() -> DropFlagHintsMap
<'tcx
> { DropFlagHintsMap { node_map: NodeMap() }
}
260 pub fn has_hint(&self, id
: ast
::NodeId
) -> bool { self.node_map.contains_key(&id) }
261 pub fn insert(&mut self, id
: ast
::NodeId
, datum
: cleanup
::DropHintDatum
<'tcx
>) {
262 self.node_map
.insert(id
, HintEntry { datum: datum }
);
264 pub fn hint_datum(&self, id
: ast
::NodeId
) -> Option
<cleanup
::DropHintDatum
<'tcx
>> {
265 self.node_map
.get(&id
).map(|t
|t
.datum
)
269 // Function context. Every LLVM function we create will have one of
271 pub struct FunctionContext
<'a
, 'tcx
: 'a
> {
272 // The MIR for this function. At present, this is optional because
273 // we only have MIR available for things that are local to the
275 pub mir
: Option
<CachedMir
<'a
, 'tcx
>>,
277 // The ValueRef returned from a call to llvm::LLVMAddFunction; the
278 // address of the first instruction in the sequence of
279 // instructions for this function that will go in the .text
280 // section of the executable we're generating.
283 // always an empty parameter-environment NOTE: @jroesch another use of ParamEnv
284 pub param_env
: ty
::ParameterEnvironment
<'tcx
>,
286 // A pointer to where to store the return value. If the return type is
287 // immediate, this points to an alloca in the function. Otherwise, it's a
288 // pointer to the hidden first parameter of the function. After function
289 // construction, this should always be Some.
290 pub llretslotptr
: Cell
<Option
<ValueRef
>>,
292 // These pub elements: "hoisted basic blocks" containing
293 // administrative activities that have to happen in only one place in
294 // the function, due to LLVM's quirks.
295 // A marker for the place where we want to insert the function's static
296 // allocas, so that LLVM will coalesce them into a single alloca call.
297 pub alloca_insert_pt
: Cell
<Option
<ValueRef
>>,
298 pub llreturn
: Cell
<Option
<BasicBlockRef
>>,
300 // If the function has any nested return's, including something like:
301 // fn foo() -> Option<Foo> { Some(Foo { x: return None }) }, then
302 // we use a separate alloca for each return
303 pub needs_ret_allocas
: bool
,
305 // When working with landingpad-based exceptions this value is alloca'd and
306 // later loaded when using the resume instruction. This ends up being
307 // critical to chaining landing pads and resuing already-translated
310 // Note that for cleanuppad-based exceptions this is not used.
311 pub landingpad_alloca
: Cell
<Option
<ValueRef
>>,
313 // Maps the DefId's for local variables to the allocas created for
314 // them in llallocas.
315 pub lllocals
: RefCell
<NodeMap
<LvalueDatum
<'tcx
>>>,
317 // Same as above, but for closure upvars
318 pub llupvars
: RefCell
<NodeMap
<ValueRef
>>,
320 // Carries info about drop-flags for local bindings (longer term,
321 // paths) for the code being compiled.
322 pub lldropflag_hints
: RefCell
<DropFlagHintsMap
<'tcx
>>,
324 // Describes the return/argument LLVM types and their ABI handling.
327 // If this function is being monomorphized, this contains the type
328 // substitutions used.
329 pub param_substs
: &'tcx Substs
<'tcx
>,
331 // The source span and nesting context where this function comes from, for
332 // error reporting and symbol generation.
333 pub span
: Option
<Span
>,
335 // The arena that blocks are allocated from.
336 pub block_arena
: &'a TypedArena
<BlockS
<'a
, 'tcx
>>,
338 // The arena that landing pads are allocated from.
339 pub lpad_arena
: TypedArena
<LandingPad
>,
341 // This function's enclosing crate context.
342 pub ccx
: &'a CrateContext
<'a
, 'tcx
>,
344 // Used and maintained by the debuginfo module.
345 pub debug_context
: debuginfo
::FunctionDebugContext
,
348 pub scopes
: RefCell
<Vec
<cleanup
::CleanupScope
<'a
, 'tcx
>>>,
350 pub cfg
: Option
<cfg
::CFG
>,
353 impl<'a
, 'tcx
> FunctionContext
<'a
, 'tcx
> {
354 pub fn mir(&self) -> CachedMir
<'a
, 'tcx
> {
355 self.mir
.clone().expect("fcx.mir was empty")
358 pub fn cleanup(&self) {
360 llvm
::LLVMInstructionEraseFromParent(self.alloca_insert_pt
366 pub fn get_llreturn(&self) -> BasicBlockRef
{
367 if self.llreturn
.get().is_none() {
369 self.llreturn
.set(Some(unsafe {
370 llvm
::LLVMAppendBasicBlockInContext(self.ccx
.llcx(), self.llfn
,
371 "return\0".as_ptr() as *const _
)
375 self.llreturn
.get().unwrap()
378 pub fn get_ret_slot(&self, bcx
: Block
<'a
, 'tcx
>, name
: &str) -> ValueRef
{
379 if self.needs_ret_allocas
{
380 base
::alloca(bcx
, self.fn_ty
.ret
.memory_ty(self.ccx
), name
)
382 self.llretslotptr
.get().unwrap()
386 pub fn new_block(&'a
self,
388 opt_node_id
: Option
<ast
::NodeId
>)
391 let name
= CString
::new(name
).unwrap();
392 let llbb
= llvm
::LLVMAppendBasicBlockInContext(self.ccx
.llcx(),
395 BlockS
::new(llbb
, opt_node_id
, self)
399 pub fn new_id_block(&'a
self,
401 node_id
: ast
::NodeId
)
403 self.new_block(name
, Some(node_id
))
406 pub fn new_temp_block(&'a
self,
409 self.new_block(name
, None
)
412 pub fn join_blocks(&'a
self,
414 in_cxs
: &[Block
<'a
, 'tcx
>])
416 let out
= self.new_id_block("join", id
);
417 let mut reachable
= false;
419 if !bcx
.unreachable
.get() {
420 build
::Br(*bcx
, out
.llbb
, DebugLoc
::None
);
425 build
::Unreachable(out
);
430 pub fn monomorphize
<T
>(&self, value
: &T
) -> T
431 where T
: TransNormalize
<'tcx
>
433 monomorphize
::apply_param_substs(self.ccx
.tcx(),
438 /// This is the same as `common::type_needs_drop`, except that it
439 /// may use or update caches within this `FunctionContext`.
440 pub fn type_needs_drop(&self, ty
: Ty
<'tcx
>) -> bool
{
441 self.ccx
.tcx().type_needs_drop_given_env(ty
, &self.param_env
)
444 pub fn eh_personality(&self) -> ValueRef
{
445 // The exception handling personality function.
447 // If our compilation unit has the `eh_personality` lang item somewhere
448 // within it, then we just need to translate that. Otherwise, we're
449 // building an rlib which will depend on some upstream implementation of
450 // this function, so we just codegen a generic reference to it. We don't
451 // specify any of the types for the function, we just make it a symbol
452 // that LLVM can later use.
454 // Note that MSVC is a little special here in that we don't use the
455 // `eh_personality` lang item at all. Currently LLVM has support for
456 // both Dwarf and SEH unwind mechanisms for MSVC targets and uses the
457 // *name of the personality function* to decide what kind of unwind side
458 // tables/landing pads to emit. It looks like Dwarf is used by default,
459 // injecting a dependency on the `_Unwind_Resume` symbol for resuming
460 // an "exception", but for MSVC we want to force SEH. This means that we
461 // can't actually have the personality function be our standard
462 // `rust_eh_personality` function, but rather we wired it up to the
463 // CRT's custom personality function, which forces LLVM to consider
464 // landing pads as "landing pads for SEH".
467 match tcx
.lang_items
.eh_personality() {
468 Some(def_id
) if !base
::wants_msvc_seh(ccx
.sess()) => {
469 Callee
::def(ccx
, def_id
, tcx
.mk_substs(Substs
::empty())).reify(ccx
).val
472 if let Some(llpersonality
) = ccx
.eh_personality().get() {
475 let name
= if base
::wants_msvc_seh(ccx
.sess()) {
478 "rust_eh_personality"
480 let fty
= Type
::variadic_func(&[], &Type
::i32(ccx
));
481 let f
= declare
::declare_cfn(ccx
, name
, fty
);
482 ccx
.eh_personality().set(Some(f
));
488 // Returns a ValueRef of the "eh_unwind_resume" lang item if one is defined,
489 // otherwise declares it as an external function.
490 pub fn eh_unwind_resume(&self) -> Callee
<'tcx
> {
494 assert
!(ccx
.sess().target
.target
.options
.custom_unwind_resume
);
495 if let Some(def_id
) = tcx
.lang_items
.eh_unwind_resume() {
496 return Callee
::def(ccx
, def_id
, tcx
.mk_substs(Substs
::empty()));
499 let ty
= tcx
.mk_fn_ptr(tcx
.mk_bare_fn(ty
::BareFnTy
{
500 unsafety
: hir
::Unsafety
::Unsafe
,
502 sig
: ty
::Binder(ty
::FnSig
{
503 inputs
: vec
![tcx
.mk_mut_ptr(tcx
.types
.u8)],
504 output
: ty
::FnDiverging
,
509 let unwresume
= ccx
.eh_unwind_resume();
510 if let Some(llfn
) = unwresume
.get() {
511 return Callee
::ptr(datum
::immediate_rvalue(llfn
, ty
));
513 let llfn
= declare
::declare_fn(ccx
, "rust_eh_unwind_resume", ty
);
514 attributes
::unwind(llfn
, true);
515 unwresume
.set(Some(llfn
));
516 Callee
::ptr(datum
::immediate_rvalue(llfn
, ty
))
520 // Basic block context. We create a block context for each basic block
521 // (single-entry, single-exit sequence of instructions) we generate from Rust
522 // code. Each basic block we generate is attached to a function, typically
523 // with many basic blocks per function. All the basic blocks attached to a
524 // function are organized as a directed graph.
525 pub struct BlockS
<'blk
, 'tcx
: 'blk
> {
526 // The BasicBlockRef returned from a call to
527 // llvm::LLVMAppendBasicBlock(llfn, name), which adds a basic
528 // block to the function pointed to by llfn. We insert
529 // instructions into that block by way of this block context.
530 // The block pointing to this one in the function's digraph.
531 pub llbb
: BasicBlockRef
,
532 pub terminated
: Cell
<bool
>,
533 pub unreachable
: Cell
<bool
>,
535 // If this block part of a landing pad, then this is `Some` indicating what
536 // kind of landing pad its in, otherwise this is none.
537 pub lpad
: Cell
<Option
<&'blk LandingPad
>>,
539 // AST node-id associated with this block, if any. Used for
540 // debugging purposes only.
541 pub opt_node_id
: Option
<ast
::NodeId
>,
543 // The function context for the function to which this block is
545 pub fcx
: &'blk FunctionContext
<'blk
, 'tcx
>,
548 pub type Block
<'blk
, 'tcx
> = &'blk BlockS
<'blk
, 'tcx
>;
550 impl<'blk
, 'tcx
> BlockS
<'blk
, 'tcx
> {
551 pub fn new(llbb
: BasicBlockRef
,
552 opt_node_id
: Option
<ast
::NodeId
>,
553 fcx
: &'blk FunctionContext
<'blk
, 'tcx
>)
554 -> Block
<'blk
, 'tcx
> {
555 fcx
.block_arena
.alloc(BlockS
{
557 terminated
: Cell
::new(false),
558 unreachable
: Cell
::new(false),
559 lpad
: Cell
::new(None
),
560 opt_node_id
: opt_node_id
,
565 pub fn ccx(&self) -> &'blk CrateContext
<'blk
, 'tcx
> {
568 pub fn fcx(&self) -> &'blk FunctionContext
<'blk
, 'tcx
> {
571 pub fn tcx(&self) -> TyCtxt
<'blk
, 'tcx
, 'tcx
> {
574 pub fn sess(&self) -> &'blk Session { self.fcx.ccx.sess() }
576 pub fn lpad(&self) -> Option
<&'blk LandingPad
> {
580 pub fn mir(&self) -> CachedMir
<'blk
, 'tcx
> {
584 pub fn name(&self, name
: ast
::Name
) -> String
{
588 pub fn node_id_to_string(&self, id
: ast
::NodeId
) -> String
{
589 self.tcx().map
.node_to_string(id
).to_string()
592 pub fn def(&self, nid
: ast
::NodeId
) -> Def
{
593 match self.tcx().def_map
.borrow().get(&nid
) {
594 Some(v
) => v
.full_def(),
596 bug
!("no def associated with node id {}", nid
);
601 pub fn to_str(&self) -> String
{
602 format
!("[block {:p}]", self)
605 pub fn monomorphize
<T
>(&self, value
: &T
) -> T
606 where T
: TransNormalize
<'tcx
>
608 monomorphize
::apply_param_substs(self.tcx(),
609 self.fcx
.param_substs
,
613 pub fn build(&'blk
self) -> BlockAndBuilder
<'blk
, 'tcx
> {
614 BlockAndBuilder
::new(self, OwnedBuilder
::new_with_ccx(self.ccx()))
618 pub struct OwnedBuilder
<'blk
, 'tcx
: 'blk
> {
619 builder
: Builder
<'blk
, 'tcx
>
622 impl<'blk
, 'tcx
> OwnedBuilder
<'blk
, 'tcx
> {
623 pub fn new_with_ccx(ccx
: &'blk CrateContext
<'blk
, 'tcx
>) -> Self {
624 // Create a fresh builder from the crate context.
625 let llbuilder
= unsafe {
626 llvm
::LLVMCreateBuilderInContext(ccx
.llcx())
630 llbuilder
: llbuilder
,
637 impl<'blk
, 'tcx
> Drop
for OwnedBuilder
<'blk
, 'tcx
> {
640 llvm
::LLVMDisposeBuilder(self.builder
.llbuilder
);
645 pub struct BlockAndBuilder
<'blk
, 'tcx
: 'blk
> {
646 bcx
: Block
<'blk
, 'tcx
>,
647 owned_builder
: OwnedBuilder
<'blk
, 'tcx
>,
650 impl<'blk
, 'tcx
> BlockAndBuilder
<'blk
, 'tcx
> {
651 pub fn new(bcx
: Block
<'blk
, 'tcx
>, owned_builder
: OwnedBuilder
<'blk
, 'tcx
>) -> Self {
652 // Set the builder's position to this block's end.
653 owned_builder
.builder
.position_at_end(bcx
.llbb
);
656 owned_builder
: owned_builder
,
660 pub fn with_block
<F
, R
>(&self, f
: F
) -> R
661 where F
: FnOnce(Block
<'blk
, 'tcx
>) -> R
663 let result
= f(self.bcx
);
664 self.position_at_end(self.bcx
.llbb
);
668 pub fn map_block
<F
>(self, f
: F
) -> Self
669 where F
: FnOnce(Block
<'blk
, 'tcx
>) -> Block
<'blk
, 'tcx
>
671 let BlockAndBuilder { bcx, owned_builder }
= self;
673 BlockAndBuilder
::new(bcx
, owned_builder
)
676 pub fn at_start
<F
, R
>(&self, f
: F
) -> R
677 where F
: FnOnce(&BlockAndBuilder
<'blk
, 'tcx
>) -> R
679 self.position_at_start(self.bcx
.llbb
);
681 self.position_at_end(self.bcx
.llbb
);
685 // Methods delegated to bcx
687 pub fn is_unreachable(&self) -> bool
{
688 self.bcx
.unreachable
.get()
691 pub fn ccx(&self) -> &'blk CrateContext
<'blk
, 'tcx
> {
694 pub fn fcx(&self) -> &'blk FunctionContext
<'blk
, 'tcx
> {
697 pub fn tcx(&self) -> TyCtxt
<'blk
, 'tcx
, 'tcx
> {
700 pub fn sess(&self) -> &'blk Session
{
704 pub fn llbb(&self) -> BasicBlockRef
{
708 pub fn mir(&self) -> CachedMir
<'blk
, 'tcx
> {
712 pub fn monomorphize
<T
>(&self, value
: &T
) -> T
713 where T
: TransNormalize
<'tcx
>
715 self.bcx
.monomorphize(value
)
718 pub fn set_lpad(&self, lpad
: Option
<LandingPad
>) {
719 self.bcx
.lpad
.set(lpad
.map(|p
| &*self.fcx().lpad_arena
.alloc(p
)))
723 impl<'blk
, 'tcx
> Deref
for BlockAndBuilder
<'blk
, 'tcx
> {
724 type Target
= Builder
<'blk
, 'tcx
>;
725 fn deref(&self) -> &Self::Target
{
726 &self.owned_builder
.builder
730 /// A structure representing an active landing pad for the duration of a basic
733 /// Each `Block` may contain an instance of this, indicating whether the block
734 /// is part of a landing pad or not. This is used to make decision about whether
735 /// to emit `invoke` instructions (e.g. in a landing pad we don't continue to
736 /// use `invoke`) and also about various function call metadata.
738 /// For GNU exceptions (`landingpad` + `resume` instructions) this structure is
739 /// just a bunch of `None` instances (not too interesting), but for MSVC
740 /// exceptions (`cleanuppad` + `cleanupret` instructions) this contains data.
741 /// When inside of a landing pad, each function call in LLVM IR needs to be
742 /// annotated with which landing pad it's a part of. This is accomplished via
743 /// the `OperandBundleDef` value created for MSVC landing pads.
744 pub struct LandingPad
{
745 cleanuppad
: Option
<ValueRef
>,
746 operand
: Option
<OperandBundleDef
>,
750 pub fn gnu() -> LandingPad
{
751 LandingPad { cleanuppad: None, operand: None }
754 pub fn msvc(cleanuppad
: ValueRef
) -> LandingPad
{
756 cleanuppad
: Some(cleanuppad
),
757 operand
: Some(OperandBundleDef
::new("funclet", &[cleanuppad
])),
761 pub fn bundle(&self) -> Option
<&OperandBundleDef
> {
762 self.operand
.as_ref()
766 impl Clone
for LandingPad
{
767 fn clone(&self) -> LandingPad
{
769 cleanuppad
: self.cleanuppad
,
770 operand
: self.cleanuppad
.map(|p
| {
771 OperandBundleDef
::new("funclet", &[p
])
777 pub struct Result
<'blk
, 'tcx
: 'blk
> {
778 pub bcx
: Block
<'blk
, 'tcx
>,
782 impl<'b
, 'tcx
> Result
<'b
, 'tcx
> {
783 pub fn new(bcx
: Block
<'b
, 'tcx
>, val
: ValueRef
) -> Result
<'b
, 'tcx
> {
791 pub fn val_ty(v
: ValueRef
) -> Type
{
793 Type
::from_ref(llvm
::LLVMTypeOf(v
))
797 // LLVM constant constructors.
798 pub fn C_null(t
: Type
) -> ValueRef
{
800 llvm
::LLVMConstNull(t
.to_ref())
804 pub fn C_undef(t
: Type
) -> ValueRef
{
806 llvm
::LLVMGetUndef(t
.to_ref())
810 pub fn C_integral(t
: Type
, u
: u64, sign_extend
: bool
) -> ValueRef
{
812 llvm
::LLVMConstInt(t
.to_ref(), u
, sign_extend
as Bool
)
816 pub fn C_floating(s
: &str, t
: Type
) -> ValueRef
{
818 let s
= CString
::new(s
).unwrap();
819 llvm
::LLVMConstRealOfString(t
.to_ref(), s
.as_ptr())
823 pub fn C_floating_f64(f
: f64, t
: Type
) -> ValueRef
{
825 llvm
::LLVMConstReal(t
.to_ref(), f
)
829 pub fn C_nil(ccx
: &CrateContext
) -> ValueRef
{
830 C_struct(ccx
, &[], false)
833 pub fn C_bool(ccx
: &CrateContext
, val
: bool
) -> ValueRef
{
834 C_integral(Type
::i1(ccx
), val
as u64, false)
837 pub fn C_i32(ccx
: &CrateContext
, i
: i32) -> ValueRef
{
838 C_integral(Type
::i32(ccx
), i
as u64, true)
841 pub fn C_u32(ccx
: &CrateContext
, i
: u32) -> ValueRef
{
842 C_integral(Type
::i32(ccx
), i
as u64, false)
845 pub fn C_u64(ccx
: &CrateContext
, i
: u64) -> ValueRef
{
846 C_integral(Type
::i64(ccx
), i
, false)
849 pub fn C_int
<I
: AsI64
>(ccx
: &CrateContext
, i
: I
) -> ValueRef
{
852 let bit_size
= machine
::llbitsize_of_real(ccx
, ccx
.int_type());
855 // make sure it doesn't overflow
856 assert
!(v
< (1<<(bit_size
-1)) && v
>= -(1<<(bit_size
-1)));
859 C_integral(ccx
.int_type(), v
as u64, true)
862 pub fn C_uint
<I
: AsU64
>(ccx
: &CrateContext
, i
: I
) -> ValueRef
{
865 let bit_size
= machine
::llbitsize_of_real(ccx
, ccx
.int_type());
868 // make sure it doesn't overflow
869 assert
!(v
< (1<<bit_size
));
872 C_integral(ccx
.int_type(), v
, false)
875 pub trait AsI64 { fn as_i64(self) -> i64; }
876 pub trait AsU64 { fn as_u64(self) -> u64; }
878 // FIXME: remove the intptr conversions, because they
879 // are host-architecture-dependent
880 impl AsI64
for i64 { fn as_i64(self) -> i64 { self as i64 }
}
881 impl AsI64
for i32 { fn as_i64(self) -> i64 { self as i64 }
}
882 impl AsI64
for isize { fn as_i64(self) -> i64 { self as i64 }
}
884 impl AsU64
for u64 { fn as_u64(self) -> u64 { self as u64 }
}
885 impl AsU64
for u32 { fn as_u64(self) -> u64 { self as u64 }
}
886 impl AsU64
for usize { fn as_u64(self) -> u64 { self as u64 }
}
888 pub fn C_u8(ccx
: &CrateContext
, i
: u8) -> ValueRef
{
889 C_integral(Type
::i8(ccx
), i
as u64, false)
893 // This is a 'c-like' raw string, which differs from
894 // our boxed-and-length-annotated strings.
895 pub fn C_cstr(cx
: &CrateContext
, s
: InternedString
, null_terminated
: bool
) -> ValueRef
{
897 if let Some(&llval
) = cx
.const_cstr_cache().borrow().get(&s
) {
901 let sc
= llvm
::LLVMConstStringInContext(cx
.llcx(),
902 s
.as_ptr() as *const c_char
,
904 !null_terminated
as Bool
);
906 let gsym
= token
::gensym("str");
907 let sym
= format
!("str{}", gsym
.0);
908 let g
= declare
::define_global(cx
, &sym
[..], val_ty(sc
)).unwrap_or_else(||{
909 bug
!("symbol `{}` is already defined", sym
);
911 llvm
::LLVMSetInitializer(g
, sc
);
912 llvm
::LLVMSetGlobalConstant(g
, True
);
913 llvm
::SetLinkage(g
, llvm
::InternalLinkage
);
915 cx
.const_cstr_cache().borrow_mut().insert(s
, g
);
920 // NB: Do not use `do_spill_noroot` to make this into a constant string, or
921 // you will be kicked off fast isel. See issue #4352 for an example of this.
922 pub fn C_str_slice(cx
: &CrateContext
, s
: InternedString
) -> ValueRef
{
924 let cs
= consts
::ptrcast(C_cstr(cx
, s
, false), Type
::i8p(cx
));
925 C_named_struct(cx
.tn().find_type("str_slice").unwrap(), &[cs
, C_uint(cx
, len
)])
928 pub fn C_struct(cx
: &CrateContext
, elts
: &[ValueRef
], packed
: bool
) -> ValueRef
{
929 C_struct_in_context(cx
.llcx(), elts
, packed
)
932 pub fn C_struct_in_context(llcx
: ContextRef
, elts
: &[ValueRef
], packed
: bool
) -> ValueRef
{
934 llvm
::LLVMConstStructInContext(llcx
,
935 elts
.as_ptr(), elts
.len() as c_uint
,
940 pub fn C_named_struct(t
: Type
, elts
: &[ValueRef
]) -> ValueRef
{
942 llvm
::LLVMConstNamedStruct(t
.to_ref(), elts
.as_ptr(), elts
.len() as c_uint
)
946 pub fn C_array(ty
: Type
, elts
: &[ValueRef
]) -> ValueRef
{
948 return llvm
::LLVMConstArray(ty
.to_ref(), elts
.as_ptr(), elts
.len() as c_uint
);
952 pub fn C_vector(elts
: &[ValueRef
]) -> ValueRef
{
954 return llvm
::LLVMConstVector(elts
.as_ptr(), elts
.len() as c_uint
);
958 pub fn C_bytes(cx
: &CrateContext
, bytes
: &[u8]) -> ValueRef
{
959 C_bytes_in_context(cx
.llcx(), bytes
)
962 pub fn C_bytes_in_context(llcx
: ContextRef
, bytes
: &[u8]) -> ValueRef
{
964 let ptr
= bytes
.as_ptr() as *const c_char
;
965 return llvm
::LLVMConstStringInContext(llcx
, ptr
, bytes
.len() as c_uint
, True
);
969 pub fn const_get_elt(v
: ValueRef
, us
: &[c_uint
])
972 let r
= llvm
::LLVMConstExtractValue(v
, us
.as_ptr(), us
.len() as c_uint
);
974 debug
!("const_get_elt(v={:?}, us={:?}, r={:?})",
975 Value(v
), us
, Value(r
));
981 pub fn const_to_int(v
: ValueRef
) -> i64 {
983 llvm
::LLVMConstIntGetSExtValue(v
)
987 pub fn const_to_uint(v
: ValueRef
) -> u64 {
989 llvm
::LLVMConstIntGetZExtValue(v
)
993 fn is_const_integral(v
: ValueRef
) -> bool
{
995 !llvm
::LLVMIsAConstantInt(v
).is_null()
999 pub fn const_to_opt_int(v
: ValueRef
) -> Option
<i64> {
1001 if is_const_integral(v
) {
1002 Some(llvm
::LLVMConstIntGetSExtValue(v
))
1009 pub fn const_to_opt_uint(v
: ValueRef
) -> Option
<u64> {
1011 if is_const_integral(v
) {
1012 Some(llvm
::LLVMConstIntGetZExtValue(v
))
1019 pub fn is_undef(val
: ValueRef
) -> bool
{
1021 llvm
::LLVMIsUndef(val
) != False
1025 #[allow(dead_code)] // potentially useful
1026 pub fn is_null(val
: ValueRef
) -> bool
{
1028 llvm
::LLVMIsNull(val
) != False
1032 pub fn monomorphize_type
<'blk
, 'tcx
>(bcx
: &BlockS
<'blk
, 'tcx
>, t
: Ty
<'tcx
>) -> Ty
<'tcx
> {
1033 bcx
.fcx
.monomorphize(&t
)
1036 pub fn node_id_type
<'blk
, 'tcx
>(bcx
: &BlockS
<'blk
, 'tcx
>, id
: ast
::NodeId
) -> Ty
<'tcx
> {
1037 let tcx
= bcx
.tcx();
1038 let t
= tcx
.node_id_to_type(id
);
1039 monomorphize_type(bcx
, t
)
1042 pub fn expr_ty
<'blk
, 'tcx
>(bcx
: &BlockS
<'blk
, 'tcx
>, ex
: &hir
::Expr
) -> Ty
<'tcx
> {
1043 node_id_type(bcx
, ex
.id
)
1046 pub fn expr_ty_adjusted
<'blk
, 'tcx
>(bcx
: &BlockS
<'blk
, 'tcx
>, ex
: &hir
::Expr
) -> Ty
<'tcx
> {
1047 monomorphize_type(bcx
, bcx
.tcx().expr_ty_adjusted(ex
))
1050 /// Attempts to resolve an obligation. The result is a shallow vtable resolution -- meaning that we
1051 /// do not (necessarily) resolve all nested obligations on the impl. Note that type check should
1052 /// guarantee to us that all nested obligations *could be* resolved if we wanted to.
1053 pub fn fulfill_obligation
<'a
, 'tcx
>(scx
: &SharedCrateContext
<'a
, 'tcx
>,
1055 trait_ref
: ty
::PolyTraitRef
<'tcx
>)
1056 -> traits
::Vtable
<'tcx
, ()>
1058 let tcx
= scx
.tcx();
1060 // Remove any references to regions; this helps improve caching.
1061 let trait_ref
= tcx
.erase_regions(&trait_ref
);
1063 scx
.trait_cache().memoize(trait_ref
, || {
1064 debug
!("trans fulfill_obligation: trait_ref={:?} def_id={:?}",
1065 trait_ref
, trait_ref
.def_id());
1067 // Do the initial selection for the obligation. This yields the
1068 // shallow result we are looking for -- that is, what specific impl.
1069 tcx
.normalizing_infer_ctxt(ProjectionMode
::Any
).enter(|infcx
| {
1070 let mut selcx
= SelectionContext
::new(&infcx
);
1072 let obligation_cause
= traits
::ObligationCause
::misc(span
,
1073 ast
::DUMMY_NODE_ID
);
1074 let obligation
= traits
::Obligation
::new(obligation_cause
,
1075 trait_ref
.to_poly_trait_predicate());
1077 let selection
= match selcx
.select(&obligation
) {
1078 Ok(Some(selection
)) => selection
,
1080 // Ambiguity can happen when monomorphizing during trans
1081 // expands to some humongo type that never occurred
1082 // statically -- this humongo type can then overflow,
1083 // leading to an ambiguous result. So report this as an
1084 // overflow bug, since I believe this is the only case
1085 // where ambiguity can result.
1086 debug
!("Encountered ambiguity selecting `{:?}` during trans, \
1087 presuming due to overflow",
1089 tcx
.sess
.span_fatal(span
,
1090 "reached the recursion limit during monomorphization \
1091 (selection ambiguity)");
1094 span_bug
!(span
, "Encountered error `{:?}` selecting `{:?}` during trans",
1099 // Currently, we use a fulfillment context to completely resolve
1100 // all nested obligations. This is because they can inform the
1101 // inference of the impl's type parameters.
1102 let mut fulfill_cx
= traits
::FulfillmentContext
::new();
1103 let vtable
= selection
.map(|predicate
| {
1104 fulfill_cx
.register_predicate_obligation(&infcx
, predicate
);
1106 let vtable
= infcx
.drain_fulfillment_cx_or_panic(span
, &mut fulfill_cx
, &vtable
);
1108 info
!("Cache miss: {:?} => {:?}", trait_ref
, vtable
);
1114 /// Normalizes the predicates and checks whether they hold. If this
1115 /// returns false, then either normalize encountered an error or one
1116 /// of the predicates did not hold. Used when creating vtables to
1117 /// check for unsatisfiable methods.
1118 pub fn normalize_and_test_predicates
<'a
, 'tcx
>(tcx
: TyCtxt
<'a
, 'tcx
, 'tcx
>,
1119 predicates
: Vec
<ty
::Predicate
<'tcx
>>)
1122 debug
!("normalize_and_test_predicates(predicates={:?})",
1125 tcx
.normalizing_infer_ctxt(ProjectionMode
::Any
).enter(|infcx
| {
1126 let mut selcx
= SelectionContext
::new(&infcx
);
1127 let mut fulfill_cx
= traits
::FulfillmentContext
::new();
1128 let cause
= traits
::ObligationCause
::dummy();
1129 let traits
::Normalized { value: predicates, obligations }
=
1130 traits
::normalize(&mut selcx
, cause
.clone(), &predicates
);
1131 for obligation
in obligations
{
1132 fulfill_cx
.register_predicate_obligation(&infcx
, obligation
);
1134 for predicate
in predicates
{
1135 let obligation
= traits
::Obligation
::new(cause
.clone(), predicate
);
1136 fulfill_cx
.register_predicate_obligation(&infcx
, obligation
);
1139 infcx
.drain_fulfillment_cx(&mut fulfill_cx
, &()).is_ok()
1143 pub fn langcall(bcx
: Block
,
1148 match bcx
.tcx().lang_items
.require(li
) {
1151 let msg
= format
!("{} {}", msg
, s
);
1153 Some(span
) => bcx
.tcx().sess
.span_fatal(span
, &msg
[..]),
1154 None
=> bcx
.tcx().sess
.fatal(&msg
[..]),
1160 /// Return the VariantDef corresponding to an inlined variant node
1161 pub fn inlined_variant_def
<'a
, 'tcx
>(ccx
: &CrateContext
<'a
, 'tcx
>,
1162 inlined_vid
: ast
::NodeId
)
1163 -> ty
::VariantDef
<'tcx
>
1166 let ctor_ty
= ccx
.tcx().node_id_to_type(inlined_vid
);
1167 debug
!("inlined_variant_def: ctor_ty={:?} inlined_vid={:?}", ctor_ty
,
1169 let adt_def
= match ctor_ty
.sty
{
1170 ty
::TyFnDef(_
, _
, &ty
::BareFnTy
{ sig
: ty
::Binder(ty
::FnSig
{
1171 output
: ty
::FnConverging(ty
), ..
1174 }.ty_adt_def().unwrap();
1175 let inlined_vid_def_id
= ccx
.tcx().map
.local_def_id(inlined_vid
);
1176 adt_def
.variants
.iter().find(|v
| {
1177 inlined_vid_def_id
== v
.did
||
1178 ccx
.external().borrow().get(&v
.did
) == Some(&Some(inlined_vid
))
1179 }).unwrap_or_else(|| {
1180 bug
!("no variant for {:?}::{}", adt_def
, inlined_vid
)
1184 // To avoid UB from LLVM, these two functions mask RHS with an
1185 // appropriate mask unconditionally (i.e. the fallback behavior for
1186 // all shifts). For 32- and 64-bit types, this matches the semantics
1187 // of Java. (See related discussion on #1877 and #10183.)
1189 pub fn build_unchecked_lshift
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
1192 binop_debug_loc
: DebugLoc
) -> ValueRef
{
1193 let rhs
= base
::cast_shift_expr_rhs(bcx
, hir
::BinOp_
::BiShl
, lhs
, rhs
);
1194 // #1877, #10183: Ensure that input is always valid
1195 let rhs
= shift_mask_rhs(bcx
, rhs
, binop_debug_loc
);
1196 build
::Shl(bcx
, lhs
, rhs
, binop_debug_loc
)
1199 pub fn build_unchecked_rshift
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
1203 binop_debug_loc
: DebugLoc
) -> ValueRef
{
1204 let rhs
= base
::cast_shift_expr_rhs(bcx
, hir
::BinOp_
::BiShr
, lhs
, rhs
);
1205 // #1877, #10183: Ensure that input is always valid
1206 let rhs
= shift_mask_rhs(bcx
, rhs
, binop_debug_loc
);
1207 let is_signed
= lhs_t
.is_signed();
1209 build
::AShr(bcx
, lhs
, rhs
, binop_debug_loc
)
1211 build
::LShr(bcx
, lhs
, rhs
, binop_debug_loc
)
1215 fn shift_mask_rhs
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
1217 debug_loc
: DebugLoc
) -> ValueRef
{
1218 let rhs_llty
= val_ty(rhs
);
1219 build
::And(bcx
, rhs
, shift_mask_val(bcx
, rhs_llty
, rhs_llty
, false), debug_loc
)
1222 pub fn shift_mask_val
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
1225 invert
: bool
) -> ValueRef
{
1226 let kind
= llty
.kind();
1228 TypeKind
::Integer
=> {
1229 // i8/u8 can shift by at most 7, i16/u16 by at most 15, etc.
1230 let val
= llty
.int_width() - 1;
1232 C_integral(mask_llty
, !val
, true)
1234 C_integral(mask_llty
, val
, false)
1237 TypeKind
::Vector
=> {
1238 let mask
= shift_mask_val(bcx
, llty
.element_type(), mask_llty
.element_type(), invert
);
1239 build
::VectorSplat(bcx
, mask_llty
.vector_length(), mask
)
1241 _
=> bug
!("shift_mask_val: expected Integer or Vector, found {:?}", kind
),