1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 #![allow(dead_code)] // FFI wrappers
12 #![allow(non_snake_case)]
15 use llvm
::{CallConv, AtomicBinOp, AtomicOrdering, SynchronizationScope, AsmDialect, AttrBuilder}
;
16 use llvm
::{Opcode, IntPredicate, RealPredicate}
;
17 use llvm
::{ValueRef, BasicBlockRef}
;
19 use syntax
::codemap
::Span
;
21 use trans
::builder
::Builder
;
22 use trans
::type_
::Type
;
23 use trans
::debuginfo
::DebugLoc
;
25 use libc
::{c_uint, c_char}
;
27 pub fn terminate(cx
: Block
, _
: &str) {
28 debug
!("terminate({})", cx
.to_str());
29 cx
.terminated
.set(true);
32 pub fn check_not_terminated(cx
: Block
) {
33 if cx
.terminated
.get() {
34 panic
!("already terminated!");
38 pub fn B
<'blk
, 'tcx
>(cx
: Block
<'blk
, 'tcx
>) -> Builder
<'blk
, 'tcx
> {
39 let b
= cx
.fcx
.ccx
.builder();
40 b
.position_at_end(cx
.llbb
);
44 // The difference between a block being unreachable and being terminated is
45 // somewhat obscure, and has to do with error checking. When a block is
46 // terminated, we're saying that trying to add any further statements in the
47 // block is an error. On the other hand, if something is unreachable, that
48 // means that the block was terminated in some way that we don't want to check
49 // for (panic/break/return statements, call to diverging functions, etc), and
50 // further instructions to the block should simply be ignored.
52 pub fn RetVoid(cx
: Block
, debug_loc
: DebugLoc
) {
53 if cx
.unreachable
.get() {
56 check_not_terminated(cx
);
57 terminate(cx
, "RetVoid");
58 debug_loc
.apply(cx
.fcx
);
62 pub fn Ret(cx
: Block
, v
: ValueRef
, debug_loc
: DebugLoc
) {
63 if cx
.unreachable
.get() {
66 check_not_terminated(cx
);
68 debug_loc
.apply(cx
.fcx
);
72 pub fn AggregateRet(cx
: Block
,
73 ret_vals
: &[ValueRef
],
74 debug_loc
: DebugLoc
) {
75 if cx
.unreachable
.get() {
78 check_not_terminated(cx
);
79 terminate(cx
, "AggregateRet");
80 debug_loc
.apply(cx
.fcx
);
81 B(cx
).aggregate_ret(ret_vals
);
84 pub fn Br(cx
: Block
, dest
: BasicBlockRef
, debug_loc
: DebugLoc
) {
85 if cx
.unreachable
.get() {
88 check_not_terminated(cx
);
90 debug_loc
.apply(cx
.fcx
);
94 pub fn CondBr(cx
: Block
,
98 debug_loc
: DebugLoc
) {
99 if cx
.unreachable
.get() {
102 check_not_terminated(cx
);
103 terminate(cx
, "CondBr");
104 debug_loc
.apply(cx
.fcx
);
105 B(cx
).cond_br(if_
, then
, else_
);
108 pub fn Switch(cx
: Block
, v
: ValueRef
, else_
: BasicBlockRef
, num_cases
: usize)
110 if cx
.unreachable
.get() { return _Undef(v); }
111 check_not_terminated(cx
);
112 terminate(cx
, "Switch");
113 B(cx
).switch(v
, else_
, num_cases
)
116 pub fn AddCase(s
: ValueRef
, on_val
: ValueRef
, dest
: BasicBlockRef
) {
118 if llvm
::LLVMIsUndef(s
) == llvm
::True { return; }
119 llvm
::LLVMAddCase(s
, on_val
, dest
);
123 pub fn IndirectBr(cx
: Block
,
126 debug_loc
: DebugLoc
) {
127 if cx
.unreachable
.get() {
130 check_not_terminated(cx
);
131 terminate(cx
, "IndirectBr");
132 debug_loc
.apply(cx
.fcx
);
133 B(cx
).indirect_br(addr
, num_dests
);
136 pub fn Invoke(cx
: Block
,
140 catch: BasicBlockRef
,
141 attributes
: Option
<AttrBuilder
>,
144 if cx
.unreachable
.get() {
145 return C_null(Type
::i8(cx
.ccx()));
147 check_not_terminated(cx
);
148 terminate(cx
, "Invoke");
149 debug
!("Invoke({} with arguments ({}))",
150 cx
.val_to_string(fn_
),
151 args
.iter().map(|a
| cx
.val_to_string(*a
)).collect
::<Vec
<String
>>().connect(", "));
152 debug_loc
.apply(cx
.fcx
);
153 B(cx
).invoke(fn_
, args
, then
, catch, attributes
)
156 pub fn Unreachable(cx
: Block
) {
157 if cx
.unreachable
.get() {
160 cx
.unreachable
.set(true);
161 if !cx
.terminated
.get() {
166 pub fn _Undef(val
: ValueRef
) -> ValueRef
{
168 return llvm
::LLVMGetUndef(val_ty(val
).to_ref());
173 pub fn Add(cx
: Block
,
178 if cx
.unreachable
.get() {
181 debug_loc
.apply(cx
.fcx
);
185 pub fn NSWAdd(cx
: Block
,
190 if cx
.unreachable
.get() {
193 debug_loc
.apply(cx
.fcx
);
194 B(cx
).nswadd(lhs
, rhs
)
197 pub fn NUWAdd(cx
: Block
,
202 if cx
.unreachable
.get() {
205 debug_loc
.apply(cx
.fcx
);
206 B(cx
).nuwadd(lhs
, rhs
)
209 pub fn FAdd(cx
: Block
,
214 if cx
.unreachable
.get() {
217 debug_loc
.apply(cx
.fcx
);
221 pub fn Sub(cx
: Block
,
226 if cx
.unreachable
.get() {
229 debug_loc
.apply(cx
.fcx
);
233 pub fn NSWSub(cx
: Block
,
238 if cx
.unreachable
.get() {
241 debug_loc
.apply(cx
.fcx
);
242 B(cx
).nswsub(lhs
, rhs
)
245 pub fn NUWSub(cx
: Block
,
250 if cx
.unreachable
.get() {
253 debug_loc
.apply(cx
.fcx
);
254 B(cx
).nuwsub(lhs
, rhs
)
257 pub fn FSub(cx
: Block
,
262 if cx
.unreachable
.get() {
265 debug_loc
.apply(cx
.fcx
);
269 pub fn Mul(cx
: Block
,
274 if cx
.unreachable
.get() {
277 debug_loc
.apply(cx
.fcx
);
281 pub fn NSWMul(cx
: Block
,
286 if cx
.unreachable
.get() {
289 debug_loc
.apply(cx
.fcx
);
290 B(cx
).nswmul(lhs
, rhs
)
293 pub fn NUWMul(cx
: Block
,
298 if cx
.unreachable
.get() {
301 debug_loc
.apply(cx
.fcx
);
302 B(cx
).nuwmul(lhs
, rhs
)
305 pub fn FMul(cx
: Block
,
310 if cx
.unreachable
.get() {
313 debug_loc
.apply(cx
.fcx
);
317 pub fn UDiv(cx
: Block
,
322 if cx
.unreachable
.get() {
325 debug_loc
.apply(cx
.fcx
);
329 pub fn SDiv(cx
: Block
,
334 if cx
.unreachable
.get() {
337 debug_loc
.apply(cx
.fcx
);
341 pub fn ExactSDiv(cx
: Block
,
346 if cx
.unreachable
.get() {
349 debug_loc
.apply(cx
.fcx
);
350 B(cx
).exactsdiv(lhs
, rhs
)
353 pub fn FDiv(cx
: Block
,
358 if cx
.unreachable
.get() {
361 debug_loc
.apply(cx
.fcx
);
365 pub fn URem(cx
: Block
,
370 if cx
.unreachable
.get() {
373 debug_loc
.apply(cx
.fcx
);
377 pub fn SRem(cx
: Block
,
382 if cx
.unreachable
.get() {
385 debug_loc
.apply(cx
.fcx
);
389 pub fn FRem(cx
: Block
,
394 if cx
.unreachable
.get() {
397 debug_loc
.apply(cx
.fcx
);
401 pub fn Shl(cx
: Block
,
406 if cx
.unreachable
.get() {
409 debug_loc
.apply(cx
.fcx
);
413 pub fn LShr(cx
: Block
,
418 if cx
.unreachable
.get() {
421 debug_loc
.apply(cx
.fcx
);
425 pub fn AShr(cx
: Block
,
430 if cx
.unreachable
.get() {
433 debug_loc
.apply(cx
.fcx
);
437 pub fn And(cx
: Block
,
442 if cx
.unreachable
.get() {
445 debug_loc
.apply(cx
.fcx
);
454 if cx
.unreachable
.get() {
457 debug_loc
.apply(cx
.fcx
);
461 pub fn Xor(cx
: Block
,
466 if cx
.unreachable
.get() {
469 debug_loc
.apply(cx
.fcx
);
473 pub fn BinOp(cx
: Block
,
479 if cx
.unreachable
.get() {
482 debug_loc
.apply(cx
.fcx
);
483 B(cx
).binop(op
, lhs
, rhs
)
486 pub fn Neg(cx
: Block
, v
: ValueRef
, debug_loc
: DebugLoc
) -> ValueRef
{
487 if cx
.unreachable
.get() {
490 debug_loc
.apply(cx
.fcx
);
494 pub fn NSWNeg(cx
: Block
, v
: ValueRef
, debug_loc
: DebugLoc
) -> ValueRef
{
495 if cx
.unreachable
.get() {
498 debug_loc
.apply(cx
.fcx
);
502 pub fn NUWNeg(cx
: Block
, v
: ValueRef
, debug_loc
: DebugLoc
) -> ValueRef
{
503 if cx
.unreachable
.get() {
506 debug_loc
.apply(cx
.fcx
);
509 pub fn FNeg(cx
: Block
, v
: ValueRef
, debug_loc
: DebugLoc
) -> ValueRef
{
510 if cx
.unreachable
.get() {
513 debug_loc
.apply(cx
.fcx
);
517 pub fn Not(cx
: Block
, v
: ValueRef
, debug_loc
: DebugLoc
) -> ValueRef
{
518 if cx
.unreachable
.get() {
521 debug_loc
.apply(cx
.fcx
);
525 pub fn Alloca(cx
: Block
, ty
: Type
, name
: &str) -> ValueRef
{
527 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(ty.ptr_to().to_ref()); }
528 AllocaFcx(cx
.fcx
, ty
, name
)
532 pub fn AllocaFcx(fcx
: &FunctionContext
, ty
: Type
, name
: &str) -> ValueRef
{
533 let b
= fcx
.ccx
.builder();
534 b
.position_before(fcx
.alloca_insert_pt
.get().unwrap());
535 DebugLoc
::None
.apply(fcx
);
539 pub fn Free(cx
: Block
, pointer_val
: ValueRef
) {
540 if cx
.unreachable
.get() { return; }
541 B(cx
).free(pointer_val
)
544 pub fn Load(cx
: Block
, pointer_val
: ValueRef
) -> ValueRef
{
546 let ccx
= cx
.fcx
.ccx
;
547 if cx
.unreachable
.get() {
548 let ty
= val_ty(pointer_val
);
549 let eltty
= if ty
.kind() == llvm
::Array
{
554 return llvm
::LLVMGetUndef(eltty
.to_ref());
556 B(cx
).load(pointer_val
)
560 pub fn VolatileLoad(cx
: Block
, pointer_val
: ValueRef
) -> ValueRef
{
562 if cx
.unreachable
.get() {
563 return llvm
::LLVMGetUndef(Type
::nil(cx
.ccx()).to_ref());
565 B(cx
).volatile_load(pointer_val
)
569 pub fn AtomicLoad(cx
: Block
, pointer_val
: ValueRef
, order
: AtomicOrdering
) -> ValueRef
{
571 let ccx
= cx
.fcx
.ccx
;
572 if cx
.unreachable
.get() {
573 return llvm
::LLVMGetUndef(ccx
.int_type().to_ref());
575 B(cx
).atomic_load(pointer_val
, order
)
580 pub fn LoadRangeAssert(cx
: Block
, pointer_val
: ValueRef
, lo
: u64,
581 hi
: u64, signed
: llvm
::Bool
) -> ValueRef
{
582 if cx
.unreachable
.get() {
583 let ccx
= cx
.fcx
.ccx
;
584 let ty
= val_ty(pointer_val
);
585 let eltty
= if ty
.kind() == llvm
::Array
{
591 llvm
::LLVMGetUndef(eltty
.to_ref())
594 B(cx
).load_range_assert(pointer_val
, lo
, hi
, signed
)
598 pub fn LoadNonNull(cx
: Block
, ptr
: ValueRef
) -> ValueRef
{
599 if cx
.unreachable
.get() {
600 let ccx
= cx
.fcx
.ccx
;
601 let ty
= val_ty(ptr
);
602 let eltty
= if ty
.kind() == llvm
::Array
{
608 llvm
::LLVMGetUndef(eltty
.to_ref())
611 B(cx
).load_nonnull(ptr
)
615 pub fn Store(cx
: Block
, val
: ValueRef
, ptr
: ValueRef
) -> ValueRef
{
616 if cx
.unreachable
.get() { return C_nil(cx.ccx()); }
617 B(cx
).store(val
, ptr
)
620 pub fn VolatileStore(cx
: Block
, val
: ValueRef
, ptr
: ValueRef
) -> ValueRef
{
621 if cx
.unreachable
.get() { return C_nil(cx.ccx()); }
622 B(cx
).volatile_store(val
, ptr
)
625 pub fn AtomicStore(cx
: Block
, val
: ValueRef
, ptr
: ValueRef
, order
: AtomicOrdering
) {
626 if cx
.unreachable
.get() { return; }
627 B(cx
).atomic_store(val
, ptr
, order
)
630 pub fn GEP(cx
: Block
, pointer
: ValueRef
, indices
: &[ValueRef
]) -> ValueRef
{
632 if cx
.unreachable
.get() {
633 return llvm
::LLVMGetUndef(Type
::nil(cx
.ccx()).ptr_to().to_ref());
635 B(cx
).gep(pointer
, indices
)
639 // Simple wrapper around GEP that takes an array of ints and wraps them
642 pub fn GEPi(cx
: Block
, base
: ValueRef
, ixs
: &[usize]) -> ValueRef
{
644 if cx
.unreachable
.get() {
645 return llvm
::LLVMGetUndef(Type
::nil(cx
.ccx()).ptr_to().to_ref());
647 B(cx
).gepi(base
, ixs
)
651 pub fn InBoundsGEP(cx
: Block
, pointer
: ValueRef
, indices
: &[ValueRef
]) -> ValueRef
{
653 if cx
.unreachable
.get() {
654 return llvm
::LLVMGetUndef(Type
::nil(cx
.ccx()).ptr_to().to_ref());
656 B(cx
).inbounds_gep(pointer
, indices
)
660 pub fn StructGEP(cx
: Block
, pointer
: ValueRef
, idx
: usize) -> ValueRef
{
662 if cx
.unreachable
.get() {
663 return llvm
::LLVMGetUndef(Type
::nil(cx
.ccx()).ptr_to().to_ref());
665 B(cx
).struct_gep(pointer
, idx
)
669 pub fn GlobalString(cx
: Block
, _str
: *const c_char
) -> ValueRef
{
671 if cx
.unreachable
.get() {
672 return llvm
::LLVMGetUndef(Type
::i8p(cx
.ccx()).to_ref());
674 B(cx
).global_string(_str
)
678 pub fn GlobalStringPtr(cx
: Block
, _str
: *const c_char
) -> ValueRef
{
680 if cx
.unreachable
.get() {
681 return llvm
::LLVMGetUndef(Type
::i8p(cx
.ccx()).to_ref());
683 B(cx
).global_string_ptr(_str
)
688 pub fn Trunc(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
690 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
691 B(cx
).trunc(val
, dest_ty
)
695 pub fn ZExt(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
697 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
698 B(cx
).zext(val
, dest_ty
)
702 pub fn SExt(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
704 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
705 B(cx
).sext(val
, dest_ty
)
709 pub fn FPToUI(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
711 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
712 B(cx
).fptoui(val
, dest_ty
)
716 pub fn FPToSI(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
718 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
719 B(cx
).fptosi(val
, dest_ty
)
723 pub fn UIToFP(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
725 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
726 B(cx
).uitofp(val
, dest_ty
)
730 pub fn SIToFP(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
732 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
733 B(cx
).sitofp(val
, dest_ty
)
737 pub fn FPTrunc(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
739 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
740 B(cx
).fptrunc(val
, dest_ty
)
744 pub fn FPExt(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
746 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
747 B(cx
).fpext(val
, dest_ty
)
751 pub fn PtrToInt(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
753 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
754 B(cx
).ptrtoint(val
, dest_ty
)
758 pub fn IntToPtr(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
760 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
761 B(cx
).inttoptr(val
, dest_ty
)
765 pub fn BitCast(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
767 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
768 B(cx
).bitcast(val
, dest_ty
)
772 pub fn ZExtOrBitCast(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
774 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
775 B(cx
).zext_or_bitcast(val
, dest_ty
)
779 pub fn SExtOrBitCast(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
781 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
782 B(cx
).sext_or_bitcast(val
, dest_ty
)
786 pub fn TruncOrBitCast(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
788 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
789 B(cx
).trunc_or_bitcast(val
, dest_ty
)
793 pub fn Cast(cx
: Block
, op
: Opcode
, val
: ValueRef
, dest_ty
: Type
,
797 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
798 B(cx
).cast(op
, val
, dest_ty
)
802 pub fn PointerCast(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
804 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
805 B(cx
).pointercast(val
, dest_ty
)
809 pub fn IntCast(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
811 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
812 B(cx
).intcast(val
, dest_ty
)
816 pub fn FPCast(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
818 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
819 B(cx
).fpcast(val
, dest_ty
)
825 pub fn ICmp(cx
: Block
,
832 if cx
.unreachable
.get() {
833 return llvm
::LLVMGetUndef(Type
::i1(cx
.ccx()).to_ref());
835 debug_loc
.apply(cx
.fcx
);
836 B(cx
).icmp(op
, lhs
, rhs
)
840 pub fn FCmp(cx
: Block
,
847 if cx
.unreachable
.get() {
848 return llvm
::LLVMGetUndef(Type
::i1(cx
.ccx()).to_ref());
850 debug_loc
.apply(cx
.fcx
);
851 B(cx
).fcmp(op
, lhs
, rhs
)
855 /* Miscellaneous instructions */
856 pub fn EmptyPhi(cx
: Block
, ty
: Type
) -> ValueRef
{
858 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(ty.to_ref()); }
863 pub fn Phi(cx
: Block
, ty
: Type
, vals
: &[ValueRef
],
864 bbs
: &[BasicBlockRef
]) -> ValueRef
{
866 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(ty.to_ref()); }
867 B(cx
).phi(ty
, vals
, bbs
)
871 pub fn AddIncomingToPhi(phi
: ValueRef
, val
: ValueRef
, bb
: BasicBlockRef
) {
873 if llvm
::LLVMIsUndef(phi
) == llvm
::True { return; }
874 llvm
::LLVMAddIncoming(phi
, &val
, &bb
, 1 as c_uint
);
878 pub fn _UndefReturn(cx
: Block
, fn_
: ValueRef
) -> ValueRef
{
880 let ccx
= cx
.fcx
.ccx
;
881 let ty
= val_ty(fn_
);
882 let retty
= if ty
.kind() == llvm
::Function
{
887 B(cx
).count_insn("ret_undef");
888 llvm
::LLVMGetUndef(retty
.to_ref())
892 pub fn add_span_comment(cx
: Block
, sp
: Span
, text
: &str) {
893 B(cx
).add_span_comment(sp
, text
)
896 pub fn add_comment(cx
: Block
, text
: &str) {
897 B(cx
).add_comment(text
)
900 pub fn InlineAsmCall(cx
: Block
, asm
: *const c_char
, cons
: *const c_char
,
901 inputs
: &[ValueRef
], output
: Type
,
902 volatile
: bool
, alignstack
: bool
,
903 dia
: AsmDialect
) -> ValueRef
{
904 B(cx
).inline_asm_call(asm
, cons
, inputs
, output
, volatile
, alignstack
, dia
)
907 pub fn Call(cx
: Block
,
910 attributes
: Option
<AttrBuilder
>,
913 if cx
.unreachable
.get() {
914 return _UndefReturn(cx
, fn_
);
916 debug_loc
.apply(cx
.fcx
);
917 B(cx
).call(fn_
, args
, attributes
)
920 pub fn CallWithConv(cx
: Block
,
924 attributes
: Option
<AttrBuilder
>,
927 if cx
.unreachable
.get() {
928 return _UndefReturn(cx
, fn_
);
930 debug_loc
.apply(cx
.fcx
);
931 B(cx
).call_with_conv(fn_
, args
, conv
, attributes
)
934 pub fn AtomicFence(cx
: Block
, order
: AtomicOrdering
, scope
: SynchronizationScope
) {
935 if cx
.unreachable
.get() { return; }
936 B(cx
).atomic_fence(order
, scope
)
939 pub fn Select(cx
: Block
, if_
: ValueRef
, then
: ValueRef
, else_
: ValueRef
) -> ValueRef
{
940 if cx
.unreachable
.get() { return _Undef(then); }
941 B(cx
).select(if_
, then
, else_
)
944 pub fn VAArg(cx
: Block
, list
: ValueRef
, ty
: Type
) -> ValueRef
{
946 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(ty.to_ref()); }
947 B(cx
).va_arg(list
, ty
)
951 pub fn ExtractElement(cx
: Block
, vec_val
: ValueRef
, index
: ValueRef
) -> ValueRef
{
953 if cx
.unreachable
.get() {
954 return llvm
::LLVMGetUndef(Type
::nil(cx
.ccx()).to_ref());
956 B(cx
).extract_element(vec_val
, index
)
960 pub fn InsertElement(cx
: Block
, vec_val
: ValueRef
, elt_val
: ValueRef
,
961 index
: ValueRef
) -> ValueRef
{
963 if cx
.unreachable
.get() {
964 return llvm
::LLVMGetUndef(Type
::nil(cx
.ccx()).to_ref());
966 B(cx
).insert_element(vec_val
, elt_val
, index
)
970 pub fn ShuffleVector(cx
: Block
, v1
: ValueRef
, v2
: ValueRef
,
971 mask
: ValueRef
) -> ValueRef
{
973 if cx
.unreachable
.get() {
974 return llvm
::LLVMGetUndef(Type
::nil(cx
.ccx()).to_ref());
976 B(cx
).shuffle_vector(v1
, v2
, mask
)
980 pub fn VectorSplat(cx
: Block
, num_elts
: usize, elt_val
: ValueRef
) -> ValueRef
{
982 if cx
.unreachable
.get() {
983 return llvm
::LLVMGetUndef(Type
::nil(cx
.ccx()).to_ref());
985 B(cx
).vector_splat(num_elts
, elt_val
)
989 pub fn ExtractValue(cx
: Block
, agg_val
: ValueRef
, index
: usize) -> ValueRef
{
991 if cx
.unreachable
.get() {
992 return llvm
::LLVMGetUndef(Type
::nil(cx
.ccx()).to_ref());
994 B(cx
).extract_value(agg_val
, index
)
998 pub fn InsertValue(cx
: Block
, agg_val
: ValueRef
, elt_val
: ValueRef
, index
: usize) -> ValueRef
{
1000 if cx
.unreachable
.get() {
1001 return llvm
::LLVMGetUndef(Type
::nil(cx
.ccx()).to_ref());
1003 B(cx
).insert_value(agg_val
, elt_val
, index
)
1007 pub fn IsNull(cx
: Block
, val
: ValueRef
) -> ValueRef
{
1009 if cx
.unreachable
.get() {
1010 return llvm
::LLVMGetUndef(Type
::i1(cx
.ccx()).to_ref());
1016 pub fn IsNotNull(cx
: Block
, val
: ValueRef
) -> ValueRef
{
1018 if cx
.unreachable
.get() {
1019 return llvm
::LLVMGetUndef(Type
::i1(cx
.ccx()).to_ref());
1021 B(cx
).is_not_null(val
)
1025 pub fn PtrDiff(cx
: Block
, lhs
: ValueRef
, rhs
: ValueRef
) -> ValueRef
{
1027 let ccx
= cx
.fcx
.ccx
;
1028 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(ccx.int_type().to_ref()); }
1029 B(cx
).ptrdiff(lhs
, rhs
)
1033 pub fn Trap(cx
: Block
) {
1034 if cx
.unreachable
.get() { return; }
1038 pub fn LandingPad(cx
: Block
, ty
: Type
, pers_fn
: ValueRef
,
1039 num_clauses
: usize) -> ValueRef
{
1040 check_not_terminated(cx
);
1041 assert
!(!cx
.unreachable
.get());
1042 B(cx
).landing_pad(ty
, pers_fn
, num_clauses
)
1045 pub fn SetCleanup(cx
: Block
, landing_pad
: ValueRef
) {
1046 B(cx
).set_cleanup(landing_pad
)
1049 pub fn Resume(cx
: Block
, exn
: ValueRef
) -> ValueRef
{
1050 check_not_terminated(cx
);
1051 terminate(cx
, "Resume");
1055 // Atomic Operations
1056 pub fn AtomicCmpXchg(cx
: Block
, dst
: ValueRef
,
1057 cmp
: ValueRef
, src
: ValueRef
,
1058 order
: AtomicOrdering
,
1059 failure_order
: AtomicOrdering
) -> ValueRef
{
1060 B(cx
).atomic_cmpxchg(dst
, cmp
, src
, order
, failure_order
)
1062 pub fn AtomicRMW(cx
: Block
, op
: AtomicBinOp
,
1063 dst
: ValueRef
, src
: ValueRef
,
1064 order
: AtomicOrdering
) -> ValueRef
{
1065 B(cx
).atomic_rmw(op
, dst
, src
, order
)