1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 #![allow(dead_code)] // FFI wrappers
12 #![allow(non_snake_case)]
15 use llvm
::{CallConv, AtomicBinOp, AtomicOrdering, SynchronizationScope, AsmDialect, AttrBuilder}
;
16 use llvm
::{Opcode, IntPredicate, RealPredicate}
;
17 use llvm
::{ValueRef, BasicBlockRef}
;
19 use syntax
::codemap
::Span
;
21 use trans
::builder
::Builder
;
22 use trans
::type_
::Type
;
23 use trans
::debuginfo
::DebugLoc
;
25 use libc
::{c_uint, c_char}
;
27 pub fn terminate(cx
: Block
, _
: &str) {
28 debug
!("terminate({})", cx
.to_str());
29 cx
.terminated
.set(true);
32 pub fn check_not_terminated(cx
: Block
) {
33 if cx
.terminated
.get() {
34 panic
!("already terminated!");
38 pub fn B
<'blk
, 'tcx
>(cx
: Block
<'blk
, 'tcx
>) -> Builder
<'blk
, 'tcx
> {
39 let b
= cx
.fcx
.ccx
.builder();
40 b
.position_at_end(cx
.llbb
);
44 // The difference between a block being unreachable and being terminated is
45 // somewhat obscure, and has to do with error checking. When a block is
46 // terminated, we're saying that trying to add any further statements in the
47 // block is an error. On the other hand, if something is unreachable, that
48 // means that the block was terminated in some way that we don't want to check
49 // for (panic/break/return statements, call to diverging functions, etc), and
50 // further instructions to the block should simply be ignored.
52 pub fn RetVoid(cx
: Block
, debug_loc
: DebugLoc
) {
53 if cx
.unreachable
.get() {
56 check_not_terminated(cx
);
57 terminate(cx
, "RetVoid");
58 debug_loc
.apply(cx
.fcx
);
62 pub fn Ret(cx
: Block
, v
: ValueRef
, debug_loc
: DebugLoc
) {
63 if cx
.unreachable
.get() {
66 check_not_terminated(cx
);
68 debug_loc
.apply(cx
.fcx
);
72 pub fn AggregateRet(cx
: Block
,
73 ret_vals
: &[ValueRef
],
74 debug_loc
: DebugLoc
) {
75 if cx
.unreachable
.get() {
78 check_not_terminated(cx
);
79 terminate(cx
, "AggregateRet");
80 debug_loc
.apply(cx
.fcx
);
81 B(cx
).aggregate_ret(ret_vals
);
84 pub fn Br(cx
: Block
, dest
: BasicBlockRef
, debug_loc
: DebugLoc
) {
85 if cx
.unreachable
.get() {
88 check_not_terminated(cx
);
90 debug_loc
.apply(cx
.fcx
);
94 pub fn CondBr(cx
: Block
,
98 debug_loc
: DebugLoc
) {
99 if cx
.unreachable
.get() {
102 check_not_terminated(cx
);
103 terminate(cx
, "CondBr");
104 debug_loc
.apply(cx
.fcx
);
105 B(cx
).cond_br(if_
, then
, else_
);
108 pub fn Switch(cx
: Block
, v
: ValueRef
, else_
: BasicBlockRef
, num_cases
: usize)
110 if cx
.unreachable
.get() { return _Undef(v); }
111 check_not_terminated(cx
);
112 terminate(cx
, "Switch");
113 B(cx
).switch(v
, else_
, num_cases
)
116 pub fn AddCase(s
: ValueRef
, on_val
: ValueRef
, dest
: BasicBlockRef
) {
118 if llvm
::LLVMIsUndef(s
) == llvm
::True { return; }
119 llvm
::LLVMAddCase(s
, on_val
, dest
);
123 pub fn IndirectBr(cx
: Block
,
126 debug_loc
: DebugLoc
) {
127 if cx
.unreachable
.get() {
130 check_not_terminated(cx
);
131 terminate(cx
, "IndirectBr");
132 debug_loc
.apply(cx
.fcx
);
133 B(cx
).indirect_br(addr
, num_dests
);
136 pub fn Invoke(cx
: Block
,
140 catch: BasicBlockRef
,
141 attributes
: Option
<AttrBuilder
>,
144 if cx
.unreachable
.get() {
145 return C_null(Type
::i8(cx
.ccx()));
147 check_not_terminated(cx
);
148 terminate(cx
, "Invoke");
149 debug
!("Invoke({} with arguments ({}))",
150 cx
.val_to_string(fn_
),
151 args
.iter().map(|a
| cx
.val_to_string(*a
)).collect
::<Vec
<String
>>().connect(", "));
152 debug_loc
.apply(cx
.fcx
);
153 B(cx
).invoke(fn_
, args
, then
, catch, attributes
)
156 pub fn Unreachable(cx
: Block
) {
157 if cx
.unreachable
.get() {
160 cx
.unreachable
.set(true);
161 if !cx
.terminated
.get() {
166 pub fn _Undef(val
: ValueRef
) -> ValueRef
{
168 return llvm
::LLVMGetUndef(val_ty(val
).to_ref());
173 pub fn Add(cx
: Block
,
178 if cx
.unreachable
.get() {
181 debug_loc
.apply(cx
.fcx
);
185 pub fn NSWAdd(cx
: Block
,
190 if cx
.unreachable
.get() {
193 debug_loc
.apply(cx
.fcx
);
194 B(cx
).nswadd(lhs
, rhs
)
197 pub fn NUWAdd(cx
: Block
,
202 if cx
.unreachable
.get() {
205 debug_loc
.apply(cx
.fcx
);
206 B(cx
).nuwadd(lhs
, rhs
)
209 pub fn FAdd(cx
: Block
,
214 if cx
.unreachable
.get() {
217 debug_loc
.apply(cx
.fcx
);
221 pub fn Sub(cx
: Block
,
226 if cx
.unreachable
.get() {
229 debug_loc
.apply(cx
.fcx
);
233 pub fn NSWSub(cx
: Block
,
238 if cx
.unreachable
.get() {
241 debug_loc
.apply(cx
.fcx
);
242 B(cx
).nswsub(lhs
, rhs
)
245 pub fn NUWSub(cx
: Block
,
250 if cx
.unreachable
.get() {
253 debug_loc
.apply(cx
.fcx
);
254 B(cx
).nuwsub(lhs
, rhs
)
257 pub fn FSub(cx
: Block
,
262 if cx
.unreachable
.get() {
265 debug_loc
.apply(cx
.fcx
);
269 pub fn Mul(cx
: Block
,
274 if cx
.unreachable
.get() {
277 debug_loc
.apply(cx
.fcx
);
281 pub fn NSWMul(cx
: Block
,
286 if cx
.unreachable
.get() {
289 debug_loc
.apply(cx
.fcx
);
290 B(cx
).nswmul(lhs
, rhs
)
293 pub fn NUWMul(cx
: Block
,
298 if cx
.unreachable
.get() {
301 debug_loc
.apply(cx
.fcx
);
302 B(cx
).nuwmul(lhs
, rhs
)
305 pub fn FMul(cx
: Block
,
310 if cx
.unreachable
.get() {
313 debug_loc
.apply(cx
.fcx
);
317 pub fn UDiv(cx
: Block
,
322 if cx
.unreachable
.get() {
325 debug_loc
.apply(cx
.fcx
);
329 pub fn SDiv(cx
: Block
,
334 if cx
.unreachable
.get() {
337 debug_loc
.apply(cx
.fcx
);
341 pub fn ExactSDiv(cx
: Block
,
346 if cx
.unreachable
.get() {
349 debug_loc
.apply(cx
.fcx
);
350 B(cx
).exactsdiv(lhs
, rhs
)
353 pub fn FDiv(cx
: Block
,
358 if cx
.unreachable
.get() {
361 debug_loc
.apply(cx
.fcx
);
365 pub fn URem(cx
: Block
,
370 if cx
.unreachable
.get() {
373 debug_loc
.apply(cx
.fcx
);
377 pub fn SRem(cx
: Block
,
382 if cx
.unreachable
.get() {
385 debug_loc
.apply(cx
.fcx
);
389 pub fn FRem(cx
: Block
,
394 if cx
.unreachable
.get() {
397 debug_loc
.apply(cx
.fcx
);
401 pub fn Shl(cx
: Block
,
406 if cx
.unreachable
.get() {
409 debug_loc
.apply(cx
.fcx
);
413 pub fn LShr(cx
: Block
,
418 if cx
.unreachable
.get() {
421 debug_loc
.apply(cx
.fcx
);
425 pub fn AShr(cx
: Block
,
430 if cx
.unreachable
.get() {
433 debug_loc
.apply(cx
.fcx
);
437 pub fn And(cx
: Block
,
442 if cx
.unreachable
.get() {
445 debug_loc
.apply(cx
.fcx
);
454 if cx
.unreachable
.get() {
457 debug_loc
.apply(cx
.fcx
);
461 pub fn Xor(cx
: Block
,
466 if cx
.unreachable
.get() {
469 debug_loc
.apply(cx
.fcx
);
473 pub fn BinOp(cx
: Block
,
479 if cx
.unreachable
.get() {
482 debug_loc
.apply(cx
.fcx
);
483 B(cx
).binop(op
, lhs
, rhs
)
486 pub fn Neg(cx
: Block
, v
: ValueRef
, debug_loc
: DebugLoc
) -> ValueRef
{
487 if cx
.unreachable
.get() {
490 debug_loc
.apply(cx
.fcx
);
494 pub fn NSWNeg(cx
: Block
, v
: ValueRef
, debug_loc
: DebugLoc
) -> ValueRef
{
495 if cx
.unreachable
.get() {
498 debug_loc
.apply(cx
.fcx
);
502 pub fn NUWNeg(cx
: Block
, v
: ValueRef
, debug_loc
: DebugLoc
) -> ValueRef
{
503 if cx
.unreachable
.get() {
506 debug_loc
.apply(cx
.fcx
);
509 pub fn FNeg(cx
: Block
, v
: ValueRef
, debug_loc
: DebugLoc
) -> ValueRef
{
510 if cx
.unreachable
.get() {
513 debug_loc
.apply(cx
.fcx
);
517 pub fn Not(cx
: Block
, v
: ValueRef
, debug_loc
: DebugLoc
) -> ValueRef
{
518 if cx
.unreachable
.get() {
521 debug_loc
.apply(cx
.fcx
);
526 pub fn Malloc(cx
: Block
, ty
: Type
, debug_loc
: DebugLoc
) -> ValueRef
{
528 if cx
.unreachable
.get() {
529 return llvm
::LLVMGetUndef(Type
::i8p(cx
.ccx()).to_ref());
531 debug_loc
.apply(cx
.fcx
);
536 pub fn ArrayMalloc(cx
: Block
,
539 debug_loc
: DebugLoc
) -> ValueRef
{
541 if cx
.unreachable
.get() {
542 return llvm
::LLVMGetUndef(Type
::i8p(cx
.ccx()).to_ref());
544 debug_loc
.apply(cx
.fcx
);
545 B(cx
).array_malloc(ty
, val
)
549 pub fn Alloca(cx
: Block
, ty
: Type
, name
: &str) -> ValueRef
{
551 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(ty.ptr_to().to_ref()); }
552 AllocaFcx(cx
.fcx
, ty
, name
)
556 pub fn AllocaFcx(fcx
: &FunctionContext
, ty
: Type
, name
: &str) -> ValueRef
{
557 let b
= fcx
.ccx
.builder();
558 b
.position_before(fcx
.alloca_insert_pt
.get().unwrap());
559 DebugLoc
::None
.apply(fcx
);
563 pub fn ArrayAlloca(cx
: Block
, ty
: Type
, val
: ValueRef
) -> ValueRef
{
565 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(ty.ptr_to().to_ref()); }
566 let b
= cx
.fcx
.ccx
.builder();
567 b
.position_before(cx
.fcx
.alloca_insert_pt
.get().unwrap());
568 DebugLoc
::None
.apply(cx
.fcx
);
569 b
.array_alloca(ty
, val
)
573 pub fn Free(cx
: Block
, pointer_val
: ValueRef
) {
574 if cx
.unreachable
.get() { return; }
575 B(cx
).free(pointer_val
)
578 pub fn Load(cx
: Block
, pointer_val
: ValueRef
) -> ValueRef
{
580 let ccx
= cx
.fcx
.ccx
;
581 if cx
.unreachable
.get() {
582 let ty
= val_ty(pointer_val
);
583 let eltty
= if ty
.kind() == llvm
::Array
{
588 return llvm
::LLVMGetUndef(eltty
.to_ref());
590 B(cx
).load(pointer_val
)
594 pub fn VolatileLoad(cx
: Block
, pointer_val
: ValueRef
) -> ValueRef
{
596 if cx
.unreachable
.get() {
597 return llvm
::LLVMGetUndef(Type
::nil(cx
.ccx()).to_ref());
599 B(cx
).volatile_load(pointer_val
)
603 pub fn AtomicLoad(cx
: Block
, pointer_val
: ValueRef
, order
: AtomicOrdering
) -> ValueRef
{
605 let ccx
= cx
.fcx
.ccx
;
606 if cx
.unreachable
.get() {
607 return llvm
::LLVMGetUndef(ccx
.int_type().to_ref());
609 B(cx
).atomic_load(pointer_val
, order
)
614 pub fn LoadRangeAssert(cx
: Block
, pointer_val
: ValueRef
, lo
: u64,
615 hi
: u64, signed
: llvm
::Bool
) -> ValueRef
{
616 if cx
.unreachable
.get() {
617 let ccx
= cx
.fcx
.ccx
;
618 let ty
= val_ty(pointer_val
);
619 let eltty
= if ty
.kind() == llvm
::Array
{
625 llvm
::LLVMGetUndef(eltty
.to_ref())
628 B(cx
).load_range_assert(pointer_val
, lo
, hi
, signed
)
632 pub fn LoadNonNull(cx
: Block
, ptr
: ValueRef
) -> ValueRef
{
633 if cx
.unreachable
.get() {
634 let ccx
= cx
.fcx
.ccx
;
635 let ty
= val_ty(ptr
);
636 let eltty
= if ty
.kind() == llvm
::Array
{
642 llvm
::LLVMGetUndef(eltty
.to_ref())
645 B(cx
).load_nonnull(ptr
)
649 pub fn Store(cx
: Block
, val
: ValueRef
, ptr
: ValueRef
) -> ValueRef
{
650 if cx
.unreachable
.get() { return C_nil(cx.ccx()); }
651 B(cx
).store(val
, ptr
)
654 pub fn VolatileStore(cx
: Block
, val
: ValueRef
, ptr
: ValueRef
) -> ValueRef
{
655 if cx
.unreachable
.get() { return C_nil(cx.ccx()); }
656 B(cx
).volatile_store(val
, ptr
)
659 pub fn AtomicStore(cx
: Block
, val
: ValueRef
, ptr
: ValueRef
, order
: AtomicOrdering
) {
660 if cx
.unreachable
.get() { return; }
661 B(cx
).atomic_store(val
, ptr
, order
)
664 pub fn GEP(cx
: Block
, pointer
: ValueRef
, indices
: &[ValueRef
]) -> ValueRef
{
666 if cx
.unreachable
.get() {
667 return llvm
::LLVMGetUndef(Type
::nil(cx
.ccx()).ptr_to().to_ref());
669 B(cx
).gep(pointer
, indices
)
673 // Simple wrapper around GEP that takes an array of ints and wraps them
676 pub fn GEPi(cx
: Block
, base
: ValueRef
, ixs
: &[usize]) -> ValueRef
{
678 if cx
.unreachable
.get() {
679 return llvm
::LLVMGetUndef(Type
::nil(cx
.ccx()).ptr_to().to_ref());
681 B(cx
).gepi(base
, ixs
)
685 pub fn InBoundsGEP(cx
: Block
, pointer
: ValueRef
, indices
: &[ValueRef
]) -> ValueRef
{
687 if cx
.unreachable
.get() {
688 return llvm
::LLVMGetUndef(Type
::nil(cx
.ccx()).ptr_to().to_ref());
690 B(cx
).inbounds_gep(pointer
, indices
)
694 pub fn StructGEP(cx
: Block
, pointer
: ValueRef
, idx
: usize) -> ValueRef
{
696 if cx
.unreachable
.get() {
697 return llvm
::LLVMGetUndef(Type
::nil(cx
.ccx()).ptr_to().to_ref());
699 B(cx
).struct_gep(pointer
, idx
)
703 pub fn GlobalString(cx
: Block
, _str
: *const c_char
) -> ValueRef
{
705 if cx
.unreachable
.get() {
706 return llvm
::LLVMGetUndef(Type
::i8p(cx
.ccx()).to_ref());
708 B(cx
).global_string(_str
)
712 pub fn GlobalStringPtr(cx
: Block
, _str
: *const c_char
) -> ValueRef
{
714 if cx
.unreachable
.get() {
715 return llvm
::LLVMGetUndef(Type
::i8p(cx
.ccx()).to_ref());
717 B(cx
).global_string_ptr(_str
)
722 pub fn Trunc(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
724 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
725 B(cx
).trunc(val
, dest_ty
)
729 pub fn ZExt(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
731 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
732 B(cx
).zext(val
, dest_ty
)
736 pub fn SExt(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
738 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
739 B(cx
).sext(val
, dest_ty
)
743 pub fn FPToUI(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
745 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
746 B(cx
).fptoui(val
, dest_ty
)
750 pub fn FPToSI(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
752 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
753 B(cx
).fptosi(val
, dest_ty
)
757 pub fn UIToFP(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
759 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
760 B(cx
).uitofp(val
, dest_ty
)
764 pub fn SIToFP(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
766 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
767 B(cx
).sitofp(val
, dest_ty
)
771 pub fn FPTrunc(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
773 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
774 B(cx
).fptrunc(val
, dest_ty
)
778 pub fn FPExt(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
780 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
781 B(cx
).fpext(val
, dest_ty
)
785 pub fn PtrToInt(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
787 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
788 B(cx
).ptrtoint(val
, dest_ty
)
792 pub fn IntToPtr(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
794 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
795 B(cx
).inttoptr(val
, dest_ty
)
799 pub fn BitCast(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
801 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
802 B(cx
).bitcast(val
, dest_ty
)
806 pub fn ZExtOrBitCast(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
808 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
809 B(cx
).zext_or_bitcast(val
, dest_ty
)
813 pub fn SExtOrBitCast(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
815 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
816 B(cx
).sext_or_bitcast(val
, dest_ty
)
820 pub fn TruncOrBitCast(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
822 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
823 B(cx
).trunc_or_bitcast(val
, dest_ty
)
827 pub fn Cast(cx
: Block
, op
: Opcode
, val
: ValueRef
, dest_ty
: Type
,
831 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
832 B(cx
).cast(op
, val
, dest_ty
)
836 pub fn PointerCast(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
838 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
839 B(cx
).pointercast(val
, dest_ty
)
843 pub fn IntCast(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
845 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
846 B(cx
).intcast(val
, dest_ty
)
850 pub fn FPCast(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
852 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
853 B(cx
).fpcast(val
, dest_ty
)
859 pub fn ICmp(cx
: Block
,
866 if cx
.unreachable
.get() {
867 return llvm
::LLVMGetUndef(Type
::i1(cx
.ccx()).to_ref());
869 debug_loc
.apply(cx
.fcx
);
870 B(cx
).icmp(op
, lhs
, rhs
)
874 pub fn FCmp(cx
: Block
,
881 if cx
.unreachable
.get() {
882 return llvm
::LLVMGetUndef(Type
::i1(cx
.ccx()).to_ref());
884 debug_loc
.apply(cx
.fcx
);
885 B(cx
).fcmp(op
, lhs
, rhs
)
889 /* Miscellaneous instructions */
890 pub fn EmptyPhi(cx
: Block
, ty
: Type
) -> ValueRef
{
892 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(ty.to_ref()); }
897 pub fn Phi(cx
: Block
, ty
: Type
, vals
: &[ValueRef
],
898 bbs
: &[BasicBlockRef
]) -> ValueRef
{
900 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(ty.to_ref()); }
901 B(cx
).phi(ty
, vals
, bbs
)
905 pub fn AddIncomingToPhi(phi
: ValueRef
, val
: ValueRef
, bb
: BasicBlockRef
) {
907 if llvm
::LLVMIsUndef(phi
) == llvm
::True { return; }
908 llvm
::LLVMAddIncoming(phi
, &val
, &bb
, 1 as c_uint
);
912 pub fn _UndefReturn(cx
: Block
, fn_
: ValueRef
) -> ValueRef
{
914 let ccx
= cx
.fcx
.ccx
;
915 let ty
= val_ty(fn_
);
916 let retty
= if ty
.kind() == llvm
::Function
{
921 B(cx
).count_insn("ret_undef");
922 llvm
::LLVMGetUndef(retty
.to_ref())
926 pub fn add_span_comment(cx
: Block
, sp
: Span
, text
: &str) {
927 B(cx
).add_span_comment(sp
, text
)
930 pub fn add_comment(cx
: Block
, text
: &str) {
931 B(cx
).add_comment(text
)
934 pub fn InlineAsmCall(cx
: Block
, asm
: *const c_char
, cons
: *const c_char
,
935 inputs
: &[ValueRef
], output
: Type
,
936 volatile
: bool
, alignstack
: bool
,
937 dia
: AsmDialect
) -> ValueRef
{
938 B(cx
).inline_asm_call(asm
, cons
, inputs
, output
, volatile
, alignstack
, dia
)
941 pub fn Call(cx
: Block
,
944 attributes
: Option
<AttrBuilder
>,
947 if cx
.unreachable
.get() {
948 return _UndefReturn(cx
, fn_
);
950 debug_loc
.apply(cx
.fcx
);
951 B(cx
).call(fn_
, args
, attributes
)
954 pub fn CallWithConv(cx
: Block
,
958 attributes
: Option
<AttrBuilder
>,
961 if cx
.unreachable
.get() {
962 return _UndefReturn(cx
, fn_
);
964 debug_loc
.apply(cx
.fcx
);
965 B(cx
).call_with_conv(fn_
, args
, conv
, attributes
)
968 pub fn AtomicFence(cx
: Block
, order
: AtomicOrdering
, scope
: SynchronizationScope
) {
969 if cx
.unreachable
.get() { return; }
970 B(cx
).atomic_fence(order
, scope
)
973 pub fn Select(cx
: Block
, if_
: ValueRef
, then
: ValueRef
, else_
: ValueRef
) -> ValueRef
{
974 if cx
.unreachable
.get() { return _Undef(then); }
975 B(cx
).select(if_
, then
, else_
)
978 pub fn VAArg(cx
: Block
, list
: ValueRef
, ty
: Type
) -> ValueRef
{
980 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(ty.to_ref()); }
981 B(cx
).va_arg(list
, ty
)
985 pub fn ExtractElement(cx
: Block
, vec_val
: ValueRef
, index
: ValueRef
) -> ValueRef
{
987 if cx
.unreachable
.get() {
988 return llvm
::LLVMGetUndef(Type
::nil(cx
.ccx()).to_ref());
990 B(cx
).extract_element(vec_val
, index
)
994 pub fn InsertElement(cx
: Block
, vec_val
: ValueRef
, elt_val
: ValueRef
,
995 index
: ValueRef
) -> ValueRef
{
997 if cx
.unreachable
.get() {
998 return llvm
::LLVMGetUndef(Type
::nil(cx
.ccx()).to_ref());
1000 B(cx
).insert_element(vec_val
, elt_val
, index
)
1004 pub fn ShuffleVector(cx
: Block
, v1
: ValueRef
, v2
: ValueRef
,
1005 mask
: ValueRef
) -> ValueRef
{
1007 if cx
.unreachable
.get() {
1008 return llvm
::LLVMGetUndef(Type
::nil(cx
.ccx()).to_ref());
1010 B(cx
).shuffle_vector(v1
, v2
, mask
)
1014 pub fn VectorSplat(cx
: Block
, num_elts
: usize, elt_val
: ValueRef
) -> ValueRef
{
1016 if cx
.unreachable
.get() {
1017 return llvm
::LLVMGetUndef(Type
::nil(cx
.ccx()).to_ref());
1019 B(cx
).vector_splat(num_elts
, elt_val
)
1023 pub fn ExtractValue(cx
: Block
, agg_val
: ValueRef
, index
: usize) -> ValueRef
{
1025 if cx
.unreachable
.get() {
1026 return llvm
::LLVMGetUndef(Type
::nil(cx
.ccx()).to_ref());
1028 B(cx
).extract_value(agg_val
, index
)
1032 pub fn InsertValue(cx
: Block
, agg_val
: ValueRef
, elt_val
: ValueRef
, index
: usize) -> ValueRef
{
1034 if cx
.unreachable
.get() {
1035 return llvm
::LLVMGetUndef(Type
::nil(cx
.ccx()).to_ref());
1037 B(cx
).insert_value(agg_val
, elt_val
, index
)
1041 pub fn IsNull(cx
: Block
, val
: ValueRef
) -> ValueRef
{
1043 if cx
.unreachable
.get() {
1044 return llvm
::LLVMGetUndef(Type
::i1(cx
.ccx()).to_ref());
1050 pub fn IsNotNull(cx
: Block
, val
: ValueRef
) -> ValueRef
{
1052 if cx
.unreachable
.get() {
1053 return llvm
::LLVMGetUndef(Type
::i1(cx
.ccx()).to_ref());
1055 B(cx
).is_not_null(val
)
1059 pub fn PtrDiff(cx
: Block
, lhs
: ValueRef
, rhs
: ValueRef
) -> ValueRef
{
1061 let ccx
= cx
.fcx
.ccx
;
1062 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(ccx.int_type().to_ref()); }
1063 B(cx
).ptrdiff(lhs
, rhs
)
1067 pub fn Trap(cx
: Block
) {
1068 if cx
.unreachable
.get() { return; }
1072 pub fn LandingPad(cx
: Block
, ty
: Type
, pers_fn
: ValueRef
,
1073 num_clauses
: usize) -> ValueRef
{
1074 check_not_terminated(cx
);
1075 assert
!(!cx
.unreachable
.get());
1076 B(cx
).landing_pad(ty
, pers_fn
, num_clauses
)
1079 pub fn SetCleanup(cx
: Block
, landing_pad
: ValueRef
) {
1080 B(cx
).set_cleanup(landing_pad
)
1083 pub fn Resume(cx
: Block
, exn
: ValueRef
) -> ValueRef
{
1084 check_not_terminated(cx
);
1085 terminate(cx
, "Resume");
1089 // Atomic Operations
1090 pub fn AtomicCmpXchg(cx
: Block
, dst
: ValueRef
,
1091 cmp
: ValueRef
, src
: ValueRef
,
1092 order
: AtomicOrdering
,
1093 failure_order
: AtomicOrdering
) -> ValueRef
{
1094 B(cx
).atomic_cmpxchg(dst
, cmp
, src
, order
, failure_order
)
1096 pub fn AtomicRMW(cx
: Block
, op
: AtomicBinOp
,
1097 dst
: ValueRef
, src
: ValueRef
,
1098 order
: AtomicOrdering
) -> ValueRef
{
1099 B(cx
).atomic_rmw(op
, dst
, src
, order
)