1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 #![allow(dead_code)] // FFI wrappers
12 #![allow(non_snake_case)]
15 use llvm
::{AtomicBinOp, AtomicOrdering, SynchronizationScope, AsmDialect}
;
16 use llvm
::{Opcode, IntPredicate, RealPredicate}
;
17 use llvm
::{ValueRef, BasicBlockRef}
;
19 use syntax
::codemap
::Span
;
24 use debuginfo
::DebugLoc
;
26 use libc
::{c_uint, c_char}
;
28 pub fn terminate(cx
: Block
, _
: &str) {
29 debug
!("terminate({})", cx
.to_str());
30 cx
.terminated
.set(true);
33 pub fn check_not_terminated(cx
: Block
) {
34 if cx
.terminated
.get() {
35 bug
!("already terminated!");
39 pub fn B
<'blk
, 'tcx
>(cx
: Block
<'blk
, 'tcx
>) -> Builder
<'blk
, 'tcx
> {
40 let b
= cx
.fcx
.ccx
.builder();
41 b
.position_at_end(cx
.llbb
);
45 // The difference between a block being unreachable and being terminated is
46 // somewhat obscure, and has to do with error checking. When a block is
47 // terminated, we're saying that trying to add any further statements in the
48 // block is an error. On the other hand, if something is unreachable, that
49 // means that the block was terminated in some way that we don't want to check
50 // for (panic/break/return statements, call to diverging functions, etc), and
51 // further instructions to the block should simply be ignored.
53 pub fn RetVoid(cx
: Block
, debug_loc
: DebugLoc
) {
54 if cx
.unreachable
.get() {
57 check_not_terminated(cx
);
58 terminate(cx
, "RetVoid");
59 debug_loc
.apply(cx
.fcx
);
63 pub fn Ret(cx
: Block
, v
: ValueRef
, debug_loc
: DebugLoc
) {
64 if cx
.unreachable
.get() {
67 check_not_terminated(cx
);
69 debug_loc
.apply(cx
.fcx
);
73 pub fn AggregateRet(cx
: Block
,
74 ret_vals
: &[ValueRef
],
75 debug_loc
: DebugLoc
) {
76 if cx
.unreachable
.get() {
79 check_not_terminated(cx
);
80 terminate(cx
, "AggregateRet");
81 debug_loc
.apply(cx
.fcx
);
82 B(cx
).aggregate_ret(ret_vals
);
85 pub fn Br(cx
: Block
, dest
: BasicBlockRef
, debug_loc
: DebugLoc
) {
86 if cx
.unreachable
.get() {
89 check_not_terminated(cx
);
91 debug_loc
.apply(cx
.fcx
);
95 pub fn CondBr(cx
: Block
,
99 debug_loc
: DebugLoc
) {
100 if cx
.unreachable
.get() {
103 check_not_terminated(cx
);
104 terminate(cx
, "CondBr");
105 debug_loc
.apply(cx
.fcx
);
106 B(cx
).cond_br(if_
, then
, else_
);
109 pub fn Switch(cx
: Block
, v
: ValueRef
, else_
: BasicBlockRef
, num_cases
: usize)
111 if cx
.unreachable
.get() { return _Undef(v); }
112 check_not_terminated(cx
);
113 terminate(cx
, "Switch");
114 B(cx
).switch(v
, else_
, num_cases
)
117 pub fn AddCase(s
: ValueRef
, on_val
: ValueRef
, dest
: BasicBlockRef
) {
119 if llvm
::LLVMIsUndef(s
) == llvm
::True { return; }
120 llvm
::LLVMAddCase(s
, on_val
, dest
);
124 pub fn IndirectBr(cx
: Block
,
127 debug_loc
: DebugLoc
) {
128 if cx
.unreachable
.get() {
131 check_not_terminated(cx
);
132 terminate(cx
, "IndirectBr");
133 debug_loc
.apply(cx
.fcx
);
134 B(cx
).indirect_br(addr
, num_dests
);
137 pub fn Invoke(cx
: Block
,
141 catch: BasicBlockRef
,
144 if cx
.unreachable
.get() {
145 return C_null(Type
::i8(cx
.ccx()));
147 check_not_terminated(cx
);
148 terminate(cx
, "Invoke");
149 debug
!("Invoke({:?} with arguments ({}))",
151 args
.iter().map(|a
| {
152 format
!("{:?}", Value(*a
))
153 }).collect
::<Vec
<String
>>().join(", "));
154 debug_loc
.apply(cx
.fcx
);
155 let bundle
= cx
.lpad().and_then(|b
| b
.bundle());
156 B(cx
).invoke(fn_
, args
, then
, catch, bundle
)
159 pub fn Unreachable(cx
: Block
) {
160 if cx
.unreachable
.get() {
163 cx
.unreachable
.set(true);
164 if !cx
.terminated
.get() {
169 pub fn _Undef(val
: ValueRef
) -> ValueRef
{
171 return llvm
::LLVMGetUndef(val_ty(val
).to_ref());
176 pub fn Add(cx
: Block
,
181 if cx
.unreachable
.get() {
184 debug_loc
.apply(cx
.fcx
);
188 pub fn NSWAdd(cx
: Block
,
193 if cx
.unreachable
.get() {
196 debug_loc
.apply(cx
.fcx
);
197 B(cx
).nswadd(lhs
, rhs
)
200 pub fn NUWAdd(cx
: Block
,
205 if cx
.unreachable
.get() {
208 debug_loc
.apply(cx
.fcx
);
209 B(cx
).nuwadd(lhs
, rhs
)
212 pub fn FAdd(cx
: Block
,
217 if cx
.unreachable
.get() {
220 debug_loc
.apply(cx
.fcx
);
224 pub fn FAddFast(cx
: Block
,
229 if cx
.unreachable
.get() {
232 debug_loc
.apply(cx
.fcx
);
233 B(cx
).fadd_fast(lhs
, rhs
)
236 pub fn Sub(cx
: Block
,
241 if cx
.unreachable
.get() {
244 debug_loc
.apply(cx
.fcx
);
248 pub fn NSWSub(cx
: Block
,
253 if cx
.unreachable
.get() {
256 debug_loc
.apply(cx
.fcx
);
257 B(cx
).nswsub(lhs
, rhs
)
260 pub fn NUWSub(cx
: Block
,
265 if cx
.unreachable
.get() {
268 debug_loc
.apply(cx
.fcx
);
269 B(cx
).nuwsub(lhs
, rhs
)
272 pub fn FSub(cx
: Block
,
277 if cx
.unreachable
.get() {
280 debug_loc
.apply(cx
.fcx
);
284 pub fn FSubFast(cx
: Block
,
289 if cx
.unreachable
.get() {
292 debug_loc
.apply(cx
.fcx
);
293 B(cx
).fsub_fast(lhs
, rhs
)
296 pub fn Mul(cx
: Block
,
301 if cx
.unreachable
.get() {
304 debug_loc
.apply(cx
.fcx
);
308 pub fn NSWMul(cx
: Block
,
313 if cx
.unreachable
.get() {
316 debug_loc
.apply(cx
.fcx
);
317 B(cx
).nswmul(lhs
, rhs
)
320 pub fn NUWMul(cx
: Block
,
325 if cx
.unreachable
.get() {
328 debug_loc
.apply(cx
.fcx
);
329 B(cx
).nuwmul(lhs
, rhs
)
332 pub fn FMul(cx
: Block
,
337 if cx
.unreachable
.get() {
340 debug_loc
.apply(cx
.fcx
);
344 pub fn FMulFast(cx
: Block
,
349 if cx
.unreachable
.get() {
352 debug_loc
.apply(cx
.fcx
);
353 B(cx
).fmul_fast(lhs
, rhs
)
356 pub fn UDiv(cx
: Block
,
361 if cx
.unreachable
.get() {
364 debug_loc
.apply(cx
.fcx
);
368 pub fn SDiv(cx
: Block
,
373 if cx
.unreachable
.get() {
376 debug_loc
.apply(cx
.fcx
);
380 pub fn ExactSDiv(cx
: Block
,
385 if cx
.unreachable
.get() {
388 debug_loc
.apply(cx
.fcx
);
389 B(cx
).exactsdiv(lhs
, rhs
)
392 pub fn FDiv(cx
: Block
,
397 if cx
.unreachable
.get() {
400 debug_loc
.apply(cx
.fcx
);
404 pub fn FDivFast(cx
: Block
,
409 if cx
.unreachable
.get() {
412 debug_loc
.apply(cx
.fcx
);
413 B(cx
).fdiv_fast(lhs
, rhs
)
416 pub fn URem(cx
: Block
,
421 if cx
.unreachable
.get() {
424 debug_loc
.apply(cx
.fcx
);
428 pub fn SRem(cx
: Block
,
433 if cx
.unreachable
.get() {
436 debug_loc
.apply(cx
.fcx
);
440 pub fn FRem(cx
: Block
,
445 if cx
.unreachable
.get() {
448 debug_loc
.apply(cx
.fcx
);
452 pub fn FRemFast(cx
: Block
,
457 if cx
.unreachable
.get() {
460 debug_loc
.apply(cx
.fcx
);
461 B(cx
).frem_fast(lhs
, rhs
)
464 pub fn Shl(cx
: Block
,
469 if cx
.unreachable
.get() {
472 debug_loc
.apply(cx
.fcx
);
476 pub fn LShr(cx
: Block
,
481 if cx
.unreachable
.get() {
484 debug_loc
.apply(cx
.fcx
);
488 pub fn AShr(cx
: Block
,
493 if cx
.unreachable
.get() {
496 debug_loc
.apply(cx
.fcx
);
500 pub fn And(cx
: Block
,
505 if cx
.unreachable
.get() {
508 debug_loc
.apply(cx
.fcx
);
517 if cx
.unreachable
.get() {
520 debug_loc
.apply(cx
.fcx
);
524 pub fn Xor(cx
: Block
,
529 if cx
.unreachable
.get() {
532 debug_loc
.apply(cx
.fcx
);
536 pub fn BinOp(cx
: Block
,
542 if cx
.unreachable
.get() {
545 debug_loc
.apply(cx
.fcx
);
546 B(cx
).binop(op
, lhs
, rhs
)
549 pub fn Neg(cx
: Block
, v
: ValueRef
, debug_loc
: DebugLoc
) -> ValueRef
{
550 if cx
.unreachable
.get() {
553 debug_loc
.apply(cx
.fcx
);
557 pub fn NSWNeg(cx
: Block
, v
: ValueRef
, debug_loc
: DebugLoc
) -> ValueRef
{
558 if cx
.unreachable
.get() {
561 debug_loc
.apply(cx
.fcx
);
565 pub fn NUWNeg(cx
: Block
, v
: ValueRef
, debug_loc
: DebugLoc
) -> ValueRef
{
566 if cx
.unreachable
.get() {
569 debug_loc
.apply(cx
.fcx
);
572 pub fn FNeg(cx
: Block
, v
: ValueRef
, debug_loc
: DebugLoc
) -> ValueRef
{
573 if cx
.unreachable
.get() {
576 debug_loc
.apply(cx
.fcx
);
580 pub fn Not(cx
: Block
, v
: ValueRef
, debug_loc
: DebugLoc
) -> ValueRef
{
581 if cx
.unreachable
.get() {
584 debug_loc
.apply(cx
.fcx
);
588 pub fn Alloca(cx
: Block
, ty
: Type
, name
: &str) -> ValueRef
{
590 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(ty.ptr_to().to_ref()); }
591 AllocaFcx(cx
.fcx
, ty
, name
)
595 pub fn AllocaFcx(fcx
: &FunctionContext
, ty
: Type
, name
: &str) -> ValueRef
{
596 let b
= fcx
.ccx
.builder();
597 b
.position_before(fcx
.alloca_insert_pt
.get().unwrap());
598 DebugLoc
::None
.apply(fcx
);
602 pub fn Free(cx
: Block
, pointer_val
: ValueRef
) {
603 if cx
.unreachable
.get() { return; }
604 B(cx
).free(pointer_val
)
607 pub fn Load(cx
: Block
, pointer_val
: ValueRef
) -> ValueRef
{
609 let ccx
= cx
.fcx
.ccx
;
610 if cx
.unreachable
.get() {
611 let ty
= val_ty(pointer_val
);
612 let eltty
= if ty
.kind() == llvm
::Array
{
617 return llvm
::LLVMGetUndef(eltty
.to_ref());
619 B(cx
).load(pointer_val
)
623 pub fn VolatileLoad(cx
: Block
, pointer_val
: ValueRef
) -> ValueRef
{
625 if cx
.unreachable
.get() {
626 return llvm
::LLVMGetUndef(Type
::nil(cx
.ccx()).to_ref());
628 B(cx
).volatile_load(pointer_val
)
632 pub fn AtomicLoad(cx
: Block
, pointer_val
: ValueRef
, order
: AtomicOrdering
) -> ValueRef
{
634 let ccx
= cx
.fcx
.ccx
;
635 if cx
.unreachable
.get() {
636 return llvm
::LLVMGetUndef(ccx
.int_type().to_ref());
638 B(cx
).atomic_load(pointer_val
, order
)
643 pub fn LoadRangeAssert(cx
: Block
, pointer_val
: ValueRef
, lo
: u64,
644 hi
: u64, signed
: llvm
::Bool
) -> ValueRef
{
645 if cx
.unreachable
.get() {
646 let ccx
= cx
.fcx
.ccx
;
647 let ty
= val_ty(pointer_val
);
648 let eltty
= if ty
.kind() == llvm
::Array
{
654 llvm
::LLVMGetUndef(eltty
.to_ref())
657 B(cx
).load_range_assert(pointer_val
, lo
, hi
, signed
)
661 pub fn LoadNonNull(cx
: Block
, ptr
: ValueRef
) -> ValueRef
{
662 if cx
.unreachable
.get() {
663 let ccx
= cx
.fcx
.ccx
;
664 let ty
= val_ty(ptr
);
665 let eltty
= if ty
.kind() == llvm
::Array
{
671 llvm
::LLVMGetUndef(eltty
.to_ref())
674 B(cx
).load_nonnull(ptr
)
678 pub fn Store(cx
: Block
, val
: ValueRef
, ptr
: ValueRef
) -> ValueRef
{
679 if cx
.unreachable
.get() { return C_nil(cx.ccx()); }
680 B(cx
).store(val
, ptr
)
683 pub fn VolatileStore(cx
: Block
, val
: ValueRef
, ptr
: ValueRef
) -> ValueRef
{
684 if cx
.unreachable
.get() { return C_nil(cx.ccx()); }
685 B(cx
).volatile_store(val
, ptr
)
688 pub fn AtomicStore(cx
: Block
, val
: ValueRef
, ptr
: ValueRef
, order
: AtomicOrdering
) {
689 if cx
.unreachable
.get() { return; }
690 B(cx
).atomic_store(val
, ptr
, order
)
693 pub fn GEP(cx
: Block
, pointer
: ValueRef
, indices
: &[ValueRef
]) -> ValueRef
{
695 if cx
.unreachable
.get() {
696 return llvm
::LLVMGetUndef(Type
::nil(cx
.ccx()).ptr_to().to_ref());
698 B(cx
).gep(pointer
, indices
)
702 // Simple wrapper around GEP that takes an array of ints and wraps them
705 pub fn GEPi(cx
: Block
, base
: ValueRef
, ixs
: &[usize]) -> ValueRef
{
707 if cx
.unreachable
.get() {
708 return llvm
::LLVMGetUndef(Type
::nil(cx
.ccx()).ptr_to().to_ref());
710 B(cx
).gepi(base
, ixs
)
714 pub fn InBoundsGEP(cx
: Block
, pointer
: ValueRef
, indices
: &[ValueRef
]) -> ValueRef
{
716 if cx
.unreachable
.get() {
717 return llvm
::LLVMGetUndef(Type
::nil(cx
.ccx()).ptr_to().to_ref());
719 B(cx
).inbounds_gep(pointer
, indices
)
723 pub fn StructGEP(cx
: Block
, pointer
: ValueRef
, idx
: usize) -> ValueRef
{
725 if cx
.unreachable
.get() {
726 return llvm
::LLVMGetUndef(Type
::nil(cx
.ccx()).ptr_to().to_ref());
728 B(cx
).struct_gep(pointer
, idx
)
732 pub fn GlobalString(cx
: Block
, _str
: *const c_char
) -> ValueRef
{
734 if cx
.unreachable
.get() {
735 return llvm
::LLVMGetUndef(Type
::i8p(cx
.ccx()).to_ref());
737 B(cx
).global_string(_str
)
741 pub fn GlobalStringPtr(cx
: Block
, _str
: *const c_char
) -> ValueRef
{
743 if cx
.unreachable
.get() {
744 return llvm
::LLVMGetUndef(Type
::i8p(cx
.ccx()).to_ref());
746 B(cx
).global_string_ptr(_str
)
751 pub fn Trunc(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
753 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
754 B(cx
).trunc(val
, dest_ty
)
758 pub fn ZExt(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
760 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
761 B(cx
).zext(val
, dest_ty
)
765 pub fn SExt(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
767 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
768 B(cx
).sext(val
, dest_ty
)
772 pub fn FPToUI(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
774 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
775 B(cx
).fptoui(val
, dest_ty
)
779 pub fn FPToSI(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
781 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
782 B(cx
).fptosi(val
, dest_ty
)
786 pub fn UIToFP(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
788 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
789 B(cx
).uitofp(val
, dest_ty
)
793 pub fn SIToFP(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
795 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
796 B(cx
).sitofp(val
, dest_ty
)
800 pub fn FPTrunc(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
802 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
803 B(cx
).fptrunc(val
, dest_ty
)
807 pub fn FPExt(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
809 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
810 B(cx
).fpext(val
, dest_ty
)
814 pub fn PtrToInt(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
816 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
817 B(cx
).ptrtoint(val
, dest_ty
)
821 pub fn IntToPtr(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
823 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
824 B(cx
).inttoptr(val
, dest_ty
)
828 pub fn BitCast(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
830 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
831 B(cx
).bitcast(val
, dest_ty
)
835 pub fn ZExtOrBitCast(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
837 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
838 B(cx
).zext_or_bitcast(val
, dest_ty
)
842 pub fn SExtOrBitCast(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
844 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
845 B(cx
).sext_or_bitcast(val
, dest_ty
)
849 pub fn TruncOrBitCast(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
851 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
852 B(cx
).trunc_or_bitcast(val
, dest_ty
)
856 pub fn Cast(cx
: Block
, op
: Opcode
, val
: ValueRef
, dest_ty
: Type
,
860 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
861 B(cx
).cast(op
, val
, dest_ty
)
865 pub fn PointerCast(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
867 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
868 B(cx
).pointercast(val
, dest_ty
)
872 pub fn IntCast(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
874 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
875 B(cx
).intcast(val
, dest_ty
)
879 pub fn FPCast(cx
: Block
, val
: ValueRef
, dest_ty
: Type
) -> ValueRef
{
881 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
882 B(cx
).fpcast(val
, dest_ty
)
888 pub fn ICmp(cx
: Block
,
895 if cx
.unreachable
.get() {
896 return llvm
::LLVMGetUndef(Type
::i1(cx
.ccx()).to_ref());
898 debug_loc
.apply(cx
.fcx
);
899 B(cx
).icmp(op
, lhs
, rhs
)
903 pub fn FCmp(cx
: Block
,
910 if cx
.unreachable
.get() {
911 return llvm
::LLVMGetUndef(Type
::i1(cx
.ccx()).to_ref());
913 debug_loc
.apply(cx
.fcx
);
914 B(cx
).fcmp(op
, lhs
, rhs
)
918 /* Miscellaneous instructions */
919 pub fn EmptyPhi(cx
: Block
, ty
: Type
) -> ValueRef
{
921 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(ty.to_ref()); }
926 pub fn Phi(cx
: Block
, ty
: Type
, vals
: &[ValueRef
],
927 bbs
: &[BasicBlockRef
]) -> ValueRef
{
929 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(ty.to_ref()); }
930 B(cx
).phi(ty
, vals
, bbs
)
934 pub fn AddIncomingToPhi(phi
: ValueRef
, val
: ValueRef
, bb
: BasicBlockRef
) {
936 if llvm
::LLVMIsUndef(phi
) == llvm
::True { return; }
937 llvm
::LLVMAddIncoming(phi
, &val
, &bb
, 1 as c_uint
);
941 pub fn _UndefReturn(cx
: Block
, fn_
: ValueRef
) -> ValueRef
{
943 let ccx
= cx
.fcx
.ccx
;
944 let ty
= val_ty(fn_
);
945 let retty
= if ty
.kind() == llvm
::Function
{
950 B(cx
).count_insn("ret_undef");
951 llvm
::LLVMGetUndef(retty
.to_ref())
955 pub fn add_span_comment(cx
: Block
, sp
: Span
, text
: &str) {
956 B(cx
).add_span_comment(sp
, text
)
959 pub fn add_comment(cx
: Block
, text
: &str) {
960 B(cx
).add_comment(text
)
963 pub fn InlineAsmCall(cx
: Block
, asm
: *const c_char
, cons
: *const c_char
,
964 inputs
: &[ValueRef
], output
: Type
,
965 volatile
: bool
, alignstack
: bool
,
966 dia
: AsmDialect
) -> ValueRef
{
967 B(cx
).inline_asm_call(asm
, cons
, inputs
, output
, volatile
, alignstack
, dia
)
970 pub fn Call(cx
: Block
,
975 if cx
.unreachable
.get() {
976 return _UndefReturn(cx
, fn_
);
978 debug_loc
.apply(cx
.fcx
);
979 let bundle
= cx
.lpad
.get().and_then(|b
| b
.bundle());
980 B(cx
).call(fn_
, args
, bundle
)
983 pub fn AtomicFence(cx
: Block
, order
: AtomicOrdering
, scope
: SynchronizationScope
) {
984 if cx
.unreachable
.get() { return; }
985 B(cx
).atomic_fence(order
, scope
)
988 pub fn Select(cx
: Block
, if_
: ValueRef
, then
: ValueRef
, else_
: ValueRef
) -> ValueRef
{
989 if cx
.unreachable
.get() { return _Undef(then); }
990 B(cx
).select(if_
, then
, else_
)
993 pub fn VAArg(cx
: Block
, list
: ValueRef
, ty
: Type
) -> ValueRef
{
995 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(ty.to_ref()); }
996 B(cx
).va_arg(list
, ty
)
1000 pub fn ExtractElement(cx
: Block
, vec_val
: ValueRef
, index
: ValueRef
) -> ValueRef
{
1002 if cx
.unreachable
.get() {
1003 return llvm
::LLVMGetUndef(Type
::nil(cx
.ccx()).to_ref());
1005 B(cx
).extract_element(vec_val
, index
)
1009 pub fn InsertElement(cx
: Block
, vec_val
: ValueRef
, elt_val
: ValueRef
,
1010 index
: ValueRef
) -> ValueRef
{
1012 if cx
.unreachable
.get() {
1013 return llvm
::LLVMGetUndef(Type
::nil(cx
.ccx()).to_ref());
1015 B(cx
).insert_element(vec_val
, elt_val
, index
)
1019 pub fn ShuffleVector(cx
: Block
, v1
: ValueRef
, v2
: ValueRef
,
1020 mask
: ValueRef
) -> ValueRef
{
1022 if cx
.unreachable
.get() {
1023 return llvm
::LLVMGetUndef(Type
::nil(cx
.ccx()).to_ref());
1025 B(cx
).shuffle_vector(v1
, v2
, mask
)
1029 pub fn VectorSplat(cx
: Block
, num_elts
: usize, elt_val
: ValueRef
) -> ValueRef
{
1031 if cx
.unreachable
.get() {
1032 return llvm
::LLVMGetUndef(Type
::nil(cx
.ccx()).to_ref());
1034 B(cx
).vector_splat(num_elts
, elt_val
)
1038 pub fn ExtractValue(cx
: Block
, agg_val
: ValueRef
, index
: usize) -> ValueRef
{
1040 if cx
.unreachable
.get() {
1041 return llvm
::LLVMGetUndef(Type
::nil(cx
.ccx()).to_ref());
1043 B(cx
).extract_value(agg_val
, index
)
1047 pub fn InsertValue(cx
: Block
, agg_val
: ValueRef
, elt_val
: ValueRef
, index
: usize) -> ValueRef
{
1049 if cx
.unreachable
.get() {
1050 return llvm
::LLVMGetUndef(Type
::nil(cx
.ccx()).to_ref());
1052 B(cx
).insert_value(agg_val
, elt_val
, index
)
1056 pub fn IsNull(cx
: Block
, val
: ValueRef
) -> ValueRef
{
1058 if cx
.unreachable
.get() {
1059 return llvm
::LLVMGetUndef(Type
::i1(cx
.ccx()).to_ref());
1065 pub fn IsNotNull(cx
: Block
, val
: ValueRef
) -> ValueRef
{
1067 if cx
.unreachable
.get() {
1068 return llvm
::LLVMGetUndef(Type
::i1(cx
.ccx()).to_ref());
1070 B(cx
).is_not_null(val
)
1074 pub fn PtrDiff(cx
: Block
, lhs
: ValueRef
, rhs
: ValueRef
) -> ValueRef
{
1076 let ccx
= cx
.fcx
.ccx
;
1077 if cx
.unreachable
.get() { return llvm::LLVMGetUndef(ccx.int_type().to_ref()); }
1078 B(cx
).ptrdiff(lhs
, rhs
)
1082 pub fn Trap(cx
: Block
) {
1083 if cx
.unreachable
.get() { return; }
1087 pub fn LandingPad(cx
: Block
, ty
: Type
, pers_fn
: ValueRef
,
1088 num_clauses
: usize) -> ValueRef
{
1089 check_not_terminated(cx
);
1090 assert
!(!cx
.unreachable
.get());
1091 B(cx
).landing_pad(ty
, pers_fn
, num_clauses
, cx
.fcx
.llfn
)
1094 pub fn AddClause(cx
: Block
, landing_pad
: ValueRef
, clause
: ValueRef
) {
1095 B(cx
).add_clause(landing_pad
, clause
)
1098 pub fn SetCleanup(cx
: Block
, landing_pad
: ValueRef
) {
1099 B(cx
).set_cleanup(landing_pad
)
1102 pub fn SetPersonalityFn(cx
: Block
, f
: ValueRef
) {
1103 B(cx
).set_personality_fn(f
)
1106 pub fn Resume(cx
: Block
, exn
: ValueRef
) -> ValueRef
{
1107 check_not_terminated(cx
);
1108 terminate(cx
, "Resume");
1112 // Atomic Operations
1113 pub fn AtomicCmpXchg(cx
: Block
, dst
: ValueRef
,
1114 cmp
: ValueRef
, src
: ValueRef
,
1115 order
: AtomicOrdering
,
1116 failure_order
: AtomicOrdering
,
1117 weak
: llvm
::Bool
) -> ValueRef
{
1118 B(cx
).atomic_cmpxchg(dst
, cmp
, src
, order
, failure_order
, weak
)
1120 pub fn AtomicRMW(cx
: Block
, op
: AtomicBinOp
,
1121 dst
: ValueRef
, src
: ValueRef
,
1122 order
: AtomicOrdering
) -> ValueRef
{
1123 B(cx
).atomic_rmw(op
, dst
, src
, order
)
1126 pub fn CleanupPad(cx
: Block
,
1127 parent
: Option
<ValueRef
>,
1128 args
: &[ValueRef
]) -> ValueRef
{
1129 check_not_terminated(cx
);
1130 assert
!(!cx
.unreachable
.get());
1131 B(cx
).cleanup_pad(parent
, args
)
1134 pub fn CleanupRet(cx
: Block
,
1136 unwind
: Option
<BasicBlockRef
>) -> ValueRef
{
1137 check_not_terminated(cx
);
1138 terminate(cx
, "CleanupRet");
1139 B(cx
).cleanup_ret(cleanup
, unwind
)
1142 pub fn CatchPad(cx
: Block
,
1144 args
: &[ValueRef
]) -> ValueRef
{
1145 check_not_terminated(cx
);
1146 assert
!(!cx
.unreachable
.get());
1147 B(cx
).catch_pad(parent
, args
)
1150 pub fn CatchRet(cx
: Block
, pad
: ValueRef
, unwind
: BasicBlockRef
) -> ValueRef
{
1151 check_not_terminated(cx
);
1152 terminate(cx
, "CatchRet");
1153 B(cx
).catch_ret(pad
, unwind
)
1156 pub fn CatchSwitch(cx
: Block
,
1157 parent
: Option
<ValueRef
>,
1158 unwind
: Option
<BasicBlockRef
>,
1159 num_handlers
: usize) -> ValueRef
{
1160 check_not_terminated(cx
);
1161 terminate(cx
, "CatchSwitch");
1162 B(cx
).catch_switch(parent
, unwind
, num_handlers
)
1165 pub fn AddHandler(cx
: Block
, catch_switch
: ValueRef
, handler
: BasicBlockRef
) {
1166 B(cx
).add_handler(catch_switch
, handler
)