]> git.proxmox.com Git - rustc.git/blob - src/librustc_trans/trans/build.rs
d6ac412a4faead9678cc7711671e87e40a1e78df
[rustc.git] / src / librustc_trans / trans / build.rs
1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 #![allow(dead_code)] // FFI wrappers
12 #![allow(non_snake_case)]
13
14 use llvm;
15 use llvm::{CallConv, AtomicBinOp, AtomicOrdering, SynchronizationScope, AsmDialect, AttrBuilder};
16 use llvm::{Opcode, IntPredicate, RealPredicate};
17 use llvm::{ValueRef, BasicBlockRef};
18 use trans::common::*;
19 use syntax::codemap::Span;
20
21 use trans::builder::Builder;
22 use trans::type_::Type;
23 use trans::debuginfo::DebugLoc;
24
25 use libc::{c_uint, c_char};
26
27 pub fn terminate(cx: Block, _: &str) {
28 debug!("terminate({})", cx.to_str());
29 cx.terminated.set(true);
30 }
31
32 pub fn check_not_terminated(cx: Block) {
33 if cx.terminated.get() {
34 panic!("already terminated!");
35 }
36 }
37
38 pub fn B<'blk, 'tcx>(cx: Block<'blk, 'tcx>) -> Builder<'blk, 'tcx> {
39 let b = cx.fcx.ccx.builder();
40 b.position_at_end(cx.llbb);
41 b
42 }
43
44 // The difference between a block being unreachable and being terminated is
45 // somewhat obscure, and has to do with error checking. When a block is
46 // terminated, we're saying that trying to add any further statements in the
47 // block is an error. On the other hand, if something is unreachable, that
48 // means that the block was terminated in some way that we don't want to check
49 // for (panic/break/return statements, call to diverging functions, etc), and
50 // further instructions to the block should simply be ignored.
51
52 pub fn RetVoid(cx: Block, debug_loc: DebugLoc) {
53 if cx.unreachable.get() {
54 return;
55 }
56 check_not_terminated(cx);
57 terminate(cx, "RetVoid");
58 debug_loc.apply(cx.fcx);
59 B(cx).ret_void();
60 }
61
62 pub fn Ret(cx: Block, v: ValueRef, debug_loc: DebugLoc) {
63 if cx.unreachable.get() {
64 return;
65 }
66 check_not_terminated(cx);
67 terminate(cx, "Ret");
68 debug_loc.apply(cx.fcx);
69 B(cx).ret(v);
70 }
71
72 pub fn AggregateRet(cx: Block,
73 ret_vals: &[ValueRef],
74 debug_loc: DebugLoc) {
75 if cx.unreachable.get() {
76 return;
77 }
78 check_not_terminated(cx);
79 terminate(cx, "AggregateRet");
80 debug_loc.apply(cx.fcx);
81 B(cx).aggregate_ret(ret_vals);
82 }
83
84 pub fn Br(cx: Block, dest: BasicBlockRef, debug_loc: DebugLoc) {
85 if cx.unreachable.get() {
86 return;
87 }
88 check_not_terminated(cx);
89 terminate(cx, "Br");
90 debug_loc.apply(cx.fcx);
91 B(cx).br(dest);
92 }
93
94 pub fn CondBr(cx: Block,
95 if_: ValueRef,
96 then: BasicBlockRef,
97 else_: BasicBlockRef,
98 debug_loc: DebugLoc) {
99 if cx.unreachable.get() {
100 return;
101 }
102 check_not_terminated(cx);
103 terminate(cx, "CondBr");
104 debug_loc.apply(cx.fcx);
105 B(cx).cond_br(if_, then, else_);
106 }
107
108 pub fn Switch(cx: Block, v: ValueRef, else_: BasicBlockRef, num_cases: usize)
109 -> ValueRef {
110 if cx.unreachable.get() { return _Undef(v); }
111 check_not_terminated(cx);
112 terminate(cx, "Switch");
113 B(cx).switch(v, else_, num_cases)
114 }
115
116 pub fn AddCase(s: ValueRef, on_val: ValueRef, dest: BasicBlockRef) {
117 unsafe {
118 if llvm::LLVMIsUndef(s) == llvm::True { return; }
119 llvm::LLVMAddCase(s, on_val, dest);
120 }
121 }
122
123 pub fn IndirectBr(cx: Block,
124 addr: ValueRef,
125 num_dests: usize,
126 debug_loc: DebugLoc) {
127 if cx.unreachable.get() {
128 return;
129 }
130 check_not_terminated(cx);
131 terminate(cx, "IndirectBr");
132 debug_loc.apply(cx.fcx);
133 B(cx).indirect_br(addr, num_dests);
134 }
135
136 pub fn Invoke(cx: Block,
137 fn_: ValueRef,
138 args: &[ValueRef],
139 then: BasicBlockRef,
140 catch: BasicBlockRef,
141 attributes: Option<AttrBuilder>,
142 debug_loc: DebugLoc)
143 -> ValueRef {
144 if cx.unreachable.get() {
145 return C_null(Type::i8(cx.ccx()));
146 }
147 check_not_terminated(cx);
148 terminate(cx, "Invoke");
149 debug!("Invoke({} with arguments ({}))",
150 cx.val_to_string(fn_),
151 args.iter().map(|a| cx.val_to_string(*a)).collect::<Vec<String>>().connect(", "));
152 debug_loc.apply(cx.fcx);
153 B(cx).invoke(fn_, args, then, catch, attributes)
154 }
155
156 pub fn Unreachable(cx: Block) {
157 if cx.unreachable.get() {
158 return
159 }
160 cx.unreachable.set(true);
161 if !cx.terminated.get() {
162 B(cx).unreachable();
163 }
164 }
165
166 pub fn _Undef(val: ValueRef) -> ValueRef {
167 unsafe {
168 return llvm::LLVMGetUndef(val_ty(val).to_ref());
169 }
170 }
171
172 /* Arithmetic */
173 pub fn Add(cx: Block,
174 lhs: ValueRef,
175 rhs: ValueRef,
176 debug_loc: DebugLoc)
177 -> ValueRef {
178 if cx.unreachable.get() {
179 return _Undef(lhs);
180 }
181 debug_loc.apply(cx.fcx);
182 B(cx).add(lhs, rhs)
183 }
184
185 pub fn NSWAdd(cx: Block,
186 lhs: ValueRef,
187 rhs: ValueRef,
188 debug_loc: DebugLoc)
189 -> ValueRef {
190 if cx.unreachable.get() {
191 return _Undef(lhs);
192 }
193 debug_loc.apply(cx.fcx);
194 B(cx).nswadd(lhs, rhs)
195 }
196
197 pub fn NUWAdd(cx: Block,
198 lhs: ValueRef,
199 rhs: ValueRef,
200 debug_loc: DebugLoc)
201 -> ValueRef {
202 if cx.unreachable.get() {
203 return _Undef(lhs);
204 }
205 debug_loc.apply(cx.fcx);
206 B(cx).nuwadd(lhs, rhs)
207 }
208
209 pub fn FAdd(cx: Block,
210 lhs: ValueRef,
211 rhs: ValueRef,
212 debug_loc: DebugLoc)
213 -> ValueRef {
214 if cx.unreachable.get() {
215 return _Undef(lhs);
216 }
217 debug_loc.apply(cx.fcx);
218 B(cx).fadd(lhs, rhs)
219 }
220
221 pub fn Sub(cx: Block,
222 lhs: ValueRef,
223 rhs: ValueRef,
224 debug_loc: DebugLoc)
225 -> ValueRef {
226 if cx.unreachable.get() {
227 return _Undef(lhs);
228 }
229 debug_loc.apply(cx.fcx);
230 B(cx).sub(lhs, rhs)
231 }
232
233 pub fn NSWSub(cx: Block,
234 lhs: ValueRef,
235 rhs: ValueRef,
236 debug_loc: DebugLoc)
237 -> ValueRef {
238 if cx.unreachable.get() {
239 return _Undef(lhs);
240 }
241 debug_loc.apply(cx.fcx);
242 B(cx).nswsub(lhs, rhs)
243 }
244
245 pub fn NUWSub(cx: Block,
246 lhs: ValueRef,
247 rhs: ValueRef,
248 debug_loc: DebugLoc)
249 -> ValueRef {
250 if cx.unreachable.get() {
251 return _Undef(lhs);
252 }
253 debug_loc.apply(cx.fcx);
254 B(cx).nuwsub(lhs, rhs)
255 }
256
257 pub fn FSub(cx: Block,
258 lhs: ValueRef,
259 rhs: ValueRef,
260 debug_loc: DebugLoc)
261 -> ValueRef {
262 if cx.unreachable.get() {
263 return _Undef(lhs);
264 }
265 debug_loc.apply(cx.fcx);
266 B(cx).fsub(lhs, rhs)
267 }
268
269 pub fn Mul(cx: Block,
270 lhs: ValueRef,
271 rhs: ValueRef,
272 debug_loc: DebugLoc)
273 -> ValueRef {
274 if cx.unreachable.get() {
275 return _Undef(lhs);
276 }
277 debug_loc.apply(cx.fcx);
278 B(cx).mul(lhs, rhs)
279 }
280
281 pub fn NSWMul(cx: Block,
282 lhs: ValueRef,
283 rhs: ValueRef,
284 debug_loc: DebugLoc)
285 -> ValueRef {
286 if cx.unreachable.get() {
287 return _Undef(lhs);
288 }
289 debug_loc.apply(cx.fcx);
290 B(cx).nswmul(lhs, rhs)
291 }
292
293 pub fn NUWMul(cx: Block,
294 lhs: ValueRef,
295 rhs: ValueRef,
296 debug_loc: DebugLoc)
297 -> ValueRef {
298 if cx.unreachable.get() {
299 return _Undef(lhs);
300 }
301 debug_loc.apply(cx.fcx);
302 B(cx).nuwmul(lhs, rhs)
303 }
304
305 pub fn FMul(cx: Block,
306 lhs: ValueRef,
307 rhs: ValueRef,
308 debug_loc: DebugLoc)
309 -> ValueRef {
310 if cx.unreachable.get() {
311 return _Undef(lhs);
312 }
313 debug_loc.apply(cx.fcx);
314 B(cx).fmul(lhs, rhs)
315 }
316
317 pub fn UDiv(cx: Block,
318 lhs: ValueRef,
319 rhs: ValueRef,
320 debug_loc: DebugLoc)
321 -> ValueRef {
322 if cx.unreachable.get() {
323 return _Undef(lhs);
324 }
325 debug_loc.apply(cx.fcx);
326 B(cx).udiv(lhs, rhs)
327 }
328
329 pub fn SDiv(cx: Block,
330 lhs: ValueRef,
331 rhs: ValueRef,
332 debug_loc: DebugLoc)
333 -> ValueRef {
334 if cx.unreachable.get() {
335 return _Undef(lhs);
336 }
337 debug_loc.apply(cx.fcx);
338 B(cx).sdiv(lhs, rhs)
339 }
340
341 pub fn ExactSDiv(cx: Block,
342 lhs: ValueRef,
343 rhs: ValueRef,
344 debug_loc: DebugLoc)
345 -> ValueRef {
346 if cx.unreachable.get() {
347 return _Undef(lhs);
348 }
349 debug_loc.apply(cx.fcx);
350 B(cx).exactsdiv(lhs, rhs)
351 }
352
353 pub fn FDiv(cx: Block,
354 lhs: ValueRef,
355 rhs: ValueRef,
356 debug_loc: DebugLoc)
357 -> ValueRef {
358 if cx.unreachable.get() {
359 return _Undef(lhs);
360 }
361 debug_loc.apply(cx.fcx);
362 B(cx).fdiv(lhs, rhs)
363 }
364
365 pub fn URem(cx: Block,
366 lhs: ValueRef,
367 rhs: ValueRef,
368 debug_loc: DebugLoc)
369 -> ValueRef {
370 if cx.unreachable.get() {
371 return _Undef(lhs);
372 }
373 debug_loc.apply(cx.fcx);
374 B(cx).urem(lhs, rhs)
375 }
376
377 pub fn SRem(cx: Block,
378 lhs: ValueRef,
379 rhs: ValueRef,
380 debug_loc: DebugLoc)
381 -> ValueRef {
382 if cx.unreachable.get() {
383 return _Undef(lhs);
384 }
385 debug_loc.apply(cx.fcx);
386 B(cx).srem(lhs, rhs)
387 }
388
389 pub fn FRem(cx: Block,
390 lhs: ValueRef,
391 rhs: ValueRef,
392 debug_loc: DebugLoc)
393 -> ValueRef {
394 if cx.unreachable.get() {
395 return _Undef(lhs);
396 }
397 debug_loc.apply(cx.fcx);
398 B(cx).frem(lhs, rhs)
399 }
400
401 pub fn Shl(cx: Block,
402 lhs: ValueRef,
403 rhs: ValueRef,
404 debug_loc: DebugLoc)
405 -> ValueRef {
406 if cx.unreachable.get() {
407 return _Undef(lhs);
408 }
409 debug_loc.apply(cx.fcx);
410 B(cx).shl(lhs, rhs)
411 }
412
413 pub fn LShr(cx: Block,
414 lhs: ValueRef,
415 rhs: ValueRef,
416 debug_loc: DebugLoc)
417 -> ValueRef {
418 if cx.unreachable.get() {
419 return _Undef(lhs);
420 }
421 debug_loc.apply(cx.fcx);
422 B(cx).lshr(lhs, rhs)
423 }
424
425 pub fn AShr(cx: Block,
426 lhs: ValueRef,
427 rhs: ValueRef,
428 debug_loc: DebugLoc)
429 -> ValueRef {
430 if cx.unreachable.get() {
431 return _Undef(lhs);
432 }
433 debug_loc.apply(cx.fcx);
434 B(cx).ashr(lhs, rhs)
435 }
436
437 pub fn And(cx: Block,
438 lhs: ValueRef,
439 rhs: ValueRef,
440 debug_loc: DebugLoc)
441 -> ValueRef {
442 if cx.unreachable.get() {
443 return _Undef(lhs);
444 }
445 debug_loc.apply(cx.fcx);
446 B(cx).and(lhs, rhs)
447 }
448
449 pub fn Or(cx: Block,
450 lhs: ValueRef,
451 rhs: ValueRef,
452 debug_loc: DebugLoc)
453 -> ValueRef {
454 if cx.unreachable.get() {
455 return _Undef(lhs);
456 }
457 debug_loc.apply(cx.fcx);
458 B(cx).or(lhs, rhs)
459 }
460
461 pub fn Xor(cx: Block,
462 lhs: ValueRef,
463 rhs: ValueRef,
464 debug_loc: DebugLoc)
465 -> ValueRef {
466 if cx.unreachable.get() {
467 return _Undef(lhs);
468 }
469 debug_loc.apply(cx.fcx);
470 B(cx).xor(lhs, rhs)
471 }
472
473 pub fn BinOp(cx: Block,
474 op: Opcode,
475 lhs: ValueRef,
476 rhs: ValueRef,
477 debug_loc: DebugLoc)
478 -> ValueRef {
479 if cx.unreachable.get() {
480 return _Undef(lhs);
481 }
482 debug_loc.apply(cx.fcx);
483 B(cx).binop(op, lhs, rhs)
484 }
485
486 pub fn Neg(cx: Block, v: ValueRef, debug_loc: DebugLoc) -> ValueRef {
487 if cx.unreachable.get() {
488 return _Undef(v);
489 }
490 debug_loc.apply(cx.fcx);
491 B(cx).neg(v)
492 }
493
494 pub fn NSWNeg(cx: Block, v: ValueRef, debug_loc: DebugLoc) -> ValueRef {
495 if cx.unreachable.get() {
496 return _Undef(v);
497 }
498 debug_loc.apply(cx.fcx);
499 B(cx).nswneg(v)
500 }
501
502 pub fn NUWNeg(cx: Block, v: ValueRef, debug_loc: DebugLoc) -> ValueRef {
503 if cx.unreachable.get() {
504 return _Undef(v);
505 }
506 debug_loc.apply(cx.fcx);
507 B(cx).nuwneg(v)
508 }
509 pub fn FNeg(cx: Block, v: ValueRef, debug_loc: DebugLoc) -> ValueRef {
510 if cx.unreachable.get() {
511 return _Undef(v);
512 }
513 debug_loc.apply(cx.fcx);
514 B(cx).fneg(v)
515 }
516
517 pub fn Not(cx: Block, v: ValueRef, debug_loc: DebugLoc) -> ValueRef {
518 if cx.unreachable.get() {
519 return _Undef(v);
520 }
521 debug_loc.apply(cx.fcx);
522 B(cx).not(v)
523 }
524
525 /* Memory */
526 pub fn Malloc(cx: Block, ty: Type, debug_loc: DebugLoc) -> ValueRef {
527 unsafe {
528 if cx.unreachable.get() {
529 return llvm::LLVMGetUndef(Type::i8p(cx.ccx()).to_ref());
530 }
531 debug_loc.apply(cx.fcx);
532 B(cx).malloc(ty)
533 }
534 }
535
536 pub fn ArrayMalloc(cx: Block,
537 ty: Type,
538 val: ValueRef,
539 debug_loc: DebugLoc) -> ValueRef {
540 unsafe {
541 if cx.unreachable.get() {
542 return llvm::LLVMGetUndef(Type::i8p(cx.ccx()).to_ref());
543 }
544 debug_loc.apply(cx.fcx);
545 B(cx).array_malloc(ty, val)
546 }
547 }
548
549 pub fn Alloca(cx: Block, ty: Type, name: &str) -> ValueRef {
550 unsafe {
551 if cx.unreachable.get() { return llvm::LLVMGetUndef(ty.ptr_to().to_ref()); }
552 AllocaFcx(cx.fcx, ty, name)
553 }
554 }
555
556 pub fn AllocaFcx(fcx: &FunctionContext, ty: Type, name: &str) -> ValueRef {
557 let b = fcx.ccx.builder();
558 b.position_before(fcx.alloca_insert_pt.get().unwrap());
559 DebugLoc::None.apply(fcx);
560 b.alloca(ty, name)
561 }
562
563 pub fn ArrayAlloca(cx: Block, ty: Type, val: ValueRef) -> ValueRef {
564 unsafe {
565 if cx.unreachable.get() { return llvm::LLVMGetUndef(ty.ptr_to().to_ref()); }
566 let b = cx.fcx.ccx.builder();
567 b.position_before(cx.fcx.alloca_insert_pt.get().unwrap());
568 DebugLoc::None.apply(cx.fcx);
569 b.array_alloca(ty, val)
570 }
571 }
572
573 pub fn Free(cx: Block, pointer_val: ValueRef) {
574 if cx.unreachable.get() { return; }
575 B(cx).free(pointer_val)
576 }
577
578 pub fn Load(cx: Block, pointer_val: ValueRef) -> ValueRef {
579 unsafe {
580 let ccx = cx.fcx.ccx;
581 if cx.unreachable.get() {
582 let ty = val_ty(pointer_val);
583 let eltty = if ty.kind() == llvm::Array {
584 ty.element_type()
585 } else {
586 ccx.int_type()
587 };
588 return llvm::LLVMGetUndef(eltty.to_ref());
589 }
590 B(cx).load(pointer_val)
591 }
592 }
593
594 pub fn VolatileLoad(cx: Block, pointer_val: ValueRef) -> ValueRef {
595 unsafe {
596 if cx.unreachable.get() {
597 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
598 }
599 B(cx).volatile_load(pointer_val)
600 }
601 }
602
603 pub fn AtomicLoad(cx: Block, pointer_val: ValueRef, order: AtomicOrdering) -> ValueRef {
604 unsafe {
605 let ccx = cx.fcx.ccx;
606 if cx.unreachable.get() {
607 return llvm::LLVMGetUndef(ccx.int_type().to_ref());
608 }
609 B(cx).atomic_load(pointer_val, order)
610 }
611 }
612
613
614 pub fn LoadRangeAssert(cx: Block, pointer_val: ValueRef, lo: u64,
615 hi: u64, signed: llvm::Bool) -> ValueRef {
616 if cx.unreachable.get() {
617 let ccx = cx.fcx.ccx;
618 let ty = val_ty(pointer_val);
619 let eltty = if ty.kind() == llvm::Array {
620 ty.element_type()
621 } else {
622 ccx.int_type()
623 };
624 unsafe {
625 llvm::LLVMGetUndef(eltty.to_ref())
626 }
627 } else {
628 B(cx).load_range_assert(pointer_val, lo, hi, signed)
629 }
630 }
631
632 pub fn LoadNonNull(cx: Block, ptr: ValueRef) -> ValueRef {
633 if cx.unreachable.get() {
634 let ccx = cx.fcx.ccx;
635 let ty = val_ty(ptr);
636 let eltty = if ty.kind() == llvm::Array {
637 ty.element_type()
638 } else {
639 ccx.int_type()
640 };
641 unsafe {
642 llvm::LLVMGetUndef(eltty.to_ref())
643 }
644 } else {
645 B(cx).load_nonnull(ptr)
646 }
647 }
648
649 pub fn Store(cx: Block, val: ValueRef, ptr: ValueRef) -> ValueRef {
650 if cx.unreachable.get() { return C_nil(cx.ccx()); }
651 B(cx).store(val, ptr)
652 }
653
654 pub fn VolatileStore(cx: Block, val: ValueRef, ptr: ValueRef) -> ValueRef {
655 if cx.unreachable.get() { return C_nil(cx.ccx()); }
656 B(cx).volatile_store(val, ptr)
657 }
658
659 pub fn AtomicStore(cx: Block, val: ValueRef, ptr: ValueRef, order: AtomicOrdering) {
660 if cx.unreachable.get() { return; }
661 B(cx).atomic_store(val, ptr, order)
662 }
663
664 pub fn GEP(cx: Block, pointer: ValueRef, indices: &[ValueRef]) -> ValueRef {
665 unsafe {
666 if cx.unreachable.get() {
667 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).ptr_to().to_ref());
668 }
669 B(cx).gep(pointer, indices)
670 }
671 }
672
673 // Simple wrapper around GEP that takes an array of ints and wraps them
674 // in C_i32()
675 #[inline]
676 pub fn GEPi(cx: Block, base: ValueRef, ixs: &[usize]) -> ValueRef {
677 unsafe {
678 if cx.unreachable.get() {
679 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).ptr_to().to_ref());
680 }
681 B(cx).gepi(base, ixs)
682 }
683 }
684
685 pub fn InBoundsGEP(cx: Block, pointer: ValueRef, indices: &[ValueRef]) -> ValueRef {
686 unsafe {
687 if cx.unreachable.get() {
688 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).ptr_to().to_ref());
689 }
690 B(cx).inbounds_gep(pointer, indices)
691 }
692 }
693
694 pub fn StructGEP(cx: Block, pointer: ValueRef, idx: usize) -> ValueRef {
695 unsafe {
696 if cx.unreachable.get() {
697 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).ptr_to().to_ref());
698 }
699 B(cx).struct_gep(pointer, idx)
700 }
701 }
702
703 pub fn GlobalString(cx: Block, _str: *const c_char) -> ValueRef {
704 unsafe {
705 if cx.unreachable.get() {
706 return llvm::LLVMGetUndef(Type::i8p(cx.ccx()).to_ref());
707 }
708 B(cx).global_string(_str)
709 }
710 }
711
712 pub fn GlobalStringPtr(cx: Block, _str: *const c_char) -> ValueRef {
713 unsafe {
714 if cx.unreachable.get() {
715 return llvm::LLVMGetUndef(Type::i8p(cx.ccx()).to_ref());
716 }
717 B(cx).global_string_ptr(_str)
718 }
719 }
720
721 /* Casts */
722 pub fn Trunc(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
723 unsafe {
724 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
725 B(cx).trunc(val, dest_ty)
726 }
727 }
728
729 pub fn ZExt(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
730 unsafe {
731 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
732 B(cx).zext(val, dest_ty)
733 }
734 }
735
736 pub fn SExt(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
737 unsafe {
738 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
739 B(cx).sext(val, dest_ty)
740 }
741 }
742
743 pub fn FPToUI(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
744 unsafe {
745 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
746 B(cx).fptoui(val, dest_ty)
747 }
748 }
749
750 pub fn FPToSI(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
751 unsafe {
752 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
753 B(cx).fptosi(val, dest_ty)
754 }
755 }
756
757 pub fn UIToFP(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
758 unsafe {
759 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
760 B(cx).uitofp(val, dest_ty)
761 }
762 }
763
764 pub fn SIToFP(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
765 unsafe {
766 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
767 B(cx).sitofp(val, dest_ty)
768 }
769 }
770
771 pub fn FPTrunc(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
772 unsafe {
773 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
774 B(cx).fptrunc(val, dest_ty)
775 }
776 }
777
778 pub fn FPExt(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
779 unsafe {
780 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
781 B(cx).fpext(val, dest_ty)
782 }
783 }
784
785 pub fn PtrToInt(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
786 unsafe {
787 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
788 B(cx).ptrtoint(val, dest_ty)
789 }
790 }
791
792 pub fn IntToPtr(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
793 unsafe {
794 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
795 B(cx).inttoptr(val, dest_ty)
796 }
797 }
798
799 pub fn BitCast(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
800 unsafe {
801 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
802 B(cx).bitcast(val, dest_ty)
803 }
804 }
805
806 pub fn ZExtOrBitCast(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
807 unsafe {
808 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
809 B(cx).zext_or_bitcast(val, dest_ty)
810 }
811 }
812
813 pub fn SExtOrBitCast(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
814 unsafe {
815 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
816 B(cx).sext_or_bitcast(val, dest_ty)
817 }
818 }
819
820 pub fn TruncOrBitCast(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
821 unsafe {
822 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
823 B(cx).trunc_or_bitcast(val, dest_ty)
824 }
825 }
826
827 pub fn Cast(cx: Block, op: Opcode, val: ValueRef, dest_ty: Type,
828 _: *const u8)
829 -> ValueRef {
830 unsafe {
831 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
832 B(cx).cast(op, val, dest_ty)
833 }
834 }
835
836 pub fn PointerCast(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
837 unsafe {
838 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
839 B(cx).pointercast(val, dest_ty)
840 }
841 }
842
843 pub fn IntCast(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
844 unsafe {
845 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
846 B(cx).intcast(val, dest_ty)
847 }
848 }
849
850 pub fn FPCast(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
851 unsafe {
852 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
853 B(cx).fpcast(val, dest_ty)
854 }
855 }
856
857
858 /* Comparisons */
859 pub fn ICmp(cx: Block,
860 op: IntPredicate,
861 lhs: ValueRef,
862 rhs: ValueRef,
863 debug_loc: DebugLoc)
864 -> ValueRef {
865 unsafe {
866 if cx.unreachable.get() {
867 return llvm::LLVMGetUndef(Type::i1(cx.ccx()).to_ref());
868 }
869 debug_loc.apply(cx.fcx);
870 B(cx).icmp(op, lhs, rhs)
871 }
872 }
873
874 pub fn FCmp(cx: Block,
875 op: RealPredicate,
876 lhs: ValueRef,
877 rhs: ValueRef,
878 debug_loc: DebugLoc)
879 -> ValueRef {
880 unsafe {
881 if cx.unreachable.get() {
882 return llvm::LLVMGetUndef(Type::i1(cx.ccx()).to_ref());
883 }
884 debug_loc.apply(cx.fcx);
885 B(cx).fcmp(op, lhs, rhs)
886 }
887 }
888
889 /* Miscellaneous instructions */
890 pub fn EmptyPhi(cx: Block, ty: Type) -> ValueRef {
891 unsafe {
892 if cx.unreachable.get() { return llvm::LLVMGetUndef(ty.to_ref()); }
893 B(cx).empty_phi(ty)
894 }
895 }
896
897 pub fn Phi(cx: Block, ty: Type, vals: &[ValueRef],
898 bbs: &[BasicBlockRef]) -> ValueRef {
899 unsafe {
900 if cx.unreachable.get() { return llvm::LLVMGetUndef(ty.to_ref()); }
901 B(cx).phi(ty, vals, bbs)
902 }
903 }
904
905 pub fn AddIncomingToPhi(phi: ValueRef, val: ValueRef, bb: BasicBlockRef) {
906 unsafe {
907 if llvm::LLVMIsUndef(phi) == llvm::True { return; }
908 llvm::LLVMAddIncoming(phi, &val, &bb, 1 as c_uint);
909 }
910 }
911
912 pub fn _UndefReturn(cx: Block, fn_: ValueRef) -> ValueRef {
913 unsafe {
914 let ccx = cx.fcx.ccx;
915 let ty = val_ty(fn_);
916 let retty = if ty.kind() == llvm::Function {
917 ty.return_type()
918 } else {
919 ccx.int_type()
920 };
921 B(cx).count_insn("ret_undef");
922 llvm::LLVMGetUndef(retty.to_ref())
923 }
924 }
925
926 pub fn add_span_comment(cx: Block, sp: Span, text: &str) {
927 B(cx).add_span_comment(sp, text)
928 }
929
930 pub fn add_comment(cx: Block, text: &str) {
931 B(cx).add_comment(text)
932 }
933
934 pub fn InlineAsmCall(cx: Block, asm: *const c_char, cons: *const c_char,
935 inputs: &[ValueRef], output: Type,
936 volatile: bool, alignstack: bool,
937 dia: AsmDialect) -> ValueRef {
938 B(cx).inline_asm_call(asm, cons, inputs, output, volatile, alignstack, dia)
939 }
940
941 pub fn Call(cx: Block,
942 fn_: ValueRef,
943 args: &[ValueRef],
944 attributes: Option<AttrBuilder>,
945 debug_loc: DebugLoc)
946 -> ValueRef {
947 if cx.unreachable.get() {
948 return _UndefReturn(cx, fn_);
949 }
950 debug_loc.apply(cx.fcx);
951 B(cx).call(fn_, args, attributes)
952 }
953
954 pub fn CallWithConv(cx: Block,
955 fn_: ValueRef,
956 args: &[ValueRef],
957 conv: CallConv,
958 attributes: Option<AttrBuilder>,
959 debug_loc: DebugLoc)
960 -> ValueRef {
961 if cx.unreachable.get() {
962 return _UndefReturn(cx, fn_);
963 }
964 debug_loc.apply(cx.fcx);
965 B(cx).call_with_conv(fn_, args, conv, attributes)
966 }
967
968 pub fn AtomicFence(cx: Block, order: AtomicOrdering, scope: SynchronizationScope) {
969 if cx.unreachable.get() { return; }
970 B(cx).atomic_fence(order, scope)
971 }
972
973 pub fn Select(cx: Block, if_: ValueRef, then: ValueRef, else_: ValueRef) -> ValueRef {
974 if cx.unreachable.get() { return _Undef(then); }
975 B(cx).select(if_, then, else_)
976 }
977
978 pub fn VAArg(cx: Block, list: ValueRef, ty: Type) -> ValueRef {
979 unsafe {
980 if cx.unreachable.get() { return llvm::LLVMGetUndef(ty.to_ref()); }
981 B(cx).va_arg(list, ty)
982 }
983 }
984
985 pub fn ExtractElement(cx: Block, vec_val: ValueRef, index: ValueRef) -> ValueRef {
986 unsafe {
987 if cx.unreachable.get() {
988 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
989 }
990 B(cx).extract_element(vec_val, index)
991 }
992 }
993
994 pub fn InsertElement(cx: Block, vec_val: ValueRef, elt_val: ValueRef,
995 index: ValueRef) -> ValueRef {
996 unsafe {
997 if cx.unreachable.get() {
998 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
999 }
1000 B(cx).insert_element(vec_val, elt_val, index)
1001 }
1002 }
1003
1004 pub fn ShuffleVector(cx: Block, v1: ValueRef, v2: ValueRef,
1005 mask: ValueRef) -> ValueRef {
1006 unsafe {
1007 if cx.unreachable.get() {
1008 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
1009 }
1010 B(cx).shuffle_vector(v1, v2, mask)
1011 }
1012 }
1013
1014 pub fn VectorSplat(cx: Block, num_elts: usize, elt_val: ValueRef) -> ValueRef {
1015 unsafe {
1016 if cx.unreachable.get() {
1017 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
1018 }
1019 B(cx).vector_splat(num_elts, elt_val)
1020 }
1021 }
1022
1023 pub fn ExtractValue(cx: Block, agg_val: ValueRef, index: usize) -> ValueRef {
1024 unsafe {
1025 if cx.unreachable.get() {
1026 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
1027 }
1028 B(cx).extract_value(agg_val, index)
1029 }
1030 }
1031
1032 pub fn InsertValue(cx: Block, agg_val: ValueRef, elt_val: ValueRef, index: usize) -> ValueRef {
1033 unsafe {
1034 if cx.unreachable.get() {
1035 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
1036 }
1037 B(cx).insert_value(agg_val, elt_val, index)
1038 }
1039 }
1040
1041 pub fn IsNull(cx: Block, val: ValueRef) -> ValueRef {
1042 unsafe {
1043 if cx.unreachable.get() {
1044 return llvm::LLVMGetUndef(Type::i1(cx.ccx()).to_ref());
1045 }
1046 B(cx).is_null(val)
1047 }
1048 }
1049
1050 pub fn IsNotNull(cx: Block, val: ValueRef) -> ValueRef {
1051 unsafe {
1052 if cx.unreachable.get() {
1053 return llvm::LLVMGetUndef(Type::i1(cx.ccx()).to_ref());
1054 }
1055 B(cx).is_not_null(val)
1056 }
1057 }
1058
1059 pub fn PtrDiff(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
1060 unsafe {
1061 let ccx = cx.fcx.ccx;
1062 if cx.unreachable.get() { return llvm::LLVMGetUndef(ccx.int_type().to_ref()); }
1063 B(cx).ptrdiff(lhs, rhs)
1064 }
1065 }
1066
1067 pub fn Trap(cx: Block) {
1068 if cx.unreachable.get() { return; }
1069 B(cx).trap();
1070 }
1071
1072 pub fn LandingPad(cx: Block, ty: Type, pers_fn: ValueRef,
1073 num_clauses: usize) -> ValueRef {
1074 check_not_terminated(cx);
1075 assert!(!cx.unreachable.get());
1076 B(cx).landing_pad(ty, pers_fn, num_clauses)
1077 }
1078
1079 pub fn SetCleanup(cx: Block, landing_pad: ValueRef) {
1080 B(cx).set_cleanup(landing_pad)
1081 }
1082
1083 pub fn Resume(cx: Block, exn: ValueRef) -> ValueRef {
1084 check_not_terminated(cx);
1085 terminate(cx, "Resume");
1086 B(cx).resume(exn)
1087 }
1088
1089 // Atomic Operations
1090 pub fn AtomicCmpXchg(cx: Block, dst: ValueRef,
1091 cmp: ValueRef, src: ValueRef,
1092 order: AtomicOrdering,
1093 failure_order: AtomicOrdering) -> ValueRef {
1094 B(cx).atomic_cmpxchg(dst, cmp, src, order, failure_order)
1095 }
1096 pub fn AtomicRMW(cx: Block, op: AtomicBinOp,
1097 dst: ValueRef, src: ValueRef,
1098 order: AtomicOrdering) -> ValueRef {
1099 B(cx).atomic_rmw(op, dst, src, order)
1100 }