]> git.proxmox.com Git - rustc.git/blob - src/librustc_trans/build.rs
Imported Upstream version 1.9.0+dfsg1
[rustc.git] / src / librustc_trans / build.rs
1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 #![allow(dead_code)] // FFI wrappers
12 #![allow(non_snake_case)]
13
14 use llvm;
15 use llvm::{AtomicBinOp, AtomicOrdering, SynchronizationScope, AsmDialect};
16 use llvm::{Opcode, IntPredicate, RealPredicate};
17 use llvm::{ValueRef, BasicBlockRef};
18 use common::*;
19 use syntax::codemap::Span;
20
21 use builder::Builder;
22 use type_::Type;
23 use value::Value;
24 use debuginfo::DebugLoc;
25
26 use libc::{c_uint, c_char};
27
28 pub fn terminate(cx: Block, _: &str) {
29 debug!("terminate({})", cx.to_str());
30 cx.terminated.set(true);
31 }
32
33 pub fn check_not_terminated(cx: Block) {
34 if cx.terminated.get() {
35 bug!("already terminated!");
36 }
37 }
38
39 pub fn B<'blk, 'tcx>(cx: Block<'blk, 'tcx>) -> Builder<'blk, 'tcx> {
40 let b = cx.fcx.ccx.builder();
41 b.position_at_end(cx.llbb);
42 b
43 }
44
45 // The difference between a block being unreachable and being terminated is
46 // somewhat obscure, and has to do with error checking. When a block is
47 // terminated, we're saying that trying to add any further statements in the
48 // block is an error. On the other hand, if something is unreachable, that
49 // means that the block was terminated in some way that we don't want to check
50 // for (panic/break/return statements, call to diverging functions, etc), and
51 // further instructions to the block should simply be ignored.
52
53 pub fn RetVoid(cx: Block, debug_loc: DebugLoc) {
54 if cx.unreachable.get() {
55 return;
56 }
57 check_not_terminated(cx);
58 terminate(cx, "RetVoid");
59 debug_loc.apply(cx.fcx);
60 B(cx).ret_void();
61 }
62
63 pub fn Ret(cx: Block, v: ValueRef, debug_loc: DebugLoc) {
64 if cx.unreachable.get() {
65 return;
66 }
67 check_not_terminated(cx);
68 terminate(cx, "Ret");
69 debug_loc.apply(cx.fcx);
70 B(cx).ret(v);
71 }
72
73 pub fn AggregateRet(cx: Block,
74 ret_vals: &[ValueRef],
75 debug_loc: DebugLoc) {
76 if cx.unreachable.get() {
77 return;
78 }
79 check_not_terminated(cx);
80 terminate(cx, "AggregateRet");
81 debug_loc.apply(cx.fcx);
82 B(cx).aggregate_ret(ret_vals);
83 }
84
85 pub fn Br(cx: Block, dest: BasicBlockRef, debug_loc: DebugLoc) {
86 if cx.unreachable.get() {
87 return;
88 }
89 check_not_terminated(cx);
90 terminate(cx, "Br");
91 debug_loc.apply(cx.fcx);
92 B(cx).br(dest);
93 }
94
95 pub fn CondBr(cx: Block,
96 if_: ValueRef,
97 then: BasicBlockRef,
98 else_: BasicBlockRef,
99 debug_loc: DebugLoc) {
100 if cx.unreachable.get() {
101 return;
102 }
103 check_not_terminated(cx);
104 terminate(cx, "CondBr");
105 debug_loc.apply(cx.fcx);
106 B(cx).cond_br(if_, then, else_);
107 }
108
109 pub fn Switch(cx: Block, v: ValueRef, else_: BasicBlockRef, num_cases: usize)
110 -> ValueRef {
111 if cx.unreachable.get() { return _Undef(v); }
112 check_not_terminated(cx);
113 terminate(cx, "Switch");
114 B(cx).switch(v, else_, num_cases)
115 }
116
117 pub fn AddCase(s: ValueRef, on_val: ValueRef, dest: BasicBlockRef) {
118 unsafe {
119 if llvm::LLVMIsUndef(s) == llvm::True { return; }
120 llvm::LLVMAddCase(s, on_val, dest);
121 }
122 }
123
124 pub fn IndirectBr(cx: Block,
125 addr: ValueRef,
126 num_dests: usize,
127 debug_loc: DebugLoc) {
128 if cx.unreachable.get() {
129 return;
130 }
131 check_not_terminated(cx);
132 terminate(cx, "IndirectBr");
133 debug_loc.apply(cx.fcx);
134 B(cx).indirect_br(addr, num_dests);
135 }
136
137 pub fn Invoke(cx: Block,
138 fn_: ValueRef,
139 args: &[ValueRef],
140 then: BasicBlockRef,
141 catch: BasicBlockRef,
142 debug_loc: DebugLoc)
143 -> ValueRef {
144 if cx.unreachable.get() {
145 return C_null(Type::i8(cx.ccx()));
146 }
147 check_not_terminated(cx);
148 terminate(cx, "Invoke");
149 debug!("Invoke({:?} with arguments ({}))",
150 Value(fn_),
151 args.iter().map(|a| {
152 format!("{:?}", Value(*a))
153 }).collect::<Vec<String>>().join(", "));
154 debug_loc.apply(cx.fcx);
155 let bundle = cx.lpad().and_then(|b| b.bundle());
156 B(cx).invoke(fn_, args, then, catch, bundle)
157 }
158
159 pub fn Unreachable(cx: Block) {
160 if cx.unreachable.get() {
161 return
162 }
163 cx.unreachable.set(true);
164 if !cx.terminated.get() {
165 B(cx).unreachable();
166 }
167 }
168
169 pub fn _Undef(val: ValueRef) -> ValueRef {
170 unsafe {
171 return llvm::LLVMGetUndef(val_ty(val).to_ref());
172 }
173 }
174
175 /* Arithmetic */
176 pub fn Add(cx: Block,
177 lhs: ValueRef,
178 rhs: ValueRef,
179 debug_loc: DebugLoc)
180 -> ValueRef {
181 if cx.unreachable.get() {
182 return _Undef(lhs);
183 }
184 debug_loc.apply(cx.fcx);
185 B(cx).add(lhs, rhs)
186 }
187
188 pub fn NSWAdd(cx: Block,
189 lhs: ValueRef,
190 rhs: ValueRef,
191 debug_loc: DebugLoc)
192 -> ValueRef {
193 if cx.unreachable.get() {
194 return _Undef(lhs);
195 }
196 debug_loc.apply(cx.fcx);
197 B(cx).nswadd(lhs, rhs)
198 }
199
200 pub fn NUWAdd(cx: Block,
201 lhs: ValueRef,
202 rhs: ValueRef,
203 debug_loc: DebugLoc)
204 -> ValueRef {
205 if cx.unreachable.get() {
206 return _Undef(lhs);
207 }
208 debug_loc.apply(cx.fcx);
209 B(cx).nuwadd(lhs, rhs)
210 }
211
212 pub fn FAdd(cx: Block,
213 lhs: ValueRef,
214 rhs: ValueRef,
215 debug_loc: DebugLoc)
216 -> ValueRef {
217 if cx.unreachable.get() {
218 return _Undef(lhs);
219 }
220 debug_loc.apply(cx.fcx);
221 B(cx).fadd(lhs, rhs)
222 }
223
224 pub fn FAddFast(cx: Block,
225 lhs: ValueRef,
226 rhs: ValueRef,
227 debug_loc: DebugLoc)
228 -> ValueRef {
229 if cx.unreachable.get() {
230 return _Undef(lhs);
231 }
232 debug_loc.apply(cx.fcx);
233 B(cx).fadd_fast(lhs, rhs)
234 }
235
236 pub fn Sub(cx: Block,
237 lhs: ValueRef,
238 rhs: ValueRef,
239 debug_loc: DebugLoc)
240 -> ValueRef {
241 if cx.unreachable.get() {
242 return _Undef(lhs);
243 }
244 debug_loc.apply(cx.fcx);
245 B(cx).sub(lhs, rhs)
246 }
247
248 pub fn NSWSub(cx: Block,
249 lhs: ValueRef,
250 rhs: ValueRef,
251 debug_loc: DebugLoc)
252 -> ValueRef {
253 if cx.unreachable.get() {
254 return _Undef(lhs);
255 }
256 debug_loc.apply(cx.fcx);
257 B(cx).nswsub(lhs, rhs)
258 }
259
260 pub fn NUWSub(cx: Block,
261 lhs: ValueRef,
262 rhs: ValueRef,
263 debug_loc: DebugLoc)
264 -> ValueRef {
265 if cx.unreachable.get() {
266 return _Undef(lhs);
267 }
268 debug_loc.apply(cx.fcx);
269 B(cx).nuwsub(lhs, rhs)
270 }
271
272 pub fn FSub(cx: Block,
273 lhs: ValueRef,
274 rhs: ValueRef,
275 debug_loc: DebugLoc)
276 -> ValueRef {
277 if cx.unreachable.get() {
278 return _Undef(lhs);
279 }
280 debug_loc.apply(cx.fcx);
281 B(cx).fsub(lhs, rhs)
282 }
283
284 pub fn FSubFast(cx: Block,
285 lhs: ValueRef,
286 rhs: ValueRef,
287 debug_loc: DebugLoc)
288 -> ValueRef {
289 if cx.unreachable.get() {
290 return _Undef(lhs);
291 }
292 debug_loc.apply(cx.fcx);
293 B(cx).fsub_fast(lhs, rhs)
294 }
295
296 pub fn Mul(cx: Block,
297 lhs: ValueRef,
298 rhs: ValueRef,
299 debug_loc: DebugLoc)
300 -> ValueRef {
301 if cx.unreachable.get() {
302 return _Undef(lhs);
303 }
304 debug_loc.apply(cx.fcx);
305 B(cx).mul(lhs, rhs)
306 }
307
308 pub fn NSWMul(cx: Block,
309 lhs: ValueRef,
310 rhs: ValueRef,
311 debug_loc: DebugLoc)
312 -> ValueRef {
313 if cx.unreachable.get() {
314 return _Undef(lhs);
315 }
316 debug_loc.apply(cx.fcx);
317 B(cx).nswmul(lhs, rhs)
318 }
319
320 pub fn NUWMul(cx: Block,
321 lhs: ValueRef,
322 rhs: ValueRef,
323 debug_loc: DebugLoc)
324 -> ValueRef {
325 if cx.unreachable.get() {
326 return _Undef(lhs);
327 }
328 debug_loc.apply(cx.fcx);
329 B(cx).nuwmul(lhs, rhs)
330 }
331
332 pub fn FMul(cx: Block,
333 lhs: ValueRef,
334 rhs: ValueRef,
335 debug_loc: DebugLoc)
336 -> ValueRef {
337 if cx.unreachable.get() {
338 return _Undef(lhs);
339 }
340 debug_loc.apply(cx.fcx);
341 B(cx).fmul(lhs, rhs)
342 }
343
344 pub fn FMulFast(cx: Block,
345 lhs: ValueRef,
346 rhs: ValueRef,
347 debug_loc: DebugLoc)
348 -> ValueRef {
349 if cx.unreachable.get() {
350 return _Undef(lhs);
351 }
352 debug_loc.apply(cx.fcx);
353 B(cx).fmul_fast(lhs, rhs)
354 }
355
356 pub fn UDiv(cx: Block,
357 lhs: ValueRef,
358 rhs: ValueRef,
359 debug_loc: DebugLoc)
360 -> ValueRef {
361 if cx.unreachable.get() {
362 return _Undef(lhs);
363 }
364 debug_loc.apply(cx.fcx);
365 B(cx).udiv(lhs, rhs)
366 }
367
368 pub fn SDiv(cx: Block,
369 lhs: ValueRef,
370 rhs: ValueRef,
371 debug_loc: DebugLoc)
372 -> ValueRef {
373 if cx.unreachable.get() {
374 return _Undef(lhs);
375 }
376 debug_loc.apply(cx.fcx);
377 B(cx).sdiv(lhs, rhs)
378 }
379
380 pub fn ExactSDiv(cx: Block,
381 lhs: ValueRef,
382 rhs: ValueRef,
383 debug_loc: DebugLoc)
384 -> ValueRef {
385 if cx.unreachable.get() {
386 return _Undef(lhs);
387 }
388 debug_loc.apply(cx.fcx);
389 B(cx).exactsdiv(lhs, rhs)
390 }
391
392 pub fn FDiv(cx: Block,
393 lhs: ValueRef,
394 rhs: ValueRef,
395 debug_loc: DebugLoc)
396 -> ValueRef {
397 if cx.unreachable.get() {
398 return _Undef(lhs);
399 }
400 debug_loc.apply(cx.fcx);
401 B(cx).fdiv(lhs, rhs)
402 }
403
404 pub fn FDivFast(cx: Block,
405 lhs: ValueRef,
406 rhs: ValueRef,
407 debug_loc: DebugLoc)
408 -> ValueRef {
409 if cx.unreachable.get() {
410 return _Undef(lhs);
411 }
412 debug_loc.apply(cx.fcx);
413 B(cx).fdiv_fast(lhs, rhs)
414 }
415
416 pub fn URem(cx: Block,
417 lhs: ValueRef,
418 rhs: ValueRef,
419 debug_loc: DebugLoc)
420 -> ValueRef {
421 if cx.unreachable.get() {
422 return _Undef(lhs);
423 }
424 debug_loc.apply(cx.fcx);
425 B(cx).urem(lhs, rhs)
426 }
427
428 pub fn SRem(cx: Block,
429 lhs: ValueRef,
430 rhs: ValueRef,
431 debug_loc: DebugLoc)
432 -> ValueRef {
433 if cx.unreachable.get() {
434 return _Undef(lhs);
435 }
436 debug_loc.apply(cx.fcx);
437 B(cx).srem(lhs, rhs)
438 }
439
440 pub fn FRem(cx: Block,
441 lhs: ValueRef,
442 rhs: ValueRef,
443 debug_loc: DebugLoc)
444 -> ValueRef {
445 if cx.unreachable.get() {
446 return _Undef(lhs);
447 }
448 debug_loc.apply(cx.fcx);
449 B(cx).frem(lhs, rhs)
450 }
451
452 pub fn FRemFast(cx: Block,
453 lhs: ValueRef,
454 rhs: ValueRef,
455 debug_loc: DebugLoc)
456 -> ValueRef {
457 if cx.unreachable.get() {
458 return _Undef(lhs);
459 }
460 debug_loc.apply(cx.fcx);
461 B(cx).frem_fast(lhs, rhs)
462 }
463
464 pub fn Shl(cx: Block,
465 lhs: ValueRef,
466 rhs: ValueRef,
467 debug_loc: DebugLoc)
468 -> ValueRef {
469 if cx.unreachable.get() {
470 return _Undef(lhs);
471 }
472 debug_loc.apply(cx.fcx);
473 B(cx).shl(lhs, rhs)
474 }
475
476 pub fn LShr(cx: Block,
477 lhs: ValueRef,
478 rhs: ValueRef,
479 debug_loc: DebugLoc)
480 -> ValueRef {
481 if cx.unreachable.get() {
482 return _Undef(lhs);
483 }
484 debug_loc.apply(cx.fcx);
485 B(cx).lshr(lhs, rhs)
486 }
487
488 pub fn AShr(cx: Block,
489 lhs: ValueRef,
490 rhs: ValueRef,
491 debug_loc: DebugLoc)
492 -> ValueRef {
493 if cx.unreachable.get() {
494 return _Undef(lhs);
495 }
496 debug_loc.apply(cx.fcx);
497 B(cx).ashr(lhs, rhs)
498 }
499
500 pub fn And(cx: Block,
501 lhs: ValueRef,
502 rhs: ValueRef,
503 debug_loc: DebugLoc)
504 -> ValueRef {
505 if cx.unreachable.get() {
506 return _Undef(lhs);
507 }
508 debug_loc.apply(cx.fcx);
509 B(cx).and(lhs, rhs)
510 }
511
512 pub fn Or(cx: Block,
513 lhs: ValueRef,
514 rhs: ValueRef,
515 debug_loc: DebugLoc)
516 -> ValueRef {
517 if cx.unreachable.get() {
518 return _Undef(lhs);
519 }
520 debug_loc.apply(cx.fcx);
521 B(cx).or(lhs, rhs)
522 }
523
524 pub fn Xor(cx: Block,
525 lhs: ValueRef,
526 rhs: ValueRef,
527 debug_loc: DebugLoc)
528 -> ValueRef {
529 if cx.unreachable.get() {
530 return _Undef(lhs);
531 }
532 debug_loc.apply(cx.fcx);
533 B(cx).xor(lhs, rhs)
534 }
535
536 pub fn BinOp(cx: Block,
537 op: Opcode,
538 lhs: ValueRef,
539 rhs: ValueRef,
540 debug_loc: DebugLoc)
541 -> ValueRef {
542 if cx.unreachable.get() {
543 return _Undef(lhs);
544 }
545 debug_loc.apply(cx.fcx);
546 B(cx).binop(op, lhs, rhs)
547 }
548
549 pub fn Neg(cx: Block, v: ValueRef, debug_loc: DebugLoc) -> ValueRef {
550 if cx.unreachable.get() {
551 return _Undef(v);
552 }
553 debug_loc.apply(cx.fcx);
554 B(cx).neg(v)
555 }
556
557 pub fn NSWNeg(cx: Block, v: ValueRef, debug_loc: DebugLoc) -> ValueRef {
558 if cx.unreachable.get() {
559 return _Undef(v);
560 }
561 debug_loc.apply(cx.fcx);
562 B(cx).nswneg(v)
563 }
564
565 pub fn NUWNeg(cx: Block, v: ValueRef, debug_loc: DebugLoc) -> ValueRef {
566 if cx.unreachable.get() {
567 return _Undef(v);
568 }
569 debug_loc.apply(cx.fcx);
570 B(cx).nuwneg(v)
571 }
572 pub fn FNeg(cx: Block, v: ValueRef, debug_loc: DebugLoc) -> ValueRef {
573 if cx.unreachable.get() {
574 return _Undef(v);
575 }
576 debug_loc.apply(cx.fcx);
577 B(cx).fneg(v)
578 }
579
580 pub fn Not(cx: Block, v: ValueRef, debug_loc: DebugLoc) -> ValueRef {
581 if cx.unreachable.get() {
582 return _Undef(v);
583 }
584 debug_loc.apply(cx.fcx);
585 B(cx).not(v)
586 }
587
588 pub fn Alloca(cx: Block, ty: Type, name: &str) -> ValueRef {
589 unsafe {
590 if cx.unreachable.get() { return llvm::LLVMGetUndef(ty.ptr_to().to_ref()); }
591 AllocaFcx(cx.fcx, ty, name)
592 }
593 }
594
595 pub fn AllocaFcx(fcx: &FunctionContext, ty: Type, name: &str) -> ValueRef {
596 let b = fcx.ccx.builder();
597 b.position_before(fcx.alloca_insert_pt.get().unwrap());
598 DebugLoc::None.apply(fcx);
599 b.alloca(ty, name)
600 }
601
602 pub fn Free(cx: Block, pointer_val: ValueRef) {
603 if cx.unreachable.get() { return; }
604 B(cx).free(pointer_val)
605 }
606
607 pub fn Load(cx: Block, pointer_val: ValueRef) -> ValueRef {
608 unsafe {
609 let ccx = cx.fcx.ccx;
610 if cx.unreachable.get() {
611 let ty = val_ty(pointer_val);
612 let eltty = if ty.kind() == llvm::Array {
613 ty.element_type()
614 } else {
615 ccx.int_type()
616 };
617 return llvm::LLVMGetUndef(eltty.to_ref());
618 }
619 B(cx).load(pointer_val)
620 }
621 }
622
623 pub fn VolatileLoad(cx: Block, pointer_val: ValueRef) -> ValueRef {
624 unsafe {
625 if cx.unreachable.get() {
626 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
627 }
628 B(cx).volatile_load(pointer_val)
629 }
630 }
631
632 pub fn AtomicLoad(cx: Block, pointer_val: ValueRef, order: AtomicOrdering) -> ValueRef {
633 unsafe {
634 let ccx = cx.fcx.ccx;
635 if cx.unreachable.get() {
636 return llvm::LLVMGetUndef(ccx.int_type().to_ref());
637 }
638 B(cx).atomic_load(pointer_val, order)
639 }
640 }
641
642
643 pub fn LoadRangeAssert(cx: Block, pointer_val: ValueRef, lo: u64,
644 hi: u64, signed: llvm::Bool) -> ValueRef {
645 if cx.unreachable.get() {
646 let ccx = cx.fcx.ccx;
647 let ty = val_ty(pointer_val);
648 let eltty = if ty.kind() == llvm::Array {
649 ty.element_type()
650 } else {
651 ccx.int_type()
652 };
653 unsafe {
654 llvm::LLVMGetUndef(eltty.to_ref())
655 }
656 } else {
657 B(cx).load_range_assert(pointer_val, lo, hi, signed)
658 }
659 }
660
661 pub fn LoadNonNull(cx: Block, ptr: ValueRef) -> ValueRef {
662 if cx.unreachable.get() {
663 let ccx = cx.fcx.ccx;
664 let ty = val_ty(ptr);
665 let eltty = if ty.kind() == llvm::Array {
666 ty.element_type()
667 } else {
668 ccx.int_type()
669 };
670 unsafe {
671 llvm::LLVMGetUndef(eltty.to_ref())
672 }
673 } else {
674 B(cx).load_nonnull(ptr)
675 }
676 }
677
678 pub fn Store(cx: Block, val: ValueRef, ptr: ValueRef) -> ValueRef {
679 if cx.unreachable.get() { return C_nil(cx.ccx()); }
680 B(cx).store(val, ptr)
681 }
682
683 pub fn VolatileStore(cx: Block, val: ValueRef, ptr: ValueRef) -> ValueRef {
684 if cx.unreachable.get() { return C_nil(cx.ccx()); }
685 B(cx).volatile_store(val, ptr)
686 }
687
688 pub fn AtomicStore(cx: Block, val: ValueRef, ptr: ValueRef, order: AtomicOrdering) {
689 if cx.unreachable.get() { return; }
690 B(cx).atomic_store(val, ptr, order)
691 }
692
693 pub fn GEP(cx: Block, pointer: ValueRef, indices: &[ValueRef]) -> ValueRef {
694 unsafe {
695 if cx.unreachable.get() {
696 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).ptr_to().to_ref());
697 }
698 B(cx).gep(pointer, indices)
699 }
700 }
701
702 // Simple wrapper around GEP that takes an array of ints and wraps them
703 // in C_i32()
704 #[inline]
705 pub fn GEPi(cx: Block, base: ValueRef, ixs: &[usize]) -> ValueRef {
706 unsafe {
707 if cx.unreachable.get() {
708 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).ptr_to().to_ref());
709 }
710 B(cx).gepi(base, ixs)
711 }
712 }
713
714 pub fn InBoundsGEP(cx: Block, pointer: ValueRef, indices: &[ValueRef]) -> ValueRef {
715 unsafe {
716 if cx.unreachable.get() {
717 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).ptr_to().to_ref());
718 }
719 B(cx).inbounds_gep(pointer, indices)
720 }
721 }
722
723 pub fn StructGEP(cx: Block, pointer: ValueRef, idx: usize) -> ValueRef {
724 unsafe {
725 if cx.unreachable.get() {
726 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).ptr_to().to_ref());
727 }
728 B(cx).struct_gep(pointer, idx)
729 }
730 }
731
732 pub fn GlobalString(cx: Block, _str: *const c_char) -> ValueRef {
733 unsafe {
734 if cx.unreachable.get() {
735 return llvm::LLVMGetUndef(Type::i8p(cx.ccx()).to_ref());
736 }
737 B(cx).global_string(_str)
738 }
739 }
740
741 pub fn GlobalStringPtr(cx: Block, _str: *const c_char) -> ValueRef {
742 unsafe {
743 if cx.unreachable.get() {
744 return llvm::LLVMGetUndef(Type::i8p(cx.ccx()).to_ref());
745 }
746 B(cx).global_string_ptr(_str)
747 }
748 }
749
750 /* Casts */
751 pub fn Trunc(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
752 unsafe {
753 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
754 B(cx).trunc(val, dest_ty)
755 }
756 }
757
758 pub fn ZExt(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
759 unsafe {
760 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
761 B(cx).zext(val, dest_ty)
762 }
763 }
764
765 pub fn SExt(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
766 unsafe {
767 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
768 B(cx).sext(val, dest_ty)
769 }
770 }
771
772 pub fn FPToUI(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
773 unsafe {
774 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
775 B(cx).fptoui(val, dest_ty)
776 }
777 }
778
779 pub fn FPToSI(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
780 unsafe {
781 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
782 B(cx).fptosi(val, dest_ty)
783 }
784 }
785
786 pub fn UIToFP(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
787 unsafe {
788 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
789 B(cx).uitofp(val, dest_ty)
790 }
791 }
792
793 pub fn SIToFP(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
794 unsafe {
795 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
796 B(cx).sitofp(val, dest_ty)
797 }
798 }
799
800 pub fn FPTrunc(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
801 unsafe {
802 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
803 B(cx).fptrunc(val, dest_ty)
804 }
805 }
806
807 pub fn FPExt(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
808 unsafe {
809 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
810 B(cx).fpext(val, dest_ty)
811 }
812 }
813
814 pub fn PtrToInt(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
815 unsafe {
816 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
817 B(cx).ptrtoint(val, dest_ty)
818 }
819 }
820
821 pub fn IntToPtr(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
822 unsafe {
823 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
824 B(cx).inttoptr(val, dest_ty)
825 }
826 }
827
828 pub fn BitCast(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
829 unsafe {
830 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
831 B(cx).bitcast(val, dest_ty)
832 }
833 }
834
835 pub fn ZExtOrBitCast(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
836 unsafe {
837 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
838 B(cx).zext_or_bitcast(val, dest_ty)
839 }
840 }
841
842 pub fn SExtOrBitCast(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
843 unsafe {
844 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
845 B(cx).sext_or_bitcast(val, dest_ty)
846 }
847 }
848
849 pub fn TruncOrBitCast(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
850 unsafe {
851 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
852 B(cx).trunc_or_bitcast(val, dest_ty)
853 }
854 }
855
856 pub fn Cast(cx: Block, op: Opcode, val: ValueRef, dest_ty: Type,
857 _: *const u8)
858 -> ValueRef {
859 unsafe {
860 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
861 B(cx).cast(op, val, dest_ty)
862 }
863 }
864
865 pub fn PointerCast(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
866 unsafe {
867 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
868 B(cx).pointercast(val, dest_ty)
869 }
870 }
871
872 pub fn IntCast(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
873 unsafe {
874 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
875 B(cx).intcast(val, dest_ty)
876 }
877 }
878
879 pub fn FPCast(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
880 unsafe {
881 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
882 B(cx).fpcast(val, dest_ty)
883 }
884 }
885
886
887 /* Comparisons */
888 pub fn ICmp(cx: Block,
889 op: IntPredicate,
890 lhs: ValueRef,
891 rhs: ValueRef,
892 debug_loc: DebugLoc)
893 -> ValueRef {
894 unsafe {
895 if cx.unreachable.get() {
896 return llvm::LLVMGetUndef(Type::i1(cx.ccx()).to_ref());
897 }
898 debug_loc.apply(cx.fcx);
899 B(cx).icmp(op, lhs, rhs)
900 }
901 }
902
903 pub fn FCmp(cx: Block,
904 op: RealPredicate,
905 lhs: ValueRef,
906 rhs: ValueRef,
907 debug_loc: DebugLoc)
908 -> ValueRef {
909 unsafe {
910 if cx.unreachable.get() {
911 return llvm::LLVMGetUndef(Type::i1(cx.ccx()).to_ref());
912 }
913 debug_loc.apply(cx.fcx);
914 B(cx).fcmp(op, lhs, rhs)
915 }
916 }
917
918 /* Miscellaneous instructions */
919 pub fn EmptyPhi(cx: Block, ty: Type) -> ValueRef {
920 unsafe {
921 if cx.unreachable.get() { return llvm::LLVMGetUndef(ty.to_ref()); }
922 B(cx).empty_phi(ty)
923 }
924 }
925
926 pub fn Phi(cx: Block, ty: Type, vals: &[ValueRef],
927 bbs: &[BasicBlockRef]) -> ValueRef {
928 unsafe {
929 if cx.unreachable.get() { return llvm::LLVMGetUndef(ty.to_ref()); }
930 B(cx).phi(ty, vals, bbs)
931 }
932 }
933
934 pub fn AddIncomingToPhi(phi: ValueRef, val: ValueRef, bb: BasicBlockRef) {
935 unsafe {
936 if llvm::LLVMIsUndef(phi) == llvm::True { return; }
937 llvm::LLVMAddIncoming(phi, &val, &bb, 1 as c_uint);
938 }
939 }
940
941 pub fn _UndefReturn(cx: Block, fn_: ValueRef) -> ValueRef {
942 unsafe {
943 let ccx = cx.fcx.ccx;
944 let ty = val_ty(fn_);
945 let retty = if ty.kind() == llvm::Function {
946 ty.return_type()
947 } else {
948 ccx.int_type()
949 };
950 B(cx).count_insn("ret_undef");
951 llvm::LLVMGetUndef(retty.to_ref())
952 }
953 }
954
955 pub fn add_span_comment(cx: Block, sp: Span, text: &str) {
956 B(cx).add_span_comment(sp, text)
957 }
958
959 pub fn add_comment(cx: Block, text: &str) {
960 B(cx).add_comment(text)
961 }
962
963 pub fn InlineAsmCall(cx: Block, asm: *const c_char, cons: *const c_char,
964 inputs: &[ValueRef], output: Type,
965 volatile: bool, alignstack: bool,
966 dia: AsmDialect) -> ValueRef {
967 B(cx).inline_asm_call(asm, cons, inputs, output, volatile, alignstack, dia)
968 }
969
970 pub fn Call(cx: Block,
971 fn_: ValueRef,
972 args: &[ValueRef],
973 debug_loc: DebugLoc)
974 -> ValueRef {
975 if cx.unreachable.get() {
976 return _UndefReturn(cx, fn_);
977 }
978 debug_loc.apply(cx.fcx);
979 let bundle = cx.lpad.get().and_then(|b| b.bundle());
980 B(cx).call(fn_, args, bundle)
981 }
982
983 pub fn AtomicFence(cx: Block, order: AtomicOrdering, scope: SynchronizationScope) {
984 if cx.unreachable.get() { return; }
985 B(cx).atomic_fence(order, scope)
986 }
987
988 pub fn Select(cx: Block, if_: ValueRef, then: ValueRef, else_: ValueRef) -> ValueRef {
989 if cx.unreachable.get() { return _Undef(then); }
990 B(cx).select(if_, then, else_)
991 }
992
993 pub fn VAArg(cx: Block, list: ValueRef, ty: Type) -> ValueRef {
994 unsafe {
995 if cx.unreachable.get() { return llvm::LLVMGetUndef(ty.to_ref()); }
996 B(cx).va_arg(list, ty)
997 }
998 }
999
1000 pub fn ExtractElement(cx: Block, vec_val: ValueRef, index: ValueRef) -> ValueRef {
1001 unsafe {
1002 if cx.unreachable.get() {
1003 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
1004 }
1005 B(cx).extract_element(vec_val, index)
1006 }
1007 }
1008
1009 pub fn InsertElement(cx: Block, vec_val: ValueRef, elt_val: ValueRef,
1010 index: ValueRef) -> ValueRef {
1011 unsafe {
1012 if cx.unreachable.get() {
1013 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
1014 }
1015 B(cx).insert_element(vec_val, elt_val, index)
1016 }
1017 }
1018
1019 pub fn ShuffleVector(cx: Block, v1: ValueRef, v2: ValueRef,
1020 mask: ValueRef) -> ValueRef {
1021 unsafe {
1022 if cx.unreachable.get() {
1023 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
1024 }
1025 B(cx).shuffle_vector(v1, v2, mask)
1026 }
1027 }
1028
1029 pub fn VectorSplat(cx: Block, num_elts: usize, elt_val: ValueRef) -> ValueRef {
1030 unsafe {
1031 if cx.unreachable.get() {
1032 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
1033 }
1034 B(cx).vector_splat(num_elts, elt_val)
1035 }
1036 }
1037
1038 pub fn ExtractValue(cx: Block, agg_val: ValueRef, index: usize) -> ValueRef {
1039 unsafe {
1040 if cx.unreachable.get() {
1041 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
1042 }
1043 B(cx).extract_value(agg_val, index)
1044 }
1045 }
1046
1047 pub fn InsertValue(cx: Block, agg_val: ValueRef, elt_val: ValueRef, index: usize) -> ValueRef {
1048 unsafe {
1049 if cx.unreachable.get() {
1050 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
1051 }
1052 B(cx).insert_value(agg_val, elt_val, index)
1053 }
1054 }
1055
1056 pub fn IsNull(cx: Block, val: ValueRef) -> ValueRef {
1057 unsafe {
1058 if cx.unreachable.get() {
1059 return llvm::LLVMGetUndef(Type::i1(cx.ccx()).to_ref());
1060 }
1061 B(cx).is_null(val)
1062 }
1063 }
1064
1065 pub fn IsNotNull(cx: Block, val: ValueRef) -> ValueRef {
1066 unsafe {
1067 if cx.unreachable.get() {
1068 return llvm::LLVMGetUndef(Type::i1(cx.ccx()).to_ref());
1069 }
1070 B(cx).is_not_null(val)
1071 }
1072 }
1073
1074 pub fn PtrDiff(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
1075 unsafe {
1076 let ccx = cx.fcx.ccx;
1077 if cx.unreachable.get() { return llvm::LLVMGetUndef(ccx.int_type().to_ref()); }
1078 B(cx).ptrdiff(lhs, rhs)
1079 }
1080 }
1081
1082 pub fn Trap(cx: Block) {
1083 if cx.unreachable.get() { return; }
1084 B(cx).trap();
1085 }
1086
1087 pub fn LandingPad(cx: Block, ty: Type, pers_fn: ValueRef,
1088 num_clauses: usize) -> ValueRef {
1089 check_not_terminated(cx);
1090 assert!(!cx.unreachable.get());
1091 B(cx).landing_pad(ty, pers_fn, num_clauses, cx.fcx.llfn)
1092 }
1093
1094 pub fn AddClause(cx: Block, landing_pad: ValueRef, clause: ValueRef) {
1095 B(cx).add_clause(landing_pad, clause)
1096 }
1097
1098 pub fn SetCleanup(cx: Block, landing_pad: ValueRef) {
1099 B(cx).set_cleanup(landing_pad)
1100 }
1101
1102 pub fn SetPersonalityFn(cx: Block, f: ValueRef) {
1103 B(cx).set_personality_fn(f)
1104 }
1105
1106 pub fn Resume(cx: Block, exn: ValueRef) -> ValueRef {
1107 check_not_terminated(cx);
1108 terminate(cx, "Resume");
1109 B(cx).resume(exn)
1110 }
1111
1112 // Atomic Operations
1113 pub fn AtomicCmpXchg(cx: Block, dst: ValueRef,
1114 cmp: ValueRef, src: ValueRef,
1115 order: AtomicOrdering,
1116 failure_order: AtomicOrdering,
1117 weak: llvm::Bool) -> ValueRef {
1118 B(cx).atomic_cmpxchg(dst, cmp, src, order, failure_order, weak)
1119 }
1120 pub fn AtomicRMW(cx: Block, op: AtomicBinOp,
1121 dst: ValueRef, src: ValueRef,
1122 order: AtomicOrdering) -> ValueRef {
1123 B(cx).atomic_rmw(op, dst, src, order)
1124 }
1125
1126 pub fn CleanupPad(cx: Block,
1127 parent: Option<ValueRef>,
1128 args: &[ValueRef]) -> ValueRef {
1129 check_not_terminated(cx);
1130 assert!(!cx.unreachable.get());
1131 B(cx).cleanup_pad(parent, args)
1132 }
1133
1134 pub fn CleanupRet(cx: Block,
1135 cleanup: ValueRef,
1136 unwind: Option<BasicBlockRef>) -> ValueRef {
1137 check_not_terminated(cx);
1138 terminate(cx, "CleanupRet");
1139 B(cx).cleanup_ret(cleanup, unwind)
1140 }
1141
1142 pub fn CatchPad(cx: Block,
1143 parent: ValueRef,
1144 args: &[ValueRef]) -> ValueRef {
1145 check_not_terminated(cx);
1146 assert!(!cx.unreachable.get());
1147 B(cx).catch_pad(parent, args)
1148 }
1149
1150 pub fn CatchRet(cx: Block, pad: ValueRef, unwind: BasicBlockRef) -> ValueRef {
1151 check_not_terminated(cx);
1152 terminate(cx, "CatchRet");
1153 B(cx).catch_ret(pad, unwind)
1154 }
1155
1156 pub fn CatchSwitch(cx: Block,
1157 parent: Option<ValueRef>,
1158 unwind: Option<BasicBlockRef>,
1159 num_handlers: usize) -> ValueRef {
1160 check_not_terminated(cx);
1161 terminate(cx, "CatchSwitch");
1162 B(cx).catch_switch(parent, unwind, num_handlers)
1163 }
1164
1165 pub fn AddHandler(cx: Block, catch_switch: ValueRef, handler: BasicBlockRef) {
1166 B(cx).add_handler(catch_switch, handler)
1167 }