]> git.proxmox.com Git - rustc.git/blame - src/librustc_trans/trans/build.rs
Imported Upstream version 1.2.0+dfsg1
[rustc.git] / src / librustc_trans / trans / build.rs
CommitLineData
1a4d82fc
JJ
1// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2// file at the top-level directory of this distribution and at
3// http://rust-lang.org/COPYRIGHT.
4//
5// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8// option. This file may not be copied, modified, or distributed
9// except according to those terms.
10
11#![allow(dead_code)] // FFI wrappers
12#![allow(non_snake_case)]
13
14use llvm;
d9579d0f 15use llvm::{CallConv, AtomicBinOp, AtomicOrdering, SynchronizationScope, AsmDialect, AttrBuilder};
1a4d82fc
JJ
16use llvm::{Opcode, IntPredicate, RealPredicate};
17use llvm::{ValueRef, BasicBlockRef};
18use trans::common::*;
19use syntax::codemap::Span;
20
21use trans::builder::Builder;
22use trans::type_::Type;
85aaf69f 23use trans::debuginfo::DebugLoc;
1a4d82fc
JJ
24
25use libc::{c_uint, c_char};
26
27pub fn terminate(cx: Block, _: &str) {
28 debug!("terminate({})", cx.to_str());
29 cx.terminated.set(true);
30}
31
32pub fn check_not_terminated(cx: Block) {
33 if cx.terminated.get() {
34 panic!("already terminated!");
35 }
36}
37
38pub fn B<'blk, 'tcx>(cx: Block<'blk, 'tcx>) -> Builder<'blk, 'tcx> {
39 let b = cx.fcx.ccx.builder();
40 b.position_at_end(cx.llbb);
41 b
42}
43
44// The difference between a block being unreachable and being terminated is
45// somewhat obscure, and has to do with error checking. When a block is
46// terminated, we're saying that trying to add any further statements in the
47// block is an error. On the other hand, if something is unreachable, that
48// means that the block was terminated in some way that we don't want to check
49// for (panic/break/return statements, call to diverging functions, etc), and
50// further instructions to the block should simply be ignored.
51
85aaf69f
SL
52pub fn RetVoid(cx: Block, debug_loc: DebugLoc) {
53 if cx.unreachable.get() {
54 return;
55 }
1a4d82fc
JJ
56 check_not_terminated(cx);
57 terminate(cx, "RetVoid");
85aaf69f 58 debug_loc.apply(cx.fcx);
1a4d82fc
JJ
59 B(cx).ret_void();
60}
61
85aaf69f
SL
62pub fn Ret(cx: Block, v: ValueRef, debug_loc: DebugLoc) {
63 if cx.unreachable.get() {
64 return;
65 }
1a4d82fc
JJ
66 check_not_terminated(cx);
67 terminate(cx, "Ret");
85aaf69f 68 debug_loc.apply(cx.fcx);
1a4d82fc
JJ
69 B(cx).ret(v);
70}
71
85aaf69f
SL
72pub fn AggregateRet(cx: Block,
73 ret_vals: &[ValueRef],
74 debug_loc: DebugLoc) {
75 if cx.unreachable.get() {
76 return;
77 }
1a4d82fc
JJ
78 check_not_terminated(cx);
79 terminate(cx, "AggregateRet");
85aaf69f 80 debug_loc.apply(cx.fcx);
1a4d82fc
JJ
81 B(cx).aggregate_ret(ret_vals);
82}
83
85aaf69f
SL
84pub fn Br(cx: Block, dest: BasicBlockRef, debug_loc: DebugLoc) {
85 if cx.unreachable.get() {
86 return;
87 }
1a4d82fc
JJ
88 check_not_terminated(cx);
89 terminate(cx, "Br");
85aaf69f 90 debug_loc.apply(cx.fcx);
1a4d82fc
JJ
91 B(cx).br(dest);
92}
93
94pub fn CondBr(cx: Block,
95 if_: ValueRef,
96 then: BasicBlockRef,
85aaf69f
SL
97 else_: BasicBlockRef,
98 debug_loc: DebugLoc) {
99 if cx.unreachable.get() {
100 return;
101 }
1a4d82fc
JJ
102 check_not_terminated(cx);
103 terminate(cx, "CondBr");
85aaf69f 104 debug_loc.apply(cx.fcx);
1a4d82fc
JJ
105 B(cx).cond_br(if_, then, else_);
106}
107
c34b1796 108pub fn Switch(cx: Block, v: ValueRef, else_: BasicBlockRef, num_cases: usize)
1a4d82fc
JJ
109 -> ValueRef {
110 if cx.unreachable.get() { return _Undef(v); }
111 check_not_terminated(cx);
112 terminate(cx, "Switch");
113 B(cx).switch(v, else_, num_cases)
114}
115
116pub fn AddCase(s: ValueRef, on_val: ValueRef, dest: BasicBlockRef) {
117 unsafe {
118 if llvm::LLVMIsUndef(s) == llvm::True { return; }
119 llvm::LLVMAddCase(s, on_val, dest);
120 }
121}
122
85aaf69f
SL
123pub fn IndirectBr(cx: Block,
124 addr: ValueRef,
c34b1796 125 num_dests: usize,
85aaf69f
SL
126 debug_loc: DebugLoc) {
127 if cx.unreachable.get() {
128 return;
129 }
1a4d82fc
JJ
130 check_not_terminated(cx);
131 terminate(cx, "IndirectBr");
85aaf69f 132 debug_loc.apply(cx.fcx);
1a4d82fc
JJ
133 B(cx).indirect_br(addr, num_dests);
134}
135
136pub fn Invoke(cx: Block,
137 fn_: ValueRef,
138 args: &[ValueRef],
139 then: BasicBlockRef,
140 catch: BasicBlockRef,
85aaf69f
SL
141 attributes: Option<AttrBuilder>,
142 debug_loc: DebugLoc)
1a4d82fc
JJ
143 -> ValueRef {
144 if cx.unreachable.get() {
145 return C_null(Type::i8(cx.ccx()));
146 }
147 check_not_terminated(cx);
148 terminate(cx, "Invoke");
149 debug!("Invoke({} with arguments ({}))",
150 cx.val_to_string(fn_),
151 args.iter().map(|a| cx.val_to_string(*a)).collect::<Vec<String>>().connect(", "));
85aaf69f 152 debug_loc.apply(cx.fcx);
1a4d82fc
JJ
153 B(cx).invoke(fn_, args, then, catch, attributes)
154}
155
156pub fn Unreachable(cx: Block) {
157 if cx.unreachable.get() {
158 return
159 }
160 cx.unreachable.set(true);
161 if !cx.terminated.get() {
162 B(cx).unreachable();
163 }
164}
165
166pub fn _Undef(val: ValueRef) -> ValueRef {
167 unsafe {
168 return llvm::LLVMGetUndef(val_ty(val).to_ref());
169 }
170}
171
172/* Arithmetic */
85aaf69f
SL
173pub fn Add(cx: Block,
174 lhs: ValueRef,
175 rhs: ValueRef,
176 debug_loc: DebugLoc)
177 -> ValueRef {
178 if cx.unreachable.get() {
179 return _Undef(lhs);
180 }
181 debug_loc.apply(cx.fcx);
1a4d82fc
JJ
182 B(cx).add(lhs, rhs)
183}
184
85aaf69f
SL
185pub fn NSWAdd(cx: Block,
186 lhs: ValueRef,
187 rhs: ValueRef,
188 debug_loc: DebugLoc)
189 -> ValueRef {
190 if cx.unreachable.get() {
191 return _Undef(lhs);
192 }
193 debug_loc.apply(cx.fcx);
1a4d82fc
JJ
194 B(cx).nswadd(lhs, rhs)
195}
196
85aaf69f
SL
197pub fn NUWAdd(cx: Block,
198 lhs: ValueRef,
199 rhs: ValueRef,
200 debug_loc: DebugLoc)
201 -> ValueRef {
202 if cx.unreachable.get() {
203 return _Undef(lhs);
204 }
205 debug_loc.apply(cx.fcx);
1a4d82fc
JJ
206 B(cx).nuwadd(lhs, rhs)
207}
208
85aaf69f
SL
209pub fn FAdd(cx: Block,
210 lhs: ValueRef,
211 rhs: ValueRef,
212 debug_loc: DebugLoc)
213 -> ValueRef {
214 if cx.unreachable.get() {
215 return _Undef(lhs);
216 }
217 debug_loc.apply(cx.fcx);
1a4d82fc
JJ
218 B(cx).fadd(lhs, rhs)
219}
220
85aaf69f
SL
221pub fn Sub(cx: Block,
222 lhs: ValueRef,
223 rhs: ValueRef,
224 debug_loc: DebugLoc)
225 -> ValueRef {
226 if cx.unreachable.get() {
227 return _Undef(lhs);
228 }
229 debug_loc.apply(cx.fcx);
1a4d82fc
JJ
230 B(cx).sub(lhs, rhs)
231}
232
85aaf69f
SL
233pub fn NSWSub(cx: Block,
234 lhs: ValueRef,
235 rhs: ValueRef,
236 debug_loc: DebugLoc)
237 -> ValueRef {
238 if cx.unreachable.get() {
239 return _Undef(lhs);
240 }
241 debug_loc.apply(cx.fcx);
1a4d82fc
JJ
242 B(cx).nswsub(lhs, rhs)
243}
244
85aaf69f
SL
245pub fn NUWSub(cx: Block,
246 lhs: ValueRef,
247 rhs: ValueRef,
248 debug_loc: DebugLoc)
249 -> ValueRef {
250 if cx.unreachable.get() {
251 return _Undef(lhs);
252 }
253 debug_loc.apply(cx.fcx);
1a4d82fc
JJ
254 B(cx).nuwsub(lhs, rhs)
255}
256
85aaf69f
SL
257pub fn FSub(cx: Block,
258 lhs: ValueRef,
259 rhs: ValueRef,
260 debug_loc: DebugLoc)
261 -> ValueRef {
262 if cx.unreachable.get() {
263 return _Undef(lhs);
264 }
265 debug_loc.apply(cx.fcx);
1a4d82fc
JJ
266 B(cx).fsub(lhs, rhs)
267}
268
85aaf69f
SL
269pub fn Mul(cx: Block,
270 lhs: ValueRef,
271 rhs: ValueRef,
272 debug_loc: DebugLoc)
273 -> ValueRef {
274 if cx.unreachable.get() {
275 return _Undef(lhs);
276 }
277 debug_loc.apply(cx.fcx);
1a4d82fc
JJ
278 B(cx).mul(lhs, rhs)
279}
280
85aaf69f
SL
281pub fn NSWMul(cx: Block,
282 lhs: ValueRef,
283 rhs: ValueRef,
284 debug_loc: DebugLoc)
285 -> ValueRef {
286 if cx.unreachable.get() {
287 return _Undef(lhs);
288 }
289 debug_loc.apply(cx.fcx);
1a4d82fc
JJ
290 B(cx).nswmul(lhs, rhs)
291}
292
85aaf69f
SL
293pub fn NUWMul(cx: Block,
294 lhs: ValueRef,
295 rhs: ValueRef,
296 debug_loc: DebugLoc)
297 -> ValueRef {
298 if cx.unreachable.get() {
299 return _Undef(lhs);
300 }
301 debug_loc.apply(cx.fcx);
1a4d82fc
JJ
302 B(cx).nuwmul(lhs, rhs)
303}
304
85aaf69f
SL
305pub fn FMul(cx: Block,
306 lhs: ValueRef,
307 rhs: ValueRef,
308 debug_loc: DebugLoc)
309 -> ValueRef {
310 if cx.unreachable.get() {
311 return _Undef(lhs);
312 }
313 debug_loc.apply(cx.fcx);
1a4d82fc
JJ
314 B(cx).fmul(lhs, rhs)
315}
316
85aaf69f
SL
317pub fn UDiv(cx: Block,
318 lhs: ValueRef,
319 rhs: ValueRef,
320 debug_loc: DebugLoc)
321 -> ValueRef {
322 if cx.unreachable.get() {
323 return _Undef(lhs);
324 }
325 debug_loc.apply(cx.fcx);
1a4d82fc
JJ
326 B(cx).udiv(lhs, rhs)
327}
328
85aaf69f
SL
329pub fn SDiv(cx: Block,
330 lhs: ValueRef,
331 rhs: ValueRef,
332 debug_loc: DebugLoc)
333 -> ValueRef {
334 if cx.unreachable.get() {
335 return _Undef(lhs);
336 }
337 debug_loc.apply(cx.fcx);
1a4d82fc
JJ
338 B(cx).sdiv(lhs, rhs)
339}
340
85aaf69f
SL
341pub fn ExactSDiv(cx: Block,
342 lhs: ValueRef,
343 rhs: ValueRef,
344 debug_loc: DebugLoc)
345 -> ValueRef {
346 if cx.unreachable.get() {
347 return _Undef(lhs);
348 }
349 debug_loc.apply(cx.fcx);
1a4d82fc
JJ
350 B(cx).exactsdiv(lhs, rhs)
351}
352
85aaf69f
SL
353pub fn FDiv(cx: Block,
354 lhs: ValueRef,
355 rhs: ValueRef,
356 debug_loc: DebugLoc)
357 -> ValueRef {
358 if cx.unreachable.get() {
359 return _Undef(lhs);
360 }
361 debug_loc.apply(cx.fcx);
1a4d82fc
JJ
362 B(cx).fdiv(lhs, rhs)
363}
364
85aaf69f
SL
365pub fn URem(cx: Block,
366 lhs: ValueRef,
367 rhs: ValueRef,
368 debug_loc: DebugLoc)
369 -> ValueRef {
370 if cx.unreachable.get() {
371 return _Undef(lhs);
372 }
373 debug_loc.apply(cx.fcx);
1a4d82fc
JJ
374 B(cx).urem(lhs, rhs)
375}
376
85aaf69f
SL
377pub fn SRem(cx: Block,
378 lhs: ValueRef,
379 rhs: ValueRef,
380 debug_loc: DebugLoc)
381 -> ValueRef {
382 if cx.unreachable.get() {
383 return _Undef(lhs);
384 }
385 debug_loc.apply(cx.fcx);
1a4d82fc
JJ
386 B(cx).srem(lhs, rhs)
387}
388
85aaf69f
SL
389pub fn FRem(cx: Block,
390 lhs: ValueRef,
391 rhs: ValueRef,
392 debug_loc: DebugLoc)
393 -> ValueRef {
394 if cx.unreachable.get() {
395 return _Undef(lhs);
396 }
397 debug_loc.apply(cx.fcx);
1a4d82fc
JJ
398 B(cx).frem(lhs, rhs)
399}
400
85aaf69f
SL
401pub fn Shl(cx: Block,
402 lhs: ValueRef,
403 rhs: ValueRef,
404 debug_loc: DebugLoc)
405 -> ValueRef {
406 if cx.unreachable.get() {
407 return _Undef(lhs);
408 }
409 debug_loc.apply(cx.fcx);
1a4d82fc
JJ
410 B(cx).shl(lhs, rhs)
411}
412
85aaf69f
SL
413pub fn LShr(cx: Block,
414 lhs: ValueRef,
415 rhs: ValueRef,
416 debug_loc: DebugLoc)
417 -> ValueRef {
418 if cx.unreachable.get() {
419 return _Undef(lhs);
420 }
421 debug_loc.apply(cx.fcx);
1a4d82fc
JJ
422 B(cx).lshr(lhs, rhs)
423}
424
85aaf69f
SL
425pub fn AShr(cx: Block,
426 lhs: ValueRef,
427 rhs: ValueRef,
428 debug_loc: DebugLoc)
429 -> ValueRef {
430 if cx.unreachable.get() {
431 return _Undef(lhs);
432 }
433 debug_loc.apply(cx.fcx);
1a4d82fc
JJ
434 B(cx).ashr(lhs, rhs)
435}
436
85aaf69f
SL
437pub fn And(cx: Block,
438 lhs: ValueRef,
439 rhs: ValueRef,
440 debug_loc: DebugLoc)
441 -> ValueRef {
442 if cx.unreachable.get() {
443 return _Undef(lhs);
444 }
445 debug_loc.apply(cx.fcx);
1a4d82fc
JJ
446 B(cx).and(lhs, rhs)
447}
448
85aaf69f
SL
449pub fn Or(cx: Block,
450 lhs: ValueRef,
451 rhs: ValueRef,
452 debug_loc: DebugLoc)
453 -> ValueRef {
454 if cx.unreachable.get() {
455 return _Undef(lhs);
456 }
457 debug_loc.apply(cx.fcx);
1a4d82fc
JJ
458 B(cx).or(lhs, rhs)
459}
460
85aaf69f
SL
461pub fn Xor(cx: Block,
462 lhs: ValueRef,
463 rhs: ValueRef,
464 debug_loc: DebugLoc)
465 -> ValueRef {
466 if cx.unreachable.get() {
467 return _Undef(lhs);
468 }
469 debug_loc.apply(cx.fcx);
1a4d82fc
JJ
470 B(cx).xor(lhs, rhs)
471}
472
85aaf69f
SL
473pub fn BinOp(cx: Block,
474 op: Opcode,
475 lhs: ValueRef,
476 rhs: ValueRef,
477 debug_loc: DebugLoc)
1a4d82fc 478 -> ValueRef {
85aaf69f
SL
479 if cx.unreachable.get() {
480 return _Undef(lhs);
481 }
482 debug_loc.apply(cx.fcx);
1a4d82fc
JJ
483 B(cx).binop(op, lhs, rhs)
484}
485
85aaf69f
SL
486pub fn Neg(cx: Block, v: ValueRef, debug_loc: DebugLoc) -> ValueRef {
487 if cx.unreachable.get() {
488 return _Undef(v);
489 }
490 debug_loc.apply(cx.fcx);
1a4d82fc
JJ
491 B(cx).neg(v)
492}
493
85aaf69f
SL
494pub fn NSWNeg(cx: Block, v: ValueRef, debug_loc: DebugLoc) -> ValueRef {
495 if cx.unreachable.get() {
496 return _Undef(v);
497 }
498 debug_loc.apply(cx.fcx);
1a4d82fc
JJ
499 B(cx).nswneg(v)
500}
501
85aaf69f
SL
502pub fn NUWNeg(cx: Block, v: ValueRef, debug_loc: DebugLoc) -> ValueRef {
503 if cx.unreachable.get() {
504 return _Undef(v);
505 }
506 debug_loc.apply(cx.fcx);
1a4d82fc
JJ
507 B(cx).nuwneg(v)
508}
85aaf69f
SL
509pub fn FNeg(cx: Block, v: ValueRef, debug_loc: DebugLoc) -> ValueRef {
510 if cx.unreachable.get() {
511 return _Undef(v);
512 }
513 debug_loc.apply(cx.fcx);
1a4d82fc
JJ
514 B(cx).fneg(v)
515}
516
85aaf69f
SL
517pub fn Not(cx: Block, v: ValueRef, debug_loc: DebugLoc) -> ValueRef {
518 if cx.unreachable.get() {
519 return _Undef(v);
520 }
521 debug_loc.apply(cx.fcx);
1a4d82fc
JJ
522 B(cx).not(v)
523}
524
1a4d82fc
JJ
525pub fn Alloca(cx: Block, ty: Type, name: &str) -> ValueRef {
526 unsafe {
527 if cx.unreachable.get() { return llvm::LLVMGetUndef(ty.ptr_to().to_ref()); }
528 AllocaFcx(cx.fcx, ty, name)
529 }
530}
531
532pub fn AllocaFcx(fcx: &FunctionContext, ty: Type, name: &str) -> ValueRef {
533 let b = fcx.ccx.builder();
534 b.position_before(fcx.alloca_insert_pt.get().unwrap());
85aaf69f 535 DebugLoc::None.apply(fcx);
1a4d82fc
JJ
536 b.alloca(ty, name)
537}
538
1a4d82fc
JJ
539pub fn Free(cx: Block, pointer_val: ValueRef) {
540 if cx.unreachable.get() { return; }
541 B(cx).free(pointer_val)
542}
543
544pub fn Load(cx: Block, pointer_val: ValueRef) -> ValueRef {
545 unsafe {
546 let ccx = cx.fcx.ccx;
547 if cx.unreachable.get() {
548 let ty = val_ty(pointer_val);
549 let eltty = if ty.kind() == llvm::Array {
550 ty.element_type()
551 } else {
552 ccx.int_type()
553 };
554 return llvm::LLVMGetUndef(eltty.to_ref());
555 }
556 B(cx).load(pointer_val)
557 }
558}
559
560pub fn VolatileLoad(cx: Block, pointer_val: ValueRef) -> ValueRef {
561 unsafe {
562 if cx.unreachable.get() {
563 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
564 }
565 B(cx).volatile_load(pointer_val)
566 }
567}
568
569pub fn AtomicLoad(cx: Block, pointer_val: ValueRef, order: AtomicOrdering) -> ValueRef {
570 unsafe {
571 let ccx = cx.fcx.ccx;
572 if cx.unreachable.get() {
573 return llvm::LLVMGetUndef(ccx.int_type().to_ref());
574 }
575 B(cx).atomic_load(pointer_val, order)
576 }
577}
578
579
580pub fn LoadRangeAssert(cx: Block, pointer_val: ValueRef, lo: u64,
581 hi: u64, signed: llvm::Bool) -> ValueRef {
582 if cx.unreachable.get() {
583 let ccx = cx.fcx.ccx;
584 let ty = val_ty(pointer_val);
585 let eltty = if ty.kind() == llvm::Array {
586 ty.element_type()
587 } else {
588 ccx.int_type()
589 };
590 unsafe {
591 llvm::LLVMGetUndef(eltty.to_ref())
592 }
593 } else {
594 B(cx).load_range_assert(pointer_val, lo, hi, signed)
595 }
596}
597
85aaf69f
SL
598pub fn LoadNonNull(cx: Block, ptr: ValueRef) -> ValueRef {
599 if cx.unreachable.get() {
600 let ccx = cx.fcx.ccx;
601 let ty = val_ty(ptr);
602 let eltty = if ty.kind() == llvm::Array {
603 ty.element_type()
604 } else {
605 ccx.int_type()
606 };
607 unsafe {
608 llvm::LLVMGetUndef(eltty.to_ref())
609 }
610 } else {
611 B(cx).load_nonnull(ptr)
612 }
613}
614
d9579d0f
AL
615pub fn Store(cx: Block, val: ValueRef, ptr: ValueRef) -> ValueRef {
616 if cx.unreachable.get() { return C_nil(cx.ccx()); }
1a4d82fc
JJ
617 B(cx).store(val, ptr)
618}
619
d9579d0f
AL
620pub fn VolatileStore(cx: Block, val: ValueRef, ptr: ValueRef) -> ValueRef {
621 if cx.unreachable.get() { return C_nil(cx.ccx()); }
1a4d82fc
JJ
622 B(cx).volatile_store(val, ptr)
623}
624
625pub fn AtomicStore(cx: Block, val: ValueRef, ptr: ValueRef, order: AtomicOrdering) {
626 if cx.unreachable.get() { return; }
627 B(cx).atomic_store(val, ptr, order)
628}
629
630pub fn GEP(cx: Block, pointer: ValueRef, indices: &[ValueRef]) -> ValueRef {
631 unsafe {
632 if cx.unreachable.get() {
633 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).ptr_to().to_ref());
634 }
635 B(cx).gep(pointer, indices)
636 }
637}
638
639// Simple wrapper around GEP that takes an array of ints and wraps them
640// in C_i32()
641#[inline]
c34b1796 642pub fn GEPi(cx: Block, base: ValueRef, ixs: &[usize]) -> ValueRef {
1a4d82fc
JJ
643 unsafe {
644 if cx.unreachable.get() {
645 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).ptr_to().to_ref());
646 }
647 B(cx).gepi(base, ixs)
648 }
649}
650
651pub fn InBoundsGEP(cx: Block, pointer: ValueRef, indices: &[ValueRef]) -> ValueRef {
652 unsafe {
653 if cx.unreachable.get() {
654 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).ptr_to().to_ref());
655 }
656 B(cx).inbounds_gep(pointer, indices)
657 }
658}
659
c34b1796 660pub fn StructGEP(cx: Block, pointer: ValueRef, idx: usize) -> ValueRef {
1a4d82fc
JJ
661 unsafe {
662 if cx.unreachable.get() {
663 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).ptr_to().to_ref());
664 }
665 B(cx).struct_gep(pointer, idx)
666 }
667}
668
669pub fn GlobalString(cx: Block, _str: *const c_char) -> ValueRef {
670 unsafe {
671 if cx.unreachable.get() {
672 return llvm::LLVMGetUndef(Type::i8p(cx.ccx()).to_ref());
673 }
674 B(cx).global_string(_str)
675 }
676}
677
678pub fn GlobalStringPtr(cx: Block, _str: *const c_char) -> ValueRef {
679 unsafe {
680 if cx.unreachable.get() {
681 return llvm::LLVMGetUndef(Type::i8p(cx.ccx()).to_ref());
682 }
683 B(cx).global_string_ptr(_str)
684 }
685}
686
687/* Casts */
688pub fn Trunc(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
689 unsafe {
690 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
691 B(cx).trunc(val, dest_ty)
692 }
693}
694
695pub fn ZExt(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
696 unsafe {
697 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
698 B(cx).zext(val, dest_ty)
699 }
700}
701
702pub fn SExt(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
703 unsafe {
704 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
705 B(cx).sext(val, dest_ty)
706 }
707}
708
709pub fn FPToUI(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
710 unsafe {
711 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
712 B(cx).fptoui(val, dest_ty)
713 }
714}
715
716pub fn FPToSI(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
717 unsafe {
718 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
719 B(cx).fptosi(val, dest_ty)
720 }
721}
722
723pub fn UIToFP(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
724 unsafe {
725 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
726 B(cx).uitofp(val, dest_ty)
727 }
728}
729
730pub fn SIToFP(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
731 unsafe {
732 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
733 B(cx).sitofp(val, dest_ty)
734 }
735}
736
737pub fn FPTrunc(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
738 unsafe {
739 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
740 B(cx).fptrunc(val, dest_ty)
741 }
742}
743
744pub fn FPExt(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
745 unsafe {
746 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
747 B(cx).fpext(val, dest_ty)
748 }
749}
750
751pub fn PtrToInt(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
752 unsafe {
753 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
754 B(cx).ptrtoint(val, dest_ty)
755 }
756}
757
758pub fn IntToPtr(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
759 unsafe {
760 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
761 B(cx).inttoptr(val, dest_ty)
762 }
763}
764
765pub fn BitCast(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
766 unsafe {
767 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
768 B(cx).bitcast(val, dest_ty)
769 }
770}
771
772pub fn ZExtOrBitCast(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
773 unsafe {
774 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
775 B(cx).zext_or_bitcast(val, dest_ty)
776 }
777}
778
779pub fn SExtOrBitCast(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
780 unsafe {
781 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
782 B(cx).sext_or_bitcast(val, dest_ty)
783 }
784}
785
786pub fn TruncOrBitCast(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
787 unsafe {
788 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
789 B(cx).trunc_or_bitcast(val, dest_ty)
790 }
791}
792
793pub fn Cast(cx: Block, op: Opcode, val: ValueRef, dest_ty: Type,
794 _: *const u8)
795 -> ValueRef {
796 unsafe {
797 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
798 B(cx).cast(op, val, dest_ty)
799 }
800}
801
802pub fn PointerCast(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
803 unsafe {
804 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
805 B(cx).pointercast(val, dest_ty)
806 }
807}
808
809pub fn IntCast(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
810 unsafe {
811 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
812 B(cx).intcast(val, dest_ty)
813 }
814}
815
816pub fn FPCast(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
817 unsafe {
818 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
819 B(cx).fpcast(val, dest_ty)
820 }
821}
822
823
824/* Comparisons */
85aaf69f
SL
825pub fn ICmp(cx: Block,
826 op: IntPredicate,
827 lhs: ValueRef,
828 rhs: ValueRef,
829 debug_loc: DebugLoc)
830 -> ValueRef {
1a4d82fc
JJ
831 unsafe {
832 if cx.unreachable.get() {
833 return llvm::LLVMGetUndef(Type::i1(cx.ccx()).to_ref());
834 }
85aaf69f 835 debug_loc.apply(cx.fcx);
1a4d82fc
JJ
836 B(cx).icmp(op, lhs, rhs)
837 }
838}
839
85aaf69f
SL
840pub fn FCmp(cx: Block,
841 op: RealPredicate,
842 lhs: ValueRef,
843 rhs: ValueRef,
844 debug_loc: DebugLoc)
845 -> ValueRef {
1a4d82fc
JJ
846 unsafe {
847 if cx.unreachable.get() {
848 return llvm::LLVMGetUndef(Type::i1(cx.ccx()).to_ref());
849 }
85aaf69f 850 debug_loc.apply(cx.fcx);
1a4d82fc
JJ
851 B(cx).fcmp(op, lhs, rhs)
852 }
853}
854
855/* Miscellaneous instructions */
856pub fn EmptyPhi(cx: Block, ty: Type) -> ValueRef {
857 unsafe {
858 if cx.unreachable.get() { return llvm::LLVMGetUndef(ty.to_ref()); }
859 B(cx).empty_phi(ty)
860 }
861}
862
863pub fn Phi(cx: Block, ty: Type, vals: &[ValueRef],
864 bbs: &[BasicBlockRef]) -> ValueRef {
865 unsafe {
866 if cx.unreachable.get() { return llvm::LLVMGetUndef(ty.to_ref()); }
867 B(cx).phi(ty, vals, bbs)
868 }
869}
870
871pub fn AddIncomingToPhi(phi: ValueRef, val: ValueRef, bb: BasicBlockRef) {
872 unsafe {
873 if llvm::LLVMIsUndef(phi) == llvm::True { return; }
874 llvm::LLVMAddIncoming(phi, &val, &bb, 1 as c_uint);
875 }
876}
877
878pub fn _UndefReturn(cx: Block, fn_: ValueRef) -> ValueRef {
879 unsafe {
880 let ccx = cx.fcx.ccx;
881 let ty = val_ty(fn_);
882 let retty = if ty.kind() == llvm::Function {
883 ty.return_type()
884 } else {
885 ccx.int_type()
886 };
887 B(cx).count_insn("ret_undef");
888 llvm::LLVMGetUndef(retty.to_ref())
889 }
890}
891
892pub fn add_span_comment(cx: Block, sp: Span, text: &str) {
893 B(cx).add_span_comment(sp, text)
894}
895
896pub fn add_comment(cx: Block, text: &str) {
897 B(cx).add_comment(text)
898}
899
900pub fn InlineAsmCall(cx: Block, asm: *const c_char, cons: *const c_char,
901 inputs: &[ValueRef], output: Type,
902 volatile: bool, alignstack: bool,
903 dia: AsmDialect) -> ValueRef {
904 B(cx).inline_asm_call(asm, cons, inputs, output, volatile, alignstack, dia)
905}
906
85aaf69f
SL
907pub fn Call(cx: Block,
908 fn_: ValueRef,
909 args: &[ValueRef],
910 attributes: Option<AttrBuilder>,
911 debug_loc: DebugLoc)
912 -> ValueRef {
913 if cx.unreachable.get() {
914 return _UndefReturn(cx, fn_);
915 }
916 debug_loc.apply(cx.fcx);
1a4d82fc
JJ
917 B(cx).call(fn_, args, attributes)
918}
919
85aaf69f
SL
920pub fn CallWithConv(cx: Block,
921 fn_: ValueRef,
922 args: &[ValueRef],
923 conv: CallConv,
924 attributes: Option<AttrBuilder>,
925 debug_loc: DebugLoc)
926 -> ValueRef {
927 if cx.unreachable.get() {
928 return _UndefReturn(cx, fn_);
929 }
930 debug_loc.apply(cx.fcx);
1a4d82fc
JJ
931 B(cx).call_with_conv(fn_, args, conv, attributes)
932}
933
d9579d0f 934pub fn AtomicFence(cx: Block, order: AtomicOrdering, scope: SynchronizationScope) {
1a4d82fc 935 if cx.unreachable.get() { return; }
d9579d0f 936 B(cx).atomic_fence(order, scope)
1a4d82fc
JJ
937}
938
939pub fn Select(cx: Block, if_: ValueRef, then: ValueRef, else_: ValueRef) -> ValueRef {
940 if cx.unreachable.get() { return _Undef(then); }
941 B(cx).select(if_, then, else_)
942}
943
944pub fn VAArg(cx: Block, list: ValueRef, ty: Type) -> ValueRef {
945 unsafe {
946 if cx.unreachable.get() { return llvm::LLVMGetUndef(ty.to_ref()); }
947 B(cx).va_arg(list, ty)
948 }
949}
950
951pub fn ExtractElement(cx: Block, vec_val: ValueRef, index: ValueRef) -> ValueRef {
952 unsafe {
953 if cx.unreachable.get() {
954 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
955 }
956 B(cx).extract_element(vec_val, index)
957 }
958}
959
960pub fn InsertElement(cx: Block, vec_val: ValueRef, elt_val: ValueRef,
961 index: ValueRef) -> ValueRef {
962 unsafe {
963 if cx.unreachable.get() {
964 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
965 }
966 B(cx).insert_element(vec_val, elt_val, index)
967 }
968}
969
970pub fn ShuffleVector(cx: Block, v1: ValueRef, v2: ValueRef,
971 mask: ValueRef) -> ValueRef {
972 unsafe {
973 if cx.unreachable.get() {
974 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
975 }
976 B(cx).shuffle_vector(v1, v2, mask)
977 }
978}
979
c34b1796 980pub fn VectorSplat(cx: Block, num_elts: usize, elt_val: ValueRef) -> ValueRef {
1a4d82fc
JJ
981 unsafe {
982 if cx.unreachable.get() {
983 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
984 }
985 B(cx).vector_splat(num_elts, elt_val)
986 }
987}
988
c34b1796 989pub fn ExtractValue(cx: Block, agg_val: ValueRef, index: usize) -> ValueRef {
1a4d82fc
JJ
990 unsafe {
991 if cx.unreachable.get() {
992 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
993 }
994 B(cx).extract_value(agg_val, index)
995 }
996}
997
c34b1796 998pub fn InsertValue(cx: Block, agg_val: ValueRef, elt_val: ValueRef, index: usize) -> ValueRef {
1a4d82fc
JJ
999 unsafe {
1000 if cx.unreachable.get() {
1001 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
1002 }
1003 B(cx).insert_value(agg_val, elt_val, index)
1004 }
1005}
1006
1007pub fn IsNull(cx: Block, val: ValueRef) -> ValueRef {
1008 unsafe {
1009 if cx.unreachable.get() {
1010 return llvm::LLVMGetUndef(Type::i1(cx.ccx()).to_ref());
1011 }
1012 B(cx).is_null(val)
1013 }
1014}
1015
1016pub fn IsNotNull(cx: Block, val: ValueRef) -> ValueRef {
1017 unsafe {
1018 if cx.unreachable.get() {
1019 return llvm::LLVMGetUndef(Type::i1(cx.ccx()).to_ref());
1020 }
1021 B(cx).is_not_null(val)
1022 }
1023}
1024
1025pub fn PtrDiff(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
1026 unsafe {
1027 let ccx = cx.fcx.ccx;
1028 if cx.unreachable.get() { return llvm::LLVMGetUndef(ccx.int_type().to_ref()); }
1029 B(cx).ptrdiff(lhs, rhs)
1030 }
1031}
1032
1033pub fn Trap(cx: Block) {
1034 if cx.unreachable.get() { return; }
1035 B(cx).trap();
1036}
1037
1038pub fn LandingPad(cx: Block, ty: Type, pers_fn: ValueRef,
c34b1796 1039 num_clauses: usize) -> ValueRef {
1a4d82fc
JJ
1040 check_not_terminated(cx);
1041 assert!(!cx.unreachable.get());
1042 B(cx).landing_pad(ty, pers_fn, num_clauses)
1043}
1044
1045pub fn SetCleanup(cx: Block, landing_pad: ValueRef) {
1046 B(cx).set_cleanup(landing_pad)
1047}
1048
1049pub fn Resume(cx: Block, exn: ValueRef) -> ValueRef {
1050 check_not_terminated(cx);
1051 terminate(cx, "Resume");
1052 B(cx).resume(exn)
1053}
1054
1055// Atomic Operations
1056pub fn AtomicCmpXchg(cx: Block, dst: ValueRef,
1057 cmp: ValueRef, src: ValueRef,
1058 order: AtomicOrdering,
1059 failure_order: AtomicOrdering) -> ValueRef {
1060 B(cx).atomic_cmpxchg(dst, cmp, src, order, failure_order)
1061}
1062pub fn AtomicRMW(cx: Block, op: AtomicBinOp,
1063 dst: ValueRef, src: ValueRef,
1064 order: AtomicOrdering) -> ValueRef {
1065 B(cx).atomic_rmw(op, dst, src, order)
1066}