]> git.proxmox.com Git - rustc.git/blame - src/librustc_trans/trans/build.rs
Imported Upstream version 1.0.0~0alpha
[rustc.git] / src / librustc_trans / trans / build.rs
CommitLineData
1a4d82fc
JJ
1// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2// file at the top-level directory of this distribution and at
3// http://rust-lang.org/COPYRIGHT.
4//
5// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8// option. This file may not be copied, modified, or distributed
9// except according to those terms.
10
11#![allow(dead_code)] // FFI wrappers
12#![allow(non_snake_case)]
13
14use llvm;
15use llvm::{CallConv, AtomicBinOp, AtomicOrdering, AsmDialect, AttrBuilder};
16use llvm::{Opcode, IntPredicate, RealPredicate};
17use llvm::{ValueRef, BasicBlockRef};
18use trans::common::*;
19use syntax::codemap::Span;
20
21use trans::builder::Builder;
22use trans::type_::Type;
23
24use libc::{c_uint, c_char};
25
26pub fn terminate(cx: Block, _: &str) {
27 debug!("terminate({})", cx.to_str());
28 cx.terminated.set(true);
29}
30
31pub fn check_not_terminated(cx: Block) {
32 if cx.terminated.get() {
33 panic!("already terminated!");
34 }
35}
36
37pub fn B<'blk, 'tcx>(cx: Block<'blk, 'tcx>) -> Builder<'blk, 'tcx> {
38 let b = cx.fcx.ccx.builder();
39 b.position_at_end(cx.llbb);
40 b
41}
42
43// The difference between a block being unreachable and being terminated is
44// somewhat obscure, and has to do with error checking. When a block is
45// terminated, we're saying that trying to add any further statements in the
46// block is an error. On the other hand, if something is unreachable, that
47// means that the block was terminated in some way that we don't want to check
48// for (panic/break/return statements, call to diverging functions, etc), and
49// further instructions to the block should simply be ignored.
50
51pub fn RetVoid(cx: Block) {
52 if cx.unreachable.get() { return; }
53 check_not_terminated(cx);
54 terminate(cx, "RetVoid");
55 B(cx).ret_void();
56}
57
58pub fn Ret(cx: Block, v: ValueRef) {
59 if cx.unreachable.get() { return; }
60 check_not_terminated(cx);
61 terminate(cx, "Ret");
62 B(cx).ret(v);
63}
64
65pub fn AggregateRet(cx: Block, ret_vals: &[ValueRef]) {
66 if cx.unreachable.get() { return; }
67 check_not_terminated(cx);
68 terminate(cx, "AggregateRet");
69 B(cx).aggregate_ret(ret_vals);
70}
71
72pub fn Br(cx: Block, dest: BasicBlockRef) {
73 if cx.unreachable.get() { return; }
74 check_not_terminated(cx);
75 terminate(cx, "Br");
76 B(cx).br(dest);
77}
78
79pub fn CondBr(cx: Block,
80 if_: ValueRef,
81 then: BasicBlockRef,
82 else_: BasicBlockRef) {
83 if cx.unreachable.get() { return; }
84 check_not_terminated(cx);
85 terminate(cx, "CondBr");
86 B(cx).cond_br(if_, then, else_);
87}
88
89pub fn Switch(cx: Block, v: ValueRef, else_: BasicBlockRef, num_cases: uint)
90 -> ValueRef {
91 if cx.unreachable.get() { return _Undef(v); }
92 check_not_terminated(cx);
93 terminate(cx, "Switch");
94 B(cx).switch(v, else_, num_cases)
95}
96
97pub fn AddCase(s: ValueRef, on_val: ValueRef, dest: BasicBlockRef) {
98 unsafe {
99 if llvm::LLVMIsUndef(s) == llvm::True { return; }
100 llvm::LLVMAddCase(s, on_val, dest);
101 }
102}
103
104pub fn IndirectBr(cx: Block, addr: ValueRef, num_dests: uint) {
105 if cx.unreachable.get() { return; }
106 check_not_terminated(cx);
107 terminate(cx, "IndirectBr");
108 B(cx).indirect_br(addr, num_dests);
109}
110
111pub fn Invoke(cx: Block,
112 fn_: ValueRef,
113 args: &[ValueRef],
114 then: BasicBlockRef,
115 catch: BasicBlockRef,
116 attributes: Option<AttrBuilder>)
117 -> ValueRef {
118 if cx.unreachable.get() {
119 return C_null(Type::i8(cx.ccx()));
120 }
121 check_not_terminated(cx);
122 terminate(cx, "Invoke");
123 debug!("Invoke({} with arguments ({}))",
124 cx.val_to_string(fn_),
125 args.iter().map(|a| cx.val_to_string(*a)).collect::<Vec<String>>().connect(", "));
126 B(cx).invoke(fn_, args, then, catch, attributes)
127}
128
129pub fn Unreachable(cx: Block) {
130 if cx.unreachable.get() {
131 return
132 }
133 cx.unreachable.set(true);
134 if !cx.terminated.get() {
135 B(cx).unreachable();
136 }
137}
138
139pub fn _Undef(val: ValueRef) -> ValueRef {
140 unsafe {
141 return llvm::LLVMGetUndef(val_ty(val).to_ref());
142 }
143}
144
145/* Arithmetic */
146pub fn Add(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
147 if cx.unreachable.get() { return _Undef(lhs); }
148 B(cx).add(lhs, rhs)
149}
150
151pub fn NSWAdd(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
152 if cx.unreachable.get() { return _Undef(lhs); }
153 B(cx).nswadd(lhs, rhs)
154}
155
156pub fn NUWAdd(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
157 if cx.unreachable.get() { return _Undef(lhs); }
158 B(cx).nuwadd(lhs, rhs)
159}
160
161pub fn FAdd(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
162 if cx.unreachable.get() { return _Undef(lhs); }
163 B(cx).fadd(lhs, rhs)
164}
165
166pub fn Sub(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
167 if cx.unreachable.get() { return _Undef(lhs); }
168 B(cx).sub(lhs, rhs)
169}
170
171pub fn NSWSub(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
172 if cx.unreachable.get() { return _Undef(lhs); }
173 B(cx).nswsub(lhs, rhs)
174}
175
176pub fn NUWSub(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
177 if cx.unreachable.get() { return _Undef(lhs); }
178 B(cx).nuwsub(lhs, rhs)
179}
180
181pub fn FSub(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
182 if cx.unreachable.get() { return _Undef(lhs); }
183 B(cx).fsub(lhs, rhs)
184}
185
186pub fn Mul(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
187 if cx.unreachable.get() { return _Undef(lhs); }
188 B(cx).mul(lhs, rhs)
189}
190
191pub fn NSWMul(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
192 if cx.unreachable.get() { return _Undef(lhs); }
193 B(cx).nswmul(lhs, rhs)
194}
195
196pub fn NUWMul(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
197 if cx.unreachable.get() { return _Undef(lhs); }
198 B(cx).nuwmul(lhs, rhs)
199}
200
201pub fn FMul(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
202 if cx.unreachable.get() { return _Undef(lhs); }
203 B(cx).fmul(lhs, rhs)
204}
205
206pub fn UDiv(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
207 if cx.unreachable.get() { return _Undef(lhs); }
208 B(cx).udiv(lhs, rhs)
209}
210
211pub fn SDiv(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
212 if cx.unreachable.get() { return _Undef(lhs); }
213 B(cx).sdiv(lhs, rhs)
214}
215
216pub fn ExactSDiv(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
217 if cx.unreachable.get() { return _Undef(lhs); }
218 B(cx).exactsdiv(lhs, rhs)
219}
220
221pub fn FDiv(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
222 if cx.unreachable.get() { return _Undef(lhs); }
223 B(cx).fdiv(lhs, rhs)
224}
225
226pub fn URem(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
227 if cx.unreachable.get() { return _Undef(lhs); }
228 B(cx).urem(lhs, rhs)
229}
230
231pub fn SRem(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
232 if cx.unreachable.get() { return _Undef(lhs); }
233 B(cx).srem(lhs, rhs)
234}
235
236pub fn FRem(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
237 if cx.unreachable.get() { return _Undef(lhs); }
238 B(cx).frem(lhs, rhs)
239}
240
241pub fn Shl(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
242 if cx.unreachable.get() { return _Undef(lhs); }
243 B(cx).shl(lhs, rhs)
244}
245
246pub fn LShr(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
247 if cx.unreachable.get() { return _Undef(lhs); }
248 B(cx).lshr(lhs, rhs)
249}
250
251pub fn AShr(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
252 if cx.unreachable.get() { return _Undef(lhs); }
253 B(cx).ashr(lhs, rhs)
254}
255
256pub fn And(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
257 if cx.unreachable.get() { return _Undef(lhs); }
258 B(cx).and(lhs, rhs)
259}
260
261pub fn Or(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
262 if cx.unreachable.get() { return _Undef(lhs); }
263 B(cx).or(lhs, rhs)
264}
265
266pub fn Xor(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
267 if cx.unreachable.get() { return _Undef(lhs); }
268 B(cx).xor(lhs, rhs)
269}
270
271pub fn BinOp(cx: Block, op: Opcode, lhs: ValueRef, rhs: ValueRef)
272 -> ValueRef {
273 if cx.unreachable.get() { return _Undef(lhs); }
274 B(cx).binop(op, lhs, rhs)
275}
276
277pub fn Neg(cx: Block, v: ValueRef) -> ValueRef {
278 if cx.unreachable.get() { return _Undef(v); }
279 B(cx).neg(v)
280}
281
282pub fn NSWNeg(cx: Block, v: ValueRef) -> ValueRef {
283 if cx.unreachable.get() { return _Undef(v); }
284 B(cx).nswneg(v)
285}
286
287pub fn NUWNeg(cx: Block, v: ValueRef) -> ValueRef {
288 if cx.unreachable.get() { return _Undef(v); }
289 B(cx).nuwneg(v)
290}
291pub fn FNeg(cx: Block, v: ValueRef) -> ValueRef {
292 if cx.unreachable.get() { return _Undef(v); }
293 B(cx).fneg(v)
294}
295
296pub fn Not(cx: Block, v: ValueRef) -> ValueRef {
297 if cx.unreachable.get() { return _Undef(v); }
298 B(cx).not(v)
299}
300
301/* Memory */
302pub fn Malloc(cx: Block, ty: Type) -> ValueRef {
303 unsafe {
304 if cx.unreachable.get() {
305 return llvm::LLVMGetUndef(Type::i8p(cx.ccx()).to_ref());
306 }
307 B(cx).malloc(ty)
308 }
309}
310
311pub fn ArrayMalloc(cx: Block, ty: Type, val: ValueRef) -> ValueRef {
312 unsafe {
313 if cx.unreachable.get() {
314 return llvm::LLVMGetUndef(Type::i8p(cx.ccx()).to_ref());
315 }
316 B(cx).array_malloc(ty, val)
317 }
318}
319
320pub fn Alloca(cx: Block, ty: Type, name: &str) -> ValueRef {
321 unsafe {
322 if cx.unreachable.get() { return llvm::LLVMGetUndef(ty.ptr_to().to_ref()); }
323 AllocaFcx(cx.fcx, ty, name)
324 }
325}
326
327pub fn AllocaFcx(fcx: &FunctionContext, ty: Type, name: &str) -> ValueRef {
328 let b = fcx.ccx.builder();
329 b.position_before(fcx.alloca_insert_pt.get().unwrap());
330 b.alloca(ty, name)
331}
332
333pub fn ArrayAlloca(cx: Block, ty: Type, val: ValueRef) -> ValueRef {
334 unsafe {
335 if cx.unreachable.get() { return llvm::LLVMGetUndef(ty.ptr_to().to_ref()); }
336 let b = cx.fcx.ccx.builder();
337 b.position_before(cx.fcx.alloca_insert_pt.get().unwrap());
338 b.array_alloca(ty, val)
339 }
340}
341
342pub fn Free(cx: Block, pointer_val: ValueRef) {
343 if cx.unreachable.get() { return; }
344 B(cx).free(pointer_val)
345}
346
347pub fn Load(cx: Block, pointer_val: ValueRef) -> ValueRef {
348 unsafe {
349 let ccx = cx.fcx.ccx;
350 if cx.unreachable.get() {
351 let ty = val_ty(pointer_val);
352 let eltty = if ty.kind() == llvm::Array {
353 ty.element_type()
354 } else {
355 ccx.int_type()
356 };
357 return llvm::LLVMGetUndef(eltty.to_ref());
358 }
359 B(cx).load(pointer_val)
360 }
361}
362
363pub fn VolatileLoad(cx: Block, pointer_val: ValueRef) -> ValueRef {
364 unsafe {
365 if cx.unreachable.get() {
366 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
367 }
368 B(cx).volatile_load(pointer_val)
369 }
370}
371
372pub fn AtomicLoad(cx: Block, pointer_val: ValueRef, order: AtomicOrdering) -> ValueRef {
373 unsafe {
374 let ccx = cx.fcx.ccx;
375 if cx.unreachable.get() {
376 return llvm::LLVMGetUndef(ccx.int_type().to_ref());
377 }
378 B(cx).atomic_load(pointer_val, order)
379 }
380}
381
382
383pub fn LoadRangeAssert(cx: Block, pointer_val: ValueRef, lo: u64,
384 hi: u64, signed: llvm::Bool) -> ValueRef {
385 if cx.unreachable.get() {
386 let ccx = cx.fcx.ccx;
387 let ty = val_ty(pointer_val);
388 let eltty = if ty.kind() == llvm::Array {
389 ty.element_type()
390 } else {
391 ccx.int_type()
392 };
393 unsafe {
394 llvm::LLVMGetUndef(eltty.to_ref())
395 }
396 } else {
397 B(cx).load_range_assert(pointer_val, lo, hi, signed)
398 }
399}
400
401pub fn Store(cx: Block, val: ValueRef, ptr: ValueRef) {
402 if cx.unreachable.get() { return; }
403 B(cx).store(val, ptr)
404}
405
406pub fn VolatileStore(cx: Block, val: ValueRef, ptr: ValueRef) {
407 if cx.unreachable.get() { return; }
408 B(cx).volatile_store(val, ptr)
409}
410
411pub fn AtomicStore(cx: Block, val: ValueRef, ptr: ValueRef, order: AtomicOrdering) {
412 if cx.unreachable.get() { return; }
413 B(cx).atomic_store(val, ptr, order)
414}
415
416pub fn GEP(cx: Block, pointer: ValueRef, indices: &[ValueRef]) -> ValueRef {
417 unsafe {
418 if cx.unreachable.get() {
419 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).ptr_to().to_ref());
420 }
421 B(cx).gep(pointer, indices)
422 }
423}
424
425// Simple wrapper around GEP that takes an array of ints and wraps them
426// in C_i32()
427#[inline]
428pub fn GEPi(cx: Block, base: ValueRef, ixs: &[uint]) -> ValueRef {
429 unsafe {
430 if cx.unreachable.get() {
431 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).ptr_to().to_ref());
432 }
433 B(cx).gepi(base, ixs)
434 }
435}
436
437pub fn InBoundsGEP(cx: Block, pointer: ValueRef, indices: &[ValueRef]) -> ValueRef {
438 unsafe {
439 if cx.unreachable.get() {
440 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).ptr_to().to_ref());
441 }
442 B(cx).inbounds_gep(pointer, indices)
443 }
444}
445
446pub fn StructGEP(cx: Block, pointer: ValueRef, idx: uint) -> ValueRef {
447 unsafe {
448 if cx.unreachable.get() {
449 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).ptr_to().to_ref());
450 }
451 B(cx).struct_gep(pointer, idx)
452 }
453}
454
455pub fn GlobalString(cx: Block, _str: *const c_char) -> ValueRef {
456 unsafe {
457 if cx.unreachable.get() {
458 return llvm::LLVMGetUndef(Type::i8p(cx.ccx()).to_ref());
459 }
460 B(cx).global_string(_str)
461 }
462}
463
464pub fn GlobalStringPtr(cx: Block, _str: *const c_char) -> ValueRef {
465 unsafe {
466 if cx.unreachable.get() {
467 return llvm::LLVMGetUndef(Type::i8p(cx.ccx()).to_ref());
468 }
469 B(cx).global_string_ptr(_str)
470 }
471}
472
473/* Casts */
474pub fn Trunc(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
475 unsafe {
476 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
477 B(cx).trunc(val, dest_ty)
478 }
479}
480
481pub fn ZExt(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
482 unsafe {
483 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
484 B(cx).zext(val, dest_ty)
485 }
486}
487
488pub fn SExt(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
489 unsafe {
490 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
491 B(cx).sext(val, dest_ty)
492 }
493}
494
495pub fn FPToUI(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
496 unsafe {
497 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
498 B(cx).fptoui(val, dest_ty)
499 }
500}
501
502pub fn FPToSI(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
503 unsafe {
504 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
505 B(cx).fptosi(val, dest_ty)
506 }
507}
508
509pub fn UIToFP(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
510 unsafe {
511 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
512 B(cx).uitofp(val, dest_ty)
513 }
514}
515
516pub fn SIToFP(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
517 unsafe {
518 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
519 B(cx).sitofp(val, dest_ty)
520 }
521}
522
523pub fn FPTrunc(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
524 unsafe {
525 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
526 B(cx).fptrunc(val, dest_ty)
527 }
528}
529
530pub fn FPExt(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
531 unsafe {
532 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
533 B(cx).fpext(val, dest_ty)
534 }
535}
536
537pub fn PtrToInt(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
538 unsafe {
539 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
540 B(cx).ptrtoint(val, dest_ty)
541 }
542}
543
544pub fn IntToPtr(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
545 unsafe {
546 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
547 B(cx).inttoptr(val, dest_ty)
548 }
549}
550
551pub fn BitCast(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
552 unsafe {
553 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
554 B(cx).bitcast(val, dest_ty)
555 }
556}
557
558pub fn ZExtOrBitCast(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
559 unsafe {
560 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
561 B(cx).zext_or_bitcast(val, dest_ty)
562 }
563}
564
565pub fn SExtOrBitCast(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
566 unsafe {
567 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
568 B(cx).sext_or_bitcast(val, dest_ty)
569 }
570}
571
572pub fn TruncOrBitCast(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
573 unsafe {
574 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
575 B(cx).trunc_or_bitcast(val, dest_ty)
576 }
577}
578
579pub fn Cast(cx: Block, op: Opcode, val: ValueRef, dest_ty: Type,
580 _: *const u8)
581 -> ValueRef {
582 unsafe {
583 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
584 B(cx).cast(op, val, dest_ty)
585 }
586}
587
588pub fn PointerCast(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
589 unsafe {
590 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
591 B(cx).pointercast(val, dest_ty)
592 }
593}
594
595pub fn IntCast(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
596 unsafe {
597 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
598 B(cx).intcast(val, dest_ty)
599 }
600}
601
602pub fn FPCast(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef {
603 unsafe {
604 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
605 B(cx).fpcast(val, dest_ty)
606 }
607}
608
609
610/* Comparisons */
611pub fn ICmp(cx: Block, op: IntPredicate, lhs: ValueRef, rhs: ValueRef)
612 -> ValueRef {
613 unsafe {
614 if cx.unreachable.get() {
615 return llvm::LLVMGetUndef(Type::i1(cx.ccx()).to_ref());
616 }
617 B(cx).icmp(op, lhs, rhs)
618 }
619}
620
621pub fn FCmp(cx: Block, op: RealPredicate, lhs: ValueRef, rhs: ValueRef)
622 -> ValueRef {
623 unsafe {
624 if cx.unreachable.get() {
625 return llvm::LLVMGetUndef(Type::i1(cx.ccx()).to_ref());
626 }
627 B(cx).fcmp(op, lhs, rhs)
628 }
629}
630
631/* Miscellaneous instructions */
632pub fn EmptyPhi(cx: Block, ty: Type) -> ValueRef {
633 unsafe {
634 if cx.unreachable.get() { return llvm::LLVMGetUndef(ty.to_ref()); }
635 B(cx).empty_phi(ty)
636 }
637}
638
639pub fn Phi(cx: Block, ty: Type, vals: &[ValueRef],
640 bbs: &[BasicBlockRef]) -> ValueRef {
641 unsafe {
642 if cx.unreachable.get() { return llvm::LLVMGetUndef(ty.to_ref()); }
643 B(cx).phi(ty, vals, bbs)
644 }
645}
646
647pub fn AddIncomingToPhi(phi: ValueRef, val: ValueRef, bb: BasicBlockRef) {
648 unsafe {
649 if llvm::LLVMIsUndef(phi) == llvm::True { return; }
650 llvm::LLVMAddIncoming(phi, &val, &bb, 1 as c_uint);
651 }
652}
653
654pub fn _UndefReturn(cx: Block, fn_: ValueRef) -> ValueRef {
655 unsafe {
656 let ccx = cx.fcx.ccx;
657 let ty = val_ty(fn_);
658 let retty = if ty.kind() == llvm::Function {
659 ty.return_type()
660 } else {
661 ccx.int_type()
662 };
663 B(cx).count_insn("ret_undef");
664 llvm::LLVMGetUndef(retty.to_ref())
665 }
666}
667
668pub fn add_span_comment(cx: Block, sp: Span, text: &str) {
669 B(cx).add_span_comment(sp, text)
670}
671
672pub fn add_comment(cx: Block, text: &str) {
673 B(cx).add_comment(text)
674}
675
676pub fn InlineAsmCall(cx: Block, asm: *const c_char, cons: *const c_char,
677 inputs: &[ValueRef], output: Type,
678 volatile: bool, alignstack: bool,
679 dia: AsmDialect) -> ValueRef {
680 B(cx).inline_asm_call(asm, cons, inputs, output, volatile, alignstack, dia)
681}
682
683pub fn Call(cx: Block, fn_: ValueRef, args: &[ValueRef],
684 attributes: Option<AttrBuilder>) -> ValueRef {
685 if cx.unreachable.get() { return _UndefReturn(cx, fn_); }
686 B(cx).call(fn_, args, attributes)
687}
688
689pub fn CallWithConv(cx: Block, fn_: ValueRef, args: &[ValueRef], conv: CallConv,
690 attributes: Option<AttrBuilder>) -> ValueRef {
691 if cx.unreachable.get() { return _UndefReturn(cx, fn_); }
692 B(cx).call_with_conv(fn_, args, conv, attributes)
693}
694
695pub fn AtomicFence(cx: Block, order: AtomicOrdering) {
696 if cx.unreachable.get() { return; }
697 B(cx).atomic_fence(order)
698}
699
700pub fn Select(cx: Block, if_: ValueRef, then: ValueRef, else_: ValueRef) -> ValueRef {
701 if cx.unreachable.get() { return _Undef(then); }
702 B(cx).select(if_, then, else_)
703}
704
705pub fn VAArg(cx: Block, list: ValueRef, ty: Type) -> ValueRef {
706 unsafe {
707 if cx.unreachable.get() { return llvm::LLVMGetUndef(ty.to_ref()); }
708 B(cx).va_arg(list, ty)
709 }
710}
711
712pub fn ExtractElement(cx: Block, vec_val: ValueRef, index: ValueRef) -> ValueRef {
713 unsafe {
714 if cx.unreachable.get() {
715 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
716 }
717 B(cx).extract_element(vec_val, index)
718 }
719}
720
721pub fn InsertElement(cx: Block, vec_val: ValueRef, elt_val: ValueRef,
722 index: ValueRef) -> ValueRef {
723 unsafe {
724 if cx.unreachable.get() {
725 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
726 }
727 B(cx).insert_element(vec_val, elt_val, index)
728 }
729}
730
731pub fn ShuffleVector(cx: Block, v1: ValueRef, v2: ValueRef,
732 mask: ValueRef) -> ValueRef {
733 unsafe {
734 if cx.unreachable.get() {
735 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
736 }
737 B(cx).shuffle_vector(v1, v2, mask)
738 }
739}
740
741pub fn VectorSplat(cx: Block, num_elts: uint, elt_val: ValueRef) -> ValueRef {
742 unsafe {
743 if cx.unreachable.get() {
744 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
745 }
746 B(cx).vector_splat(num_elts, elt_val)
747 }
748}
749
750pub fn ExtractValue(cx: Block, agg_val: ValueRef, index: uint) -> ValueRef {
751 unsafe {
752 if cx.unreachable.get() {
753 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
754 }
755 B(cx).extract_value(agg_val, index)
756 }
757}
758
759pub fn InsertValue(cx: Block, agg_val: ValueRef, elt_val: ValueRef, index: uint) -> ValueRef {
760 unsafe {
761 if cx.unreachable.get() {
762 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
763 }
764 B(cx).insert_value(agg_val, elt_val, index)
765 }
766}
767
768pub fn IsNull(cx: Block, val: ValueRef) -> ValueRef {
769 unsafe {
770 if cx.unreachable.get() {
771 return llvm::LLVMGetUndef(Type::i1(cx.ccx()).to_ref());
772 }
773 B(cx).is_null(val)
774 }
775}
776
777pub fn IsNotNull(cx: Block, val: ValueRef) -> ValueRef {
778 unsafe {
779 if cx.unreachable.get() {
780 return llvm::LLVMGetUndef(Type::i1(cx.ccx()).to_ref());
781 }
782 B(cx).is_not_null(val)
783 }
784}
785
786pub fn PtrDiff(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
787 unsafe {
788 let ccx = cx.fcx.ccx;
789 if cx.unreachable.get() { return llvm::LLVMGetUndef(ccx.int_type().to_ref()); }
790 B(cx).ptrdiff(lhs, rhs)
791 }
792}
793
794pub fn Trap(cx: Block) {
795 if cx.unreachable.get() { return; }
796 B(cx).trap();
797}
798
799pub fn LandingPad(cx: Block, ty: Type, pers_fn: ValueRef,
800 num_clauses: uint) -> ValueRef {
801 check_not_terminated(cx);
802 assert!(!cx.unreachable.get());
803 B(cx).landing_pad(ty, pers_fn, num_clauses)
804}
805
806pub fn SetCleanup(cx: Block, landing_pad: ValueRef) {
807 B(cx).set_cleanup(landing_pad)
808}
809
810pub fn Resume(cx: Block, exn: ValueRef) -> ValueRef {
811 check_not_terminated(cx);
812 terminate(cx, "Resume");
813 B(cx).resume(exn)
814}
815
816// Atomic Operations
817pub fn AtomicCmpXchg(cx: Block, dst: ValueRef,
818 cmp: ValueRef, src: ValueRef,
819 order: AtomicOrdering,
820 failure_order: AtomicOrdering) -> ValueRef {
821 B(cx).atomic_cmpxchg(dst, cmp, src, order, failure_order)
822}
823pub fn AtomicRMW(cx: Block, op: AtomicBinOp,
824 dst: ValueRef, src: ValueRef,
825 order: AtomicOrdering) -> ValueRef {
826 B(cx).atomic_rmw(op, dst, src, order)
827}