]> git.proxmox.com Git - rustc.git/blob - src/librustc_trans/trans/builder.rs
Imported Upstream version 1.8.0+dfsg1
[rustc.git] / src / librustc_trans / trans / builder.rs
1 // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 #![allow(dead_code)] // FFI wrappers
12
13 use llvm;
14 use llvm::{CallConv, AtomicBinOp, AtomicOrdering, SynchronizationScope, AsmDialect, AttrBuilder};
15 use llvm::{Opcode, IntPredicate, RealPredicate, False, OperandBundleDef};
16 use llvm::{ValueRef, BasicBlockRef, BuilderRef, ModuleRef};
17 use trans::base;
18 use trans::common::*;
19 use trans::machine::llalign_of_pref;
20 use trans::type_::Type;
21 use util::nodemap::FnvHashMap;
22 use libc::{c_uint, c_char};
23
24 use std::ffi::CString;
25 use std::ptr;
26 use syntax::codemap::Span;
27
28 pub struct Builder<'a, 'tcx: 'a> {
29 pub llbuilder: BuilderRef,
30 pub ccx: &'a CrateContext<'a, 'tcx>,
31 }
32
33 // This is a really awful way to get a zero-length c-string, but better (and a
34 // lot more efficient) than doing str::as_c_str("", ...) every time.
35 pub fn noname() -> *const c_char {
36 static CNULL: c_char = 0;
37 &CNULL
38 }
39
40 impl<'a, 'tcx> Builder<'a, 'tcx> {
41 pub fn new(ccx: &'a CrateContext<'a, 'tcx>) -> Builder<'a, 'tcx> {
42 Builder {
43 llbuilder: ccx.raw_builder(),
44 ccx: ccx,
45 }
46 }
47
48 pub fn count_insn(&self, category: &str) {
49 if self.ccx.sess().trans_stats() {
50 self.ccx.stats().n_llvm_insns.set(self.ccx
51 .stats()
52 .n_llvm_insns
53 .get() + 1);
54 }
55 self.ccx.count_llvm_insn();
56 if self.ccx.sess().count_llvm_insns() {
57 base::with_insn_ctxt(|v| {
58 let mut h = self.ccx.stats().llvm_insns.borrow_mut();
59
60 // Build version of path with cycles removed.
61
62 // Pass 1: scan table mapping str -> rightmost pos.
63 let mut mm = FnvHashMap();
64 let len = v.len();
65 let mut i = 0;
66 while i < len {
67 mm.insert(v[i], i);
68 i += 1;
69 }
70
71 // Pass 2: concat strings for each elt, skipping
72 // forwards over any cycles by advancing to rightmost
73 // occurrence of each element in path.
74 let mut s = String::from(".");
75 i = 0;
76 while i < len {
77 i = mm[v[i]];
78 s.push('/');
79 s.push_str(v[i]);
80 i += 1;
81 }
82
83 s.push('/');
84 s.push_str(category);
85
86 let n = match h.get(&s) {
87 Some(&n) => n,
88 _ => 0
89 };
90 h.insert(s, n+1);
91 })
92 }
93 }
94
95 pub fn position_before(&self, insn: ValueRef) {
96 unsafe {
97 llvm::LLVMPositionBuilderBefore(self.llbuilder, insn);
98 }
99 }
100
101 pub fn position_at_end(&self, llbb: BasicBlockRef) {
102 unsafe {
103 llvm::LLVMPositionBuilderAtEnd(self.llbuilder, llbb);
104 }
105 }
106
107 pub fn ret_void(&self) {
108 self.count_insn("retvoid");
109 unsafe {
110 llvm::LLVMBuildRetVoid(self.llbuilder);
111 }
112 }
113
114 pub fn ret(&self, v: ValueRef) {
115 self.count_insn("ret");
116 unsafe {
117 llvm::LLVMBuildRet(self.llbuilder, v);
118 }
119 }
120
121 pub fn aggregate_ret(&self, ret_vals: &[ValueRef]) {
122 unsafe {
123 llvm::LLVMBuildAggregateRet(self.llbuilder,
124 ret_vals.as_ptr(),
125 ret_vals.len() as c_uint);
126 }
127 }
128
129 pub fn br(&self, dest: BasicBlockRef) {
130 self.count_insn("br");
131 unsafe {
132 llvm::LLVMBuildBr(self.llbuilder, dest);
133 }
134 }
135
136 pub fn cond_br(&self, cond: ValueRef, then_llbb: BasicBlockRef, else_llbb: BasicBlockRef) {
137 self.count_insn("condbr");
138 unsafe {
139 llvm::LLVMBuildCondBr(self.llbuilder, cond, then_llbb, else_llbb);
140 }
141 }
142
143 pub fn switch(&self, v: ValueRef, else_llbb: BasicBlockRef, num_cases: usize) -> ValueRef {
144 unsafe {
145 llvm::LLVMBuildSwitch(self.llbuilder, v, else_llbb, num_cases as c_uint)
146 }
147 }
148
149 pub fn indirect_br(&self, addr: ValueRef, num_dests: usize) {
150 self.count_insn("indirectbr");
151 unsafe {
152 llvm::LLVMBuildIndirectBr(self.llbuilder, addr, num_dests as c_uint);
153 }
154 }
155
156 pub fn invoke(&self,
157 llfn: ValueRef,
158 args: &[ValueRef],
159 then: BasicBlockRef,
160 catch: BasicBlockRef,
161 bundle: Option<&OperandBundleDef>,
162 attributes: Option<AttrBuilder>)
163 -> ValueRef {
164 self.count_insn("invoke");
165
166 debug!("Invoke {} with args ({})",
167 self.ccx.tn().val_to_string(llfn),
168 args.iter()
169 .map(|&v| self.ccx.tn().val_to_string(v))
170 .collect::<Vec<String>>()
171 .join(", "));
172
173 let bundle = bundle.as_ref().map(|b| b.raw()).unwrap_or(0 as *mut _);
174
175 unsafe {
176 let v = llvm::LLVMRustBuildInvoke(self.llbuilder,
177 llfn,
178 args.as_ptr(),
179 args.len() as c_uint,
180 then,
181 catch,
182 bundle,
183 noname());
184 if let Some(a) = attributes {
185 a.apply_callsite(v);
186 }
187 v
188 }
189 }
190
191 pub fn unreachable(&self) {
192 self.count_insn("unreachable");
193 unsafe {
194 llvm::LLVMBuildUnreachable(self.llbuilder);
195 }
196 }
197
198 /* Arithmetic */
199 pub fn add(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
200 self.count_insn("add");
201 unsafe {
202 llvm::LLVMBuildAdd(self.llbuilder, lhs, rhs, noname())
203 }
204 }
205
206 pub fn nswadd(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
207 self.count_insn("nswadd");
208 unsafe {
209 llvm::LLVMBuildNSWAdd(self.llbuilder, lhs, rhs, noname())
210 }
211 }
212
213 pub fn nuwadd(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
214 self.count_insn("nuwadd");
215 unsafe {
216 llvm::LLVMBuildNUWAdd(self.llbuilder, lhs, rhs, noname())
217 }
218 }
219
220 pub fn fadd(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
221 self.count_insn("fadd");
222 unsafe {
223 llvm::LLVMBuildFAdd(self.llbuilder, lhs, rhs, noname())
224 }
225 }
226
227 pub fn sub(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
228 self.count_insn("sub");
229 unsafe {
230 llvm::LLVMBuildSub(self.llbuilder, lhs, rhs, noname())
231 }
232 }
233
234 pub fn nswsub(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
235 self.count_insn("nwsub");
236 unsafe {
237 llvm::LLVMBuildNSWSub(self.llbuilder, lhs, rhs, noname())
238 }
239 }
240
241 pub fn nuwsub(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
242 self.count_insn("nuwsub");
243 unsafe {
244 llvm::LLVMBuildNUWSub(self.llbuilder, lhs, rhs, noname())
245 }
246 }
247
248 pub fn fsub(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
249 self.count_insn("sub");
250 unsafe {
251 llvm::LLVMBuildFSub(self.llbuilder, lhs, rhs, noname())
252 }
253 }
254
255 pub fn mul(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
256 self.count_insn("mul");
257 unsafe {
258 llvm::LLVMBuildMul(self.llbuilder, lhs, rhs, noname())
259 }
260 }
261
262 pub fn nswmul(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
263 self.count_insn("nswmul");
264 unsafe {
265 llvm::LLVMBuildNSWMul(self.llbuilder, lhs, rhs, noname())
266 }
267 }
268
269 pub fn nuwmul(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
270 self.count_insn("nuwmul");
271 unsafe {
272 llvm::LLVMBuildNUWMul(self.llbuilder, lhs, rhs, noname())
273 }
274 }
275
276 pub fn fmul(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
277 self.count_insn("fmul");
278 unsafe {
279 llvm::LLVMBuildFMul(self.llbuilder, lhs, rhs, noname())
280 }
281 }
282
283 pub fn udiv(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
284 self.count_insn("udiv");
285 unsafe {
286 llvm::LLVMBuildUDiv(self.llbuilder, lhs, rhs, noname())
287 }
288 }
289
290 pub fn sdiv(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
291 self.count_insn("sdiv");
292 unsafe {
293 llvm::LLVMBuildSDiv(self.llbuilder, lhs, rhs, noname())
294 }
295 }
296
297 pub fn exactsdiv(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
298 self.count_insn("exactsdiv");
299 unsafe {
300 llvm::LLVMBuildExactSDiv(self.llbuilder, lhs, rhs, noname())
301 }
302 }
303
304 pub fn fdiv(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
305 self.count_insn("fdiv");
306 unsafe {
307 llvm::LLVMBuildFDiv(self.llbuilder, lhs, rhs, noname())
308 }
309 }
310
311 pub fn urem(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
312 self.count_insn("urem");
313 unsafe {
314 llvm::LLVMBuildURem(self.llbuilder, lhs, rhs, noname())
315 }
316 }
317
318 pub fn srem(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
319 self.count_insn("srem");
320 unsafe {
321 llvm::LLVMBuildSRem(self.llbuilder, lhs, rhs, noname())
322 }
323 }
324
325 pub fn frem(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
326 self.count_insn("frem");
327 unsafe {
328 llvm::LLVMBuildFRem(self.llbuilder, lhs, rhs, noname())
329 }
330 }
331
332 pub fn shl(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
333 self.count_insn("shl");
334 unsafe {
335 llvm::LLVMBuildShl(self.llbuilder, lhs, rhs, noname())
336 }
337 }
338
339 pub fn lshr(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
340 self.count_insn("lshr");
341 unsafe {
342 llvm::LLVMBuildLShr(self.llbuilder, lhs, rhs, noname())
343 }
344 }
345
346 pub fn ashr(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
347 self.count_insn("ashr");
348 unsafe {
349 llvm::LLVMBuildAShr(self.llbuilder, lhs, rhs, noname())
350 }
351 }
352
353 pub fn and(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
354 self.count_insn("and");
355 unsafe {
356 llvm::LLVMBuildAnd(self.llbuilder, lhs, rhs, noname())
357 }
358 }
359
360 pub fn or(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
361 self.count_insn("or");
362 unsafe {
363 llvm::LLVMBuildOr(self.llbuilder, lhs, rhs, noname())
364 }
365 }
366
367 pub fn xor(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
368 self.count_insn("xor");
369 unsafe {
370 llvm::LLVMBuildXor(self.llbuilder, lhs, rhs, noname())
371 }
372 }
373
374 pub fn binop(&self, op: Opcode, lhs: ValueRef, rhs: ValueRef)
375 -> ValueRef {
376 self.count_insn("binop");
377 unsafe {
378 llvm::LLVMBuildBinOp(self.llbuilder, op, lhs, rhs, noname())
379 }
380 }
381
382 pub fn neg(&self, v: ValueRef) -> ValueRef {
383 self.count_insn("neg");
384 unsafe {
385 llvm::LLVMBuildNeg(self.llbuilder, v, noname())
386 }
387 }
388
389 pub fn nswneg(&self, v: ValueRef) -> ValueRef {
390 self.count_insn("nswneg");
391 unsafe {
392 llvm::LLVMBuildNSWNeg(self.llbuilder, v, noname())
393 }
394 }
395
396 pub fn nuwneg(&self, v: ValueRef) -> ValueRef {
397 self.count_insn("nuwneg");
398 unsafe {
399 llvm::LLVMBuildNUWNeg(self.llbuilder, v, noname())
400 }
401 }
402 pub fn fneg(&self, v: ValueRef) -> ValueRef {
403 self.count_insn("fneg");
404 unsafe {
405 llvm::LLVMBuildFNeg(self.llbuilder, v, noname())
406 }
407 }
408
409 pub fn not(&self, v: ValueRef) -> ValueRef {
410 self.count_insn("not");
411 unsafe {
412 llvm::LLVMBuildNot(self.llbuilder, v, noname())
413 }
414 }
415
416 pub fn alloca(&self, ty: Type, name: &str) -> ValueRef {
417 self.count_insn("alloca");
418 unsafe {
419 if name.is_empty() {
420 llvm::LLVMBuildAlloca(self.llbuilder, ty.to_ref(), noname())
421 } else {
422 let name = CString::new(name).unwrap();
423 llvm::LLVMBuildAlloca(self.llbuilder, ty.to_ref(),
424 name.as_ptr())
425 }
426 }
427 }
428
429 pub fn free(&self, ptr: ValueRef) {
430 self.count_insn("free");
431 unsafe {
432 llvm::LLVMBuildFree(self.llbuilder, ptr);
433 }
434 }
435
436 pub fn load(&self, ptr: ValueRef) -> ValueRef {
437 self.count_insn("load");
438 unsafe {
439 llvm::LLVMBuildLoad(self.llbuilder, ptr, noname())
440 }
441 }
442
443 pub fn volatile_load(&self, ptr: ValueRef) -> ValueRef {
444 self.count_insn("load.volatile");
445 unsafe {
446 let insn = llvm::LLVMBuildLoad(self.llbuilder, ptr, noname());
447 llvm::LLVMSetVolatile(insn, llvm::True);
448 insn
449 }
450 }
451
452 pub fn atomic_load(&self, ptr: ValueRef, order: AtomicOrdering) -> ValueRef {
453 self.count_insn("load.atomic");
454 unsafe {
455 let ty = Type::from_ref(llvm::LLVMTypeOf(ptr));
456 let align = llalign_of_pref(self.ccx, ty.element_type());
457 llvm::LLVMBuildAtomicLoad(self.llbuilder, ptr, noname(), order,
458 align as c_uint)
459 }
460 }
461
462
463 pub fn load_range_assert(&self, ptr: ValueRef, lo: u64,
464 hi: u64, signed: llvm::Bool) -> ValueRef {
465 let value = self.load(ptr);
466
467 unsafe {
468 let t = llvm::LLVMGetElementType(llvm::LLVMTypeOf(ptr));
469 let min = llvm::LLVMConstInt(t, lo, signed);
470 let max = llvm::LLVMConstInt(t, hi, signed);
471
472 let v = [min, max];
473
474 llvm::LLVMSetMetadata(value, llvm::MD_range as c_uint,
475 llvm::LLVMMDNodeInContext(self.ccx.llcx(),
476 v.as_ptr(),
477 v.len() as c_uint));
478 }
479
480 value
481 }
482
483 pub fn load_nonnull(&self, ptr: ValueRef) -> ValueRef {
484 let value = self.load(ptr);
485 unsafe {
486 llvm::LLVMSetMetadata(value, llvm::MD_nonnull as c_uint,
487 llvm::LLVMMDNodeInContext(self.ccx.llcx(), ptr::null(), 0));
488 }
489
490 value
491 }
492
493 pub fn store(&self, val: ValueRef, ptr: ValueRef) -> ValueRef {
494 debug!("Store {} -> {}",
495 self.ccx.tn().val_to_string(val),
496 self.ccx.tn().val_to_string(ptr));
497 assert!(!self.llbuilder.is_null());
498 self.count_insn("store");
499 unsafe {
500 llvm::LLVMBuildStore(self.llbuilder, val, ptr)
501 }
502 }
503
504 pub fn volatile_store(&self, val: ValueRef, ptr: ValueRef) -> ValueRef {
505 debug!("Store {} -> {}",
506 self.ccx.tn().val_to_string(val),
507 self.ccx.tn().val_to_string(ptr));
508 assert!(!self.llbuilder.is_null());
509 self.count_insn("store.volatile");
510 unsafe {
511 let insn = llvm::LLVMBuildStore(self.llbuilder, val, ptr);
512 llvm::LLVMSetVolatile(insn, llvm::True);
513 insn
514 }
515 }
516
517 pub fn atomic_store(&self, val: ValueRef, ptr: ValueRef, order: AtomicOrdering) {
518 debug!("Store {} -> {}",
519 self.ccx.tn().val_to_string(val),
520 self.ccx.tn().val_to_string(ptr));
521 self.count_insn("store.atomic");
522 unsafe {
523 let ty = Type::from_ref(llvm::LLVMTypeOf(ptr));
524 let align = llalign_of_pref(self.ccx, ty.element_type());
525 llvm::LLVMBuildAtomicStore(self.llbuilder, val, ptr, order, align as c_uint);
526 }
527 }
528
529 pub fn gep(&self, ptr: ValueRef, indices: &[ValueRef]) -> ValueRef {
530 self.count_insn("gep");
531 unsafe {
532 llvm::LLVMBuildGEP(self.llbuilder, ptr, indices.as_ptr(),
533 indices.len() as c_uint, noname())
534 }
535 }
536
537 // Simple wrapper around GEP that takes an array of ints and wraps them
538 // in C_i32()
539 #[inline]
540 pub fn gepi(&self, base: ValueRef, ixs: &[usize]) -> ValueRef {
541 // Small vector optimization. This should catch 100% of the cases that
542 // we care about.
543 if ixs.len() < 16 {
544 let mut small_vec = [ C_i32(self.ccx, 0); 16 ];
545 for (small_vec_e, &ix) in small_vec.iter_mut().zip(ixs) {
546 *small_vec_e = C_i32(self.ccx, ix as i32);
547 }
548 self.inbounds_gep(base, &small_vec[..ixs.len()])
549 } else {
550 let v = ixs.iter().map(|i| C_i32(self.ccx, *i as i32)).collect::<Vec<ValueRef>>();
551 self.count_insn("gepi");
552 self.inbounds_gep(base, &v[..])
553 }
554 }
555
556 pub fn inbounds_gep(&self, ptr: ValueRef, indices: &[ValueRef]) -> ValueRef {
557 self.count_insn("inboundsgep");
558 unsafe {
559 llvm::LLVMBuildInBoundsGEP(
560 self.llbuilder, ptr, indices.as_ptr(), indices.len() as c_uint, noname())
561 }
562 }
563
564 pub fn struct_gep(&self, ptr: ValueRef, idx: usize) -> ValueRef {
565 self.count_insn("structgep");
566 unsafe {
567 llvm::LLVMBuildStructGEP(self.llbuilder, ptr, idx as c_uint, noname())
568 }
569 }
570
571 pub fn global_string(&self, _str: *const c_char) -> ValueRef {
572 self.count_insn("globalstring");
573 unsafe {
574 llvm::LLVMBuildGlobalString(self.llbuilder, _str, noname())
575 }
576 }
577
578 pub fn global_string_ptr(&self, _str: *const c_char) -> ValueRef {
579 self.count_insn("globalstringptr");
580 unsafe {
581 llvm::LLVMBuildGlobalStringPtr(self.llbuilder, _str, noname())
582 }
583 }
584
585 /* Casts */
586 pub fn trunc(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
587 self.count_insn("trunc");
588 unsafe {
589 llvm::LLVMBuildTrunc(self.llbuilder, val, dest_ty.to_ref(), noname())
590 }
591 }
592
593 pub fn zext(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
594 self.count_insn("zext");
595 unsafe {
596 llvm::LLVMBuildZExt(self.llbuilder, val, dest_ty.to_ref(), noname())
597 }
598 }
599
600 pub fn sext(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
601 self.count_insn("sext");
602 unsafe {
603 llvm::LLVMBuildSExt(self.llbuilder, val, dest_ty.to_ref(), noname())
604 }
605 }
606
607 pub fn fptoui(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
608 self.count_insn("fptoui");
609 unsafe {
610 llvm::LLVMBuildFPToUI(self.llbuilder, val, dest_ty.to_ref(), noname())
611 }
612 }
613
614 pub fn fptosi(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
615 self.count_insn("fptosi");
616 unsafe {
617 llvm::LLVMBuildFPToSI(self.llbuilder, val, dest_ty.to_ref(),noname())
618 }
619 }
620
621 pub fn uitofp(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
622 self.count_insn("uitofp");
623 unsafe {
624 llvm::LLVMBuildUIToFP(self.llbuilder, val, dest_ty.to_ref(), noname())
625 }
626 }
627
628 pub fn sitofp(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
629 self.count_insn("sitofp");
630 unsafe {
631 llvm::LLVMBuildSIToFP(self.llbuilder, val, dest_ty.to_ref(), noname())
632 }
633 }
634
635 pub fn fptrunc(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
636 self.count_insn("fptrunc");
637 unsafe {
638 llvm::LLVMBuildFPTrunc(self.llbuilder, val, dest_ty.to_ref(), noname())
639 }
640 }
641
642 pub fn fpext(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
643 self.count_insn("fpext");
644 unsafe {
645 llvm::LLVMBuildFPExt(self.llbuilder, val, dest_ty.to_ref(), noname())
646 }
647 }
648
649 pub fn ptrtoint(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
650 self.count_insn("ptrtoint");
651 unsafe {
652 llvm::LLVMBuildPtrToInt(self.llbuilder, val, dest_ty.to_ref(), noname())
653 }
654 }
655
656 pub fn inttoptr(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
657 self.count_insn("inttoptr");
658 unsafe {
659 llvm::LLVMBuildIntToPtr(self.llbuilder, val, dest_ty.to_ref(), noname())
660 }
661 }
662
663 pub fn bitcast(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
664 self.count_insn("bitcast");
665 unsafe {
666 llvm::LLVMBuildBitCast(self.llbuilder, val, dest_ty.to_ref(), noname())
667 }
668 }
669
670 pub fn zext_or_bitcast(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
671 self.count_insn("zextorbitcast");
672 unsafe {
673 llvm::LLVMBuildZExtOrBitCast(self.llbuilder, val, dest_ty.to_ref(), noname())
674 }
675 }
676
677 pub fn sext_or_bitcast(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
678 self.count_insn("sextorbitcast");
679 unsafe {
680 llvm::LLVMBuildSExtOrBitCast(self.llbuilder, val, dest_ty.to_ref(), noname())
681 }
682 }
683
684 pub fn trunc_or_bitcast(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
685 self.count_insn("truncorbitcast");
686 unsafe {
687 llvm::LLVMBuildTruncOrBitCast(self.llbuilder, val, dest_ty.to_ref(), noname())
688 }
689 }
690
691 pub fn cast(&self, op: Opcode, val: ValueRef, dest_ty: Type) -> ValueRef {
692 self.count_insn("cast");
693 unsafe {
694 llvm::LLVMBuildCast(self.llbuilder, op, val, dest_ty.to_ref(), noname())
695 }
696 }
697
698 pub fn pointercast(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
699 self.count_insn("pointercast");
700 unsafe {
701 llvm::LLVMBuildPointerCast(self.llbuilder, val, dest_ty.to_ref(), noname())
702 }
703 }
704
705 pub fn intcast(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
706 self.count_insn("intcast");
707 unsafe {
708 llvm::LLVMBuildIntCast(self.llbuilder, val, dest_ty.to_ref(), noname())
709 }
710 }
711
712 pub fn fpcast(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
713 self.count_insn("fpcast");
714 unsafe {
715 llvm::LLVMBuildFPCast(self.llbuilder, val, dest_ty.to_ref(), noname())
716 }
717 }
718
719
720 /* Comparisons */
721 pub fn icmp(&self, op: IntPredicate, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
722 self.count_insn("icmp");
723 unsafe {
724 llvm::LLVMBuildICmp(self.llbuilder, op as c_uint, lhs, rhs, noname())
725 }
726 }
727
728 pub fn fcmp(&self, op: RealPredicate, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
729 self.count_insn("fcmp");
730 unsafe {
731 llvm::LLVMBuildFCmp(self.llbuilder, op as c_uint, lhs, rhs, noname())
732 }
733 }
734
735 /* Miscellaneous instructions */
736 pub fn empty_phi(&self, ty: Type) -> ValueRef {
737 self.count_insn("emptyphi");
738 unsafe {
739 llvm::LLVMBuildPhi(self.llbuilder, ty.to_ref(), noname())
740 }
741 }
742
743 pub fn phi(&self, ty: Type, vals: &[ValueRef], bbs: &[BasicBlockRef]) -> ValueRef {
744 assert_eq!(vals.len(), bbs.len());
745 let phi = self.empty_phi(ty);
746 self.count_insn("addincoming");
747 unsafe {
748 llvm::LLVMAddIncoming(phi, vals.as_ptr(),
749 bbs.as_ptr(),
750 vals.len() as c_uint);
751 phi
752 }
753 }
754
755 pub fn add_span_comment(&self, sp: Span, text: &str) {
756 if self.ccx.sess().asm_comments() {
757 let s = format!("{} ({})",
758 text,
759 self.ccx.sess().codemap().span_to_string(sp));
760 debug!("{}", &s[..]);
761 self.add_comment(&s[..]);
762 }
763 }
764
765 pub fn add_comment(&self, text: &str) {
766 if self.ccx.sess().asm_comments() {
767 let sanitized = text.replace("$", "");
768 let comment_text = format!("{} {}", "#",
769 sanitized.replace("\n", "\n\t# "));
770 self.count_insn("inlineasm");
771 let comment_text = CString::new(comment_text).unwrap();
772 let asm = unsafe {
773 llvm::LLVMConstInlineAsm(Type::func(&[], &Type::void(self.ccx)).to_ref(),
774 comment_text.as_ptr(), noname(), False,
775 False)
776 };
777 self.call(asm, &[], None, None);
778 }
779 }
780
781 pub fn inline_asm_call(&self, asm: *const c_char, cons: *const c_char,
782 inputs: &[ValueRef], output: Type,
783 volatile: bool, alignstack: bool,
784 dia: AsmDialect) -> ValueRef {
785 self.count_insn("inlineasm");
786
787 let volatile = if volatile { llvm::True }
788 else { llvm::False };
789 let alignstack = if alignstack { llvm::True }
790 else { llvm::False };
791
792 let argtys = inputs.iter().map(|v| {
793 debug!("Asm Input Type: {}", self.ccx.tn().val_to_string(*v));
794 val_ty(*v)
795 }).collect::<Vec<_>>();
796
797 debug!("Asm Output Type: {}", self.ccx.tn().type_to_string(output));
798 let fty = Type::func(&argtys[..], &output);
799 unsafe {
800 let v = llvm::LLVMInlineAsm(
801 fty.to_ref(), asm, cons, volatile, alignstack, dia as c_uint);
802 self.call(v, inputs, None, None)
803 }
804 }
805
806 pub fn call(&self, llfn: ValueRef, args: &[ValueRef],
807 bundle: Option<&OperandBundleDef>,
808 attributes: Option<AttrBuilder>) -> ValueRef {
809 self.count_insn("call");
810
811 debug!("Call {} with args ({})",
812 self.ccx.tn().val_to_string(llfn),
813 args.iter()
814 .map(|&v| self.ccx.tn().val_to_string(v))
815 .collect::<Vec<String>>()
816 .join(", "));
817
818 let mut fn_ty = val_ty(llfn);
819 // Strip off pointers
820 while fn_ty.kind() == llvm::TypeKind::Pointer {
821 fn_ty = fn_ty.element_type();
822 }
823
824 assert!(fn_ty.kind() == llvm::TypeKind::Function,
825 "builder::call not passed a function");
826
827 let param_tys = fn_ty.func_params();
828
829 let iter = param_tys.into_iter()
830 .zip(args.iter().map(|&v| val_ty(v)));
831 for (i, (expected_ty, actual_ty)) in iter.enumerate() {
832 if expected_ty != actual_ty {
833 self.ccx.sess().bug(
834 &format!(
835 "Type mismatch in function call of {}. Expected {} for param {}, got {}",
836 self.ccx.tn().val_to_string(llfn),
837 self.ccx.tn().type_to_string(expected_ty),
838 i,
839 self.ccx.tn().type_to_string(actual_ty)));
840
841 }
842 }
843
844 let bundle = bundle.as_ref().map(|b| b.raw()).unwrap_or(0 as *mut _);
845
846 unsafe {
847 let v = llvm::LLVMRustBuildCall(self.llbuilder, llfn, args.as_ptr(),
848 args.len() as c_uint, bundle,
849 noname());
850 if let Some(a) = attributes {
851 a.apply_callsite(v);
852 }
853 v
854 }
855 }
856
857 pub fn call_with_conv(&self, llfn: ValueRef, args: &[ValueRef],
858 conv: CallConv,
859 bundle: Option<&OperandBundleDef>,
860 attributes: Option<AttrBuilder>) -> ValueRef {
861 self.count_insn("callwithconv");
862 let v = self.call(llfn, args, bundle, attributes);
863 llvm::SetInstructionCallConv(v, conv);
864 v
865 }
866
867 pub fn select(&self, cond: ValueRef, then_val: ValueRef, else_val: ValueRef) -> ValueRef {
868 self.count_insn("select");
869 unsafe {
870 llvm::LLVMBuildSelect(self.llbuilder, cond, then_val, else_val, noname())
871 }
872 }
873
874 pub fn va_arg(&self, list: ValueRef, ty: Type) -> ValueRef {
875 self.count_insn("vaarg");
876 unsafe {
877 llvm::LLVMBuildVAArg(self.llbuilder, list, ty.to_ref(), noname())
878 }
879 }
880
881 pub fn extract_element(&self, vec: ValueRef, idx: ValueRef) -> ValueRef {
882 self.count_insn("extractelement");
883 unsafe {
884 llvm::LLVMBuildExtractElement(self.llbuilder, vec, idx, noname())
885 }
886 }
887
888 pub fn insert_element(&self, vec: ValueRef, elt: ValueRef, idx: ValueRef) -> ValueRef {
889 self.count_insn("insertelement");
890 unsafe {
891 llvm::LLVMBuildInsertElement(self.llbuilder, vec, elt, idx, noname())
892 }
893 }
894
895 pub fn shuffle_vector(&self, v1: ValueRef, v2: ValueRef, mask: ValueRef) -> ValueRef {
896 self.count_insn("shufflevector");
897 unsafe {
898 llvm::LLVMBuildShuffleVector(self.llbuilder, v1, v2, mask, noname())
899 }
900 }
901
902 pub fn vector_splat(&self, num_elts: usize, elt: ValueRef) -> ValueRef {
903 unsafe {
904 let elt_ty = val_ty(elt);
905 let undef = llvm::LLVMGetUndef(Type::vector(&elt_ty, num_elts as u64).to_ref());
906 let vec = self.insert_element(undef, elt, C_i32(self.ccx, 0));
907 let vec_i32_ty = Type::vector(&Type::i32(self.ccx), num_elts as u64);
908 self.shuffle_vector(vec, undef, C_null(vec_i32_ty))
909 }
910 }
911
912 pub fn extract_value(&self, agg_val: ValueRef, idx: usize) -> ValueRef {
913 self.count_insn("extractvalue");
914 unsafe {
915 llvm::LLVMBuildExtractValue(self.llbuilder, agg_val, idx as c_uint, noname())
916 }
917 }
918
919 pub fn insert_value(&self, agg_val: ValueRef, elt: ValueRef,
920 idx: usize) -> ValueRef {
921 self.count_insn("insertvalue");
922 unsafe {
923 llvm::LLVMBuildInsertValue(self.llbuilder, agg_val, elt, idx as c_uint,
924 noname())
925 }
926 }
927
928 pub fn is_null(&self, val: ValueRef) -> ValueRef {
929 self.count_insn("isnull");
930 unsafe {
931 llvm::LLVMBuildIsNull(self.llbuilder, val, noname())
932 }
933 }
934
935 pub fn is_not_null(&self, val: ValueRef) -> ValueRef {
936 self.count_insn("isnotnull");
937 unsafe {
938 llvm::LLVMBuildIsNotNull(self.llbuilder, val, noname())
939 }
940 }
941
942 pub fn ptrdiff(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
943 self.count_insn("ptrdiff");
944 unsafe {
945 llvm::LLVMBuildPtrDiff(self.llbuilder, lhs, rhs, noname())
946 }
947 }
948
949 pub fn trap(&self) {
950 unsafe {
951 let bb: BasicBlockRef = llvm::LLVMGetInsertBlock(self.llbuilder);
952 let fn_: ValueRef = llvm::LLVMGetBasicBlockParent(bb);
953 let m: ModuleRef = llvm::LLVMGetGlobalParent(fn_);
954 let p = "llvm.trap\0".as_ptr();
955 let t: ValueRef = llvm::LLVMGetNamedFunction(m, p as *const _);
956 assert!((t as isize != 0));
957 let args: &[ValueRef] = &[];
958 self.count_insn("trap");
959 llvm::LLVMRustBuildCall(self.llbuilder, t,
960 args.as_ptr(), args.len() as c_uint,
961 0 as *mut _,
962 noname());
963 }
964 }
965
966 pub fn landing_pad(&self, ty: Type, pers_fn: ValueRef,
967 num_clauses: usize,
968 llfn: ValueRef) -> ValueRef {
969 self.count_insn("landingpad");
970 unsafe {
971 llvm::LLVMRustBuildLandingPad(self.llbuilder, ty.to_ref(), pers_fn,
972 num_clauses as c_uint, noname(), llfn)
973 }
974 }
975
976 pub fn add_clause(&self, landing_pad: ValueRef, clause: ValueRef) {
977 unsafe {
978 llvm::LLVMAddClause(landing_pad, clause);
979 }
980 }
981
982 pub fn set_cleanup(&self, landing_pad: ValueRef) {
983 self.count_insn("setcleanup");
984 unsafe {
985 llvm::LLVMSetCleanup(landing_pad, llvm::True);
986 }
987 }
988
989 pub fn resume(&self, exn: ValueRef) -> ValueRef {
990 self.count_insn("resume");
991 unsafe {
992 llvm::LLVMBuildResume(self.llbuilder, exn)
993 }
994 }
995
996 pub fn cleanup_pad(&self,
997 parent: Option<ValueRef>,
998 args: &[ValueRef]) -> ValueRef {
999 self.count_insn("cleanuppad");
1000 let parent = parent.unwrap_or(0 as *mut _);
1001 let name = CString::new("cleanuppad").unwrap();
1002 let ret = unsafe {
1003 llvm::LLVMRustBuildCleanupPad(self.llbuilder,
1004 parent,
1005 args.len() as c_uint,
1006 args.as_ptr(),
1007 name.as_ptr())
1008 };
1009 assert!(!ret.is_null(), "LLVM does not have support for cleanuppad");
1010 return ret
1011 }
1012
1013 pub fn cleanup_ret(&self, cleanup: ValueRef,
1014 unwind: Option<BasicBlockRef>) -> ValueRef {
1015 self.count_insn("cleanupret");
1016 let unwind = unwind.unwrap_or(0 as *mut _);
1017 let ret = unsafe {
1018 llvm::LLVMRustBuildCleanupRet(self.llbuilder, cleanup, unwind)
1019 };
1020 assert!(!ret.is_null(), "LLVM does not have support for cleanupret");
1021 return ret
1022 }
1023
1024 pub fn catch_pad(&self,
1025 parent: ValueRef,
1026 args: &[ValueRef]) -> ValueRef {
1027 self.count_insn("catchpad");
1028 let name = CString::new("catchpad").unwrap();
1029 let ret = unsafe {
1030 llvm::LLVMRustBuildCatchPad(self.llbuilder, parent,
1031 args.len() as c_uint, args.as_ptr(),
1032 name.as_ptr())
1033 };
1034 assert!(!ret.is_null(), "LLVM does not have support for catchpad");
1035 return ret
1036 }
1037
1038 pub fn catch_ret(&self, pad: ValueRef, unwind: BasicBlockRef) -> ValueRef {
1039 self.count_insn("catchret");
1040 let ret = unsafe {
1041 llvm::LLVMRustBuildCatchRet(self.llbuilder, pad, unwind)
1042 };
1043 assert!(!ret.is_null(), "LLVM does not have support for catchret");
1044 return ret
1045 }
1046
1047 pub fn catch_switch(&self,
1048 parent: Option<ValueRef>,
1049 unwind: Option<BasicBlockRef>,
1050 num_handlers: usize) -> ValueRef {
1051 self.count_insn("catchswitch");
1052 let parent = parent.unwrap_or(0 as *mut _);
1053 let unwind = unwind.unwrap_or(0 as *mut _);
1054 let name = CString::new("catchswitch").unwrap();
1055 let ret = unsafe {
1056 llvm::LLVMRustBuildCatchSwitch(self.llbuilder, parent, unwind,
1057 num_handlers as c_uint,
1058 name.as_ptr())
1059 };
1060 assert!(!ret.is_null(), "LLVM does not have support for catchswitch");
1061 return ret
1062 }
1063
1064 pub fn add_handler(&self, catch_switch: ValueRef, handler: BasicBlockRef) {
1065 unsafe {
1066 llvm::LLVMRustAddHandler(catch_switch, handler);
1067 }
1068 }
1069
1070 pub fn set_personality_fn(&self, personality: ValueRef) {
1071 unsafe {
1072 llvm::LLVMRustSetPersonalityFn(self.llbuilder, personality);
1073 }
1074 }
1075
1076 // Atomic Operations
1077 pub fn atomic_cmpxchg(&self, dst: ValueRef,
1078 cmp: ValueRef, src: ValueRef,
1079 order: AtomicOrdering,
1080 failure_order: AtomicOrdering,
1081 weak: llvm::Bool) -> ValueRef {
1082 unsafe {
1083 llvm::LLVMBuildAtomicCmpXchg(self.llbuilder, dst, cmp, src,
1084 order, failure_order, weak)
1085 }
1086 }
1087 pub fn atomic_rmw(&self, op: AtomicBinOp,
1088 dst: ValueRef, src: ValueRef,
1089 order: AtomicOrdering) -> ValueRef {
1090 unsafe {
1091 llvm::LLVMBuildAtomicRMW(self.llbuilder, op, dst, src, order, False)
1092 }
1093 }
1094
1095 pub fn atomic_fence(&self, order: AtomicOrdering, scope: SynchronizationScope) {
1096 unsafe {
1097 llvm::LLVMBuildAtomicFence(self.llbuilder, order, scope);
1098 }
1099 }
1100 }