]>
Commit | Line | Data |
---|---|---|
1a4d82fc JJ |
1 | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT |
2 | // file at the top-level directory of this distribution and at | |
3 | // http://rust-lang.org/COPYRIGHT. | |
4 | // | |
5 | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or | |
6 | // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license | |
7 | // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your | |
8 | // option. This file may not be copied, modified, or distributed | |
9 | // except according to those terms. | |
10 | ||
11 | #![allow(dead_code)] // FFI wrappers | |
12 | #![allow(non_snake_case)] | |
13 | ||
14 | use llvm; | |
d9579d0f | 15 | use llvm::{CallConv, AtomicBinOp, AtomicOrdering, SynchronizationScope, AsmDialect, AttrBuilder}; |
1a4d82fc JJ |
16 | use llvm::{Opcode, IntPredicate, RealPredicate}; |
17 | use llvm::{ValueRef, BasicBlockRef}; | |
18 | use trans::common::*; | |
19 | use syntax::codemap::Span; | |
20 | ||
21 | use trans::builder::Builder; | |
22 | use trans::type_::Type; | |
85aaf69f | 23 | use trans::debuginfo::DebugLoc; |
1a4d82fc JJ |
24 | |
25 | use libc::{c_uint, c_char}; | |
26 | ||
27 | pub fn terminate(cx: Block, _: &str) { | |
28 | debug!("terminate({})", cx.to_str()); | |
29 | cx.terminated.set(true); | |
30 | } | |
31 | ||
32 | pub fn check_not_terminated(cx: Block) { | |
33 | if cx.terminated.get() { | |
34 | panic!("already terminated!"); | |
35 | } | |
36 | } | |
37 | ||
38 | pub fn B<'blk, 'tcx>(cx: Block<'blk, 'tcx>) -> Builder<'blk, 'tcx> { | |
39 | let b = cx.fcx.ccx.builder(); | |
40 | b.position_at_end(cx.llbb); | |
41 | b | |
42 | } | |
43 | ||
44 | // The difference between a block being unreachable and being terminated is | |
45 | // somewhat obscure, and has to do with error checking. When a block is | |
46 | // terminated, we're saying that trying to add any further statements in the | |
47 | // block is an error. On the other hand, if something is unreachable, that | |
48 | // means that the block was terminated in some way that we don't want to check | |
49 | // for (panic/break/return statements, call to diverging functions, etc), and | |
50 | // further instructions to the block should simply be ignored. | |
51 | ||
85aaf69f SL |
52 | pub fn RetVoid(cx: Block, debug_loc: DebugLoc) { |
53 | if cx.unreachable.get() { | |
54 | return; | |
55 | } | |
1a4d82fc JJ |
56 | check_not_terminated(cx); |
57 | terminate(cx, "RetVoid"); | |
85aaf69f | 58 | debug_loc.apply(cx.fcx); |
1a4d82fc JJ |
59 | B(cx).ret_void(); |
60 | } | |
61 | ||
85aaf69f SL |
62 | pub fn Ret(cx: Block, v: ValueRef, debug_loc: DebugLoc) { |
63 | if cx.unreachable.get() { | |
64 | return; | |
65 | } | |
1a4d82fc JJ |
66 | check_not_terminated(cx); |
67 | terminate(cx, "Ret"); | |
85aaf69f | 68 | debug_loc.apply(cx.fcx); |
1a4d82fc JJ |
69 | B(cx).ret(v); |
70 | } | |
71 | ||
85aaf69f SL |
72 | pub fn AggregateRet(cx: Block, |
73 | ret_vals: &[ValueRef], | |
74 | debug_loc: DebugLoc) { | |
75 | if cx.unreachable.get() { | |
76 | return; | |
77 | } | |
1a4d82fc JJ |
78 | check_not_terminated(cx); |
79 | terminate(cx, "AggregateRet"); | |
85aaf69f | 80 | debug_loc.apply(cx.fcx); |
1a4d82fc JJ |
81 | B(cx).aggregate_ret(ret_vals); |
82 | } | |
83 | ||
85aaf69f SL |
84 | pub fn Br(cx: Block, dest: BasicBlockRef, debug_loc: DebugLoc) { |
85 | if cx.unreachable.get() { | |
86 | return; | |
87 | } | |
1a4d82fc JJ |
88 | check_not_terminated(cx); |
89 | terminate(cx, "Br"); | |
85aaf69f | 90 | debug_loc.apply(cx.fcx); |
1a4d82fc JJ |
91 | B(cx).br(dest); |
92 | } | |
93 | ||
94 | pub fn CondBr(cx: Block, | |
95 | if_: ValueRef, | |
96 | then: BasicBlockRef, | |
85aaf69f SL |
97 | else_: BasicBlockRef, |
98 | debug_loc: DebugLoc) { | |
99 | if cx.unreachable.get() { | |
100 | return; | |
101 | } | |
1a4d82fc JJ |
102 | check_not_terminated(cx); |
103 | terminate(cx, "CondBr"); | |
85aaf69f | 104 | debug_loc.apply(cx.fcx); |
1a4d82fc JJ |
105 | B(cx).cond_br(if_, then, else_); |
106 | } | |
107 | ||
c34b1796 | 108 | pub fn Switch(cx: Block, v: ValueRef, else_: BasicBlockRef, num_cases: usize) |
1a4d82fc JJ |
109 | -> ValueRef { |
110 | if cx.unreachable.get() { return _Undef(v); } | |
111 | check_not_terminated(cx); | |
112 | terminate(cx, "Switch"); | |
113 | B(cx).switch(v, else_, num_cases) | |
114 | } | |
115 | ||
116 | pub fn AddCase(s: ValueRef, on_val: ValueRef, dest: BasicBlockRef) { | |
117 | unsafe { | |
118 | if llvm::LLVMIsUndef(s) == llvm::True { return; } | |
119 | llvm::LLVMAddCase(s, on_val, dest); | |
120 | } | |
121 | } | |
122 | ||
85aaf69f SL |
123 | pub fn IndirectBr(cx: Block, |
124 | addr: ValueRef, | |
c34b1796 | 125 | num_dests: usize, |
85aaf69f SL |
126 | debug_loc: DebugLoc) { |
127 | if cx.unreachable.get() { | |
128 | return; | |
129 | } | |
1a4d82fc JJ |
130 | check_not_terminated(cx); |
131 | terminate(cx, "IndirectBr"); | |
85aaf69f | 132 | debug_loc.apply(cx.fcx); |
1a4d82fc JJ |
133 | B(cx).indirect_br(addr, num_dests); |
134 | } | |
135 | ||
136 | pub fn Invoke(cx: Block, | |
137 | fn_: ValueRef, | |
138 | args: &[ValueRef], | |
139 | then: BasicBlockRef, | |
140 | catch: BasicBlockRef, | |
85aaf69f SL |
141 | attributes: Option<AttrBuilder>, |
142 | debug_loc: DebugLoc) | |
1a4d82fc JJ |
143 | -> ValueRef { |
144 | if cx.unreachable.get() { | |
145 | return C_null(Type::i8(cx.ccx())); | |
146 | } | |
147 | check_not_terminated(cx); | |
148 | terminate(cx, "Invoke"); | |
149 | debug!("Invoke({} with arguments ({}))", | |
150 | cx.val_to_string(fn_), | |
c1a9b12d | 151 | args.iter().map(|a| cx.val_to_string(*a)).collect::<Vec<String>>().join(", ")); |
85aaf69f | 152 | debug_loc.apply(cx.fcx); |
7453a54e SL |
153 | let bundle = cx.lpad().and_then(|b| b.bundle()); |
154 | B(cx).invoke(fn_, args, then, catch, bundle, attributes) | |
1a4d82fc JJ |
155 | } |
156 | ||
157 | pub fn Unreachable(cx: Block) { | |
158 | if cx.unreachable.get() { | |
159 | return | |
160 | } | |
161 | cx.unreachable.set(true); | |
162 | if !cx.terminated.get() { | |
163 | B(cx).unreachable(); | |
164 | } | |
165 | } | |
166 | ||
167 | pub fn _Undef(val: ValueRef) -> ValueRef { | |
168 | unsafe { | |
169 | return llvm::LLVMGetUndef(val_ty(val).to_ref()); | |
170 | } | |
171 | } | |
172 | ||
173 | /* Arithmetic */ | |
85aaf69f SL |
174 | pub fn Add(cx: Block, |
175 | lhs: ValueRef, | |
176 | rhs: ValueRef, | |
177 | debug_loc: DebugLoc) | |
178 | -> ValueRef { | |
179 | if cx.unreachable.get() { | |
180 | return _Undef(lhs); | |
181 | } | |
182 | debug_loc.apply(cx.fcx); | |
1a4d82fc JJ |
183 | B(cx).add(lhs, rhs) |
184 | } | |
185 | ||
85aaf69f SL |
186 | pub fn NSWAdd(cx: Block, |
187 | lhs: ValueRef, | |
188 | rhs: ValueRef, | |
189 | debug_loc: DebugLoc) | |
190 | -> ValueRef { | |
191 | if cx.unreachable.get() { | |
192 | return _Undef(lhs); | |
193 | } | |
194 | debug_loc.apply(cx.fcx); | |
1a4d82fc JJ |
195 | B(cx).nswadd(lhs, rhs) |
196 | } | |
197 | ||
85aaf69f SL |
198 | pub fn NUWAdd(cx: Block, |
199 | lhs: ValueRef, | |
200 | rhs: ValueRef, | |
201 | debug_loc: DebugLoc) | |
202 | -> ValueRef { | |
203 | if cx.unreachable.get() { | |
204 | return _Undef(lhs); | |
205 | } | |
206 | debug_loc.apply(cx.fcx); | |
1a4d82fc JJ |
207 | B(cx).nuwadd(lhs, rhs) |
208 | } | |
209 | ||
85aaf69f SL |
210 | pub fn FAdd(cx: Block, |
211 | lhs: ValueRef, | |
212 | rhs: ValueRef, | |
213 | debug_loc: DebugLoc) | |
214 | -> ValueRef { | |
215 | if cx.unreachable.get() { | |
216 | return _Undef(lhs); | |
217 | } | |
218 | debug_loc.apply(cx.fcx); | |
1a4d82fc JJ |
219 | B(cx).fadd(lhs, rhs) |
220 | } | |
221 | ||
85aaf69f SL |
222 | pub fn Sub(cx: Block, |
223 | lhs: ValueRef, | |
224 | rhs: ValueRef, | |
225 | debug_loc: DebugLoc) | |
226 | -> ValueRef { | |
227 | if cx.unreachable.get() { | |
228 | return _Undef(lhs); | |
229 | } | |
230 | debug_loc.apply(cx.fcx); | |
1a4d82fc JJ |
231 | B(cx).sub(lhs, rhs) |
232 | } | |
233 | ||
85aaf69f SL |
234 | pub fn NSWSub(cx: Block, |
235 | lhs: ValueRef, | |
236 | rhs: ValueRef, | |
237 | debug_loc: DebugLoc) | |
238 | -> ValueRef { | |
239 | if cx.unreachable.get() { | |
240 | return _Undef(lhs); | |
241 | } | |
242 | debug_loc.apply(cx.fcx); | |
1a4d82fc JJ |
243 | B(cx).nswsub(lhs, rhs) |
244 | } | |
245 | ||
85aaf69f SL |
246 | pub fn NUWSub(cx: Block, |
247 | lhs: ValueRef, | |
248 | rhs: ValueRef, | |
249 | debug_loc: DebugLoc) | |
250 | -> ValueRef { | |
251 | if cx.unreachable.get() { | |
252 | return _Undef(lhs); | |
253 | } | |
254 | debug_loc.apply(cx.fcx); | |
1a4d82fc JJ |
255 | B(cx).nuwsub(lhs, rhs) |
256 | } | |
257 | ||
85aaf69f SL |
258 | pub fn FSub(cx: Block, |
259 | lhs: ValueRef, | |
260 | rhs: ValueRef, | |
261 | debug_loc: DebugLoc) | |
262 | -> ValueRef { | |
263 | if cx.unreachable.get() { | |
264 | return _Undef(lhs); | |
265 | } | |
266 | debug_loc.apply(cx.fcx); | |
1a4d82fc JJ |
267 | B(cx).fsub(lhs, rhs) |
268 | } | |
269 | ||
85aaf69f SL |
270 | pub fn Mul(cx: Block, |
271 | lhs: ValueRef, | |
272 | rhs: ValueRef, | |
273 | debug_loc: DebugLoc) | |
274 | -> ValueRef { | |
275 | if cx.unreachable.get() { | |
276 | return _Undef(lhs); | |
277 | } | |
278 | debug_loc.apply(cx.fcx); | |
1a4d82fc JJ |
279 | B(cx).mul(lhs, rhs) |
280 | } | |
281 | ||
85aaf69f SL |
282 | pub fn NSWMul(cx: Block, |
283 | lhs: ValueRef, | |
284 | rhs: ValueRef, | |
285 | debug_loc: DebugLoc) | |
286 | -> ValueRef { | |
287 | if cx.unreachable.get() { | |
288 | return _Undef(lhs); | |
289 | } | |
290 | debug_loc.apply(cx.fcx); | |
1a4d82fc JJ |
291 | B(cx).nswmul(lhs, rhs) |
292 | } | |
293 | ||
85aaf69f SL |
294 | pub fn NUWMul(cx: Block, |
295 | lhs: ValueRef, | |
296 | rhs: ValueRef, | |
297 | debug_loc: DebugLoc) | |
298 | -> ValueRef { | |
299 | if cx.unreachable.get() { | |
300 | return _Undef(lhs); | |
301 | } | |
302 | debug_loc.apply(cx.fcx); | |
1a4d82fc JJ |
303 | B(cx).nuwmul(lhs, rhs) |
304 | } | |
305 | ||
85aaf69f SL |
306 | pub fn FMul(cx: Block, |
307 | lhs: ValueRef, | |
308 | rhs: ValueRef, | |
309 | debug_loc: DebugLoc) | |
310 | -> ValueRef { | |
311 | if cx.unreachable.get() { | |
312 | return _Undef(lhs); | |
313 | } | |
314 | debug_loc.apply(cx.fcx); | |
1a4d82fc JJ |
315 | B(cx).fmul(lhs, rhs) |
316 | } | |
317 | ||
85aaf69f SL |
318 | pub fn UDiv(cx: Block, |
319 | lhs: ValueRef, | |
320 | rhs: ValueRef, | |
321 | debug_loc: DebugLoc) | |
322 | -> ValueRef { | |
323 | if cx.unreachable.get() { | |
324 | return _Undef(lhs); | |
325 | } | |
326 | debug_loc.apply(cx.fcx); | |
1a4d82fc JJ |
327 | B(cx).udiv(lhs, rhs) |
328 | } | |
329 | ||
85aaf69f SL |
330 | pub fn SDiv(cx: Block, |
331 | lhs: ValueRef, | |
332 | rhs: ValueRef, | |
333 | debug_loc: DebugLoc) | |
334 | -> ValueRef { | |
335 | if cx.unreachable.get() { | |
336 | return _Undef(lhs); | |
337 | } | |
338 | debug_loc.apply(cx.fcx); | |
1a4d82fc JJ |
339 | B(cx).sdiv(lhs, rhs) |
340 | } | |
341 | ||
85aaf69f SL |
342 | pub fn ExactSDiv(cx: Block, |
343 | lhs: ValueRef, | |
344 | rhs: ValueRef, | |
345 | debug_loc: DebugLoc) | |
346 | -> ValueRef { | |
347 | if cx.unreachable.get() { | |
348 | return _Undef(lhs); | |
349 | } | |
350 | debug_loc.apply(cx.fcx); | |
1a4d82fc JJ |
351 | B(cx).exactsdiv(lhs, rhs) |
352 | } | |
353 | ||
85aaf69f SL |
354 | pub fn FDiv(cx: Block, |
355 | lhs: ValueRef, | |
356 | rhs: ValueRef, | |
357 | debug_loc: DebugLoc) | |
358 | -> ValueRef { | |
359 | if cx.unreachable.get() { | |
360 | return _Undef(lhs); | |
361 | } | |
362 | debug_loc.apply(cx.fcx); | |
1a4d82fc JJ |
363 | B(cx).fdiv(lhs, rhs) |
364 | } | |
365 | ||
85aaf69f SL |
366 | pub fn URem(cx: Block, |
367 | lhs: ValueRef, | |
368 | rhs: ValueRef, | |
369 | debug_loc: DebugLoc) | |
370 | -> ValueRef { | |
371 | if cx.unreachable.get() { | |
372 | return _Undef(lhs); | |
373 | } | |
374 | debug_loc.apply(cx.fcx); | |
1a4d82fc JJ |
375 | B(cx).urem(lhs, rhs) |
376 | } | |
377 | ||
85aaf69f SL |
378 | pub fn SRem(cx: Block, |
379 | lhs: ValueRef, | |
380 | rhs: ValueRef, | |
381 | debug_loc: DebugLoc) | |
382 | -> ValueRef { | |
383 | if cx.unreachable.get() { | |
384 | return _Undef(lhs); | |
385 | } | |
386 | debug_loc.apply(cx.fcx); | |
1a4d82fc JJ |
387 | B(cx).srem(lhs, rhs) |
388 | } | |
389 | ||
85aaf69f SL |
390 | pub fn FRem(cx: Block, |
391 | lhs: ValueRef, | |
392 | rhs: ValueRef, | |
393 | debug_loc: DebugLoc) | |
394 | -> ValueRef { | |
395 | if cx.unreachable.get() { | |
396 | return _Undef(lhs); | |
397 | } | |
398 | debug_loc.apply(cx.fcx); | |
1a4d82fc JJ |
399 | B(cx).frem(lhs, rhs) |
400 | } | |
401 | ||
85aaf69f SL |
402 | pub fn Shl(cx: Block, |
403 | lhs: ValueRef, | |
404 | rhs: ValueRef, | |
405 | debug_loc: DebugLoc) | |
406 | -> ValueRef { | |
407 | if cx.unreachable.get() { | |
408 | return _Undef(lhs); | |
409 | } | |
410 | debug_loc.apply(cx.fcx); | |
1a4d82fc JJ |
411 | B(cx).shl(lhs, rhs) |
412 | } | |
413 | ||
85aaf69f SL |
414 | pub fn LShr(cx: Block, |
415 | lhs: ValueRef, | |
416 | rhs: ValueRef, | |
417 | debug_loc: DebugLoc) | |
418 | -> ValueRef { | |
419 | if cx.unreachable.get() { | |
420 | return _Undef(lhs); | |
421 | } | |
422 | debug_loc.apply(cx.fcx); | |
1a4d82fc JJ |
423 | B(cx).lshr(lhs, rhs) |
424 | } | |
425 | ||
85aaf69f SL |
426 | pub fn AShr(cx: Block, |
427 | lhs: ValueRef, | |
428 | rhs: ValueRef, | |
429 | debug_loc: DebugLoc) | |
430 | -> ValueRef { | |
431 | if cx.unreachable.get() { | |
432 | return _Undef(lhs); | |
433 | } | |
434 | debug_loc.apply(cx.fcx); | |
1a4d82fc JJ |
435 | B(cx).ashr(lhs, rhs) |
436 | } | |
437 | ||
85aaf69f SL |
438 | pub fn And(cx: Block, |
439 | lhs: ValueRef, | |
440 | rhs: ValueRef, | |
441 | debug_loc: DebugLoc) | |
442 | -> ValueRef { | |
443 | if cx.unreachable.get() { | |
444 | return _Undef(lhs); | |
445 | } | |
446 | debug_loc.apply(cx.fcx); | |
1a4d82fc JJ |
447 | B(cx).and(lhs, rhs) |
448 | } | |
449 | ||
85aaf69f SL |
450 | pub fn Or(cx: Block, |
451 | lhs: ValueRef, | |
452 | rhs: ValueRef, | |
453 | debug_loc: DebugLoc) | |
454 | -> ValueRef { | |
455 | if cx.unreachable.get() { | |
456 | return _Undef(lhs); | |
457 | } | |
458 | debug_loc.apply(cx.fcx); | |
1a4d82fc JJ |
459 | B(cx).or(lhs, rhs) |
460 | } | |
461 | ||
85aaf69f SL |
462 | pub fn Xor(cx: Block, |
463 | lhs: ValueRef, | |
464 | rhs: ValueRef, | |
465 | debug_loc: DebugLoc) | |
466 | -> ValueRef { | |
467 | if cx.unreachable.get() { | |
468 | return _Undef(lhs); | |
469 | } | |
470 | debug_loc.apply(cx.fcx); | |
1a4d82fc JJ |
471 | B(cx).xor(lhs, rhs) |
472 | } | |
473 | ||
85aaf69f SL |
474 | pub fn BinOp(cx: Block, |
475 | op: Opcode, | |
476 | lhs: ValueRef, | |
477 | rhs: ValueRef, | |
478 | debug_loc: DebugLoc) | |
1a4d82fc | 479 | -> ValueRef { |
85aaf69f SL |
480 | if cx.unreachable.get() { |
481 | return _Undef(lhs); | |
482 | } | |
483 | debug_loc.apply(cx.fcx); | |
1a4d82fc JJ |
484 | B(cx).binop(op, lhs, rhs) |
485 | } | |
486 | ||
85aaf69f SL |
487 | pub fn Neg(cx: Block, v: ValueRef, debug_loc: DebugLoc) -> ValueRef { |
488 | if cx.unreachable.get() { | |
489 | return _Undef(v); | |
490 | } | |
491 | debug_loc.apply(cx.fcx); | |
1a4d82fc JJ |
492 | B(cx).neg(v) |
493 | } | |
494 | ||
85aaf69f SL |
495 | pub fn NSWNeg(cx: Block, v: ValueRef, debug_loc: DebugLoc) -> ValueRef { |
496 | if cx.unreachable.get() { | |
497 | return _Undef(v); | |
498 | } | |
499 | debug_loc.apply(cx.fcx); | |
1a4d82fc JJ |
500 | B(cx).nswneg(v) |
501 | } | |
502 | ||
85aaf69f SL |
503 | pub fn NUWNeg(cx: Block, v: ValueRef, debug_loc: DebugLoc) -> ValueRef { |
504 | if cx.unreachable.get() { | |
505 | return _Undef(v); | |
506 | } | |
507 | debug_loc.apply(cx.fcx); | |
1a4d82fc JJ |
508 | B(cx).nuwneg(v) |
509 | } | |
85aaf69f SL |
510 | pub fn FNeg(cx: Block, v: ValueRef, debug_loc: DebugLoc) -> ValueRef { |
511 | if cx.unreachable.get() { | |
512 | return _Undef(v); | |
513 | } | |
514 | debug_loc.apply(cx.fcx); | |
1a4d82fc JJ |
515 | B(cx).fneg(v) |
516 | } | |
517 | ||
85aaf69f SL |
518 | pub fn Not(cx: Block, v: ValueRef, debug_loc: DebugLoc) -> ValueRef { |
519 | if cx.unreachable.get() { | |
520 | return _Undef(v); | |
521 | } | |
522 | debug_loc.apply(cx.fcx); | |
1a4d82fc JJ |
523 | B(cx).not(v) |
524 | } | |
525 | ||
1a4d82fc JJ |
526 | pub fn Alloca(cx: Block, ty: Type, name: &str) -> ValueRef { |
527 | unsafe { | |
528 | if cx.unreachable.get() { return llvm::LLVMGetUndef(ty.ptr_to().to_ref()); } | |
529 | AllocaFcx(cx.fcx, ty, name) | |
530 | } | |
531 | } | |
532 | ||
533 | pub fn AllocaFcx(fcx: &FunctionContext, ty: Type, name: &str) -> ValueRef { | |
534 | let b = fcx.ccx.builder(); | |
535 | b.position_before(fcx.alloca_insert_pt.get().unwrap()); | |
85aaf69f | 536 | DebugLoc::None.apply(fcx); |
1a4d82fc JJ |
537 | b.alloca(ty, name) |
538 | } | |
539 | ||
1a4d82fc JJ |
540 | pub fn Free(cx: Block, pointer_val: ValueRef) { |
541 | if cx.unreachable.get() { return; } | |
542 | B(cx).free(pointer_val) | |
543 | } | |
544 | ||
545 | pub fn Load(cx: Block, pointer_val: ValueRef) -> ValueRef { | |
546 | unsafe { | |
547 | let ccx = cx.fcx.ccx; | |
548 | if cx.unreachable.get() { | |
549 | let ty = val_ty(pointer_val); | |
550 | let eltty = if ty.kind() == llvm::Array { | |
551 | ty.element_type() | |
552 | } else { | |
553 | ccx.int_type() | |
554 | }; | |
555 | return llvm::LLVMGetUndef(eltty.to_ref()); | |
556 | } | |
557 | B(cx).load(pointer_val) | |
558 | } | |
559 | } | |
560 | ||
561 | pub fn VolatileLoad(cx: Block, pointer_val: ValueRef) -> ValueRef { | |
562 | unsafe { | |
563 | if cx.unreachable.get() { | |
564 | return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref()); | |
565 | } | |
566 | B(cx).volatile_load(pointer_val) | |
567 | } | |
568 | } | |
569 | ||
570 | pub fn AtomicLoad(cx: Block, pointer_val: ValueRef, order: AtomicOrdering) -> ValueRef { | |
571 | unsafe { | |
572 | let ccx = cx.fcx.ccx; | |
573 | if cx.unreachable.get() { | |
574 | return llvm::LLVMGetUndef(ccx.int_type().to_ref()); | |
575 | } | |
576 | B(cx).atomic_load(pointer_val, order) | |
577 | } | |
578 | } | |
579 | ||
580 | ||
581 | pub fn LoadRangeAssert(cx: Block, pointer_val: ValueRef, lo: u64, | |
582 | hi: u64, signed: llvm::Bool) -> ValueRef { | |
583 | if cx.unreachable.get() { | |
584 | let ccx = cx.fcx.ccx; | |
585 | let ty = val_ty(pointer_val); | |
586 | let eltty = if ty.kind() == llvm::Array { | |
587 | ty.element_type() | |
588 | } else { | |
589 | ccx.int_type() | |
590 | }; | |
591 | unsafe { | |
592 | llvm::LLVMGetUndef(eltty.to_ref()) | |
593 | } | |
594 | } else { | |
595 | B(cx).load_range_assert(pointer_val, lo, hi, signed) | |
596 | } | |
597 | } | |
598 | ||
85aaf69f SL |
599 | pub fn LoadNonNull(cx: Block, ptr: ValueRef) -> ValueRef { |
600 | if cx.unreachable.get() { | |
601 | let ccx = cx.fcx.ccx; | |
602 | let ty = val_ty(ptr); | |
603 | let eltty = if ty.kind() == llvm::Array { | |
604 | ty.element_type() | |
605 | } else { | |
606 | ccx.int_type() | |
607 | }; | |
608 | unsafe { | |
609 | llvm::LLVMGetUndef(eltty.to_ref()) | |
610 | } | |
611 | } else { | |
612 | B(cx).load_nonnull(ptr) | |
613 | } | |
614 | } | |
615 | ||
d9579d0f AL |
616 | pub fn Store(cx: Block, val: ValueRef, ptr: ValueRef) -> ValueRef { |
617 | if cx.unreachable.get() { return C_nil(cx.ccx()); } | |
1a4d82fc JJ |
618 | B(cx).store(val, ptr) |
619 | } | |
620 | ||
d9579d0f AL |
621 | pub fn VolatileStore(cx: Block, val: ValueRef, ptr: ValueRef) -> ValueRef { |
622 | if cx.unreachable.get() { return C_nil(cx.ccx()); } | |
1a4d82fc JJ |
623 | B(cx).volatile_store(val, ptr) |
624 | } | |
625 | ||
626 | pub fn AtomicStore(cx: Block, val: ValueRef, ptr: ValueRef, order: AtomicOrdering) { | |
627 | if cx.unreachable.get() { return; } | |
628 | B(cx).atomic_store(val, ptr, order) | |
629 | } | |
630 | ||
631 | pub fn GEP(cx: Block, pointer: ValueRef, indices: &[ValueRef]) -> ValueRef { | |
632 | unsafe { | |
633 | if cx.unreachable.get() { | |
634 | return llvm::LLVMGetUndef(Type::nil(cx.ccx()).ptr_to().to_ref()); | |
635 | } | |
636 | B(cx).gep(pointer, indices) | |
637 | } | |
638 | } | |
639 | ||
640 | // Simple wrapper around GEP that takes an array of ints and wraps them | |
641 | // in C_i32() | |
642 | #[inline] | |
c34b1796 | 643 | pub fn GEPi(cx: Block, base: ValueRef, ixs: &[usize]) -> ValueRef { |
1a4d82fc JJ |
644 | unsafe { |
645 | if cx.unreachable.get() { | |
646 | return llvm::LLVMGetUndef(Type::nil(cx.ccx()).ptr_to().to_ref()); | |
647 | } | |
648 | B(cx).gepi(base, ixs) | |
649 | } | |
650 | } | |
651 | ||
652 | pub fn InBoundsGEP(cx: Block, pointer: ValueRef, indices: &[ValueRef]) -> ValueRef { | |
653 | unsafe { | |
654 | if cx.unreachable.get() { | |
655 | return llvm::LLVMGetUndef(Type::nil(cx.ccx()).ptr_to().to_ref()); | |
656 | } | |
657 | B(cx).inbounds_gep(pointer, indices) | |
658 | } | |
659 | } | |
660 | ||
c34b1796 | 661 | pub fn StructGEP(cx: Block, pointer: ValueRef, idx: usize) -> ValueRef { |
1a4d82fc JJ |
662 | unsafe { |
663 | if cx.unreachable.get() { | |
664 | return llvm::LLVMGetUndef(Type::nil(cx.ccx()).ptr_to().to_ref()); | |
665 | } | |
666 | B(cx).struct_gep(pointer, idx) | |
667 | } | |
668 | } | |
669 | ||
670 | pub fn GlobalString(cx: Block, _str: *const c_char) -> ValueRef { | |
671 | unsafe { | |
672 | if cx.unreachable.get() { | |
673 | return llvm::LLVMGetUndef(Type::i8p(cx.ccx()).to_ref()); | |
674 | } | |
675 | B(cx).global_string(_str) | |
676 | } | |
677 | } | |
678 | ||
679 | pub fn GlobalStringPtr(cx: Block, _str: *const c_char) -> ValueRef { | |
680 | unsafe { | |
681 | if cx.unreachable.get() { | |
682 | return llvm::LLVMGetUndef(Type::i8p(cx.ccx()).to_ref()); | |
683 | } | |
684 | B(cx).global_string_ptr(_str) | |
685 | } | |
686 | } | |
687 | ||
688 | /* Casts */ | |
689 | pub fn Trunc(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef { | |
690 | unsafe { | |
691 | if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); } | |
692 | B(cx).trunc(val, dest_ty) | |
693 | } | |
694 | } | |
695 | ||
696 | pub fn ZExt(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef { | |
697 | unsafe { | |
698 | if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); } | |
699 | B(cx).zext(val, dest_ty) | |
700 | } | |
701 | } | |
702 | ||
703 | pub fn SExt(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef { | |
704 | unsafe { | |
705 | if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); } | |
706 | B(cx).sext(val, dest_ty) | |
707 | } | |
708 | } | |
709 | ||
710 | pub fn FPToUI(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef { | |
711 | unsafe { | |
712 | if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); } | |
713 | B(cx).fptoui(val, dest_ty) | |
714 | } | |
715 | } | |
716 | ||
717 | pub fn FPToSI(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef { | |
718 | unsafe { | |
719 | if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); } | |
720 | B(cx).fptosi(val, dest_ty) | |
721 | } | |
722 | } | |
723 | ||
724 | pub fn UIToFP(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef { | |
725 | unsafe { | |
726 | if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); } | |
727 | B(cx).uitofp(val, dest_ty) | |
728 | } | |
729 | } | |
730 | ||
731 | pub fn SIToFP(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef { | |
732 | unsafe { | |
733 | if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); } | |
734 | B(cx).sitofp(val, dest_ty) | |
735 | } | |
736 | } | |
737 | ||
738 | pub fn FPTrunc(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef { | |
739 | unsafe { | |
740 | if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); } | |
741 | B(cx).fptrunc(val, dest_ty) | |
742 | } | |
743 | } | |
744 | ||
745 | pub fn FPExt(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef { | |
746 | unsafe { | |
747 | if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); } | |
748 | B(cx).fpext(val, dest_ty) | |
749 | } | |
750 | } | |
751 | ||
752 | pub fn PtrToInt(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef { | |
753 | unsafe { | |
754 | if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); } | |
755 | B(cx).ptrtoint(val, dest_ty) | |
756 | } | |
757 | } | |
758 | ||
759 | pub fn IntToPtr(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef { | |
760 | unsafe { | |
761 | if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); } | |
762 | B(cx).inttoptr(val, dest_ty) | |
763 | } | |
764 | } | |
765 | ||
766 | pub fn BitCast(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef { | |
767 | unsafe { | |
768 | if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); } | |
769 | B(cx).bitcast(val, dest_ty) | |
770 | } | |
771 | } | |
772 | ||
773 | pub fn ZExtOrBitCast(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef { | |
774 | unsafe { | |
775 | if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); } | |
776 | B(cx).zext_or_bitcast(val, dest_ty) | |
777 | } | |
778 | } | |
779 | ||
780 | pub fn SExtOrBitCast(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef { | |
781 | unsafe { | |
782 | if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); } | |
783 | B(cx).sext_or_bitcast(val, dest_ty) | |
784 | } | |
785 | } | |
786 | ||
787 | pub fn TruncOrBitCast(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef { | |
788 | unsafe { | |
789 | if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); } | |
790 | B(cx).trunc_or_bitcast(val, dest_ty) | |
791 | } | |
792 | } | |
793 | ||
794 | pub fn Cast(cx: Block, op: Opcode, val: ValueRef, dest_ty: Type, | |
795 | _: *const u8) | |
796 | -> ValueRef { | |
797 | unsafe { | |
798 | if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); } | |
799 | B(cx).cast(op, val, dest_ty) | |
800 | } | |
801 | } | |
802 | ||
803 | pub fn PointerCast(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef { | |
804 | unsafe { | |
805 | if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); } | |
806 | B(cx).pointercast(val, dest_ty) | |
807 | } | |
808 | } | |
809 | ||
810 | pub fn IntCast(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef { | |
811 | unsafe { | |
812 | if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); } | |
813 | B(cx).intcast(val, dest_ty) | |
814 | } | |
815 | } | |
816 | ||
817 | pub fn FPCast(cx: Block, val: ValueRef, dest_ty: Type) -> ValueRef { | |
818 | unsafe { | |
819 | if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); } | |
820 | B(cx).fpcast(val, dest_ty) | |
821 | } | |
822 | } | |
823 | ||
824 | ||
825 | /* Comparisons */ | |
85aaf69f SL |
826 | pub fn ICmp(cx: Block, |
827 | op: IntPredicate, | |
828 | lhs: ValueRef, | |
829 | rhs: ValueRef, | |
830 | debug_loc: DebugLoc) | |
831 | -> ValueRef { | |
1a4d82fc JJ |
832 | unsafe { |
833 | if cx.unreachable.get() { | |
834 | return llvm::LLVMGetUndef(Type::i1(cx.ccx()).to_ref()); | |
835 | } | |
85aaf69f | 836 | debug_loc.apply(cx.fcx); |
1a4d82fc JJ |
837 | B(cx).icmp(op, lhs, rhs) |
838 | } | |
839 | } | |
840 | ||
85aaf69f SL |
841 | pub fn FCmp(cx: Block, |
842 | op: RealPredicate, | |
843 | lhs: ValueRef, | |
844 | rhs: ValueRef, | |
845 | debug_loc: DebugLoc) | |
846 | -> ValueRef { | |
1a4d82fc JJ |
847 | unsafe { |
848 | if cx.unreachable.get() { | |
849 | return llvm::LLVMGetUndef(Type::i1(cx.ccx()).to_ref()); | |
850 | } | |
85aaf69f | 851 | debug_loc.apply(cx.fcx); |
1a4d82fc JJ |
852 | B(cx).fcmp(op, lhs, rhs) |
853 | } | |
854 | } | |
855 | ||
856 | /* Miscellaneous instructions */ | |
857 | pub fn EmptyPhi(cx: Block, ty: Type) -> ValueRef { | |
858 | unsafe { | |
859 | if cx.unreachable.get() { return llvm::LLVMGetUndef(ty.to_ref()); } | |
860 | B(cx).empty_phi(ty) | |
861 | } | |
862 | } | |
863 | ||
864 | pub fn Phi(cx: Block, ty: Type, vals: &[ValueRef], | |
865 | bbs: &[BasicBlockRef]) -> ValueRef { | |
866 | unsafe { | |
867 | if cx.unreachable.get() { return llvm::LLVMGetUndef(ty.to_ref()); } | |
868 | B(cx).phi(ty, vals, bbs) | |
869 | } | |
870 | } | |
871 | ||
872 | pub fn AddIncomingToPhi(phi: ValueRef, val: ValueRef, bb: BasicBlockRef) { | |
873 | unsafe { | |
874 | if llvm::LLVMIsUndef(phi) == llvm::True { return; } | |
875 | llvm::LLVMAddIncoming(phi, &val, &bb, 1 as c_uint); | |
876 | } | |
877 | } | |
878 | ||
879 | pub fn _UndefReturn(cx: Block, fn_: ValueRef) -> ValueRef { | |
880 | unsafe { | |
881 | let ccx = cx.fcx.ccx; | |
882 | let ty = val_ty(fn_); | |
883 | let retty = if ty.kind() == llvm::Function { | |
884 | ty.return_type() | |
885 | } else { | |
886 | ccx.int_type() | |
887 | }; | |
888 | B(cx).count_insn("ret_undef"); | |
889 | llvm::LLVMGetUndef(retty.to_ref()) | |
890 | } | |
891 | } | |
892 | ||
893 | pub fn add_span_comment(cx: Block, sp: Span, text: &str) { | |
894 | B(cx).add_span_comment(sp, text) | |
895 | } | |
896 | ||
897 | pub fn add_comment(cx: Block, text: &str) { | |
898 | B(cx).add_comment(text) | |
899 | } | |
900 | ||
901 | pub fn InlineAsmCall(cx: Block, asm: *const c_char, cons: *const c_char, | |
902 | inputs: &[ValueRef], output: Type, | |
903 | volatile: bool, alignstack: bool, | |
904 | dia: AsmDialect) -> ValueRef { | |
905 | B(cx).inline_asm_call(asm, cons, inputs, output, volatile, alignstack, dia) | |
906 | } | |
907 | ||
85aaf69f SL |
908 | pub fn Call(cx: Block, |
909 | fn_: ValueRef, | |
910 | args: &[ValueRef], | |
911 | attributes: Option<AttrBuilder>, | |
912 | debug_loc: DebugLoc) | |
913 | -> ValueRef { | |
914 | if cx.unreachable.get() { | |
915 | return _UndefReturn(cx, fn_); | |
916 | } | |
917 | debug_loc.apply(cx.fcx); | |
7453a54e SL |
918 | let bundle = cx.lpad.get().and_then(|b| b.bundle()); |
919 | B(cx).call(fn_, args, bundle, attributes) | |
1a4d82fc JJ |
920 | } |
921 | ||
85aaf69f SL |
922 | pub fn CallWithConv(cx: Block, |
923 | fn_: ValueRef, | |
924 | args: &[ValueRef], | |
925 | conv: CallConv, | |
926 | attributes: Option<AttrBuilder>, | |
927 | debug_loc: DebugLoc) | |
928 | -> ValueRef { | |
929 | if cx.unreachable.get() { | |
930 | return _UndefReturn(cx, fn_); | |
931 | } | |
932 | debug_loc.apply(cx.fcx); | |
7453a54e SL |
933 | let bundle = cx.lpad.get().and_then(|b| b.bundle()); |
934 | B(cx).call_with_conv(fn_, args, conv, bundle, attributes) | |
1a4d82fc JJ |
935 | } |
936 | ||
d9579d0f | 937 | pub fn AtomicFence(cx: Block, order: AtomicOrdering, scope: SynchronizationScope) { |
1a4d82fc | 938 | if cx.unreachable.get() { return; } |
d9579d0f | 939 | B(cx).atomic_fence(order, scope) |
1a4d82fc JJ |
940 | } |
941 | ||
942 | pub fn Select(cx: Block, if_: ValueRef, then: ValueRef, else_: ValueRef) -> ValueRef { | |
943 | if cx.unreachable.get() { return _Undef(then); } | |
944 | B(cx).select(if_, then, else_) | |
945 | } | |
946 | ||
947 | pub fn VAArg(cx: Block, list: ValueRef, ty: Type) -> ValueRef { | |
948 | unsafe { | |
949 | if cx.unreachable.get() { return llvm::LLVMGetUndef(ty.to_ref()); } | |
950 | B(cx).va_arg(list, ty) | |
951 | } | |
952 | } | |
953 | ||
954 | pub fn ExtractElement(cx: Block, vec_val: ValueRef, index: ValueRef) -> ValueRef { | |
955 | unsafe { | |
956 | if cx.unreachable.get() { | |
957 | return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref()); | |
958 | } | |
959 | B(cx).extract_element(vec_val, index) | |
960 | } | |
961 | } | |
962 | ||
963 | pub fn InsertElement(cx: Block, vec_val: ValueRef, elt_val: ValueRef, | |
964 | index: ValueRef) -> ValueRef { | |
965 | unsafe { | |
966 | if cx.unreachable.get() { | |
967 | return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref()); | |
968 | } | |
969 | B(cx).insert_element(vec_val, elt_val, index) | |
970 | } | |
971 | } | |
972 | ||
973 | pub fn ShuffleVector(cx: Block, v1: ValueRef, v2: ValueRef, | |
974 | mask: ValueRef) -> ValueRef { | |
975 | unsafe { | |
976 | if cx.unreachable.get() { | |
977 | return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref()); | |
978 | } | |
979 | B(cx).shuffle_vector(v1, v2, mask) | |
980 | } | |
981 | } | |
982 | ||
c34b1796 | 983 | pub fn VectorSplat(cx: Block, num_elts: usize, elt_val: ValueRef) -> ValueRef { |
1a4d82fc JJ |
984 | unsafe { |
985 | if cx.unreachable.get() { | |
986 | return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref()); | |
987 | } | |
988 | B(cx).vector_splat(num_elts, elt_val) | |
989 | } | |
990 | } | |
991 | ||
c34b1796 | 992 | pub fn ExtractValue(cx: Block, agg_val: ValueRef, index: usize) -> ValueRef { |
1a4d82fc JJ |
993 | unsafe { |
994 | if cx.unreachable.get() { | |
995 | return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref()); | |
996 | } | |
997 | B(cx).extract_value(agg_val, index) | |
998 | } | |
999 | } | |
1000 | ||
c34b1796 | 1001 | pub fn InsertValue(cx: Block, agg_val: ValueRef, elt_val: ValueRef, index: usize) -> ValueRef { |
1a4d82fc JJ |
1002 | unsafe { |
1003 | if cx.unreachable.get() { | |
1004 | return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref()); | |
1005 | } | |
1006 | B(cx).insert_value(agg_val, elt_val, index) | |
1007 | } | |
1008 | } | |
1009 | ||
1010 | pub fn IsNull(cx: Block, val: ValueRef) -> ValueRef { | |
1011 | unsafe { | |
1012 | if cx.unreachable.get() { | |
1013 | return llvm::LLVMGetUndef(Type::i1(cx.ccx()).to_ref()); | |
1014 | } | |
1015 | B(cx).is_null(val) | |
1016 | } | |
1017 | } | |
1018 | ||
1019 | pub fn IsNotNull(cx: Block, val: ValueRef) -> ValueRef { | |
1020 | unsafe { | |
1021 | if cx.unreachable.get() { | |
1022 | return llvm::LLVMGetUndef(Type::i1(cx.ccx()).to_ref()); | |
1023 | } | |
1024 | B(cx).is_not_null(val) | |
1025 | } | |
1026 | } | |
1027 | ||
1028 | pub fn PtrDiff(cx: Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef { | |
1029 | unsafe { | |
1030 | let ccx = cx.fcx.ccx; | |
1031 | if cx.unreachable.get() { return llvm::LLVMGetUndef(ccx.int_type().to_ref()); } | |
1032 | B(cx).ptrdiff(lhs, rhs) | |
1033 | } | |
1034 | } | |
1035 | ||
1036 | pub fn Trap(cx: Block) { | |
1037 | if cx.unreachable.get() { return; } | |
1038 | B(cx).trap(); | |
1039 | } | |
1040 | ||
1041 | pub fn LandingPad(cx: Block, ty: Type, pers_fn: ValueRef, | |
c34b1796 | 1042 | num_clauses: usize) -> ValueRef { |
1a4d82fc JJ |
1043 | check_not_terminated(cx); |
1044 | assert!(!cx.unreachable.get()); | |
c1a9b12d SL |
1045 | B(cx).landing_pad(ty, pers_fn, num_clauses, cx.fcx.llfn) |
1046 | } | |
1047 | ||
1048 | pub fn AddClause(cx: Block, landing_pad: ValueRef, clause: ValueRef) { | |
1049 | B(cx).add_clause(landing_pad, clause) | |
1a4d82fc JJ |
1050 | } |
1051 | ||
1052 | pub fn SetCleanup(cx: Block, landing_pad: ValueRef) { | |
1053 | B(cx).set_cleanup(landing_pad) | |
1054 | } | |
1055 | ||
7453a54e SL |
1056 | pub fn SetPersonalityFn(cx: Block, f: ValueRef) { |
1057 | B(cx).set_personality_fn(f) | |
1058 | } | |
1059 | ||
1a4d82fc JJ |
1060 | pub fn Resume(cx: Block, exn: ValueRef) -> ValueRef { |
1061 | check_not_terminated(cx); | |
1062 | terminate(cx, "Resume"); | |
1063 | B(cx).resume(exn) | |
1064 | } | |
1065 | ||
1066 | // Atomic Operations | |
1067 | pub fn AtomicCmpXchg(cx: Block, dst: ValueRef, | |
1068 | cmp: ValueRef, src: ValueRef, | |
1069 | order: AtomicOrdering, | |
7453a54e SL |
1070 | failure_order: AtomicOrdering, |
1071 | weak: llvm::Bool) -> ValueRef { | |
1072 | B(cx).atomic_cmpxchg(dst, cmp, src, order, failure_order, weak) | |
1a4d82fc JJ |
1073 | } |
1074 | pub fn AtomicRMW(cx: Block, op: AtomicBinOp, | |
1075 | dst: ValueRef, src: ValueRef, | |
1076 | order: AtomicOrdering) -> ValueRef { | |
1077 | B(cx).atomic_rmw(op, dst, src, order) | |
1078 | } | |
7453a54e SL |
1079 | |
1080 | pub fn CleanupPad(cx: Block, | |
1081 | parent: Option<ValueRef>, | |
1082 | args: &[ValueRef]) -> ValueRef { | |
1083 | check_not_terminated(cx); | |
1084 | assert!(!cx.unreachable.get()); | |
1085 | B(cx).cleanup_pad(parent, args) | |
1086 | } | |
1087 | ||
1088 | pub fn CleanupRet(cx: Block, | |
1089 | cleanup: ValueRef, | |
1090 | unwind: Option<BasicBlockRef>) -> ValueRef { | |
1091 | check_not_terminated(cx); | |
1092 | terminate(cx, "CleanupRet"); | |
1093 | B(cx).cleanup_ret(cleanup, unwind) | |
1094 | } | |
1095 | ||
1096 | pub fn CatchPad(cx: Block, | |
1097 | parent: ValueRef, | |
1098 | args: &[ValueRef]) -> ValueRef { | |
1099 | check_not_terminated(cx); | |
1100 | assert!(!cx.unreachable.get()); | |
1101 | B(cx).catch_pad(parent, args) | |
1102 | } | |
1103 | ||
1104 | pub fn CatchRet(cx: Block, pad: ValueRef, unwind: BasicBlockRef) -> ValueRef { | |
1105 | check_not_terminated(cx); | |
1106 | terminate(cx, "CatchRet"); | |
1107 | B(cx).catch_ret(pad, unwind) | |
1108 | } | |
1109 | ||
1110 | pub fn CatchSwitch(cx: Block, | |
1111 | parent: Option<ValueRef>, | |
1112 | unwind: Option<BasicBlockRef>, | |
1113 | num_handlers: usize) -> ValueRef { | |
1114 | check_not_terminated(cx); | |
1115 | terminate(cx, "CatchSwitch"); | |
1116 | B(cx).catch_switch(parent, unwind, num_handlers) | |
1117 | } | |
1118 | ||
1119 | pub fn AddHandler(cx: Block, catch_switch: ValueRef, handler: BasicBlockRef) { | |
1120 | B(cx).add_handler(catch_switch, handler) | |
1121 | } |