]> git.proxmox.com Git - rustc.git/blame - src/librustc_trans/mir/block.rs
New upstream version 1.12.0+dfsg1
[rustc.git] / src / librustc_trans / mir / block.rs
CommitLineData
54a0048b
SL
1// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2// file at the top-level directory of this distribution and at
3// http://rust-lang.org/COPYRIGHT.
4//
5// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8// option. This file may not be copied, modified, or distributed
9// except according to those terms.
10
3157f602
XL
11use llvm::{self, ValueRef};
12use rustc_const_eval::ErrKind;
13use rustc::middle::lang_items;
54a0048b
SL
14use rustc::ty;
15use rustc::mir::repr as mir;
16use abi::{Abi, FnType, ArgType};
17use adt;
18use base;
19use build;
20use callee::{Callee, CalleeData, Fn, Intrinsic, NamedTupleConstructor, Virtual};
3157f602
XL
21use common::{self, Block, BlockAndBuilder, LandingPad};
22use common::{C_bool, C_str_slice, C_struct, C_u32, C_undef};
23use consts;
54a0048b
SL
24use debuginfo::DebugLoc;
25use Disr;
5bcae85e 26use expr;
54a0048b
SL
27use machine::{llalign_of_min, llbitsize_of_real};
28use meth;
29use type_of;
30use glue;
31use type_::Type;
3157f602 32
a7813a04 33use rustc_data_structures::fnv::FnvHashMap;
3157f602 34use syntax::parse::token;
54a0048b 35
3157f602
XL
36use super::{MirContext, LocalRef};
37use super::analyze::CleanupKind;
a7813a04 38use super::constant::Const;
54a0048b
SL
39use super::lvalue::{LvalueRef, load_fat_ptr};
40use super::operand::OperandRef;
3157f602 41use super::operand::OperandValue::*;
54a0048b
SL
42
43impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
44 pub fn trans_block(&mut self, bb: mir::BasicBlock) {
54a0048b
SL
45 let mut bcx = self.bcx(bb);
46 let mir = self.mir.clone();
3157f602
XL
47 let data = &mir[bb];
48
49 debug!("trans_block({:?}={:?})", bb, data);
50
51 // Create the cleanup bundle, if needed.
52 let cleanup_pad = bcx.lpad().and_then(|lp| lp.cleanuppad());
53 let cleanup_bundle = bcx.lpad().and_then(|l| l.bundle());
54
55 let funclet_br = |this: &Self, bcx: BlockAndBuilder, bb: mir::BasicBlock| {
56 let lltarget = this.blocks[bb].llbb;
57 if let Some(cp) = cleanup_pad {
58 match this.cleanup_kinds[bb] {
59 CleanupKind::Funclet => {
60 // micro-optimization: generate a `ret` rather than a jump
61 // to a return block
62 bcx.cleanup_ret(cp, Some(lltarget));
63 }
64 CleanupKind::Internal { .. } => bcx.br(lltarget),
65 CleanupKind::NotCleanup => bug!("jump from cleanup bb to bb {:?}", bb)
66 }
67 } else {
68 bcx.br(lltarget);
69 }
54a0048b 70 };
3157f602
XL
71
72 let llblock = |this: &mut Self, target: mir::BasicBlock| {
73 let lltarget = this.blocks[target].llbb;
74
75 if let Some(cp) = cleanup_pad {
76 match this.cleanup_kinds[target] {
77 CleanupKind::Funclet => {
78 // MSVC cross-funclet jump - need a trampoline
79
80 debug!("llblock: creating cleanup trampoline for {:?}", target);
81 let name = &format!("{:?}_cleanup_trampoline_{:?}", bb, target);
82 let trampoline = this.fcx.new_block(name, None).build();
83 trampoline.set_personality_fn(this.fcx.eh_personality());
84 trampoline.cleanup_ret(cp, Some(lltarget));
85 trampoline.llbb()
86 }
87 CleanupKind::Internal { .. } => lltarget,
88 CleanupKind::NotCleanup =>
89 bug!("jump from cleanup bb {:?} to bb {:?}", bb, target)
90 }
91 } else {
92 if let (CleanupKind::NotCleanup, CleanupKind::Funclet) =
93 (this.cleanup_kinds[bb], this.cleanup_kinds[target])
94 {
95 // jump *into* cleanup - need a landing pad if GNU
96 this.landing_pad_to(target).llbb
97 } else {
98 lltarget
99 }
100 }
54a0048b
SL
101 };
102
103 for statement in &data.statements {
104 bcx = self.trans_statement(bcx, statement);
105 }
106
a7813a04
XL
107 let terminator = data.terminator();
108 debug!("trans_block: terminator: {:?}", terminator);
54a0048b 109
3157f602
XL
110 let span = terminator.source_info.span;
111 let debug_loc = self.debug_loc(terminator.source_info);
a7813a04
XL
112 debug_loc.apply_to_bcx(&bcx);
113 debug_loc.apply(bcx.fcx());
114 match terminator.kind {
54a0048b
SL
115 mir::TerminatorKind::Resume => {
116 if let Some(cleanup_pad) = cleanup_pad {
117 bcx.cleanup_ret(cleanup_pad, None);
118 } else {
119 let ps = self.get_personality_slot(&bcx);
120 let lp = bcx.load(ps);
121 bcx.with_block(|bcx| {
122 base::call_lifetime_end(bcx, ps);
123 base::trans_unwind_resume(bcx, lp);
124 });
125 }
126 }
127
128 mir::TerminatorKind::Goto { target } => {
3157f602 129 funclet_br(self, bcx, target);
54a0048b
SL
130 }
131
132 mir::TerminatorKind::If { ref cond, targets: (true_bb, false_bb) } => {
133 let cond = self.trans_operand(&bcx, cond);
3157f602
XL
134
135 let lltrue = llblock(self, true_bb);
136 let llfalse = llblock(self, false_bb);
54a0048b
SL
137 bcx.cond_br(cond.immediate(), lltrue, llfalse);
138 }
139
140 mir::TerminatorKind::Switch { ref discr, ref adt_def, ref targets } => {
141 let discr_lvalue = self.trans_lvalue(&bcx, discr);
142 let ty = discr_lvalue.ty.to_ty(bcx.tcx());
143 let repr = adt::represent_type(bcx.ccx(), ty);
144 let discr = bcx.with_block(|bcx|
145 adt::trans_get_discr(bcx, &repr, discr_lvalue.llval, None, true)
146 );
147
a7813a04
XL
148 let mut bb_hist = FnvHashMap();
149 for target in targets {
150 *bb_hist.entry(target).or_insert(0) += 1;
151 }
152 let (default_bb, default_blk) = match bb_hist.iter().max_by_key(|&(_, c)| c) {
153 // If a single target basic blocks is predominant, promote that to be the
154 // default case for the switch instruction to reduce the size of the generated
155 // code. This is especially helpful in cases like an if-let on a huge enum.
156 // Note: This optimization is only valid for exhaustive matches.
157 Some((&&bb, &c)) if c > targets.len() / 2 => {
3157f602 158 (Some(bb), llblock(self, bb))
a7813a04
XL
159 }
160 // We're generating an exhaustive switch, so the else branch
161 // can't be hit. Branching to an unreachable instruction
162 // lets LLVM know this
3157f602 163 _ => (None, self.unreachable_block().llbb)
a7813a04 164 };
3157f602 165 let switch = bcx.switch(discr, default_blk, targets.len());
54a0048b 166 assert_eq!(adt_def.variants.len(), targets.len());
a7813a04
XL
167 for (adt_variant, &target) in adt_def.variants.iter().zip(targets) {
168 if default_bb != Some(target) {
3157f602 169 let llbb = llblock(self, target);
a7813a04
XL
170 let llval = bcx.with_block(|bcx| adt::trans_case(
171 bcx, &repr, Disr::from(adt_variant.disr_val)));
172 build::AddCase(switch, llval, llbb)
173 }
54a0048b
SL
174 }
175 }
176
177 mir::TerminatorKind::SwitchInt { ref discr, switch_ty, ref values, ref targets } => {
178 let (otherwise, targets) = targets.split_last().unwrap();
179 let discr = bcx.load(self.trans_lvalue(&bcx, discr).llval);
180 let discr = bcx.with_block(|bcx| base::to_immediate(bcx, discr, switch_ty));
3157f602 181 let switch = bcx.switch(discr, llblock(self, *otherwise), values.len());
54a0048b 182 for (value, target) in values.iter().zip(targets) {
a7813a04 183 let val = Const::from_constval(bcx.ccx(), value.clone(), switch_ty);
3157f602 184 let llbb = llblock(self, *target);
a7813a04 185 build::AddCase(switch, val.llval, llbb)
54a0048b
SL
186 }
187 }
188
189 mir::TerminatorKind::Return => {
3157f602
XL
190 let ret = bcx.fcx().fn_ty.ret;
191 if ret.is_ignore() || ret.is_indirect() {
192 bcx.ret_void();
193 return;
194 }
195
196 let llval = if let Some(cast_ty) = ret.cast {
197 let index = mir.local_index(&mir::Lvalue::ReturnPointer).unwrap();
198 let op = match self.locals[index] {
199 LocalRef::Operand(Some(op)) => op,
200 LocalRef::Operand(None) => bug!("use of return before def"),
201 LocalRef::Lvalue(tr_lvalue) => {
202 OperandRef {
203 val: Ref(tr_lvalue.llval),
204 ty: tr_lvalue.ty.to_ty(bcx.tcx())
205 }
206 }
207 };
208 let llslot = match op.val {
209 Immediate(_) | Pair(..) => {
210 let llscratch = build::AllocaFcx(bcx.fcx(), ret.original_ty, "ret");
211 self.store_operand(&bcx, llscratch, op);
212 llscratch
213 }
214 Ref(llval) => llval
215 };
216 let load = bcx.load(bcx.pointercast(llslot, cast_ty.ptr_to()));
217 let llalign = llalign_of_min(bcx.ccx(), ret.ty);
218 unsafe {
219 llvm::LLVMSetAlignment(load, llalign);
220 }
221 load
222 } else {
223 let op = self.trans_consume(&bcx, &mir::Lvalue::ReturnPointer);
224 op.pack_if_pair(&bcx).immediate()
225 };
226 bcx.ret(llval);
227 }
228
229 mir::TerminatorKind::Unreachable => {
230 bcx.unreachable();
54a0048b
SL
231 }
232
3157f602 233 mir::TerminatorKind::Drop { ref location, target, unwind } => {
5bcae85e 234 let ty = location.ty(&mir, bcx.tcx()).to_ty(bcx.tcx());
3157f602
XL
235 let ty = bcx.monomorphize(&ty);
236
54a0048b
SL
237 // Double check for necessity to drop
238 if !glue::type_needs_drop(bcx.tcx(), ty) {
3157f602 239 funclet_br(self, bcx, target);
54a0048b
SL
240 return;
241 }
3157f602
XL
242
243 let lvalue = self.trans_lvalue(&bcx, location);
54a0048b 244 let drop_fn = glue::get_drop_glue(bcx.ccx(), ty);
a7813a04 245 let drop_ty = glue::get_drop_glue_type(bcx.tcx(), ty);
5bcae85e
SL
246 let is_sized = common::type_is_sized(bcx.tcx(), ty);
247 let llvalue = if is_sized {
248 if drop_ty != ty {
249 bcx.pointercast(lvalue.llval, type_of::type_of(bcx.ccx(), drop_ty).ptr_to())
250 } else {
251 lvalue.llval
252 }
54a0048b 253 } else {
5bcae85e
SL
254 // FIXME(#36457) Currently drop glue takes sized
255 // values as a `*(data, meta)`, but elsewhere in
256 // MIR we pass `(data, meta)` as two separate
257 // arguments. It would be better to fix drop glue,
258 // but I am shooting for a quick fix to #35546
259 // here that can be cleanly backported to beta, so
260 // I want to avoid touching all of trans.
261 bcx.with_block(|bcx| {
262 let scratch = base::alloc_ty(bcx, ty, "drop");
263 base::call_lifetime_start(bcx, scratch);
264 build::Store(bcx, lvalue.llval, expr::get_dataptr(bcx, scratch));
265 build::Store(bcx, lvalue.llextra, expr::get_meta(bcx, scratch));
266 scratch
267 })
54a0048b
SL
268 };
269 if let Some(unwind) = unwind {
54a0048b
SL
270 bcx.invoke(drop_fn,
271 &[llvalue],
3157f602
XL
272 self.blocks[target].llbb,
273 llblock(self, unwind),
274 cleanup_bundle);
275 } else {
276 bcx.call(drop_fn, &[llvalue], cleanup_bundle);
277 funclet_br(self, bcx, target);
278 }
279 }
280
281 mir::TerminatorKind::Assert { ref cond, expected, ref msg, target, cleanup } => {
282 let cond = self.trans_operand(&bcx, cond).immediate();
5bcae85e
SL
283 let mut const_cond = common::const_to_opt_uint(cond).map(|c| c == 1);
284
285 // This case can currently arise only from functions marked
286 // with #[rustc_inherit_overflow_checks] and inlined from
287 // another crate (mostly core::num generic/#[inline] fns),
288 // while the current crate doesn't use overflow checks.
289 // NOTE: Unlike binops, negation doesn't have its own
290 // checked operation, just a comparison with the minimum
291 // value, so we have to check for the assert message.
292 if !bcx.ccx().check_overflow() {
293 use rustc_const_math::ConstMathErr::Overflow;
294 use rustc_const_math::Op::Neg;
295
296 if let mir::AssertMessage::Math(Overflow(Neg)) = *msg {
297 const_cond = Some(expected);
298 }
299 }
3157f602
XL
300
301 // Don't translate the panic block if success if known.
302 if const_cond == Some(expected) {
303 funclet_br(self, bcx, target);
304 return;
305 }
306
307 // Pass the condition through llvm.expect for branch hinting.
308 let expect = bcx.ccx().get_intrinsic(&"llvm.expect.i1");
309 let cond = bcx.call(expect, &[cond, C_bool(bcx.ccx(), expected)], None);
310
311 // Create the failure block and the conditional branch to it.
312 let lltarget = llblock(self, target);
313 let panic_block = self.fcx.new_block("panic", None);
314 if expected {
315 bcx.cond_br(cond, lltarget, panic_block.llbb);
316 } else {
317 bcx.cond_br(cond, panic_block.llbb, lltarget);
318 }
319
320 // After this point, bcx is the block for the call to panic.
321 bcx = panic_block.build();
5bcae85e 322 debug_loc.apply_to_bcx(&bcx);
3157f602
XL
323
324 // Get the location information.
325 let loc = bcx.sess().codemap().lookup_char_pos(span.lo);
326 let filename = token::intern_and_get_ident(&loc.file.name);
327 let filename = C_str_slice(bcx.ccx(), filename);
328 let line = C_u32(bcx.ccx(), loc.line as u32);
329
330 // Put together the arguments to the panic entry point.
331 let (lang_item, args, const_err) = match *msg {
332 mir::AssertMessage::BoundsCheck { ref len, ref index } => {
333 let len = self.trans_operand(&mut bcx, len).immediate();
334 let index = self.trans_operand(&mut bcx, index).immediate();
335
336 let const_err = common::const_to_opt_uint(len).and_then(|len| {
337 common::const_to_opt_uint(index).map(|index| {
338 ErrKind::IndexOutOfBounds {
339 len: len,
340 index: index
341 }
342 })
343 });
344
345 let file_line = C_struct(bcx.ccx(), &[filename, line], false);
346 let align = llalign_of_min(bcx.ccx(), common::val_ty(file_line));
347 let file_line = consts::addr_of(bcx.ccx(),
348 file_line,
349 align,
350 "panic_bounds_check_loc");
351 (lang_items::PanicBoundsCheckFnLangItem,
352 vec![file_line, index, len],
353 const_err)
354 }
355 mir::AssertMessage::Math(ref err) => {
356 let msg_str = token::intern_and_get_ident(err.description());
357 let msg_str = C_str_slice(bcx.ccx(), msg_str);
358 let msg_file_line = C_struct(bcx.ccx(),
359 &[msg_str, filename, line],
360 false);
361 let align = llalign_of_min(bcx.ccx(), common::val_ty(msg_file_line));
362 let msg_file_line = consts::addr_of(bcx.ccx(),
363 msg_file_line,
364 align,
365 "panic_loc");
366 (lang_items::PanicFnLangItem,
367 vec![msg_file_line],
368 Some(ErrKind::Math(err.clone())))
369 }
370 };
371
372 // If we know we always panic, and the error message
373 // is also constant, then we can produce a warning.
374 if const_cond == Some(!expected) {
375 if let Some(err) = const_err {
376 let _ = consts::const_err(bcx.ccx(), span,
377 Err::<(), _>(err),
378 consts::TrueConst::No);
379 }
380 }
381
382 // Obtain the panic entry point.
383 let def_id = common::langcall(bcx.tcx(), Some(span), "", lang_item);
384 let callee = Callee::def(bcx.ccx(), def_id,
385 bcx.ccx().empty_substs_for_def_id(def_id));
386 let llfn = callee.reify(bcx.ccx()).val;
387
388 // Translate the actual panic invoke/call.
389 if let Some(unwind) = cleanup {
390 bcx.invoke(llfn,
391 &args,
392 self.unreachable_block().llbb,
393 llblock(self, unwind),
394 cleanup_bundle);
54a0048b 395 } else {
3157f602
XL
396 bcx.call(llfn, &args, cleanup_bundle);
397 bcx.unreachable();
54a0048b
SL
398 }
399 }
400
3157f602
XL
401 mir::TerminatorKind::DropAndReplace { .. } => {
402 bug!("undesugared DropAndReplace in trans: {:?}", data);
403 }
404
54a0048b
SL
405 mir::TerminatorKind::Call { ref func, ref args, ref destination, ref cleanup } => {
406 // Create the callee. This is a fn ptr or zero-sized and hence a kind of scalar.
407 let callee = self.trans_operand(&bcx, func);
408
409 let (mut callee, abi, sig) = match callee.ty.sty {
410 ty::TyFnDef(def_id, substs, f) => {
411 (Callee::def(bcx.ccx(), def_id, substs), f.abi, &f.sig)
412 }
413 ty::TyFnPtr(f) => {
414 (Callee {
415 data: Fn(callee.immediate()),
416 ty: callee.ty
417 }, f.abi, &f.sig)
418 }
419 _ => bug!("{} is not callable", callee.ty)
420 };
421
422 let sig = bcx.tcx().erase_late_bound_regions(sig);
423
424 // Handle intrinsics old trans wants Expr's for, ourselves.
425 let intrinsic = match (&callee.ty.sty, &callee.data) {
426 (&ty::TyFnDef(def_id, _, _), &Intrinsic) => {
427 Some(bcx.tcx().item_name(def_id).as_str())
428 }
429 _ => None
430 };
431 let intrinsic = intrinsic.as_ref().map(|s| &s[..]);
432
433 if intrinsic == Some("move_val_init") {
434 let &(_, target) = destination.as_ref().unwrap();
435 // The first argument is a thin destination pointer.
436 let llptr = self.trans_operand(&bcx, &args[0]).immediate();
437 let val = self.trans_operand(&bcx, &args[1]);
438 self.store_operand(&bcx, llptr, val);
3157f602 439 funclet_br(self, bcx, target);
54a0048b
SL
440 return;
441 }
442
443 if intrinsic == Some("transmute") {
444 let &(ref dest, target) = destination.as_ref().unwrap();
445 self.with_lvalue_ref(&bcx, dest, |this, dest| {
446 this.trans_transmute(&bcx, &args[0], dest);
447 });
448
3157f602 449 funclet_br(self, bcx, target);
54a0048b
SL
450 return;
451 }
452
453 let extra_args = &args[sig.inputs.len()..];
454 let extra_args = extra_args.iter().map(|op_arg| {
5bcae85e 455 let op_ty = op_arg.ty(&self.mir, bcx.tcx());
a7813a04 456 bcx.monomorphize(&op_ty)
54a0048b
SL
457 }).collect::<Vec<_>>();
458 let fn_ty = callee.direct_fn_type(bcx.ccx(), &extra_args);
459
460 // The arguments we'll be passing. Plus one to account for outptr, if used.
461 let arg_count = fn_ty.args.len() + fn_ty.ret.is_indirect() as usize;
462 let mut llargs = Vec::with_capacity(arg_count);
463
464 // Prepare the return value destination
465 let ret_dest = if let Some((ref dest, _)) = *destination {
466 let is_intrinsic = if let Intrinsic = callee.data {
467 true
468 } else {
469 false
470 };
471 self.make_return_dest(&bcx, dest, &fn_ty.ret, &mut llargs, is_intrinsic)
472 } else {
473 ReturnDest::Nothing
474 };
475
476 // Split the rust-call tupled arguments off.
477 let (first_args, untuple) = if abi == Abi::RustCall && !args.is_empty() {
478 let (tup, args) = args.split_last().unwrap();
479 (args, Some(tup))
480 } else {
481 (&args[..], None)
482 };
483
a7813a04
XL
484 let is_shuffle = intrinsic.map_or(false, |name| {
485 name.starts_with("simd_shuffle")
486 });
54a0048b
SL
487 let mut idx = 0;
488 for arg in first_args {
a7813a04
XL
489 // The indices passed to simd_shuffle* in the
490 // third argument must be constant. This is
491 // checked by const-qualification, which also
492 // promotes any complex rvalues to constants.
493 if is_shuffle && idx == 2 {
494 match *arg {
495 mir::Operand::Consume(_) => {
3157f602 496 span_bug!(span, "shuffle indices must be constant");
a7813a04
XL
497 }
498 mir::Operand::Constant(ref constant) => {
499 let val = self.trans_constant(&bcx, constant);
500 llargs.push(val.llval);
501 idx += 1;
502 continue;
503 }
504 }
505 }
506
3157f602
XL
507 let op = self.trans_operand(&bcx, arg);
508 self.trans_argument(&bcx, op, &mut llargs, &fn_ty,
54a0048b
SL
509 &mut idx, &mut callee.data);
510 }
511 if let Some(tup) = untuple {
512 self.trans_arguments_untupled(&bcx, tup, &mut llargs, &fn_ty,
513 &mut idx, &mut callee.data)
514 }
515
516 let fn_ptr = match callee.data {
517 NamedTupleConstructor(_) => {
518 // FIXME translate this like mir::Rvalue::Aggregate.
519 callee.reify(bcx.ccx()).val
520 }
521 Intrinsic => {
522 use callee::ArgVals;
523 use expr::{Ignore, SaveIn};
524 use intrinsic::trans_intrinsic_call;
525
526 let (dest, llargs) = match ret_dest {
527 _ if fn_ty.ret.is_indirect() => {
528 (SaveIn(llargs[0]), &llargs[1..])
529 }
530 ReturnDest::Nothing => (Ignore, &llargs[..]),
531 ReturnDest::IndirectOperand(dst, _) |
532 ReturnDest::Store(dst) => (SaveIn(dst), &llargs[..]),
533 ReturnDest::DirectOperand(_) =>
534 bug!("Cannot use direct operand with an intrinsic call")
535 };
536
537 bcx.with_block(|bcx| {
538 trans_intrinsic_call(bcx, callee.ty, &fn_ty,
539 ArgVals(llargs), dest,
a7813a04 540 debug_loc);
54a0048b
SL
541 });
542
543 if let ReturnDest::IndirectOperand(dst, _) = ret_dest {
544 // Make a fake operand for store_return
545 let op = OperandRef {
3157f602 546 val: Ref(dst),
5bcae85e 547 ty: sig.output,
54a0048b
SL
548 };
549 self.store_return(&bcx, ret_dest, fn_ty.ret, op);
550 }
551
552 if let Some((_, target)) = *destination {
3157f602 553 funclet_br(self, bcx, target);
54a0048b
SL
554 } else {
555 // trans_intrinsic_call already used Unreachable.
556 // bcx.unreachable();
557 }
558
559 return;
560 }
561 Fn(f) => f,
562 Virtual(_) => bug!("Virtual fn ptr not extracted")
563 };
564
565 // Many different ways to call a function handled here
3157f602 566 if let &Some(cleanup) = cleanup {
54a0048b 567 let ret_bcx = if let Some((_, target)) = *destination {
3157f602 568 self.blocks[target]
54a0048b
SL
569 } else {
570 self.unreachable_block()
571 };
54a0048b
SL
572 let invokeret = bcx.invoke(fn_ptr,
573 &llargs,
574 ret_bcx.llbb,
3157f602
XL
575 llblock(self, cleanup),
576 cleanup_bundle);
54a0048b
SL
577 fn_ty.apply_attrs_callsite(invokeret);
578
54a0048b
SL
579 if destination.is_some() {
580 let ret_bcx = ret_bcx.build();
581 ret_bcx.at_start(|ret_bcx| {
a7813a04 582 debug_loc.apply_to_bcx(ret_bcx);
54a0048b 583 let op = OperandRef {
3157f602 584 val: Immediate(invokeret),
5bcae85e 585 ty: sig.output,
54a0048b
SL
586 };
587 self.store_return(&ret_bcx, ret_dest, fn_ty.ret, op);
54a0048b
SL
588 });
589 }
590 } else {
3157f602 591 let llret = bcx.call(fn_ptr, &llargs, cleanup_bundle);
54a0048b
SL
592 fn_ty.apply_attrs_callsite(llret);
593 if let Some((_, target)) = *destination {
594 let op = OperandRef {
3157f602 595 val: Immediate(llret),
5bcae85e 596 ty: sig.output,
54a0048b
SL
597 };
598 self.store_return(&bcx, ret_dest, fn_ty.ret, op);
3157f602 599 funclet_br(self, bcx, target);
54a0048b 600 } else {
54a0048b
SL
601 bcx.unreachable();
602 }
603 }
604 }
605 }
606 }
607
608 fn trans_argument(&mut self,
609 bcx: &BlockAndBuilder<'bcx, 'tcx>,
3157f602 610 op: OperandRef<'tcx>,
54a0048b
SL
611 llargs: &mut Vec<ValueRef>,
612 fn_ty: &FnType,
613 next_idx: &mut usize,
614 callee: &mut CalleeData) {
3157f602
XL
615 if let Pair(a, b) = op.val {
616 // Treat the values in a fat pointer separately.
617 if common::type_is_fat_ptr(bcx.tcx(), op.ty) {
618 let (ptr, meta) = (a, b);
619 if *next_idx == 0 {
620 if let Virtual(idx) = *callee {
621 let llfn = bcx.with_block(|bcx| {
622 meth::get_virtual_method(bcx, meta, idx)
623 });
624 let llty = fn_ty.llvm_type(bcx.ccx()).ptr_to();
625 *callee = Fn(bcx.pointercast(llfn, llty));
626 }
54a0048b 627 }
3157f602
XL
628
629 let imm_op = |x| OperandRef {
630 val: Immediate(x),
631 // We won't be checking the type again.
632 ty: bcx.tcx().types.err
633 };
634 self.trans_argument(bcx, imm_op(ptr), llargs, fn_ty, next_idx, callee);
635 self.trans_argument(bcx, imm_op(meta), llargs, fn_ty, next_idx, callee);
636 return;
54a0048b 637 }
54a0048b
SL
638 }
639
640 let arg = &fn_ty.args[*next_idx];
641 *next_idx += 1;
642
643 // Fill padding with undef value, where applicable.
644 if let Some(ty) = arg.pad {
645 llargs.push(C_undef(ty));
646 }
647
648 if arg.is_ignore() {
649 return;
650 }
651
652 // Force by-ref if we have to load through a cast pointer.
3157f602
XL
653 let (mut llval, by_ref) = match op.val {
654 Immediate(_) | Pair(..) => {
655 if arg.is_indirect() || arg.cast.is_some() {
656 let llscratch = build::AllocaFcx(bcx.fcx(), arg.original_ty, "arg");
657 self.store_operand(bcx, llscratch, op);
658 (llscratch, true)
659 } else {
660 (op.pack_if_pair(bcx).immediate(), false)
661 }
54a0048b 662 }
3157f602 663 Ref(llval) => (llval, true)
54a0048b
SL
664 };
665
666 if by_ref && !arg.is_indirect() {
667 // Have to load the argument, maybe while casting it.
668 if arg.original_ty == Type::i1(bcx.ccx()) {
669 // We store bools as i8 so we need to truncate to i1.
670 llval = bcx.load_range_assert(llval, 0, 2, llvm::False);
671 llval = bcx.trunc(llval, arg.original_ty);
672 } else if let Some(ty) = arg.cast {
673 llval = bcx.load(bcx.pointercast(llval, ty.ptr_to()));
674 let llalign = llalign_of_min(bcx.ccx(), arg.ty);
675 unsafe {
676 llvm::LLVMSetAlignment(llval, llalign);
677 }
678 } else {
679 llval = bcx.load(llval);
680 }
681 }
682
683 llargs.push(llval);
684 }
685
686 fn trans_arguments_untupled(&mut self,
687 bcx: &BlockAndBuilder<'bcx, 'tcx>,
688 operand: &mir::Operand<'tcx>,
689 llargs: &mut Vec<ValueRef>,
690 fn_ty: &FnType,
691 next_idx: &mut usize,
692 callee: &mut CalleeData) {
a7813a04 693 let tuple = self.trans_operand(bcx, operand);
54a0048b 694
a7813a04 695 let arg_types = match tuple.ty.sty {
54a0048b 696 ty::TyTuple(ref tys) => tys,
a7813a04
XL
697 _ => span_bug!(self.mir.span,
698 "bad final argument to \"rust-call\" fn {:?}", tuple.ty)
54a0048b
SL
699 };
700
a7813a04
XL
701 // Handle both by-ref and immediate tuples.
702 match tuple.val {
703 Ref(llval) => {
704 let base_repr = adt::represent_type(bcx.ccx(), tuple.ty);
705 let base = adt::MaybeSizedValue::sized(llval);
706 for (n, &ty) in arg_types.iter().enumerate() {
707 let ptr = adt::trans_field_ptr_builder(bcx, &base_repr, base, Disr(0), n);
708 let val = if common::type_is_fat_ptr(bcx.tcx(), ty) {
709 let (lldata, llextra) = load_fat_ptr(bcx, ptr);
3157f602 710 Pair(lldata, llextra)
a7813a04
XL
711 } else {
712 // trans_argument will load this if it needs to
713 Ref(ptr)
714 };
3157f602
XL
715 let op = OperandRef {
716 val: val,
717 ty: ty
718 };
719 self.trans_argument(bcx, op, llargs, fn_ty, next_idx, callee);
a7813a04
XL
720 }
721
722 }
723 Immediate(llval) => {
724 for (n, &ty) in arg_types.iter().enumerate() {
725 let mut elem = bcx.extract_value(llval, n);
726 // Truncate bools to i1, if needed
727 if ty.is_bool() && common::val_ty(elem) != Type::i1(bcx.ccx()) {
728 elem = bcx.trunc(elem, Type::i1(bcx.ccx()));
729 }
730 // If the tuple is immediate, the elements are as well
3157f602
XL
731 let op = OperandRef {
732 val: Immediate(elem),
733 ty: ty
734 };
735 self.trans_argument(bcx, op, llargs, fn_ty, next_idx, callee);
736 }
737 }
738 Pair(a, b) => {
739 let elems = [a, b];
740 for (n, &ty) in arg_types.iter().enumerate() {
741 let mut elem = elems[n];
742 // Truncate bools to i1, if needed
743 if ty.is_bool() && common::val_ty(elem) != Type::i1(bcx.ccx()) {
744 elem = bcx.trunc(elem, Type::i1(bcx.ccx()));
745 }
746 // Pair is always made up of immediates
747 let op = OperandRef {
748 val: Immediate(elem),
749 ty: ty
750 };
751 self.trans_argument(bcx, op, llargs, fn_ty, next_idx, callee);
a7813a04
XL
752 }
753 }
54a0048b 754 }
a7813a04 755
54a0048b
SL
756 }
757
758 fn get_personality_slot(&mut self, bcx: &BlockAndBuilder<'bcx, 'tcx>) -> ValueRef {
759 let ccx = bcx.ccx();
760 if let Some(slot) = self.llpersonalityslot {
761 slot
762 } else {
763 let llretty = Type::struct_(ccx, &[Type::i8p(ccx), Type::i32(ccx)], false);
764 bcx.with_block(|bcx| {
765 let slot = base::alloca(bcx, llretty, "personalityslot");
766 self.llpersonalityslot = Some(slot);
767 base::call_lifetime_start(bcx, slot);
768 slot
769 })
770 }
771 }
772
3157f602 773 /// Return the landingpad wrapper around the given basic block
54a0048b
SL
774 ///
775 /// No-op in MSVC SEH scheme.
3157f602 776 fn landing_pad_to(&mut self, target_bb: mir::BasicBlock) -> Block<'bcx, 'tcx>
54a0048b 777 {
3157f602
XL
778 if let Some(block) = self.landing_pads[target_bb] {
779 return block;
780 }
781
782 if base::wants_msvc_seh(self.fcx.ccx.sess()) {
783 return self.blocks[target_bb];
54a0048b 784 }
3157f602
XL
785
786 let target = self.bcx(target_bb);
787
788 let block = self.fcx.new_block("cleanup", None);
789 self.landing_pads[target_bb] = Some(block);
790
791 let bcx = block.build();
54a0048b
SL
792 let ccx = bcx.ccx();
793 let llpersonality = self.fcx.eh_personality();
794 let llretty = Type::struct_(ccx, &[Type::i8p(ccx), Type::i32(ccx)], false);
795 let llretval = bcx.landing_pad(llretty, llpersonality, 1, self.fcx.llfn);
796 bcx.set_cleanup(llretval);
797 let slot = self.get_personality_slot(&bcx);
798 bcx.store(llretval, slot);
3157f602
XL
799 bcx.br(target.llbb());
800 block
54a0048b
SL
801 }
802
3157f602 803 pub fn init_cpad(&mut self, bb: mir::BasicBlock) {
54a0048b 804 let bcx = self.bcx(bb);
3157f602
XL
805 let data = &self.mir[bb];
806 debug!("init_cpad({:?})", data);
807
808 match self.cleanup_kinds[bb] {
809 CleanupKind::NotCleanup => {
810 bcx.set_lpad(None)
811 }
812 _ if !base::wants_msvc_seh(bcx.sess()) => {
813 bcx.set_lpad(Some(LandingPad::gnu()))
814 }
815 CleanupKind::Internal { funclet } => {
816 // FIXME: is this needed?
817 bcx.set_personality_fn(self.fcx.eh_personality());
818 bcx.set_lpad_ref(self.bcx(funclet).lpad());
819 }
820 CleanupKind::Funclet => {
821 bcx.set_personality_fn(self.fcx.eh_personality());
822 DebugLoc::None.apply_to_bcx(&bcx);
823 let cleanup_pad = bcx.cleanup_pad(None, &[]);
824 bcx.set_lpad(Some(LandingPad::msvc(cleanup_pad)));
825 }
54a0048b 826 };
54a0048b
SL
827 }
828
829 fn unreachable_block(&mut self) -> Block<'bcx, 'tcx> {
830 self.unreachable_block.unwrap_or_else(|| {
831 let bl = self.fcx.new_block("unreachable", None);
832 bl.build().unreachable();
833 self.unreachable_block = Some(bl);
834 bl
835 })
836 }
837
838 fn bcx(&self, bb: mir::BasicBlock) -> BlockAndBuilder<'bcx, 'tcx> {
3157f602 839 self.blocks[bb].build()
54a0048b
SL
840 }
841
842 fn make_return_dest(&mut self, bcx: &BlockAndBuilder<'bcx, 'tcx>,
843 dest: &mir::Lvalue<'tcx>, fn_ret_ty: &ArgType,
844 llargs: &mut Vec<ValueRef>, is_intrinsic: bool) -> ReturnDest {
845 // If the return is ignored, we can just return a do-nothing ReturnDest
846 if fn_ret_ty.is_ignore() {
847 return ReturnDest::Nothing;
848 }
3157f602 849 let dest = if let Some(index) = self.mir.local_index(dest) {
5bcae85e 850 let ret_ty = self.monomorphized_lvalue_ty(dest);
3157f602
XL
851 match self.locals[index] {
852 LocalRef::Lvalue(dest) => dest,
853 LocalRef::Operand(None) => {
854 // Handle temporary lvalues, specifically Operand ones, as
855 // they don't have allocas
856 return if fn_ret_ty.is_indirect() {
857 // Odd, but possible, case, we have an operand temporary,
858 // but the calling convention has an indirect return.
859 let tmp = bcx.with_block(|bcx| {
860 base::alloc_ty(bcx, ret_ty, "tmp_ret")
861 });
862 llargs.push(tmp);
863 ReturnDest::IndirectOperand(tmp, index)
864 } else if is_intrinsic {
865 // Currently, intrinsics always need a location to store
866 // the result. so we create a temporary alloca for the
867 // result
868 let tmp = bcx.with_block(|bcx| {
869 base::alloc_ty(bcx, ret_ty, "tmp_ret")
870 });
871 ReturnDest::IndirectOperand(tmp, index)
872 } else {
873 ReturnDest::DirectOperand(index)
874 };
875 }
876 LocalRef::Operand(Some(_)) => {
877 bug!("lvalue local already assigned to");
54a0048b
SL
878 }
879 }
3157f602
XL
880 } else {
881 self.trans_lvalue(bcx, dest)
54a0048b
SL
882 };
883 if fn_ret_ty.is_indirect() {
884 llargs.push(dest.llval);
885 ReturnDest::Nothing
886 } else {
887 ReturnDest::Store(dest.llval)
888 }
889 }
890
891 fn trans_transmute(&mut self, bcx: &BlockAndBuilder<'bcx, 'tcx>,
892 src: &mir::Operand<'tcx>, dst: LvalueRef<'tcx>) {
893 let mut val = self.trans_operand(bcx, src);
894 if let ty::TyFnDef(def_id, substs, _) = val.ty.sty {
895 let llouttype = type_of::type_of(bcx.ccx(), dst.ty.to_ty(bcx.tcx()));
896 let out_type_size = llbitsize_of_real(bcx.ccx(), llouttype);
897 if out_type_size != 0 {
898 // FIXME #19925 Remove this hack after a release cycle.
899 let f = Callee::def(bcx.ccx(), def_id, substs);
900 let datum = f.reify(bcx.ccx());
901 val = OperandRef {
3157f602 902 val: Immediate(datum.val),
54a0048b
SL
903 ty: datum.ty
904 };
905 }
906 }
907
908 let llty = type_of::type_of(bcx.ccx(), val.ty);
909 let cast_ptr = bcx.pointercast(dst.llval, llty.ptr_to());
910 self.store_operand(bcx, cast_ptr, val);
911 }
912
3157f602 913
54a0048b
SL
914 // Stores the return value of a function call into it's final location.
915 fn store_return(&mut self,
916 bcx: &BlockAndBuilder<'bcx, 'tcx>,
917 dest: ReturnDest,
918 ret_ty: ArgType,
919 op: OperandRef<'tcx>) {
920 use self::ReturnDest::*;
921
922 match dest {
923 Nothing => (),
924 Store(dst) => ret_ty.store(bcx, op.immediate(), dst),
3157f602 925 IndirectOperand(tmp, index) => {
54a0048b 926 let op = self.trans_load(bcx, tmp, op.ty);
3157f602 927 self.locals[index] = LocalRef::Operand(Some(op));
54a0048b 928 }
3157f602
XL
929 DirectOperand(index) => {
930 // If there is a cast, we have to store and reload.
931 let op = if ret_ty.cast.is_some() {
932 let tmp = bcx.with_block(|bcx| {
933 base::alloc_ty(bcx, op.ty, "tmp_ret")
934 });
935 ret_ty.store(bcx, op.immediate(), tmp);
936 self.trans_load(bcx, tmp, op.ty)
54a0048b 937 } else {
3157f602 938 op.unpack_if_pair(bcx)
54a0048b 939 };
3157f602 940 self.locals[index] = LocalRef::Operand(Some(op));
54a0048b
SL
941 }
942 }
943 }
944}
945
946enum ReturnDest {
947 // Do nothing, the return value is indirect or ignored
948 Nothing,
949 // Store the return value to the pointer
950 Store(ValueRef),
3157f602
XL
951 // Stores an indirect return value to an operand local lvalue
952 IndirectOperand(ValueRef, mir::Local),
953 // Stores a direct return value to an operand local lvalue
954 DirectOperand(mir::Local)
54a0048b 955}