1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 use llvm
::{self, ValueRef}
;
12 use rustc_const_eval
::{ErrKind, ConstEvalErr, note_const_eval_err}
;
13 use rustc
::middle
::lang_items
;
16 use abi
::{Abi, FnType, ArgType}
;
20 use callee
::{Callee, CalleeData, Fn, Intrinsic, NamedTupleConstructor, Virtual}
;
21 use common
::{self, Block, BlockAndBuilder, LandingPad}
;
22 use common
::{C_bool, C_str_slice, C_struct, C_u32, C_undef}
;
24 use debuginfo
::DebugLoc
;
26 use machine
::{llalign_of_min, llbitsize_of_real}
;
32 use rustc_data_structures
::fnv
::FnvHashMap
;
33 use syntax
::parse
::token
;
35 use super::{MirContext, LocalRef}
;
36 use super::analyze
::CleanupKind
;
37 use super::constant
::Const
;
38 use super::lvalue
::{LvalueRef}
;
39 use super::operand
::OperandRef
;
40 use super::operand
::OperandValue
::{Pair, Ref, Immediate}
;
42 use std
::cell
::Ref
as CellRef
;
44 impl<'bcx
, 'tcx
> MirContext
<'bcx
, 'tcx
> {
45 pub fn trans_block(&mut self, bb
: mir
::BasicBlock
) {
46 let mut bcx
= self.bcx(bb
);
47 let data
= &CellRef
::clone(&self.mir
)[bb
];
49 debug
!("trans_block({:?}={:?})", bb
, data
);
51 // Create the cleanup bundle, if needed.
52 let cleanup_pad
= bcx
.lpad().and_then(|lp
| lp
.cleanuppad());
53 let cleanup_bundle
= bcx
.lpad().and_then(|l
| l
.bundle());
55 let funclet_br
= |this
: &Self, bcx
: BlockAndBuilder
, bb
: mir
::BasicBlock
| {
56 let lltarget
= this
.blocks
[bb
].llbb
;
57 if let Some(cp
) = cleanup_pad
{
58 match this
.cleanup_kinds
[bb
] {
59 CleanupKind
::Funclet
=> {
60 // micro-optimization: generate a `ret` rather than a jump
62 bcx
.cleanup_ret(cp
, Some(lltarget
));
64 CleanupKind
::Internal { .. }
=> bcx
.br(lltarget
),
65 CleanupKind
::NotCleanup
=> bug
!("jump from cleanup bb to bb {:?}", bb
)
72 let llblock
= |this
: &mut Self, target
: mir
::BasicBlock
| {
73 let lltarget
= this
.blocks
[target
].llbb
;
75 if let Some(cp
) = cleanup_pad
{
76 match this
.cleanup_kinds
[target
] {
77 CleanupKind
::Funclet
=> {
78 // MSVC cross-funclet jump - need a trampoline
80 debug
!("llblock: creating cleanup trampoline for {:?}", target
);
81 let name
= &format
!("{:?}_cleanup_trampoline_{:?}", bb
, target
);
82 let trampoline
= this
.fcx
.new_block(name
).build();
83 trampoline
.set_personality_fn(this
.fcx
.eh_personality());
84 trampoline
.cleanup_ret(cp
, Some(lltarget
));
87 CleanupKind
::Internal { .. }
=> lltarget
,
88 CleanupKind
::NotCleanup
=>
89 bug
!("jump from cleanup bb {:?} to bb {:?}", bb
, target
)
92 if let (CleanupKind
::NotCleanup
, CleanupKind
::Funclet
) =
93 (this
.cleanup_kinds
[bb
], this
.cleanup_kinds
[target
])
95 // jump *into* cleanup - need a landing pad if GNU
96 this
.landing_pad_to(target
).llbb
103 for statement
in &data
.statements
{
104 bcx
= self.trans_statement(bcx
, statement
);
107 let terminator
= data
.terminator();
108 debug
!("trans_block: terminator: {:?}", terminator
);
110 let span
= terminator
.source_info
.span
;
111 let debug_loc
= self.debug_loc(terminator
.source_info
);
112 debug_loc
.apply_to_bcx(&bcx
);
113 debug_loc
.apply(bcx
.fcx());
114 match terminator
.kind
{
115 mir
::TerminatorKind
::Resume
=> {
116 if let Some(cleanup_pad
) = cleanup_pad
{
117 bcx
.cleanup_ret(cleanup_pad
, None
);
119 let ps
= self.get_personality_slot(&bcx
);
120 let lp
= bcx
.load(ps
);
121 bcx
.with_block(|bcx
| {
122 base
::call_lifetime_end(bcx
, ps
);
123 base
::trans_unwind_resume(bcx
, lp
);
128 mir
::TerminatorKind
::Goto { target }
=> {
129 funclet_br(self, bcx
, target
);
132 mir
::TerminatorKind
::If { ref cond, targets: (true_bb, false_bb) }
=> {
133 let cond
= self.trans_operand(&bcx
, cond
);
135 let lltrue
= llblock(self, true_bb
);
136 let llfalse
= llblock(self, false_bb
);
137 bcx
.cond_br(cond
.immediate(), lltrue
, llfalse
);
140 mir
::TerminatorKind
::Switch { ref discr, ref adt_def, ref targets }
=> {
141 let discr_lvalue
= self.trans_lvalue(&bcx
, discr
);
142 let ty
= discr_lvalue
.ty
.to_ty(bcx
.tcx());
143 let discr
= bcx
.with_block(|bcx
|
144 adt
::trans_get_discr(bcx
, ty
, discr_lvalue
.llval
, None
, true)
147 let mut bb_hist
= FnvHashMap();
148 for target
in targets
{
149 *bb_hist
.entry(target
).or_insert(0) += 1;
151 let (default_bb
, default_blk
) = match bb_hist
.iter().max_by_key(|&(_
, c
)| c
) {
152 // If a single target basic blocks is predominant, promote that to be the
153 // default case for the switch instruction to reduce the size of the generated
154 // code. This is especially helpful in cases like an if-let on a huge enum.
155 // Note: This optimization is only valid for exhaustive matches.
156 Some((&&bb
, &c
)) if c
> targets
.len() / 2 => {
157 (Some(bb
), llblock(self, bb
))
159 // We're generating an exhaustive switch, so the else branch
160 // can't be hit. Branching to an unreachable instruction
161 // lets LLVM know this
162 _
=> (None
, self.unreachable_block().llbb
)
164 let switch
= bcx
.switch(discr
, default_blk
, targets
.len());
165 assert_eq
!(adt_def
.variants
.len(), targets
.len());
166 for (adt_variant
, &target
) in adt_def
.variants
.iter().zip(targets
) {
167 if default_bb
!= Some(target
) {
168 let llbb
= llblock(self, target
);
169 let llval
= bcx
.with_block(|bcx
| adt
::trans_case(
170 bcx
, ty
, Disr
::from(adt_variant
.disr_val
)));
171 build
::AddCase(switch
, llval
, llbb
)
176 mir
::TerminatorKind
::SwitchInt { ref discr, switch_ty, ref values, ref targets }
=> {
177 let (otherwise
, targets
) = targets
.split_last().unwrap();
178 let discr
= bcx
.load(self.trans_lvalue(&bcx
, discr
).llval
);
179 let discr
= bcx
.with_block(|bcx
| base
::to_immediate(bcx
, discr
, switch_ty
));
180 let switch
= bcx
.switch(discr
, llblock(self, *otherwise
), values
.len());
181 for (value
, target
) in values
.iter().zip(targets
) {
182 let val
= Const
::from_constval(bcx
.ccx(), value
.clone(), switch_ty
);
183 let llbb
= llblock(self, *target
);
184 build
::AddCase(switch
, val
.llval
, llbb
)
188 mir
::TerminatorKind
::Return
=> {
189 let ret
= bcx
.fcx().fn_ty
.ret
;
190 if ret
.is_ignore() || ret
.is_indirect() {
195 let llval
= if let Some(cast_ty
) = ret
.cast
{
196 let op
= match self.locals
[mir
::RETURN_POINTER
] {
197 LocalRef
::Operand(Some(op
)) => op
,
198 LocalRef
::Operand(None
) => bug
!("use of return before def"),
199 LocalRef
::Lvalue(tr_lvalue
) => {
201 val
: Ref(tr_lvalue
.llval
),
202 ty
: tr_lvalue
.ty
.to_ty(bcx
.tcx())
206 let llslot
= match op
.val
{
207 Immediate(_
) | Pair(..) => {
208 let llscratch
= build
::AllocaFcx(bcx
.fcx(), ret
.original_ty
, "ret");
209 self.store_operand(&bcx
, llscratch
, op
);
214 let load
= bcx
.load(bcx
.pointercast(llslot
, cast_ty
.ptr_to()));
215 let llalign
= llalign_of_min(bcx
.ccx(), ret
.ty
);
217 llvm
::LLVMSetAlignment(load
, llalign
);
221 let op
= self.trans_consume(&bcx
, &mir
::Lvalue
::Local(mir
::RETURN_POINTER
));
222 op
.pack_if_pair(&bcx
).immediate()
227 mir
::TerminatorKind
::Unreachable
=> {
231 mir
::TerminatorKind
::Drop { ref location, target, unwind }
=> {
232 let ty
= location
.ty(&self.mir
, bcx
.tcx()).to_ty(bcx
.tcx());
233 let ty
= bcx
.monomorphize(&ty
);
235 // Double check for necessity to drop
236 if !glue
::type_needs_drop(bcx
.tcx(), ty
) {
237 funclet_br(self, bcx
, target
);
241 let lvalue
= self.trans_lvalue(&bcx
, location
);
242 let drop_fn
= glue
::get_drop_glue(bcx
.ccx(), ty
);
243 let drop_ty
= glue
::get_drop_glue_type(bcx
.tcx(), ty
);
244 let is_sized
= common
::type_is_sized(bcx
.tcx(), ty
);
245 let llvalue
= if is_sized
{
247 bcx
.pointercast(lvalue
.llval
, type_of
::type_of(bcx
.ccx(), drop_ty
).ptr_to())
252 // FIXME(#36457) Currently drop glue takes sized
253 // values as a `*(data, meta)`, but elsewhere in
254 // MIR we pass `(data, meta)` as two separate
255 // arguments. It would be better to fix drop glue,
256 // but I am shooting for a quick fix to #35546
257 // here that can be cleanly backported to beta, so
258 // I want to avoid touching all of trans.
259 bcx
.with_block(|bcx
| {
260 let scratch
= base
::alloc_ty(bcx
, ty
, "drop");
261 base
::call_lifetime_start(bcx
, scratch
);
262 build
::Store(bcx
, lvalue
.llval
, base
::get_dataptr(bcx
, scratch
));
263 build
::Store(bcx
, lvalue
.llextra
, base
::get_meta(bcx
, scratch
));
267 if let Some(unwind
) = unwind
{
270 self.blocks
[target
].llbb
,
271 llblock(self, unwind
),
274 bcx
.call(drop_fn
, &[llvalue
], cleanup_bundle
);
275 funclet_br(self, bcx
, target
);
279 mir
::TerminatorKind
::Assert { ref cond, expected, ref msg, target, cleanup }
=> {
280 let cond
= self.trans_operand(&bcx
, cond
).immediate();
281 let mut const_cond
= common
::const_to_opt_uint(cond
).map(|c
| c
== 1);
283 // This case can currently arise only from functions marked
284 // with #[rustc_inherit_overflow_checks] and inlined from
285 // another crate (mostly core::num generic/#[inline] fns),
286 // while the current crate doesn't use overflow checks.
287 // NOTE: Unlike binops, negation doesn't have its own
288 // checked operation, just a comparison with the minimum
289 // value, so we have to check for the assert message.
290 if !bcx
.ccx().check_overflow() {
291 use rustc_const_math
::ConstMathErr
::Overflow
;
292 use rustc_const_math
::Op
::Neg
;
294 if let mir
::AssertMessage
::Math(Overflow(Neg
)) = *msg
{
295 const_cond
= Some(expected
);
299 // Don't translate the panic block if success if known.
300 if const_cond
== Some(expected
) {
301 funclet_br(self, bcx
, target
);
305 // Pass the condition through llvm.expect for branch hinting.
306 let expect
= bcx
.ccx().get_intrinsic(&"llvm.expect.i1");
307 let cond
= bcx
.call(expect
, &[cond
, C_bool(bcx
.ccx(), expected
)], None
);
309 // Create the failure block and the conditional branch to it.
310 let lltarget
= llblock(self, target
);
311 let panic_block
= self.fcx
.new_block("panic");
313 bcx
.cond_br(cond
, lltarget
, panic_block
.llbb
);
315 bcx
.cond_br(cond
, panic_block
.llbb
, lltarget
);
318 // After this point, bcx is the block for the call to panic.
319 bcx
= panic_block
.build();
320 debug_loc
.apply_to_bcx(&bcx
);
322 // Get the location information.
323 let loc
= bcx
.sess().codemap().lookup_char_pos(span
.lo
);
324 let filename
= token
::intern_and_get_ident(&loc
.file
.name
);
325 let filename
= C_str_slice(bcx
.ccx(), filename
);
326 let line
= C_u32(bcx
.ccx(), loc
.line
as u32);
328 // Put together the arguments to the panic entry point.
329 let (lang_item
, args
, const_err
) = match *msg
{
330 mir
::AssertMessage
::BoundsCheck { ref len, ref index }
=> {
331 let len
= self.trans_operand(&mut bcx
, len
).immediate();
332 let index
= self.trans_operand(&mut bcx
, index
).immediate();
334 let const_err
= common
::const_to_opt_uint(len
).and_then(|len
| {
335 common
::const_to_opt_uint(index
).map(|index
| {
336 ErrKind
::IndexOutOfBounds
{
343 let file_line
= C_struct(bcx
.ccx(), &[filename
, line
], false);
344 let align
= llalign_of_min(bcx
.ccx(), common
::val_ty(file_line
));
345 let file_line
= consts
::addr_of(bcx
.ccx(),
348 "panic_bounds_check_loc");
349 (lang_items
::PanicBoundsCheckFnLangItem
,
350 vec
![file_line
, index
, len
],
353 mir
::AssertMessage
::Math(ref err
) => {
354 let msg_str
= token
::intern_and_get_ident(err
.description());
355 let msg_str
= C_str_slice(bcx
.ccx(), msg_str
);
356 let msg_file_line
= C_struct(bcx
.ccx(),
357 &[msg_str
, filename
, line
],
359 let align
= llalign_of_min(bcx
.ccx(), common
::val_ty(msg_file_line
));
360 let msg_file_line
= consts
::addr_of(bcx
.ccx(),
364 (lang_items
::PanicFnLangItem
,
366 Some(ErrKind
::Math(err
.clone())))
370 // If we know we always panic, and the error message
371 // is also constant, then we can produce a warning.
372 if const_cond
== Some(!expected
) {
373 if let Some(err
) = const_err
{
374 let err
= ConstEvalErr{ span: span, kind: err }
;
375 let mut diag
= bcx
.tcx().sess
.struct_span_warn(
376 span
, "this expression will panic at run-time");
377 note_const_eval_err(bcx
.tcx(), &err
, span
, "expression", &mut diag
);
382 // Obtain the panic entry point.
383 let def_id
= common
::langcall(bcx
.tcx(), Some(span
), "", lang_item
);
384 let callee
= Callee
::def(bcx
.ccx(), def_id
,
385 bcx
.ccx().empty_substs_for_def_id(def_id
));
386 let llfn
= callee
.reify(bcx
.ccx());
388 // Translate the actual panic invoke/call.
389 if let Some(unwind
) = cleanup
{
392 self.unreachable_block().llbb
,
393 llblock(self, unwind
),
396 bcx
.call(llfn
, &args
, cleanup_bundle
);
401 mir
::TerminatorKind
::DropAndReplace { .. }
=> {
402 bug
!("undesugared DropAndReplace in trans: {:?}", data
);
405 mir
::TerminatorKind
::Call { ref func, ref args, ref destination, ref cleanup }
=> {
406 // Create the callee. This is a fn ptr or zero-sized and hence a kind of scalar.
407 let callee
= self.trans_operand(&bcx
, func
);
409 let (mut callee
, abi
, sig
) = match callee
.ty
.sty
{
410 ty
::TyFnDef(def_id
, substs
, f
) => {
411 (Callee
::def(bcx
.ccx(), def_id
, substs
), f
.abi
, &f
.sig
)
415 data
: Fn(callee
.immediate()),
419 _
=> bug
!("{} is not callable", callee
.ty
)
422 let sig
= bcx
.tcx().erase_late_bound_regions_and_normalize(sig
);
424 // Handle intrinsics old trans wants Expr's for, ourselves.
425 let intrinsic
= match (&callee
.ty
.sty
, &callee
.data
) {
426 (&ty
::TyFnDef(def_id
, ..), &Intrinsic
) => {
427 Some(bcx
.tcx().item_name(def_id
).as_str())
431 let intrinsic
= intrinsic
.as_ref().map(|s
| &s
[..]);
433 if intrinsic
== Some("move_val_init") {
434 let &(_
, target
) = destination
.as_ref().unwrap();
435 // The first argument is a thin destination pointer.
436 let llptr
= self.trans_operand(&bcx
, &args
[0]).immediate();
437 let val
= self.trans_operand(&bcx
, &args
[1]);
438 self.store_operand(&bcx
, llptr
, val
);
439 funclet_br(self, bcx
, target
);
443 if intrinsic
== Some("transmute") {
444 let &(ref dest
, target
) = destination
.as_ref().unwrap();
445 self.with_lvalue_ref(&bcx
, dest
, |this
, dest
| {
446 this
.trans_transmute(&bcx
, &args
[0], dest
);
449 funclet_br(self, bcx
, target
);
453 let extra_args
= &args
[sig
.inputs
.len()..];
454 let extra_args
= extra_args
.iter().map(|op_arg
| {
455 let op_ty
= op_arg
.ty(&self.mir
, bcx
.tcx());
456 bcx
.monomorphize(&op_ty
)
457 }).collect
::<Vec
<_
>>();
458 let fn_ty
= callee
.direct_fn_type(bcx
.ccx(), &extra_args
);
460 // The arguments we'll be passing. Plus one to account for outptr, if used.
461 let arg_count
= fn_ty
.args
.len() + fn_ty
.ret
.is_indirect() as usize;
462 let mut llargs
= Vec
::with_capacity(arg_count
);
464 // Prepare the return value destination
465 let ret_dest
= if let Some((ref dest
, _
)) = *destination
{
466 let is_intrinsic
= if let Intrinsic
= callee
.data
{
471 self.make_return_dest(&bcx
, dest
, &fn_ty
.ret
, &mut llargs
, is_intrinsic
)
476 // Split the rust-call tupled arguments off.
477 let (first_args
, untuple
) = if abi
== Abi
::RustCall
&& !args
.is_empty() {
478 let (tup
, args
) = args
.split_last().unwrap();
484 let is_shuffle
= intrinsic
.map_or(false, |name
| {
485 name
.starts_with("simd_shuffle")
488 for arg
in first_args
{
489 // The indices passed to simd_shuffle* in the
490 // third argument must be constant. This is
491 // checked by const-qualification, which also
492 // promotes any complex rvalues to constants.
493 if is_shuffle
&& idx
== 2 {
495 mir
::Operand
::Consume(_
) => {
496 span_bug
!(span
, "shuffle indices must be constant");
498 mir
::Operand
::Constant(ref constant
) => {
499 let val
= self.trans_constant(&bcx
, constant
);
500 llargs
.push(val
.llval
);
507 let op
= self.trans_operand(&bcx
, arg
);
508 self.trans_argument(&bcx
, op
, &mut llargs
, &fn_ty
,
509 &mut idx
, &mut callee
.data
);
511 if let Some(tup
) = untuple
{
512 self.trans_arguments_untupled(&bcx
, tup
, &mut llargs
, &fn_ty
,
513 &mut idx
, &mut callee
.data
)
516 let fn_ptr
= match callee
.data
{
517 NamedTupleConstructor(_
) => {
518 // FIXME translate this like mir::Rvalue::Aggregate.
519 callee
.reify(bcx
.ccx())
522 use intrinsic
::trans_intrinsic_call
;
524 let (dest
, llargs
) = match ret_dest
{
525 _
if fn_ty
.ret
.is_indirect() => {
526 (llargs
[0], &llargs
[1..])
528 ReturnDest
::Nothing
=> {
529 (C_undef(fn_ty
.ret
.original_ty
.ptr_to()), &llargs
[..])
531 ReturnDest
::IndirectOperand(dst
, _
) |
532 ReturnDest
::Store(dst
) => (dst
, &llargs
[..]),
533 ReturnDest
::DirectOperand(_
) =>
534 bug
!("Cannot use direct operand with an intrinsic call")
537 bcx
.with_block(|bcx
| {
538 trans_intrinsic_call(bcx
, callee
.ty
, &fn_ty
,
539 &llargs
, dest
, debug_loc
);
542 if let ReturnDest
::IndirectOperand(dst
, _
) = ret_dest
{
543 // Make a fake operand for store_return
544 let op
= OperandRef
{
548 self.store_return(&bcx
, ret_dest
, fn_ty
.ret
, op
);
551 if let Some((_
, target
)) = *destination
{
552 funclet_br(self, bcx
, target
);
554 // trans_intrinsic_call already used Unreachable.
555 // bcx.unreachable();
561 Virtual(_
) => bug
!("Virtual fn ptr not extracted")
564 // Many different ways to call a function handled here
565 if let &Some(cleanup
) = cleanup
{
566 let ret_bcx
= if let Some((_
, target
)) = *destination
{
569 self.unreachable_block()
571 let invokeret
= bcx
.invoke(fn_ptr
,
574 llblock(self, cleanup
),
576 fn_ty
.apply_attrs_callsite(invokeret
);
578 if destination
.is_some() {
579 let ret_bcx
= ret_bcx
.build();
580 ret_bcx
.at_start(|ret_bcx
| {
581 debug_loc
.apply_to_bcx(ret_bcx
);
582 let op
= OperandRef
{
583 val
: Immediate(invokeret
),
586 self.store_return(&ret_bcx
, ret_dest
, fn_ty
.ret
, op
);
590 let llret
= bcx
.call(fn_ptr
, &llargs
, cleanup_bundle
);
591 fn_ty
.apply_attrs_callsite(llret
);
592 if let Some((_
, target
)) = *destination
{
593 let op
= OperandRef
{
594 val
: Immediate(llret
),
597 self.store_return(&bcx
, ret_dest
, fn_ty
.ret
, op
);
598 funclet_br(self, bcx
, target
);
607 fn trans_argument(&mut self,
608 bcx
: &BlockAndBuilder
<'bcx
, 'tcx
>,
609 op
: OperandRef
<'tcx
>,
610 llargs
: &mut Vec
<ValueRef
>,
612 next_idx
: &mut usize,
613 callee
: &mut CalleeData
) {
614 if let Pair(a
, b
) = op
.val
{
615 // Treat the values in a fat pointer separately.
616 if common
::type_is_fat_ptr(bcx
.tcx(), op
.ty
) {
617 let (ptr
, meta
) = (a
, b
);
619 if let Virtual(idx
) = *callee
{
620 let llfn
= bcx
.with_block(|bcx
| {
621 meth
::get_virtual_method(bcx
, meta
, idx
)
623 let llty
= fn_ty
.llvm_type(bcx
.ccx()).ptr_to();
624 *callee
= Fn(bcx
.pointercast(llfn
, llty
));
628 let imm_op
= |x
| OperandRef
{
630 // We won't be checking the type again.
631 ty
: bcx
.tcx().types
.err
633 self.trans_argument(bcx
, imm_op(ptr
), llargs
, fn_ty
, next_idx
, callee
);
634 self.trans_argument(bcx
, imm_op(meta
), llargs
, fn_ty
, next_idx
, callee
);
639 let arg
= &fn_ty
.args
[*next_idx
];
642 // Fill padding with undef value, where applicable.
643 if let Some(ty
) = arg
.pad
{
644 llargs
.push(C_undef(ty
));
651 // Force by-ref if we have to load through a cast pointer.
652 let (mut llval
, by_ref
) = match op
.val
{
653 Immediate(_
) | Pair(..) => {
654 if arg
.is_indirect() || arg
.cast
.is_some() {
655 let llscratch
= build
::AllocaFcx(bcx
.fcx(), arg
.original_ty
, "arg");
656 self.store_operand(bcx
, llscratch
, op
);
659 (op
.pack_if_pair(bcx
).immediate(), false)
662 Ref(llval
) => (llval
, true)
665 if by_ref
&& !arg
.is_indirect() {
666 // Have to load the argument, maybe while casting it.
667 if arg
.original_ty
== Type
::i1(bcx
.ccx()) {
668 // We store bools as i8 so we need to truncate to i1.
669 llval
= bcx
.load_range_assert(llval
, 0, 2, llvm
::False
);
670 llval
= bcx
.trunc(llval
, arg
.original_ty
);
671 } else if let Some(ty
) = arg
.cast
{
672 llval
= bcx
.load(bcx
.pointercast(llval
, ty
.ptr_to()));
673 let llalign
= llalign_of_min(bcx
.ccx(), arg
.ty
);
675 llvm
::LLVMSetAlignment(llval
, llalign
);
678 llval
= bcx
.load(llval
);
685 fn trans_arguments_untupled(&mut self,
686 bcx
: &BlockAndBuilder
<'bcx
, 'tcx
>,
687 operand
: &mir
::Operand
<'tcx
>,
688 llargs
: &mut Vec
<ValueRef
>,
690 next_idx
: &mut usize,
691 callee
: &mut CalleeData
) {
692 let tuple
= self.trans_operand(bcx
, operand
);
694 let arg_types
= match tuple
.ty
.sty
{
695 ty
::TyTuple(ref tys
) => tys
,
696 _
=> span_bug
!(self.mir
.span
,
697 "bad final argument to \"rust-call\" fn {:?}", tuple
.ty
)
700 // Handle both by-ref and immediate tuples.
703 let base
= adt
::MaybeSizedValue
::sized(llval
);
704 for (n
, &ty
) in arg_types
.iter().enumerate() {
705 let ptr
= adt
::trans_field_ptr_builder(bcx
, tuple
.ty
, base
, Disr(0), n
);
706 let val
= if common
::type_is_fat_ptr(bcx
.tcx(), ty
) {
707 let (lldata
, llextra
) = base
::load_fat_ptr_builder(bcx
, ptr
, ty
);
708 Pair(lldata
, llextra
)
710 // trans_argument will load this if it needs to
713 let op
= OperandRef
{
717 self.trans_argument(bcx
, op
, llargs
, fn_ty
, next_idx
, callee
);
721 Immediate(llval
) => {
722 for (n
, &ty
) in arg_types
.iter().enumerate() {
723 let mut elem
= bcx
.extract_value(llval
, n
);
724 // Truncate bools to i1, if needed
725 if ty
.is_bool() && common
::val_ty(elem
) != Type
::i1(bcx
.ccx()) {
726 elem
= bcx
.trunc(elem
, Type
::i1(bcx
.ccx()));
728 // If the tuple is immediate, the elements are as well
729 let op
= OperandRef
{
730 val
: Immediate(elem
),
733 self.trans_argument(bcx
, op
, llargs
, fn_ty
, next_idx
, callee
);
738 for (n
, &ty
) in arg_types
.iter().enumerate() {
739 let mut elem
= elems
[n
];
740 // Truncate bools to i1, if needed
741 if ty
.is_bool() && common
::val_ty(elem
) != Type
::i1(bcx
.ccx()) {
742 elem
= bcx
.trunc(elem
, Type
::i1(bcx
.ccx()));
744 // Pair is always made up of immediates
745 let op
= OperandRef
{
746 val
: Immediate(elem
),
749 self.trans_argument(bcx
, op
, llargs
, fn_ty
, next_idx
, callee
);
756 fn get_personality_slot(&mut self, bcx
: &BlockAndBuilder
<'bcx
, 'tcx
>) -> ValueRef
{
758 if let Some(slot
) = self.llpersonalityslot
{
761 let llretty
= Type
::struct_(ccx
, &[Type
::i8p(ccx
), Type
::i32(ccx
)], false);
762 bcx
.with_block(|bcx
| {
763 let slot
= base
::alloca(bcx
, llretty
, "personalityslot");
764 self.llpersonalityslot
= Some(slot
);
765 base
::call_lifetime_start(bcx
, slot
);
771 /// Return the landingpad wrapper around the given basic block
773 /// No-op in MSVC SEH scheme.
774 fn landing_pad_to(&mut self, target_bb
: mir
::BasicBlock
) -> Block
<'bcx
, 'tcx
>
776 if let Some(block
) = self.landing_pads
[target_bb
] {
780 if base
::wants_msvc_seh(self.fcx
.ccx
.sess()) {
781 return self.blocks
[target_bb
];
784 let target
= self.bcx(target_bb
);
786 let block
= self.fcx
.new_block("cleanup");
787 self.landing_pads
[target_bb
] = Some(block
);
789 let bcx
= block
.build();
791 let llpersonality
= self.fcx
.eh_personality();
792 let llretty
= Type
::struct_(ccx
, &[Type
::i8p(ccx
), Type
::i32(ccx
)], false);
793 let llretval
= bcx
.landing_pad(llretty
, llpersonality
, 1, self.fcx
.llfn
);
794 bcx
.set_cleanup(llretval
);
795 let slot
= self.get_personality_slot(&bcx
);
796 bcx
.store(llretval
, slot
);
797 bcx
.br(target
.llbb());
801 pub fn init_cpad(&mut self, bb
: mir
::BasicBlock
) {
802 let bcx
= self.bcx(bb
);
803 let data
= &self.mir
[bb
];
804 debug
!("init_cpad({:?})", data
);
806 match self.cleanup_kinds
[bb
] {
807 CleanupKind
::NotCleanup
=> {
810 _
if !base
::wants_msvc_seh(bcx
.sess()) => {
811 bcx
.set_lpad(Some(LandingPad
::gnu()))
813 CleanupKind
::Internal { funclet }
=> {
814 // FIXME: is this needed?
815 bcx
.set_personality_fn(self.fcx
.eh_personality());
816 bcx
.set_lpad_ref(self.bcx(funclet
).lpad());
818 CleanupKind
::Funclet
=> {
819 bcx
.set_personality_fn(self.fcx
.eh_personality());
820 DebugLoc
::None
.apply_to_bcx(&bcx
);
821 let cleanup_pad
= bcx
.cleanup_pad(None
, &[]);
822 bcx
.set_lpad(Some(LandingPad
::msvc(cleanup_pad
)));
827 fn unreachable_block(&mut self) -> Block
<'bcx
, 'tcx
> {
828 self.unreachable_block
.unwrap_or_else(|| {
829 let bl
= self.fcx
.new_block("unreachable");
830 bl
.build().unreachable();
831 self.unreachable_block
= Some(bl
);
836 fn bcx(&self, bb
: mir
::BasicBlock
) -> BlockAndBuilder
<'bcx
, 'tcx
> {
837 self.blocks
[bb
].build()
840 fn make_return_dest(&mut self, bcx
: &BlockAndBuilder
<'bcx
, 'tcx
>,
841 dest
: &mir
::Lvalue
<'tcx
>, fn_ret_ty
: &ArgType
,
842 llargs
: &mut Vec
<ValueRef
>, is_intrinsic
: bool
) -> ReturnDest
{
843 // If the return is ignored, we can just return a do-nothing ReturnDest
844 if fn_ret_ty
.is_ignore() {
845 return ReturnDest
::Nothing
;
847 let dest
= if let mir
::Lvalue
::Local(index
) = *dest
{
848 let ret_ty
= self.monomorphized_lvalue_ty(dest
);
849 match self.locals
[index
] {
850 LocalRef
::Lvalue(dest
) => dest
,
851 LocalRef
::Operand(None
) => {
852 // Handle temporary lvalues, specifically Operand ones, as
853 // they don't have allocas
854 return if fn_ret_ty
.is_indirect() {
855 // Odd, but possible, case, we have an operand temporary,
856 // but the calling convention has an indirect return.
857 let tmp
= bcx
.with_block(|bcx
| {
858 base
::alloc_ty(bcx
, ret_ty
, "tmp_ret")
861 ReturnDest
::IndirectOperand(tmp
, index
)
862 } else if is_intrinsic
{
863 // Currently, intrinsics always need a location to store
864 // the result. so we create a temporary alloca for the
866 let tmp
= bcx
.with_block(|bcx
| {
867 base
::alloc_ty(bcx
, ret_ty
, "tmp_ret")
869 ReturnDest
::IndirectOperand(tmp
, index
)
871 ReturnDest
::DirectOperand(index
)
874 LocalRef
::Operand(Some(_
)) => {
875 bug
!("lvalue local already assigned to");
879 self.trans_lvalue(bcx
, dest
)
881 if fn_ret_ty
.is_indirect() {
882 llargs
.push(dest
.llval
);
885 ReturnDest
::Store(dest
.llval
)
889 fn trans_transmute(&mut self, bcx
: &BlockAndBuilder
<'bcx
, 'tcx
>,
890 src
: &mir
::Operand
<'tcx
>, dst
: LvalueRef
<'tcx
>) {
891 let mut val
= self.trans_operand(bcx
, src
);
892 if let ty
::TyFnDef(def_id
, substs
, _
) = val
.ty
.sty
{
893 let llouttype
= type_of
::type_of(bcx
.ccx(), dst
.ty
.to_ty(bcx
.tcx()));
894 let out_type_size
= llbitsize_of_real(bcx
.ccx(), llouttype
);
895 if out_type_size
!= 0 {
896 // FIXME #19925 Remove this hack after a release cycle.
897 let f
= Callee
::def(bcx
.ccx(), def_id
, substs
);
898 let ty
= match f
.ty
.sty
{
899 ty
::TyFnDef(.., f
) => bcx
.tcx().mk_fn_ptr(f
),
903 val
: Immediate(f
.reify(bcx
.ccx())),
909 let llty
= type_of
::type_of(bcx
.ccx(), val
.ty
);
910 let cast_ptr
= bcx
.pointercast(dst
.llval
, llty
.ptr_to());
911 self.store_operand(bcx
, cast_ptr
, val
);
915 // Stores the return value of a function call into it's final location.
916 fn store_return(&mut self,
917 bcx
: &BlockAndBuilder
<'bcx
, 'tcx
>,
920 op
: OperandRef
<'tcx
>) {
921 use self::ReturnDest
::*;
925 Store(dst
) => ret_ty
.store(bcx
, op
.immediate(), dst
),
926 IndirectOperand(tmp
, index
) => {
927 let op
= self.trans_load(bcx
, tmp
, op
.ty
);
928 self.locals
[index
] = LocalRef
::Operand(Some(op
));
930 DirectOperand(index
) => {
931 // If there is a cast, we have to store and reload.
932 let op
= if ret_ty
.cast
.is_some() {
933 let tmp
= bcx
.with_block(|bcx
| {
934 base
::alloc_ty(bcx
, op
.ty
, "tmp_ret")
936 ret_ty
.store(bcx
, op
.immediate(), tmp
);
937 self.trans_load(bcx
, tmp
, op
.ty
)
939 op
.unpack_if_pair(bcx
)
941 self.locals
[index
] = LocalRef
::Operand(Some(op
));
948 // Do nothing, the return value is indirect or ignored
950 // Store the return value to the pointer
952 // Stores an indirect return value to an operand local lvalue
953 IndirectOperand(ValueRef
, mir
::Local
),
954 // Stores a direct return value to an operand local lvalue
955 DirectOperand(mir
::Local
)