1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 use llvm
::{self, ValueRef, BasicBlockRef}
;
12 use rustc
::middle
::lang_items
;
13 use rustc
::middle
::const_val
::{ConstEvalErr, ConstInt, ErrKind}
;
14 use rustc
::ty
::{self, TypeFoldable}
;
15 use rustc
::ty
::layout
::{self, LayoutTyper}
;
17 use abi
::{Abi, FnType, ArgType}
;
19 use base
::{self, Lifetime}
;
22 use common
::{self, C_bool, C_str_slice, C_struct, C_u32, C_undef}
;
24 use machine
::llalign_of_min
;
30 use syntax
::symbol
::Symbol
;
35 use super::{MirContext, LocalRef}
;
36 use super::constant
::Const
;
37 use super::lvalue
::{Alignment, LvalueRef}
;
38 use super::operand
::OperandRef
;
39 use super::operand
::OperandValue
::{Pair, Ref, Immediate}
;
41 impl<'a
, 'tcx
> MirContext
<'a
, 'tcx
> {
42 pub fn trans_block(&mut self, bb
: mir
::BasicBlock
) {
43 let mut bcx
= self.get_builder(bb
);
44 let data
= &self.mir
[bb
];
46 debug
!("trans_block({:?}={:?})", bb
, data
);
48 for statement
in &data
.statements
{
49 bcx
= self.trans_statement(bcx
, statement
);
52 self.trans_terminator(bcx
, bb
, data
.terminator());
55 fn trans_terminator(&mut self,
56 mut bcx
: Builder
<'a
, 'tcx
>,
58 terminator
: &mir
::Terminator
<'tcx
>)
60 debug
!("trans_terminator: {:?}", terminator
);
62 // Create the cleanup bundle, if needed.
64 let span
= terminator
.source_info
.span
;
65 let funclet_bb
= self.cleanup_kinds
[bb
].funclet_bb(bb
);
66 let funclet
= funclet_bb
.and_then(|funclet_bb
| self.funclets
[funclet_bb
].as_ref());
68 let cleanup_pad
= funclet
.map(|lp
| lp
.cleanuppad());
69 let cleanup_bundle
= funclet
.map(|l
| l
.bundle());
71 let lltarget
= |this
: &mut Self, target
: mir
::BasicBlock
| {
72 let lltarget
= this
.blocks
[target
];
73 let target_funclet
= this
.cleanup_kinds
[target
].funclet_bb(target
);
74 match (funclet_bb
, target_funclet
) {
75 (None
, None
) => (lltarget
, false),
77 if f
== t_f
|| !base
::wants_msvc_seh(tcx
.sess
)
80 // jump *into* cleanup - need a landing pad if GNU
81 (this
.landing_pad_to(target
), false)
83 (Some(_
), None
) => span_bug
!(span
, "{:?} - jump out of cleanup?", terminator
),
84 (Some(_
), Some(_
)) => {
85 (this
.landing_pad_to(target
), true)
90 let llblock
= |this
: &mut Self, target
: mir
::BasicBlock
| {
91 let (lltarget
, is_cleanupret
) = lltarget(this
, target
);
93 // MSVC cross-funclet jump - need a trampoline
95 debug
!("llblock: creating cleanup trampoline for {:?}", target
);
96 let name
= &format
!("{:?}_cleanup_trampoline_{:?}", bb
, target
);
97 let trampoline
= this
.new_block(name
);
98 trampoline
.cleanup_ret(cleanup_pad
.unwrap(), Some(lltarget
));
105 let funclet_br
= |this
: &mut Self, bcx
: Builder
, target
: mir
::BasicBlock
| {
106 let (lltarget
, is_cleanupret
) = lltarget(this
, target
);
108 // micro-optimization: generate a `ret` rather than a jump
110 bcx
.cleanup_ret(cleanup_pad
.unwrap(), Some(lltarget
));
118 bcx
: Builder
<'a
, 'tcx
>,
122 destination
: Option
<(ReturnDest
, ty
::Ty
<'tcx
>, mir
::BasicBlock
)>,
123 cleanup
: Option
<mir
::BasicBlock
>
125 if let Some(cleanup
) = cleanup
{
126 let ret_bcx
= if let Some((_
, _
, target
)) = destination
{
129 this
.unreachable_block()
131 let invokeret
= bcx
.invoke(fn_ptr
,
134 llblock(this
, cleanup
),
136 fn_ty
.apply_attrs_callsite(invokeret
);
138 if let Some((ret_dest
, ret_ty
, target
)) = destination
{
139 let ret_bcx
= this
.get_builder(target
);
140 this
.set_debug_loc(&ret_bcx
, terminator
.source_info
);
141 let op
= OperandRef
{
142 val
: Immediate(invokeret
),
145 this
.store_return(&ret_bcx
, ret_dest
, &fn_ty
.ret
, op
);
148 let llret
= bcx
.call(fn_ptr
, &llargs
, cleanup_bundle
);
149 fn_ty
.apply_attrs_callsite(llret
);
150 if this
.mir
[bb
].is_cleanup
{
151 // Cleanup is always the cold path. Don't inline
152 // drop glue. Also, when there is a deeply-nested
153 // struct, there are "symmetry" issues that cause
154 // exponential inlining - see issue #41696.
155 llvm
::Attribute
::NoInline
.apply_callsite(llvm
::AttributePlace
::Function
, llret
);
158 if let Some((ret_dest
, ret_ty
, target
)) = destination
{
159 let op
= OperandRef
{
160 val
: Immediate(llret
),
163 this
.store_return(&bcx
, ret_dest
, &fn_ty
.ret
, op
);
164 funclet_br(this
, bcx
, target
);
171 self.set_debug_loc(&bcx
, terminator
.source_info
);
172 match terminator
.kind
{
173 mir
::TerminatorKind
::Resume
=> {
174 if let Some(cleanup_pad
) = cleanup_pad
{
175 bcx
.cleanup_ret(cleanup_pad
, None
);
177 let ps
= self.get_personality_slot(&bcx
);
178 let lp
= bcx
.load(ps
, None
);
179 Lifetime
::End
.call(&bcx
, ps
);
180 if !bcx
.sess().target
.target
.options
.custom_unwind_resume
{
183 let exc_ptr
= bcx
.extract_value(lp
, 0);
184 bcx
.call(bcx
.ccx
.eh_unwind_resume(), &[exc_ptr
], cleanup_bundle
);
190 mir
::TerminatorKind
::Goto { target }
=> {
191 funclet_br(self, bcx
, target
);
194 mir
::TerminatorKind
::SwitchInt { ref discr, switch_ty, ref values, ref targets }
=> {
195 let discr
= self.trans_operand(&bcx
, discr
);
196 if switch_ty
== bcx
.tcx().types
.bool
{
197 let lltrue
= llblock(self, targets
[0]);
198 let llfalse
= llblock(self, targets
[1]);
199 if let [ConstInt
::U8(0)] = values
[..] {
200 bcx
.cond_br(discr
.immediate(), llfalse
, lltrue
);
202 bcx
.cond_br(discr
.immediate(), lltrue
, llfalse
);
205 let (otherwise
, targets
) = targets
.split_last().unwrap();
206 let switch
= bcx
.switch(discr
.immediate(),
207 llblock(self, *otherwise
), values
.len());
208 for (value
, target
) in values
.iter().zip(targets
) {
209 let val
= Const
::from_constint(bcx
.ccx
, value
);
210 let llbb
= llblock(self, *target
);
211 bcx
.add_case(switch
, val
.llval
, llbb
)
216 mir
::TerminatorKind
::Return
=> {
217 let ret
= self.fn_ty
.ret
;
218 if ret
.is_ignore() || ret
.is_indirect() {
223 let llval
= if let Some(cast_ty
) = ret
.cast
{
224 let op
= match self.locals
[mir
::RETURN_POINTER
] {
225 LocalRef
::Operand(Some(op
)) => op
,
226 LocalRef
::Operand(None
) => bug
!("use of return before def"),
227 LocalRef
::Lvalue(tr_lvalue
) => {
229 val
: Ref(tr_lvalue
.llval
, tr_lvalue
.alignment
),
230 ty
: tr_lvalue
.ty
.to_ty(bcx
.tcx())
234 let llslot
= match op
.val
{
235 Immediate(_
) | Pair(..) => {
236 let llscratch
= bcx
.alloca(ret
.memory_ty(bcx
.ccx
), "ret", None
);
237 self.store_operand(&bcx
, llscratch
, None
, op
);
240 Ref(llval
, align
) => {
241 assert_eq
!(align
, Alignment
::AbiAligned
,
242 "return pointer is unaligned!");
247 bcx
.pointercast(llslot
, cast_ty
.ptr_to()),
248 Some(ret
.layout
.align(bcx
.ccx
).abi() as u32));
251 let op
= self.trans_consume(&bcx
, &mir
::Lvalue
::Local(mir
::RETURN_POINTER
));
252 if let Ref(llval
, align
) = op
.val
{
253 base
::load_ty(&bcx
, llval
, align
, op
.ty
)
255 op
.pack_if_pair(&bcx
).immediate()
261 mir
::TerminatorKind
::Unreachable
=> {
265 mir
::TerminatorKind
::Drop { ref location, target, unwind }
=> {
266 let ty
= location
.ty(self.mir
, bcx
.tcx()).to_ty(bcx
.tcx());
267 let ty
= self.monomorphize(&ty
);
268 let drop_fn
= monomorphize
::resolve_drop_in_place(bcx
.ccx
.shared(), ty
);
270 if let ty
::InstanceDef
::DropGlue(_
, None
) = drop_fn
.def
{
271 // we don't actually need to drop anything.
272 funclet_br(self, bcx
, target
);
276 let lvalue
= self.trans_lvalue(&bcx
, location
);
277 let fn_ty
= FnType
::of_instance(bcx
.ccx
, &drop_fn
);
278 let (drop_fn
, need_extra
) = match ty
.sty
{
279 ty
::TyDynamic(..) => (meth
::DESTRUCTOR
.get_fn(&bcx
, lvalue
.llextra
),
281 _
=> (callee
::get_fn(bcx
.ccx
, drop_fn
), lvalue
.has_extra())
283 let args
= &[lvalue
.llval
, lvalue
.llextra
][..1 + need_extra
as usize];
284 do_call(self, bcx
, fn_ty
, drop_fn
, args
,
285 Some((ReturnDest
::Nothing
, tcx
.mk_nil(), target
)),
289 mir
::TerminatorKind
::Assert { ref cond, expected, ref msg, target, cleanup }
=> {
290 let cond
= self.trans_operand(&bcx
, cond
).immediate();
291 let mut const_cond
= common
::const_to_opt_u128(cond
, false).map(|c
| c
== 1);
293 // This case can currently arise only from functions marked
294 // with #[rustc_inherit_overflow_checks] and inlined from
295 // another crate (mostly core::num generic/#[inline] fns),
296 // while the current crate doesn't use overflow checks.
297 // NOTE: Unlike binops, negation doesn't have its own
298 // checked operation, just a comparison with the minimum
299 // value, so we have to check for the assert message.
300 if !bcx
.ccx
.check_overflow() {
301 use rustc_const_math
::ConstMathErr
::Overflow
;
302 use rustc_const_math
::Op
::Neg
;
304 if let mir
::AssertMessage
::Math(Overflow(Neg
)) = *msg
{
305 const_cond
= Some(expected
);
309 // Don't translate the panic block if success if known.
310 if const_cond
== Some(expected
) {
311 funclet_br(self, bcx
, target
);
315 // Pass the condition through llvm.expect for branch hinting.
316 let expect
= bcx
.ccx
.get_intrinsic(&"llvm.expect.i1");
317 let cond
= bcx
.call(expect
, &[cond
, C_bool(bcx
.ccx
, expected
)], None
);
319 // Create the failure block and the conditional branch to it.
320 let lltarget
= llblock(self, target
);
321 let panic_block
= self.new_block("panic");
323 bcx
.cond_br(cond
, lltarget
, panic_block
.llbb());
325 bcx
.cond_br(cond
, panic_block
.llbb(), lltarget
);
328 // After this point, bcx is the block for the call to panic.
330 self.set_debug_loc(&bcx
, terminator
.source_info
);
332 // Get the location information.
333 let loc
= bcx
.sess().codemap().lookup_char_pos(span
.lo
);
334 let filename
= Symbol
::intern(&loc
.file
.name
).as_str();
335 let filename
= C_str_slice(bcx
.ccx
, filename
);
336 let line
= C_u32(bcx
.ccx
, loc
.line
as u32);
337 let col
= C_u32(bcx
.ccx
, loc
.col
.to_usize() as u32 + 1);
339 // Put together the arguments to the panic entry point.
340 let (lang_item
, args
, const_err
) = match *msg
{
341 mir
::AssertMessage
::BoundsCheck { ref len, ref index }
=> {
342 let len
= self.trans_operand(&mut bcx
, len
).immediate();
343 let index
= self.trans_operand(&mut bcx
, index
).immediate();
345 let const_err
= common
::const_to_opt_u128(len
, false)
346 .and_then(|len
| common
::const_to_opt_u128(index
, false)
347 .map(|index
| ErrKind
::IndexOutOfBounds
{
352 let file_line_col
= C_struct(bcx
.ccx
, &[filename
, line
, col
], false);
353 let align
= llalign_of_min(bcx
.ccx
, common
::val_ty(file_line_col
));
354 let file_line_col
= consts
::addr_of(bcx
.ccx
,
357 "panic_bounds_check_loc");
358 (lang_items
::PanicBoundsCheckFnLangItem
,
359 vec
![file_line_col
, index
, len
],
362 mir
::AssertMessage
::Math(ref err
) => {
363 let msg_str
= Symbol
::intern(err
.description()).as_str();
364 let msg_str
= C_str_slice(bcx
.ccx
, msg_str
);
365 let msg_file_line_col
= C_struct(bcx
.ccx
,
366 &[msg_str
, filename
, line
, col
],
368 let align
= llalign_of_min(bcx
.ccx
, common
::val_ty(msg_file_line_col
));
369 let msg_file_line_col
= consts
::addr_of(bcx
.ccx
,
373 (lang_items
::PanicFnLangItem
,
374 vec
![msg_file_line_col
],
375 Some(ErrKind
::Math(err
.clone())))
379 // If we know we always panic, and the error message
380 // is also constant, then we can produce a warning.
381 if const_cond
== Some(!expected
) {
382 if let Some(err
) = const_err
{
383 let err
= ConstEvalErr{ span: span, kind: err }
;
384 let mut diag
= bcx
.tcx().sess
.struct_span_warn(
385 span
, "this expression will panic at run-time");
386 err
.note(bcx
.tcx(), span
, "expression", &mut diag
);
391 // Obtain the panic entry point.
392 let def_id
= common
::langcall(bcx
.tcx(), Some(span
), "", lang_item
);
393 let instance
= ty
::Instance
::mono(bcx
.tcx(), def_id
);
394 let fn_ty
= FnType
::of_instance(bcx
.ccx
, &instance
);
395 let llfn
= callee
::get_fn(bcx
.ccx
, instance
);
397 // Translate the actual panic invoke/call.
398 do_call(self, bcx
, fn_ty
, llfn
, &args
, None
, cleanup
);
401 mir
::TerminatorKind
::DropAndReplace { .. }
=> {
402 bug
!("undesugared DropAndReplace in trans: {:?}", terminator
);
405 mir
::TerminatorKind
::Call { ref func, ref args, ref destination, cleanup }
=> {
406 // Create the callee. This is a fn ptr or zero-sized and hence a kind of scalar.
407 let callee
= self.trans_operand(&bcx
, func
);
409 let (instance
, mut llfn
) = match callee
.ty
.sty
{
410 ty
::TyFnDef(def_id
, substs
) => {
411 (Some(monomorphize
::resolve(bcx
.ccx
.shared(), def_id
, substs
)),
415 (None
, Some(callee
.immediate()))
417 _
=> bug
!("{} is not callable", callee
.ty
)
419 let def
= instance
.map(|i
| i
.def
);
420 let sig
= callee
.ty
.fn_sig(bcx
.tcx());
421 let sig
= bcx
.tcx().erase_late_bound_regions_and_normalize(&sig
);
424 // Handle intrinsics old trans wants Expr's for, ourselves.
425 let intrinsic
= match def
{
426 Some(ty
::InstanceDef
::Intrinsic(def_id
))
427 => Some(bcx
.tcx().item_name(def_id
).as_str()),
430 let intrinsic
= intrinsic
.as_ref().map(|s
| &s
[..]);
432 if intrinsic
== Some("transmute") {
433 let &(ref dest
, target
) = destination
.as_ref().unwrap();
434 self.trans_transmute(&bcx
, &args
[0], dest
);
435 funclet_br(self, bcx
, target
);
439 let extra_args
= &args
[sig
.inputs().len()..];
440 let extra_args
= extra_args
.iter().map(|op_arg
| {
441 let op_ty
= op_arg
.ty(self.mir
, bcx
.tcx());
442 self.monomorphize(&op_ty
)
443 }).collect
::<Vec
<_
>>();
445 let fn_ty
= match def
{
446 Some(ty
::InstanceDef
::Virtual(..)) => {
447 FnType
::new_vtable(bcx
.ccx
, sig
, &extra_args
)
449 Some(ty
::InstanceDef
::DropGlue(_
, None
)) => {
450 // empty drop glue - a nop.
451 let &(_
, target
) = destination
.as_ref().unwrap();
452 funclet_br(self, bcx
, target
);
455 _
=> FnType
::new(bcx
.ccx
, sig
, &extra_args
)
458 // The arguments we'll be passing. Plus one to account for outptr, if used.
459 let arg_count
= fn_ty
.args
.len() + fn_ty
.ret
.is_indirect() as usize;
460 let mut llargs
= Vec
::with_capacity(arg_count
);
462 // Prepare the return value destination
463 let ret_dest
= if let Some((ref dest
, _
)) = *destination
{
464 let is_intrinsic
= intrinsic
.is_some();
465 self.make_return_dest(&bcx
, dest
, &fn_ty
.ret
, &mut llargs
,
471 // Split the rust-call tupled arguments off.
472 let (first_args
, untuple
) = if abi
== Abi
::RustCall
&& !args
.is_empty() {
473 let (tup
, args
) = args
.split_last().unwrap();
479 let is_shuffle
= intrinsic
.map_or(false, |name
| {
480 name
.starts_with("simd_shuffle")
483 for arg
in first_args
{
484 // The indices passed to simd_shuffle* in the
485 // third argument must be constant. This is
486 // checked by const-qualification, which also
487 // promotes any complex rvalues to constants.
488 if is_shuffle
&& idx
== 2 {
490 mir
::Operand
::Consume(_
) => {
491 span_bug
!(span
, "shuffle indices must be constant");
493 mir
::Operand
::Constant(ref constant
) => {
494 let val
= self.trans_constant(&bcx
, constant
);
495 llargs
.push(val
.llval
);
502 let op
= self.trans_operand(&bcx
, arg
);
503 self.trans_argument(&bcx
, op
, &mut llargs
, &fn_ty
,
504 &mut idx
, &mut llfn
, &def
);
506 if let Some(tup
) = untuple
{
507 self.trans_arguments_untupled(&bcx
, tup
, &mut llargs
, &fn_ty
,
508 &mut idx
, &mut llfn
, &def
)
511 if intrinsic
.is_some() && intrinsic
!= Some("drop_in_place") {
512 use intrinsic
::trans_intrinsic_call
;
514 let (dest
, llargs
) = match ret_dest
{
515 _
if fn_ty
.ret
.is_indirect() => {
516 (llargs
[0], &llargs
[1..])
518 ReturnDest
::Nothing
=> {
519 (C_undef(fn_ty
.ret
.memory_ty(bcx
.ccx
).ptr_to()), &llargs
[..])
521 ReturnDest
::IndirectOperand(dst
, _
) |
522 ReturnDest
::Store(dst
) => (dst
, &llargs
[..]),
523 ReturnDest
::DirectOperand(_
) =>
524 bug
!("Cannot use direct operand with an intrinsic call")
527 let callee_ty
= common
::instance_ty(
528 bcx
.ccx
.shared(), instance
.as_ref().unwrap());
529 trans_intrinsic_call(&bcx
, callee_ty
, &fn_ty
, &llargs
, dest
,
530 terminator
.source_info
.span
);
532 if let ReturnDest
::IndirectOperand(dst
, _
) = ret_dest
{
533 // Make a fake operand for store_return
534 let op
= OperandRef
{
535 val
: Ref(dst
, Alignment
::AbiAligned
),
538 self.store_return(&bcx
, ret_dest
, &fn_ty
.ret
, op
);
541 if let Some((_
, target
)) = *destination
{
542 funclet_br(self, bcx
, target
);
550 let fn_ptr
= match (llfn
, instance
) {
551 (Some(llfn
), _
) => llfn
,
552 (None
, Some(instance
)) => callee
::get_fn(bcx
.ccx
, instance
),
553 _
=> span_bug
!(span
, "no llfn for call"),
556 do_call(self, bcx
, fn_ty
, fn_ptr
, &llargs
,
557 destination
.as_ref().map(|&(_
, target
)| (ret_dest
, sig
.output(), target
)),
563 fn trans_argument(&mut self,
564 bcx
: &Builder
<'a
, 'tcx
>,
565 op
: OperandRef
<'tcx
>,
566 llargs
: &mut Vec
<ValueRef
>,
567 fn_ty
: &FnType
<'tcx
>,
568 next_idx
: &mut usize,
569 llfn
: &mut Option
<ValueRef
>,
570 def
: &Option
<ty
::InstanceDef
<'tcx
>>) {
571 if let Pair(a
, b
) = op
.val
{
572 // Treat the values in a fat pointer separately.
573 if common
::type_is_fat_ptr(bcx
.ccx
, op
.ty
) {
574 let (ptr
, meta
) = (a
, b
);
576 if let Some(ty
::InstanceDef
::Virtual(_
, idx
)) = *def
{
577 let llmeth
= meth
::VirtualIndex
::from_index(idx
).get_fn(bcx
, meta
);
578 let llty
= fn_ty
.llvm_type(bcx
.ccx
).ptr_to();
579 *llfn
= Some(bcx
.pointercast(llmeth
, llty
));
583 let imm_op
= |x
| OperandRef
{
585 // We won't be checking the type again.
586 ty
: bcx
.tcx().types
.err
588 self.trans_argument(bcx
, imm_op(ptr
), llargs
, fn_ty
, next_idx
, llfn
, def
);
589 self.trans_argument(bcx
, imm_op(meta
), llargs
, fn_ty
, next_idx
, llfn
, def
);
594 let arg
= &fn_ty
.args
[*next_idx
];
597 // Fill padding with undef value, where applicable.
598 if let Some(ty
) = arg
.pad
{
599 llargs
.push(C_undef(ty
));
606 // Force by-ref if we have to load through a cast pointer.
607 let (mut llval
, align
, by_ref
) = match op
.val
{
608 Immediate(_
) | Pair(..) => {
609 if arg
.is_indirect() || arg
.cast
.is_some() {
610 let llscratch
= bcx
.alloca(arg
.memory_ty(bcx
.ccx
), "arg", None
);
611 self.store_operand(bcx
, llscratch
, None
, op
);
612 (llscratch
, Alignment
::AbiAligned
, true)
614 (op
.pack_if_pair(bcx
).immediate(), Alignment
::AbiAligned
, false)
617 Ref(llval
, Alignment
::Packed
) if arg
.is_indirect() => {
618 // `foo(packed.large_field)`. We can't pass the (unaligned) field directly. I
619 // think that ATM (Rust 1.16) we only pass temporaries, but we shouldn't
620 // have scary latent bugs around.
622 let llscratch
= bcx
.alloca(arg
.memory_ty(bcx
.ccx
), "arg", None
);
623 base
::memcpy_ty(bcx
, llscratch
, llval
, op
.ty
, Some(1));
624 (llscratch
, Alignment
::AbiAligned
, true)
626 Ref(llval
, align
) => (llval
, align
, true)
629 if by_ref
&& !arg
.is_indirect() {
630 // Have to load the argument, maybe while casting it.
631 if arg
.layout
.ty
== bcx
.tcx().types
.bool
{
632 // We store bools as i8 so we need to truncate to i1.
633 llval
= bcx
.load_range_assert(llval
, 0, 2, llvm
::False
, None
);
634 llval
= bcx
.trunc(llval
, Type
::i1(bcx
.ccx
));
635 } else if let Some(ty
) = arg
.cast
{
636 llval
= bcx
.load(bcx
.pointercast(llval
, ty
.ptr_to()),
637 align
.min_with(arg
.layout
.align(bcx
.ccx
).abi() as u32));
639 llval
= bcx
.load(llval
, align
.to_align());
646 fn trans_arguments_untupled(&mut self,
647 bcx
: &Builder
<'a
, 'tcx
>,
648 operand
: &mir
::Operand
<'tcx
>,
649 llargs
: &mut Vec
<ValueRef
>,
650 fn_ty
: &FnType
<'tcx
>,
651 next_idx
: &mut usize,
652 llfn
: &mut Option
<ValueRef
>,
653 def
: &Option
<ty
::InstanceDef
<'tcx
>>) {
654 let tuple
= self.trans_operand(bcx
, operand
);
656 let arg_types
= match tuple
.ty
.sty
{
657 ty
::TyTuple(ref tys
, _
) => tys
,
658 _
=> span_bug
!(self.mir
.span
,
659 "bad final argument to \"rust-call\" fn {:?}", tuple
.ty
)
662 // Handle both by-ref and immediate tuples.
664 Ref(llval
, align
) => {
665 for (n
, &ty
) in arg_types
.iter().enumerate() {
666 let ptr
= LvalueRef
::new_sized_ty(llval
, tuple
.ty
, align
);
667 let (ptr
, align
) = ptr
.trans_field_ptr(bcx
, n
);
668 let val
= if common
::type_is_fat_ptr(bcx
.ccx
, ty
) {
669 let (lldata
, llextra
) = base
::load_fat_ptr(bcx
, ptr
, align
, ty
);
670 Pair(lldata
, llextra
)
672 // trans_argument will load this if it needs to
675 let op
= OperandRef
{
679 self.trans_argument(bcx
, op
, llargs
, fn_ty
, next_idx
, llfn
, def
);
683 Immediate(llval
) => {
684 let l
= bcx
.ccx
.layout_of(tuple
.ty
);
685 let v
= if let layout
::Univariant { ref variant, .. }
= *l
{
688 bug
!("Not a tuple.");
690 for (n
, &ty
) in arg_types
.iter().enumerate() {
691 let mut elem
= bcx
.extract_value(
692 llval
, adt
::struct_llfields_index(v
, n
));
693 // Truncate bools to i1, if needed
694 if ty
.is_bool() && common
::val_ty(elem
) != Type
::i1(bcx
.ccx
) {
695 elem
= bcx
.trunc(elem
, Type
::i1(bcx
.ccx
));
697 // If the tuple is immediate, the elements are as well
698 let op
= OperandRef
{
699 val
: Immediate(elem
),
702 self.trans_argument(bcx
, op
, llargs
, fn_ty
, next_idx
, llfn
, def
);
707 for (n
, &ty
) in arg_types
.iter().enumerate() {
708 let mut elem
= elems
[n
];
709 // Truncate bools to i1, if needed
710 if ty
.is_bool() && common
::val_ty(elem
) != Type
::i1(bcx
.ccx
) {
711 elem
= bcx
.trunc(elem
, Type
::i1(bcx
.ccx
));
713 // Pair is always made up of immediates
714 let op
= OperandRef
{
715 val
: Immediate(elem
),
718 self.trans_argument(bcx
, op
, llargs
, fn_ty
, next_idx
, llfn
, def
);
725 fn get_personality_slot(&mut self, bcx
: &Builder
<'a
, 'tcx
>) -> ValueRef
{
727 if let Some(slot
) = self.llpersonalityslot
{
730 let llretty
= Type
::struct_(ccx
, &[Type
::i8p(ccx
), Type
::i32(ccx
)], false);
731 let slot
= bcx
.alloca(llretty
, "personalityslot", None
);
732 self.llpersonalityslot
= Some(slot
);
737 /// Return the landingpad wrapper around the given basic block
739 /// No-op in MSVC SEH scheme.
740 fn landing_pad_to(&mut self, target_bb
: mir
::BasicBlock
) -> BasicBlockRef
{
741 if let Some(block
) = self.landing_pads
[target_bb
] {
745 let block
= self.blocks
[target_bb
];
746 let landing_pad
= self.landing_pad_uncached(block
);
747 self.landing_pads
[target_bb
] = Some(landing_pad
);
751 fn landing_pad_uncached(&mut self, target_bb
: BasicBlockRef
) -> BasicBlockRef
{
752 if base
::wants_msvc_seh(self.ccx
.sess()) {
753 span_bug
!(self.mir
.span
, "landing pad was not inserted?")
756 let bcx
= self.new_block("cleanup");
759 let llpersonality
= self.ccx
.eh_personality();
760 let llretty
= Type
::struct_(ccx
, &[Type
::i8p(ccx
), Type
::i32(ccx
)], false);
761 let llretval
= bcx
.landing_pad(llretty
, llpersonality
, 1, self.llfn
);
762 bcx
.set_cleanup(llretval
);
763 let slot
= self.get_personality_slot(&bcx
);
764 Lifetime
::Start
.call(&bcx
, slot
);
765 bcx
.store(llretval
, slot
, None
);
770 fn unreachable_block(&mut self) -> BasicBlockRef
{
771 self.unreachable_block
.unwrap_or_else(|| {
772 let bl
= self.new_block("unreachable");
774 self.unreachable_block
= Some(bl
.llbb());
779 pub fn new_block(&self, name
: &str) -> Builder
<'a
, 'tcx
> {
780 Builder
::new_block(self.ccx
, self.llfn
, name
)
783 pub fn get_builder(&self, bb
: mir
::BasicBlock
) -> Builder
<'a
, 'tcx
> {
784 let builder
= Builder
::with_ccx(self.ccx
);
785 builder
.position_at_end(self.blocks
[bb
]);
789 fn make_return_dest(&mut self, bcx
: &Builder
<'a
, 'tcx
>,
790 dest
: &mir
::Lvalue
<'tcx
>, fn_ret_ty
: &ArgType
,
791 llargs
: &mut Vec
<ValueRef
>, is_intrinsic
: bool
) -> ReturnDest
{
792 // If the return is ignored, we can just return a do-nothing ReturnDest
793 if fn_ret_ty
.is_ignore() {
794 return ReturnDest
::Nothing
;
796 let dest
= if let mir
::Lvalue
::Local(index
) = *dest
{
797 let ret_ty
= self.monomorphized_lvalue_ty(dest
);
798 match self.locals
[index
] {
799 LocalRef
::Lvalue(dest
) => dest
,
800 LocalRef
::Operand(None
) => {
801 // Handle temporary lvalues, specifically Operand ones, as
802 // they don't have allocas
803 return if fn_ret_ty
.is_indirect() {
804 // Odd, but possible, case, we have an operand temporary,
805 // but the calling convention has an indirect return.
806 let tmp
= LvalueRef
::alloca(bcx
, ret_ty
, "tmp_ret");
807 llargs
.push(tmp
.llval
);
808 ReturnDest
::IndirectOperand(tmp
.llval
, index
)
809 } else if is_intrinsic
{
810 // Currently, intrinsics always need a location to store
811 // the result. so we create a temporary alloca for the
813 let tmp
= LvalueRef
::alloca(bcx
, ret_ty
, "tmp_ret");
814 ReturnDest
::IndirectOperand(tmp
.llval
, index
)
816 ReturnDest
::DirectOperand(index
)
819 LocalRef
::Operand(Some(_
)) => {
820 bug
!("lvalue local already assigned to");
824 self.trans_lvalue(bcx
, dest
)
826 if fn_ret_ty
.is_indirect() {
827 match dest
.alignment
{
828 Alignment
::AbiAligned
=> {
829 llargs
.push(dest
.llval
);
832 Alignment
::Packed
=> {
833 // Currently, MIR code generation does not create calls
834 // that store directly to fields of packed structs (in
835 // fact, the calls it creates write only to temps),
837 // If someone changes that, please update this code path
838 // to create a temporary.
839 span_bug
!(self.mir
.span
, "can't directly store to unaligned value");
843 ReturnDest
::Store(dest
.llval
)
847 fn trans_transmute(&mut self, bcx
: &Builder
<'a
, 'tcx
>,
848 src
: &mir
::Operand
<'tcx
>,
849 dst
: &mir
::Lvalue
<'tcx
>) {
850 if let mir
::Lvalue
::Local(index
) = *dst
{
851 match self.locals
[index
] {
852 LocalRef
::Lvalue(lvalue
) => self.trans_transmute_into(bcx
, src
, &lvalue
),
853 LocalRef
::Operand(None
) => {
854 let lvalue_ty
= self.monomorphized_lvalue_ty(dst
);
855 assert
!(!lvalue_ty
.has_erasable_regions());
856 let lvalue
= LvalueRef
::alloca(bcx
, lvalue_ty
, "transmute_temp");
857 self.trans_transmute_into(bcx
, src
, &lvalue
);
858 let op
= self.trans_load(bcx
, lvalue
.llval
, lvalue
.alignment
, lvalue_ty
);
859 self.locals
[index
] = LocalRef
::Operand(Some(op
));
861 LocalRef
::Operand(Some(_
)) => {
862 let ty
= self.monomorphized_lvalue_ty(dst
);
863 assert
!(common
::type_is_zero_size(bcx
.ccx
, ty
),
864 "assigning to initialized SSAtemp");
868 let dst
= self.trans_lvalue(bcx
, dst
);
869 self.trans_transmute_into(bcx
, src
, &dst
);
873 fn trans_transmute_into(&mut self, bcx
: &Builder
<'a
, 'tcx
>,
874 src
: &mir
::Operand
<'tcx
>,
875 dst
: &LvalueRef
<'tcx
>) {
876 let val
= self.trans_operand(bcx
, src
);
877 let llty
= type_of
::type_of(bcx
.ccx
, val
.ty
);
878 let cast_ptr
= bcx
.pointercast(dst
.llval
, llty
.ptr_to());
879 let in_type
= val
.ty
;
880 let out_type
= dst
.ty
.to_ty(bcx
.tcx());
881 let llalign
= cmp
::min(bcx
.ccx
.align_of(in_type
), bcx
.ccx
.align_of(out_type
));
882 self.store_operand(bcx
, cast_ptr
, Some(llalign
), val
);
886 // Stores the return value of a function call into it's final location.
887 fn store_return(&mut self,
888 bcx
: &Builder
<'a
, 'tcx
>,
890 ret_ty
: &ArgType
<'tcx
>,
891 op
: OperandRef
<'tcx
>) {
892 use self::ReturnDest
::*;
896 Store(dst
) => ret_ty
.store(bcx
, op
.immediate(), dst
),
897 IndirectOperand(tmp
, index
) => {
898 let op
= self.trans_load(bcx
, tmp
, Alignment
::AbiAligned
, op
.ty
);
899 self.locals
[index
] = LocalRef
::Operand(Some(op
));
901 DirectOperand(index
) => {
902 // If there is a cast, we have to store and reload.
903 let op
= if ret_ty
.cast
.is_some() {
904 let tmp
= LvalueRef
::alloca(bcx
, op
.ty
, "tmp_ret");
905 ret_ty
.store(bcx
, op
.immediate(), tmp
.llval
);
906 self.trans_load(bcx
, tmp
.llval
, tmp
.alignment
, op
.ty
)
908 op
.unpack_if_pair(bcx
)
910 self.locals
[index
] = LocalRef
::Operand(Some(op
));
917 // Do nothing, the return value is indirect or ignored
919 // Store the return value to the pointer
921 // Stores an indirect return value to an operand local lvalue
922 IndirectOperand(ValueRef
, mir
::Local
),
923 // Stores a direct return value to an operand local lvalue
924 DirectOperand(mir
::Local
)