1 use super::operand
::OperandRef
;
2 use super::operand
::OperandValue
::{Immediate, Pair, Ref}
;
3 use super::place
::PlaceRef
;
4 use super::{FunctionCx, LocalRef}
;
7 use crate::common
::{self, IntPredicate}
;
13 use rustc_hir
::lang_items
::LangItem
;
14 use rustc_index
::vec
::Idx
;
15 use rustc_middle
::mir
::interpret
::ConstValue
;
16 use rustc_middle
::mir
::AssertKind
;
17 use rustc_middle
::mir
::{self, SwitchTargets}
;
18 use rustc_middle
::ty
::layout
::{FnAbiExt, HasTyCtxt}
;
19 use rustc_middle
::ty
::print
::with_no_trimmed_paths
;
20 use rustc_middle
::ty
::{self, Instance, Ty, TypeFoldable}
;
21 use rustc_span
::source_map
::Span
;
22 use rustc_span
::{sym, Symbol}
;
23 use rustc_target
::abi
::call
::{ArgAbi, FnAbi, PassMode}
;
24 use rustc_target
::abi
::{self, LayoutOf}
;
25 use rustc_target
::spec
::abi
::Abi
;
27 /// Used by `FunctionCx::codegen_terminator` for emitting common patterns
28 /// e.g., creating a basic block, calling a function, etc.
29 struct TerminatorCodegenHelper
<'tcx
> {
31 terminator
: &'tcx mir
::Terminator
<'tcx
>,
32 funclet_bb
: Option
<mir
::BasicBlock
>,
35 impl<'a
, 'tcx
> TerminatorCodegenHelper
<'tcx
> {
36 /// Returns the associated funclet from `FunctionCx::funclets` for the
37 /// `funclet_bb` member if it is not `None`.
38 fn funclet
<'b
, Bx
: BuilderMethods
<'a
, 'tcx
>>(
40 fx
: &'b FunctionCx
<'a
, 'tcx
, Bx
>,
41 ) -> Option
<&'b Bx
::Funclet
> {
42 self.funclet_bb
.and_then(|funcl
| fx
.funclets
[funcl
].as_ref())
45 fn lltarget
<Bx
: BuilderMethods
<'a
, 'tcx
>>(
47 fx
: &mut FunctionCx
<'a
, 'tcx
, Bx
>,
48 target
: mir
::BasicBlock
,
49 ) -> (Bx
::BasicBlock
, bool
) {
50 let span
= self.terminator
.source_info
.span
;
51 let lltarget
= fx
.blocks
[target
];
52 let target_funclet
= fx
.cleanup_kinds
[target
].funclet_bb(target
);
53 match (self.funclet_bb
, target_funclet
) {
54 (None
, None
) => (lltarget
, false),
55 (Some(f
), Some(t_f
)) if f
== t_f
|| !base
::wants_msvc_seh(fx
.cx
.tcx().sess
) => {
58 // jump *into* cleanup - need a landing pad if GNU
59 (None
, Some(_
)) => (fx
.landing_pad_to(target
), false),
60 (Some(_
), None
) => span_bug
!(span
, "{:?} - jump out of cleanup?", self.terminator
),
61 (Some(_
), Some(_
)) => (fx
.landing_pad_to(target
), true),
65 /// Create a basic block.
66 fn llblock
<Bx
: BuilderMethods
<'a
, 'tcx
>>(
68 fx
: &mut FunctionCx
<'a
, 'tcx
, Bx
>,
69 target
: mir
::BasicBlock
,
71 let (lltarget
, is_cleanupret
) = self.lltarget(fx
, target
);
73 // MSVC cross-funclet jump - need a trampoline
75 debug
!("llblock: creating cleanup trampoline for {:?}", target
);
76 let name
= &format
!("{:?}_cleanup_trampoline_{:?}", self.bb
, target
);
77 let mut trampoline
= fx
.new_block(name
);
78 trampoline
.cleanup_ret(self.funclet(fx
).unwrap(), Some(lltarget
));
85 fn funclet_br
<Bx
: BuilderMethods
<'a
, 'tcx
>>(
87 fx
: &mut FunctionCx
<'a
, 'tcx
, Bx
>,
89 target
: mir
::BasicBlock
,
91 let (lltarget
, is_cleanupret
) = self.lltarget(fx
, target
);
93 // micro-optimization: generate a `ret` rather than a jump
95 bx
.cleanup_ret(self.funclet(fx
).unwrap(), Some(lltarget
));
101 /// Call `fn_ptr` of `fn_abi` with the arguments `llargs`, the optional
102 /// return destination `destination` and the cleanup function `cleanup`.
103 fn do_call
<Bx
: BuilderMethods
<'a
, 'tcx
>>(
105 fx
: &mut FunctionCx
<'a
, 'tcx
, Bx
>,
107 fn_abi
: FnAbi
<'tcx
, Ty
<'tcx
>>,
109 llargs
: &[Bx
::Value
],
110 destination
: Option
<(ReturnDest
<'tcx
, Bx
::Value
>, mir
::BasicBlock
)>,
111 cleanup
: Option
<mir
::BasicBlock
>,
113 // If there is a cleanup block and the function we're calling can unwind, then
114 // do an invoke, otherwise do a call.
115 if let Some(cleanup
) = cleanup
.filter(|_
| fn_abi
.can_unwind
) {
116 let ret_bx
= if let Some((_
, target
)) = destination
{
119 fx
.unreachable_block()
122 bx
.invoke(fn_ptr
, &llargs
, ret_bx
, self.llblock(fx
, cleanup
), self.funclet(fx
));
123 bx
.apply_attrs_callsite(&fn_abi
, invokeret
);
125 if let Some((ret_dest
, target
)) = destination
{
126 let mut ret_bx
= fx
.build_block(target
);
127 fx
.set_debug_loc(&mut ret_bx
, self.terminator
.source_info
);
128 fx
.store_return(&mut ret_bx
, ret_dest
, &fn_abi
.ret
, invokeret
);
131 let llret
= bx
.call(fn_ptr
, &llargs
, self.funclet(fx
));
132 bx
.apply_attrs_callsite(&fn_abi
, llret
);
133 if fx
.mir
[self.bb
].is_cleanup
{
134 // Cleanup is always the cold path. Don't inline
135 // drop glue. Also, when there is a deeply-nested
136 // struct, there are "symmetry" issues that cause
137 // exponential inlining - see issue #41696.
138 bx
.do_not_inline(llret
);
141 if let Some((ret_dest
, target
)) = destination
{
142 fx
.store_return(bx
, ret_dest
, &fn_abi
.ret
, llret
);
143 self.funclet_br(fx
, bx
, target
);
150 // Generate sideeffect intrinsic if jumping to any of the targets can form
152 fn maybe_sideeffect
<Bx
: BuilderMethods
<'a
, 'tcx
>>(
154 mir
: &'tcx mir
::Body
<'tcx
>,
156 targets
: &[mir
::BasicBlock
],
158 if bx
.tcx().sess
.opts
.debugging_opts
.insert_sideeffect
{
159 if targets
.iter().any(|&target
| {
161 && target
.start_location().is_predecessor_of(self.bb
.start_location(), mir
)
163 bx
.sideeffect(false);
169 /// Codegen implementations for some terminator variants.
170 impl<'a
, 'tcx
, Bx
: BuilderMethods
<'a
, 'tcx
>> FunctionCx
<'a
, 'tcx
, Bx
> {
171 /// Generates code for a `Resume` terminator.
172 fn codegen_resume_terminator(&mut self, helper
: TerminatorCodegenHelper
<'tcx
>, mut bx
: Bx
) {
173 if let Some(funclet
) = helper
.funclet(self) {
174 bx
.cleanup_ret(funclet
, None
);
176 let slot
= self.get_personality_slot(&mut bx
);
177 let lp0
= slot
.project_field(&mut bx
, 0);
178 let lp0
= bx
.load_operand(lp0
).immediate();
179 let lp1
= slot
.project_field(&mut bx
, 1);
180 let lp1
= bx
.load_operand(lp1
).immediate();
181 slot
.storage_dead(&mut bx
);
183 let mut lp
= bx
.const_undef(self.landing_pad_type());
184 lp
= bx
.insert_value(lp
, lp0
, 0);
185 lp
= bx
.insert_value(lp
, lp1
, 1);
190 fn codegen_switchint_terminator(
192 helper
: TerminatorCodegenHelper
<'tcx
>,
194 discr
: &mir
::Operand
<'tcx
>,
196 targets
: &SwitchTargets
,
198 let discr
= self.codegen_operand(&mut bx
, &discr
);
199 // `switch_ty` is redundant, sanity-check that.
200 assert_eq
!(discr
.layout
.ty
, switch_ty
);
201 helper
.maybe_sideeffect(self.mir
, &mut bx
, targets
.all_targets());
203 let mut target_iter
= targets
.iter();
204 if target_iter
.len() == 1 {
205 // If there are two targets (one conditional, one fallback), emit br instead of switch
206 let (test_value
, target
) = target_iter
.next().unwrap();
207 let lltrue
= helper
.llblock(self, target
);
208 let llfalse
= helper
.llblock(self, targets
.otherwise());
209 if switch_ty
== bx
.tcx().types
.bool
{
210 // Don't generate trivial icmps when switching on bool
212 0 => bx
.cond_br(discr
.immediate(), llfalse
, lltrue
),
213 1 => bx
.cond_br(discr
.immediate(), lltrue
, llfalse
),
217 let switch_llty
= bx
.immediate_backend_type(bx
.layout_of(switch_ty
));
218 let llval
= bx
.const_uint_big(switch_llty
, test_value
);
219 let cmp
= bx
.icmp(IntPredicate
::IntEQ
, discr
.immediate(), llval
);
220 bx
.cond_br(cmp
, lltrue
, llfalse
);
225 helper
.llblock(self, targets
.otherwise()),
226 target_iter
.map(|(value
, target
)| (value
, helper
.llblock(self, target
))),
231 fn codegen_return_terminator(&mut self, mut bx
: Bx
) {
232 // Call `va_end` if this is the definition of a C-variadic function.
233 if self.fn_abi
.c_variadic
{
234 // The `VaList` "spoofed" argument is just after all the real arguments.
235 let va_list_arg_idx
= self.fn_abi
.args
.len();
236 match self.locals
[mir
::Local
::new(1 + va_list_arg_idx
)] {
237 LocalRef
::Place(va_list
) => {
238 bx
.va_end(va_list
.llval
);
240 _
=> bug
!("C-variadic function must have a `VaList` place"),
243 if self.fn_abi
.ret
.layout
.abi
.is_uninhabited() {
244 // Functions with uninhabited return values are marked `noreturn`,
245 // so we should make sure that we never actually do.
246 // We play it safe by using a well-defined `abort`, but we could go for immediate UB
247 // if that turns out to be helpful.
249 // `abort` does not terminate the block, so we still need to generate
250 // an `unreachable` terminator after it.
254 let llval
= match self.fn_abi
.ret
.mode
{
255 PassMode
::Ignore
| PassMode
::Indirect { .. }
=> {
260 PassMode
::Direct(_
) | PassMode
::Pair(..) => {
261 let op
= self.codegen_consume(&mut bx
, mir
::Place
::return_place().as_ref());
262 if let Ref(llval
, _
, align
) = op
.val
{
263 bx
.load(llval
, align
)
265 op
.immediate_or_packed_pair(&mut bx
)
269 PassMode
::Cast(cast_ty
) => {
270 let op
= match self.locals
[mir
::RETURN_PLACE
] {
271 LocalRef
::Operand(Some(op
)) => op
,
272 LocalRef
::Operand(None
) => bug
!("use of return before def"),
273 LocalRef
::Place(cg_place
) => OperandRef
{
274 val
: Ref(cg_place
.llval
, None
, cg_place
.align
),
275 layout
: cg_place
.layout
,
277 LocalRef
::UnsizedPlace(_
) => bug
!("return type must be sized"),
279 let llslot
= match op
.val
{
280 Immediate(_
) | Pair(..) => {
281 let scratch
= PlaceRef
::alloca(&mut bx
, self.fn_abi
.ret
.layout
);
282 op
.val
.store(&mut bx
, scratch
);
285 Ref(llval
, _
, align
) => {
286 assert_eq
!(align
, op
.layout
.align
.abi
, "return place is unaligned!");
290 let addr
= bx
.pointercast(llslot
, bx
.type_ptr_to(bx
.cast_backend_type(&cast_ty
)));
291 bx
.load(addr
, self.fn_abi
.ret
.layout
.align
.abi
)
297 fn codegen_drop_terminator(
299 helper
: TerminatorCodegenHelper
<'tcx
>,
301 location
: mir
::Place
<'tcx
>,
302 target
: mir
::BasicBlock
,
303 unwind
: Option
<mir
::BasicBlock
>,
305 let ty
= location
.ty(self.mir
, bx
.tcx()).ty
;
306 let ty
= self.monomorphize(ty
);
307 let drop_fn
= Instance
::resolve_drop_in_place(bx
.tcx(), ty
);
309 if let ty
::InstanceDef
::DropGlue(_
, None
) = drop_fn
.def
{
310 // we don't actually need to drop anything.
311 helper
.maybe_sideeffect(self.mir
, &mut bx
, &[target
]);
312 helper
.funclet_br(self, &mut bx
, target
);
316 let place
= self.codegen_place(&mut bx
, location
.as_ref());
318 let mut args
= if let Some(llextra
) = place
.llextra
{
319 args2
= [place
.llval
, llextra
];
322 args1
= [place
.llval
];
325 let (drop_fn
, fn_abi
) = match ty
.kind() {
326 // FIXME(eddyb) perhaps move some of this logic into
327 // `Instance::resolve_drop_in_place`?
329 let virtual_drop
= Instance
{
330 def
: ty
::InstanceDef
::Virtual(drop_fn
.def_id(), 0),
331 substs
: drop_fn
.substs
,
333 let fn_abi
= FnAbi
::of_instance(&bx
, virtual_drop
, &[]);
334 let vtable
= args
[1];
336 (meth
::DESTRUCTOR
.get_fn(&mut bx
, vtable
, &fn_abi
), fn_abi
)
338 _
=> (bx
.get_fn_addr(drop_fn
), FnAbi
::of_instance(&bx
, drop_fn
, &[])),
340 helper
.maybe_sideeffect(self.mir
, &mut bx
, &[target
]);
347 Some((ReturnDest
::Nothing
, target
)),
352 fn codegen_assert_terminator(
354 helper
: TerminatorCodegenHelper
<'tcx
>,
356 terminator
: &mir
::Terminator
<'tcx
>,
357 cond
: &mir
::Operand
<'tcx
>,
359 msg
: &mir
::AssertMessage
<'tcx
>,
360 target
: mir
::BasicBlock
,
361 cleanup
: Option
<mir
::BasicBlock
>,
363 let span
= terminator
.source_info
.span
;
364 let cond
= self.codegen_operand(&mut bx
, cond
).immediate();
365 let mut const_cond
= bx
.const_to_opt_u128(cond
, false).map(|c
| c
== 1);
367 // This case can currently arise only from functions marked
368 // with #[rustc_inherit_overflow_checks] and inlined from
369 // another crate (mostly core::num generic/#[inline] fns),
370 // while the current crate doesn't use overflow checks.
371 // NOTE: Unlike binops, negation doesn't have its own
372 // checked operation, just a comparison with the minimum
373 // value, so we have to check for the assert message.
374 if !bx
.check_overflow() {
375 if let AssertKind
::OverflowNeg(_
) = *msg
{
376 const_cond
= Some(expected
);
380 // Don't codegen the panic block if success if known.
381 if const_cond
== Some(expected
) {
382 helper
.maybe_sideeffect(self.mir
, &mut bx
, &[target
]);
383 helper
.funclet_br(self, &mut bx
, target
);
387 // Pass the condition through llvm.expect for branch hinting.
388 let cond
= bx
.expect(cond
, expected
);
390 // Create the failure block and the conditional branch to it.
391 let lltarget
= helper
.llblock(self, target
);
392 let panic_block
= self.new_block("panic");
393 helper
.maybe_sideeffect(self.mir
, &mut bx
, &[target
]);
395 bx
.cond_br(cond
, lltarget
, panic_block
.llbb());
397 bx
.cond_br(cond
, panic_block
.llbb(), lltarget
);
400 // After this point, bx is the block for the call to panic.
402 self.set_debug_loc(&mut bx
, terminator
.source_info
);
404 // Get the location information.
405 let location
= self.get_caller_location(&mut bx
, terminator
.source_info
).immediate();
407 // Put together the arguments to the panic entry point.
408 let (lang_item
, args
) = match msg
{
409 AssertKind
::BoundsCheck { ref len, ref index }
=> {
410 let len
= self.codegen_operand(&mut bx
, len
).immediate();
411 let index
= self.codegen_operand(&mut bx
, index
).immediate();
412 // It's `fn panic_bounds_check(index: usize, len: usize)`,
413 // and `#[track_caller]` adds an implicit third argument.
414 (LangItem
::PanicBoundsCheck
, vec
![index
, len
, location
])
417 let msg_str
= Symbol
::intern(msg
.description());
418 let msg
= bx
.const_str(msg_str
);
419 // It's `pub fn panic(expr: &str)`, with the wide reference being passed
420 // as two arguments, and `#[track_caller]` adds an implicit third argument.
421 (LangItem
::Panic
, vec
![msg
.0, msg
.1, location
])
425 // Obtain the panic entry point.
426 let def_id
= common
::langcall(bx
.tcx(), Some(span
), "", lang_item
);
427 let instance
= ty
::Instance
::mono(bx
.tcx(), def_id
);
428 let fn_abi
= FnAbi
::of_instance(&bx
, instance
, &[]);
429 let llfn
= bx
.get_fn_addr(instance
);
431 // Codegen the actual panic invoke/call.
432 helper
.do_call(self, &mut bx
, fn_abi
, llfn
, &args
, None
, cleanup
);
435 /// Returns `true` if this is indeed a panic intrinsic and codegen is done.
436 fn codegen_panic_intrinsic(
438 helper
: &TerminatorCodegenHelper
<'tcx
>,
440 intrinsic
: Option
<Symbol
>,
441 instance
: Option
<Instance
<'tcx
>>,
442 source_info
: mir
::SourceInfo
,
443 destination
: &Option
<(mir
::Place
<'tcx
>, mir
::BasicBlock
)>,
444 cleanup
: Option
<mir
::BasicBlock
>,
446 // Emit a panic or a no-op for `assert_*` intrinsics.
447 // These are intrinsics that compile to panics so that we can get a message
448 // which mentions the offending type, even from a const context.
449 #[derive(Debug, PartialEq)]
450 enum AssertIntrinsic
{
455 let panic_intrinsic
= intrinsic
.and_then(|i
| match i
{
456 sym
::assert_inhabited
=> Some(AssertIntrinsic
::Inhabited
),
457 sym
::assert_zero_valid
=> Some(AssertIntrinsic
::ZeroValid
),
458 sym
::assert_uninit_valid
=> Some(AssertIntrinsic
::UninitValid
),
461 if let Some(intrinsic
) = panic_intrinsic
{
462 use AssertIntrinsic
::*;
463 let ty
= instance
.unwrap().substs
.type_at(0);
464 let layout
= bx
.layout_of(ty
);
465 let do_panic
= match intrinsic
{
466 Inhabited
=> layout
.abi
.is_uninhabited(),
467 // We unwrap as the error type is `!`.
468 ZeroValid
=> !layout
.might_permit_raw_init(bx
, /*zero:*/ true).unwrap(),
469 // We unwrap as the error type is `!`.
470 UninitValid
=> !layout
.might_permit_raw_init(bx
, /*zero:*/ false).unwrap(),
473 let msg_str
= with_no_trimmed_paths(|| {
474 if layout
.abi
.is_uninhabited() {
475 // Use this error even for the other intrinsics as it is more precise.
476 format
!("attempted to instantiate uninhabited type `{}`", ty
)
477 } else if intrinsic
== ZeroValid
{
478 format
!("attempted to zero-initialize type `{}`, which is invalid", ty
)
480 format
!("attempted to leave type `{}` uninitialized, which is invalid", ty
)
483 let msg
= bx
.const_str(Symbol
::intern(&msg_str
));
484 let location
= self.get_caller_location(bx
, source_info
).immediate();
486 // Obtain the panic entry point.
487 // FIXME: dedup this with `codegen_assert_terminator` above.
489 common
::langcall(bx
.tcx(), Some(source_info
.span
), "", LangItem
::Panic
);
490 let instance
= ty
::Instance
::mono(bx
.tcx(), def_id
);
491 let fn_abi
= FnAbi
::of_instance(bx
, instance
, &[]);
492 let llfn
= bx
.get_fn_addr(instance
);
494 if let Some((_
, target
)) = destination
.as_ref() {
495 helper
.maybe_sideeffect(self.mir
, bx
, &[*target
]);
497 // Codegen the actual panic invoke/call.
503 &[msg
.0, msg
.1, location
],
504 destination
.as_ref().map(|(_
, bb
)| (ReturnDest
::Nothing
, *bb
)),
509 let target
= destination
.as_ref().unwrap().1;
510 helper
.maybe_sideeffect(self.mir
, bx
, &[target
]);
511 helper
.funclet_br(self, bx
, target
)
519 fn codegen_call_terminator(
521 helper
: TerminatorCodegenHelper
<'tcx
>,
523 terminator
: &mir
::Terminator
<'tcx
>,
524 func
: &mir
::Operand
<'tcx
>,
525 args
: &Vec
<mir
::Operand
<'tcx
>>,
526 destination
: &Option
<(mir
::Place
<'tcx
>, mir
::BasicBlock
)>,
527 cleanup
: Option
<mir
::BasicBlock
>,
530 let source_info
= terminator
.source_info
;
531 let span
= source_info
.span
;
533 // Create the callee. This is a fn ptr or zero-sized and hence a kind of scalar.
534 let callee
= self.codegen_operand(&mut bx
, func
);
536 let (instance
, mut llfn
) = match *callee
.layout
.ty
.kind() {
537 ty
::FnDef(def_id
, substs
) => (
539 ty
::Instance
::resolve(bx
.tcx(), ty
::ParamEnv
::reveal_all(), def_id
, substs
)
542 .polymorphize(bx
.tcx()),
546 ty
::FnPtr(_
) => (None
, Some(callee
.immediate())),
547 _
=> bug
!("{} is not callable", callee
.layout
.ty
),
549 let def
= instance
.map(|i
| i
.def
);
551 if let Some(ty
::InstanceDef
::DropGlue(_
, None
)) = def
{
552 // Empty drop glue; a no-op.
553 let &(_
, target
) = destination
.as_ref().unwrap();
554 helper
.maybe_sideeffect(self.mir
, &mut bx
, &[target
]);
555 helper
.funclet_br(self, &mut bx
, target
);
559 // FIXME(eddyb) avoid computing this if possible, when `instance` is
560 // available - right now `sig` is only needed for getting the `abi`
561 // and figuring out how many extra args were passed to a C-variadic `fn`.
562 let sig
= callee
.layout
.ty
.fn_sig(bx
.tcx());
565 // Handle intrinsics old codegen wants Expr's for, ourselves.
566 let intrinsic
= match def
{
567 Some(ty
::InstanceDef
::Intrinsic(def_id
)) => Some(bx
.tcx().item_name(def_id
)),
571 let extra_args
= &args
[sig
.inputs().skip_binder().len()..];
572 let extra_args
= extra_args
575 let op_ty
= op_arg
.ty(self.mir
, bx
.tcx());
576 self.monomorphize(op_ty
)
578 .collect
::<Vec
<_
>>();
580 let fn_abi
= match instance
{
581 Some(instance
) => FnAbi
::of_instance(&bx
, instance
, &extra_args
),
582 None
=> FnAbi
::of_fn_ptr(&bx
, sig
, &extra_args
),
585 if intrinsic
== Some(sym
::transmute
) {
586 if let Some(destination_ref
) = destination
.as_ref() {
587 let &(dest
, target
) = destination_ref
;
588 self.codegen_transmute(&mut bx
, &args
[0], dest
);
589 helper
.maybe_sideeffect(self.mir
, &mut bx
, &[target
]);
590 helper
.funclet_br(self, &mut bx
, target
);
592 // If we are trying to transmute to an uninhabited type,
593 // it is likely there is no allotted destination. In fact,
594 // transmuting to an uninhabited type is UB, which means
595 // we can do what we like. Here, we declare that transmuting
596 // into an uninhabited type is impossible, so anything following
597 // it must be unreachable.
598 assert_eq
!(fn_abi
.ret
.layout
.abi
, abi
::Abi
::Uninhabited
);
604 if self.codegen_panic_intrinsic(
616 // The arguments we'll be passing. Plus one to account for outptr, if used.
617 let arg_count
= fn_abi
.args
.len() + fn_abi
.ret
.is_indirect() as usize;
618 let mut llargs
= Vec
::with_capacity(arg_count
);
620 // Prepare the return value destination
621 let ret_dest
= if let Some((dest
, _
)) = *destination
{
622 let is_intrinsic
= intrinsic
.is_some();
623 self.make_return_dest(&mut bx
, dest
, &fn_abi
.ret
, &mut llargs
, is_intrinsic
)
628 if intrinsic
== Some(sym
::caller_location
) {
629 if let Some((_
, target
)) = destination
.as_ref() {
631 .get_caller_location(&mut bx
, mir
::SourceInfo { span: fn_span, ..source_info }
);
633 if let ReturnDest
::IndirectOperand(tmp
, _
) = ret_dest
{
634 location
.val
.store(&mut bx
, tmp
);
636 self.store_return(&mut bx
, ret_dest
, &fn_abi
.ret
, location
.immediate());
638 helper
.maybe_sideeffect(self.mir
, &mut bx
, &[*target
]);
639 helper
.funclet_br(self, &mut bx
, *target
);
644 if intrinsic
.is_some() && intrinsic
!= Some(sym
::drop_in_place
) {
645 let intrinsic
= intrinsic
.unwrap();
646 let dest
= match ret_dest
{
647 _
if fn_abi
.ret
.is_indirect() => llargs
[0],
648 ReturnDest
::Nothing
=> {
649 bx
.const_undef(bx
.type_ptr_to(bx
.arg_memory_ty(&fn_abi
.ret
)))
651 ReturnDest
::IndirectOperand(dst
, _
) | ReturnDest
::Store(dst
) => dst
.llval
,
652 ReturnDest
::DirectOperand(_
) => {
653 bug
!("Cannot use direct operand with an intrinsic call")
657 let args
: Vec
<_
> = args
661 // The indices passed to simd_shuffle* in the
662 // third argument must be constant. This is
663 // checked by const-qualification, which also
664 // promotes any complex rvalues to constants.
665 if i
== 2 && intrinsic
.as_str().starts_with("simd_shuffle") {
666 if let mir
::Operand
::Constant(constant
) = arg
{
667 let c
= self.eval_mir_constant(constant
);
668 let (llval
, ty
) = self.simd_shuffle_indices(
674 return OperandRef { val: Immediate(llval), layout: bx.layout_of(ty) }
;
676 span_bug
!(span
, "shuffle indices must be constant");
680 self.codegen_operand(&mut bx
, arg
)
684 Self::codegen_intrinsic_call(
686 *instance
.as_ref().unwrap(),
693 if let ReturnDest
::IndirectOperand(dst
, _
) = ret_dest
{
694 self.store_return(&mut bx
, ret_dest
, &fn_abi
.ret
, dst
.llval
);
697 if let Some((_
, target
)) = *destination
{
698 helper
.maybe_sideeffect(self.mir
, &mut bx
, &[target
]);
699 helper
.funclet_br(self, &mut bx
, target
);
707 // Split the rust-call tupled arguments off.
708 let (first_args
, untuple
) = if abi
== Abi
::RustCall
&& !args
.is_empty() {
709 let (tup
, args
) = args
.split_last().unwrap();
715 'make_args
: for (i
, arg
) in first_args
.iter().enumerate() {
716 let mut op
= self.codegen_operand(&mut bx
, arg
);
718 if let (0, Some(ty
::InstanceDef
::Virtual(_
, idx
))) = (i
, def
) {
719 if let Pair(..) = op
.val
{
720 // In the case of Rc<Self>, we need to explicitly pass a
721 // *mut RcBox<Self> with a Scalar (not ScalarPair) ABI. This is a hack
722 // that is understood elsewhere in the compiler as a method on
724 // To get a `*mut RcBox<Self>`, we just keep unwrapping newtypes until
725 // we get a value of a built-in pointer type
726 'descend_newtypes
: while !op
.layout
.ty
.is_unsafe_ptr()
727 && !op
.layout
.ty
.is_region_ptr()
729 for i
in 0..op
.layout
.fields
.count() {
730 let field
= op
.extract_field(&mut bx
, i
);
731 if !field
.layout
.is_zst() {
732 // we found the one non-zero-sized field that is allowed
733 // now find *its* non-zero-sized field, or stop if it's a
736 continue 'descend_newtypes
;
740 span_bug
!(span
, "receiver has no non-zero-sized fields {:?}", op
);
743 // now that we have `*dyn Trait` or `&dyn Trait`, split it up into its
744 // data pointer and vtable. Look up the method in the vtable, and pass
745 // the data pointer as the first argument
747 Pair(data_ptr
, meta
) => {
749 meth
::VirtualIndex
::from_index(idx
).get_fn(&mut bx
, meta
, &fn_abi
),
751 llargs
.push(data_ptr
);
754 other
=> bug
!("expected a Pair, got {:?}", other
),
756 } else if let Ref(data_ptr
, Some(meta
), _
) = op
.val
{
757 // by-value dynamic dispatch
758 llfn
= Some(meth
::VirtualIndex
::from_index(idx
).get_fn(&mut bx
, meta
, &fn_abi
));
759 llargs
.push(data_ptr
);
762 span_bug
!(span
, "can't codegen a virtual call on {:?}", op
);
766 // The callee needs to own the argument memory if we pass it
767 // by-ref, so make a local copy of non-immediate constants.
768 match (arg
, op
.val
) {
769 (&mir
::Operand
::Copy(_
), Ref(_
, None
, _
))
770 | (&mir
::Operand
::Constant(_
), Ref(_
, None
, _
)) => {
771 let tmp
= PlaceRef
::alloca(&mut bx
, op
.layout
);
772 op
.val
.store(&mut bx
, tmp
);
773 op
.val
= Ref(tmp
.llval
, None
, tmp
.align
);
778 self.codegen_argument(&mut bx
, op
, &mut llargs
, &fn_abi
.args
[i
]);
780 if let Some(tup
) = untuple
{
781 self.codegen_arguments_untupled(
785 &fn_abi
.args
[first_args
.len()..],
790 instance
.map_or(false, |i
| i
.def
.requires_caller_location(self.cx
.tcx()));
795 "#[track_caller] fn's must have 1 more argument in their ABI than in their MIR",
798 self.get_caller_location(&mut bx
, mir
::SourceInfo { span: fn_span, ..source_info }
);
800 "codegen_call_terminator({:?}): location={:?} (fn_span {:?})",
801 terminator
, location
, fn_span
804 let last_arg
= fn_abi
.args
.last().unwrap();
805 self.codegen_argument(&mut bx
, location
, &mut llargs
, last_arg
);
808 let fn_ptr
= match (llfn
, instance
) {
809 (Some(llfn
), _
) => llfn
,
810 (None
, Some(instance
)) => bx
.get_fn_addr(instance
),
811 _
=> span_bug
!(span
, "no llfn for call"),
814 if let Some((_
, target
)) = destination
.as_ref() {
815 helper
.maybe_sideeffect(self.mir
, &mut bx
, &[*target
]);
823 destination
.as_ref().map(|&(_
, target
)| (ret_dest
, target
)),
828 fn codegen_asm_terminator(
830 helper
: TerminatorCodegenHelper
<'tcx
>,
832 terminator
: &mir
::Terminator
<'tcx
>,
833 template
: &[ast
::InlineAsmTemplatePiece
],
834 operands
: &[mir
::InlineAsmOperand
<'tcx
>],
835 options
: ast
::InlineAsmOptions
,
837 destination
: Option
<mir
::BasicBlock
>,
839 let span
= terminator
.source_info
.span
;
841 let operands
: Vec
<_
> = operands
843 .map(|op
| match *op
{
844 mir
::InlineAsmOperand
::In { reg, ref value }
=> {
845 let value
= self.codegen_operand(&mut bx
, value
);
846 InlineAsmOperandRef
::In { reg, value }
848 mir
::InlineAsmOperand
::Out { reg, late, ref place }
=> {
849 let place
= place
.map(|place
| self.codegen_place(&mut bx
, place
.as_ref()));
850 InlineAsmOperandRef
::Out { reg, late, place }
852 mir
::InlineAsmOperand
::InOut { reg, late, ref in_value, ref out_place }
=> {
853 let in_value
= self.codegen_operand(&mut bx
, in_value
);
855 out_place
.map(|out_place
| self.codegen_place(&mut bx
, out_place
.as_ref()));
856 InlineAsmOperandRef
::InOut { reg, late, in_value, out_place }
858 mir
::InlineAsmOperand
::Const { ref value }
=> {
859 if let mir
::Operand
::Constant(constant
) = value
{
860 let const_value
= self
861 .eval_mir_constant(constant
)
862 .unwrap_or_else(|_
| span_bug
!(span
, "asm const cannot be resolved"));
863 let ty
= constant
.literal
.ty
;
864 let size
= bx
.layout_of(ty
).size
;
865 let scalar
= match const_value
{
866 ConstValue
::Scalar(s
) => s
,
869 "expected Scalar for promoted asm const, but got {:#?}",
873 let value
= scalar
.assert_bits(size
);
874 let string
= match ty
.kind() {
875 ty
::Uint(_
) => value
.to_string(),
877 match int_ty
.normalize(bx
.tcx().sess
.target
.pointer_width
) {
878 ast
::IntTy
::I8
=> (value
as i8).to_string(),
879 ast
::IntTy
::I16
=> (value
as i16).to_string(),
880 ast
::IntTy
::I32
=> (value
as i32).to_string(),
881 ast
::IntTy
::I64
=> (value
as i64).to_string(),
882 ast
::IntTy
::I128
=> (value
as i128
).to_string(),
883 ast
::IntTy
::Isize
=> unreachable
!(),
886 ty
::Float(ast
::FloatTy
::F32
) => {
887 f32::from_bits(value
as u32).to_string()
889 ty
::Float(ast
::FloatTy
::F64
) => {
890 f64::from_bits(value
as u64).to_string()
892 _
=> span_bug
!(span
, "asm const has bad type {}", ty
),
894 InlineAsmOperandRef
::Const { string }
896 span_bug
!(span
, "asm const is not a constant");
899 mir
::InlineAsmOperand
::SymFn { ref value }
=> {
900 let literal
= self.monomorphize(value
.literal
);
901 if let ty
::FnDef(def_id
, substs
) = *literal
.ty
.kind() {
902 let instance
= ty
::Instance
::resolve_for_fn_ptr(
904 ty
::ParamEnv
::reveal_all(),
909 InlineAsmOperandRef
::SymFn { instance }
911 span_bug
!(span
, "invalid type for asm sym (fn)");
914 mir
::InlineAsmOperand
::SymStatic { def_id }
=> {
915 InlineAsmOperandRef
::SymStatic { def_id }
920 bx
.codegen_inline_asm(template
, &operands
, options
, line_spans
);
922 if let Some(target
) = destination
{
923 helper
.funclet_br(self, &mut bx
, target
);
930 impl<'a
, 'tcx
, Bx
: BuilderMethods
<'a
, 'tcx
>> FunctionCx
<'a
, 'tcx
, Bx
> {
931 pub fn codegen_block(&mut self, bb
: mir
::BasicBlock
) {
932 let mut bx
= self.build_block(bb
);
936 debug
!("codegen_block({:?}={:?})", bb
, data
);
938 for statement
in &data
.statements
{
939 bx
= self.codegen_statement(bx
, statement
);
942 self.codegen_terminator(bx
, bb
, data
.terminator());
945 fn codegen_terminator(
949 terminator
: &'tcx mir
::Terminator
<'tcx
>,
951 debug
!("codegen_terminator: {:?}", terminator
);
953 // Create the cleanup bundle, if needed.
954 let funclet_bb
= self.cleanup_kinds
[bb
].funclet_bb(bb
);
955 let helper
= TerminatorCodegenHelper { bb, terminator, funclet_bb }
;
957 self.set_debug_loc(&mut bx
, terminator
.source_info
);
958 match terminator
.kind
{
959 mir
::TerminatorKind
::Resume
=> self.codegen_resume_terminator(helper
, bx
),
961 mir
::TerminatorKind
::Abort
=> {
963 // `abort` does not terminate the block, so we still need to generate
964 // an `unreachable` terminator after it.
968 mir
::TerminatorKind
::Goto { target }
=> {
970 // This is an unconditional branch back to this same basic
971 // block. That means we have something like a `loop {}`
972 // statement. Currently LLVM miscompiles this because it
973 // assumes forward progress. We want to prevent this in all
974 // cases, but that has a fairly high cost to compile times
975 // currently. Instead, try to handle this specific case
976 // which comes up commonly in practice (e.g., in embedded
979 // The `true` here means we insert side effects regardless
980 // of -Zinsert-sideeffect being passed on unconditional
981 // branching to the same basic block.
984 helper
.maybe_sideeffect(self.mir
, &mut bx
, &[target
]);
986 helper
.funclet_br(self, &mut bx
, target
);
989 mir
::TerminatorKind
::SwitchInt { ref discr, switch_ty, ref targets }
=> {
990 self.codegen_switchint_terminator(helper
, bx
, discr
, switch_ty
, targets
);
993 mir
::TerminatorKind
::Return
=> {
994 self.codegen_return_terminator(bx
);
997 mir
::TerminatorKind
::Unreachable
=> {
1001 mir
::TerminatorKind
::Drop { place, target, unwind }
=> {
1002 self.codegen_drop_terminator(helper
, bx
, place
, target
, unwind
);
1005 mir
::TerminatorKind
::Assert { ref cond, expected, ref msg, target, cleanup }
=> {
1006 self.codegen_assert_terminator(
1007 helper
, bx
, terminator
, cond
, expected
, msg
, target
, cleanup
,
1011 mir
::TerminatorKind
::DropAndReplace { .. }
=> {
1012 bug
!("undesugared DropAndReplace in codegen: {:?}", terminator
);
1015 mir
::TerminatorKind
::Call
{
1023 self.codegen_call_terminator(
1034 mir
::TerminatorKind
::GeneratorDrop
| mir
::TerminatorKind
::Yield { .. }
=> {
1035 bug
!("generator ops in codegen")
1037 mir
::TerminatorKind
::FalseEdge { .. }
| mir
::TerminatorKind
::FalseUnwind { .. }
=> {
1038 bug
!("borrowck false edges in codegen")
1041 mir
::TerminatorKind
::InlineAsm
{
1048 self.codegen_asm_terminator(
1062 fn codegen_argument(
1065 op
: OperandRef
<'tcx
, Bx
::Value
>,
1066 llargs
: &mut Vec
<Bx
::Value
>,
1067 arg
: &ArgAbi
<'tcx
, Ty
<'tcx
>>,
1069 // Fill padding with undef value, where applicable.
1070 if let Some(ty
) = arg
.pad
{
1071 llargs
.push(bx
.const_undef(bx
.reg_backend_type(&ty
)))
1074 if arg
.is_ignore() {
1078 if let PassMode
::Pair(..) = arg
.mode
{
1085 _
=> bug
!("codegen_argument: {:?} invalid for pair argument", op
),
1087 } else if arg
.is_unsized_indirect() {
1089 Ref(a
, Some(b
), _
) => {
1094 _
=> bug
!("codegen_argument: {:?} invalid for unsized indirect argument", op
),
1098 // Force by-ref if we have to load through a cast pointer.
1099 let (mut llval
, align
, by_ref
) = match op
.val
{
1100 Immediate(_
) | Pair(..) => match arg
.mode
{
1101 PassMode
::Indirect { .. }
| PassMode
::Cast(_
) => {
1102 let scratch
= PlaceRef
::alloca(bx
, arg
.layout
);
1103 op
.val
.store(bx
, scratch
);
1104 (scratch
.llval
, scratch
.align
, true)
1106 _
=> (op
.immediate_or_packed_pair(bx
), arg
.layout
.align
.abi
, false),
1108 Ref(llval
, _
, align
) => {
1109 if arg
.is_indirect() && align
< arg
.layout
.align
.abi
{
1110 // `foo(packed.large_field)`. We can't pass the (unaligned) field directly. I
1111 // think that ATM (Rust 1.16) we only pass temporaries, but we shouldn't
1112 // have scary latent bugs around.
1114 let scratch
= PlaceRef
::alloca(bx
, arg
.layout
);
1124 (scratch
.llval
, scratch
.align
, true)
1126 (llval
, align
, true)
1131 if by_ref
&& !arg
.is_indirect() {
1132 // Have to load the argument, maybe while casting it.
1133 if let PassMode
::Cast(ty
) = arg
.mode
{
1134 let addr
= bx
.pointercast(llval
, bx
.type_ptr_to(bx
.cast_backend_type(&ty
)));
1135 llval
= bx
.load(addr
, align
.min(arg
.layout
.align
.abi
));
1137 // We can't use `PlaceRef::load` here because the argument
1138 // may have a type we don't treat as immediate, but the ABI
1139 // used for this call is passing it by-value. In that case,
1140 // the load would just produce `OperandValue::Ref` instead
1141 // of the `OperandValue::Immediate` we need for the call.
1142 llval
= bx
.load(llval
, align
);
1143 if let abi
::Abi
::Scalar(ref scalar
) = arg
.layout
.abi
{
1144 if scalar
.is_bool() {
1145 bx
.range_metadata(llval
, 0..2);
1148 // We store bools as `i8` so we need to truncate to `i1`.
1149 llval
= bx
.to_immediate(llval
, arg
.layout
);
1156 fn codegen_arguments_untupled(
1159 operand
: &mir
::Operand
<'tcx
>,
1160 llargs
: &mut Vec
<Bx
::Value
>,
1161 args
: &[ArgAbi
<'tcx
, Ty
<'tcx
>>],
1163 let tuple
= self.codegen_operand(bx
, operand
);
1165 // Handle both by-ref and immediate tuples.
1166 if let Ref(llval
, None
, align
) = tuple
.val
{
1167 let tuple_ptr
= PlaceRef
::new_sized_aligned(llval
, tuple
.layout
, align
);
1168 for i
in 0..tuple
.layout
.fields
.count() {
1169 let field_ptr
= tuple_ptr
.project_field(bx
, i
);
1170 let field
= bx
.load_operand(field_ptr
);
1171 self.codegen_argument(bx
, field
, llargs
, &args
[i
]);
1173 } else if let Ref(_
, Some(_
), _
) = tuple
.val
{
1174 bug
!("closure arguments must be sized")
1176 // If the tuple is immediate, the elements are as well.
1177 for i
in 0..tuple
.layout
.fields
.count() {
1178 let op
= tuple
.extract_field(bx
, i
);
1179 self.codegen_argument(bx
, op
, llargs
, &args
[i
]);
1184 fn get_caller_location(
1187 mut source_info
: mir
::SourceInfo
,
1188 ) -> OperandRef
<'tcx
, Bx
::Value
> {
1191 let mut span_to_caller_location
= |span
: Span
| {
1192 let topmost
= span
.ctxt().outer_expn().expansion_cause().unwrap_or(span
);
1193 let caller
= tcx
.sess
.source_map().lookup_char_pos(topmost
.lo());
1194 let const_loc
= tcx
.const_caller_location((
1195 Symbol
::intern(&caller
.file
.name
.to_string()),
1197 caller
.col_display
as u32 + 1,
1199 OperandRef
::from_const(bx
, const_loc
, bx
.tcx().caller_location_ty())
1202 // Walk up the `SourceScope`s, in case some of them are from MIR inlining.
1203 // If so, the starting `source_info.span` is in the innermost inlined
1204 // function, and will be replaced with outer callsite spans as long
1205 // as the inlined functions were `#[track_caller]`.
1207 let scope_data
= &self.mir
.source_scopes
[source_info
.scope
];
1209 if let Some((callee
, callsite_span
)) = scope_data
.inlined
{
1210 // Stop inside the most nested non-`#[track_caller]` function,
1211 // before ever reaching its caller (which is irrelevant).
1212 if !callee
.def
.requires_caller_location(tcx
) {
1213 return span_to_caller_location(source_info
.span
);
1215 source_info
.span
= callsite_span
;
1218 // Skip past all of the parents with `inlined: None`.
1219 match scope_data
.inlined_parent_scope
{
1220 Some(parent
) => source_info
.scope
= parent
,
1225 // No inlined `SourceScope`s, or all of them were `#[track_caller]`.
1226 self.caller_location
.unwrap_or_else(|| span_to_caller_location(source_info
.span
))
1229 fn get_personality_slot(&mut self, bx
: &mut Bx
) -> PlaceRef
<'tcx
, Bx
::Value
> {
1231 if let Some(slot
) = self.personality_slot
{
1234 let layout
= cx
.layout_of(
1235 cx
.tcx().intern_tup(&[cx
.tcx().mk_mut_ptr(cx
.tcx().types
.u8), cx
.tcx().types
.i32]),
1237 let slot
= PlaceRef
::alloca(bx
, layout
);
1238 self.personality_slot
= Some(slot
);
1243 /// Returns the landing-pad wrapper around the given basic block.
1245 /// No-op in MSVC SEH scheme.
1246 fn landing_pad_to(&mut self, target_bb
: mir
::BasicBlock
) -> Bx
::BasicBlock
{
1247 if let Some(block
) = self.landing_pads
[target_bb
] {
1251 let block
= self.blocks
[target_bb
];
1252 let landing_pad
= self.landing_pad_uncached(block
);
1253 self.landing_pads
[target_bb
] = Some(landing_pad
);
1257 fn landing_pad_uncached(&mut self, target_bb
: Bx
::BasicBlock
) -> Bx
::BasicBlock
{
1258 if base
::wants_msvc_seh(self.cx
.sess()) {
1259 span_bug
!(self.mir
.span
, "landing pad was not inserted?")
1262 let mut bx
= self.new_block("cleanup");
1264 let llpersonality
= self.cx
.eh_personality();
1265 let llretty
= self.landing_pad_type();
1266 let lp
= bx
.landing_pad(llretty
, llpersonality
, 1);
1269 let slot
= self.get_personality_slot(&mut bx
);
1270 slot
.storage_live(&mut bx
);
1271 Pair(bx
.extract_value(lp
, 0), bx
.extract_value(lp
, 1)).store(&mut bx
, slot
);
1277 fn landing_pad_type(&self) -> Bx
::Type
{
1279 cx
.type_struct(&[cx
.type_i8p(), cx
.type_i32()], false)
1282 fn unreachable_block(&mut self) -> Bx
::BasicBlock
{
1283 self.unreachable_block
.unwrap_or_else(|| {
1284 let mut bx
= self.new_block("unreachable");
1286 self.unreachable_block
= Some(bx
.llbb());
1291 pub fn new_block(&self, name
: &str) -> Bx
{
1292 Bx
::new_block(self.cx
, self.llfn
, name
)
1295 pub fn build_block(&self, bb
: mir
::BasicBlock
) -> Bx
{
1296 let mut bx
= Bx
::with_cx(self.cx
);
1297 bx
.position_at_end(self.blocks
[bb
]);
1301 fn make_return_dest(
1304 dest
: mir
::Place
<'tcx
>,
1305 fn_ret
: &ArgAbi
<'tcx
, Ty
<'tcx
>>,
1306 llargs
: &mut Vec
<Bx
::Value
>,
1308 ) -> ReturnDest
<'tcx
, Bx
::Value
> {
1309 // If the return is ignored, we can just return a do-nothing `ReturnDest`.
1310 if fn_ret
.is_ignore() {
1311 return ReturnDest
::Nothing
;
1313 let dest
= if let Some(index
) = dest
.as_local() {
1314 match self.locals
[index
] {
1315 LocalRef
::Place(dest
) => dest
,
1316 LocalRef
::UnsizedPlace(_
) => bug
!("return type must be sized"),
1317 LocalRef
::Operand(None
) => {
1318 // Handle temporary places, specifically `Operand` ones, as
1319 // they don't have `alloca`s.
1320 return if fn_ret
.is_indirect() {
1321 // Odd, but possible, case, we have an operand temporary,
1322 // but the calling convention has an indirect return.
1323 let tmp
= PlaceRef
::alloca(bx
, fn_ret
.layout
);
1324 tmp
.storage_live(bx
);
1325 llargs
.push(tmp
.llval
);
1326 ReturnDest
::IndirectOperand(tmp
, index
)
1327 } else if is_intrinsic
{
1328 // Currently, intrinsics always need a location to store
1329 // the result, so we create a temporary `alloca` for the
1331 let tmp
= PlaceRef
::alloca(bx
, fn_ret
.layout
);
1332 tmp
.storage_live(bx
);
1333 ReturnDest
::IndirectOperand(tmp
, index
)
1335 ReturnDest
::DirectOperand(index
)
1338 LocalRef
::Operand(Some(_
)) => {
1339 bug
!("place local already assigned to");
1345 mir
::PlaceRef { local: dest.local, projection: &dest.projection }
,
1348 if fn_ret
.is_indirect() {
1349 if dest
.align
< dest
.layout
.align
.abi
{
1350 // Currently, MIR code generation does not create calls
1351 // that store directly to fields of packed structs (in
1352 // fact, the calls it creates write only to temps).
1354 // If someone changes that, please update this code path
1355 // to create a temporary.
1356 span_bug
!(self.mir
.span
, "can't directly store to unaligned value");
1358 llargs
.push(dest
.llval
);
1361 ReturnDest
::Store(dest
)
1365 fn codegen_transmute(&mut self, bx
: &mut Bx
, src
: &mir
::Operand
<'tcx
>, dst
: mir
::Place
<'tcx
>) {
1366 if let Some(index
) = dst
.as_local() {
1367 match self.locals
[index
] {
1368 LocalRef
::Place(place
) => self.codegen_transmute_into(bx
, src
, place
),
1369 LocalRef
::UnsizedPlace(_
) => bug
!("transmute must not involve unsized locals"),
1370 LocalRef
::Operand(None
) => {
1371 let dst_layout
= bx
.layout_of(self.monomorphized_place_ty(dst
.as_ref()));
1372 assert
!(!dst_layout
.ty
.has_erasable_regions());
1373 let place
= PlaceRef
::alloca(bx
, dst_layout
);
1374 place
.storage_live(bx
);
1375 self.codegen_transmute_into(bx
, src
, place
);
1376 let op
= bx
.load_operand(place
);
1377 place
.storage_dead(bx
);
1378 self.locals
[index
] = LocalRef
::Operand(Some(op
));
1379 self.debug_introduce_local(bx
, index
);
1381 LocalRef
::Operand(Some(op
)) => {
1382 assert
!(op
.layout
.is_zst(), "assigning to initialized SSAtemp");
1386 let dst
= self.codegen_place(bx
, dst
.as_ref());
1387 self.codegen_transmute_into(bx
, src
, dst
);
1391 fn codegen_transmute_into(
1394 src
: &mir
::Operand
<'tcx
>,
1395 dst
: PlaceRef
<'tcx
, Bx
::Value
>,
1397 let src
= self.codegen_operand(bx
, src
);
1399 // Special-case transmutes between scalars as simple bitcasts.
1400 match (&src
.layout
.abi
, &dst
.layout
.abi
) {
1401 (abi
::Abi
::Scalar(src_scalar
), abi
::Abi
::Scalar(dst_scalar
)) => {
1402 // HACK(eddyb) LLVM doesn't like `bitcast`s between pointers and non-pointers.
1403 if (src_scalar
.value
== abi
::Pointer
) == (dst_scalar
.value
== abi
::Pointer
) {
1404 assert_eq
!(src
.layout
.size
, dst
.layout
.size
);
1406 // NOTE(eddyb) the `from_immediate` and `to_immediate_scalar`
1407 // conversions allow handling `bool`s the same as `u8`s.
1408 let src
= bx
.from_immediate(src
.immediate());
1409 let src_as_dst
= bx
.bitcast(src
, bx
.backend_type(dst
.layout
));
1410 Immediate(bx
.to_immediate_scalar(src_as_dst
, dst_scalar
)).store(bx
, dst
);
1417 let llty
= bx
.backend_type(src
.layout
);
1418 let cast_ptr
= bx
.pointercast(dst
.llval
, bx
.type_ptr_to(llty
));
1419 let align
= src
.layout
.align
.abi
.min(dst
.align
);
1420 src
.val
.store(bx
, PlaceRef
::new_sized_aligned(cast_ptr
, src
.layout
, align
));
1423 // Stores the return value of a function call into it's final location.
1427 dest
: ReturnDest
<'tcx
, Bx
::Value
>,
1428 ret_abi
: &ArgAbi
<'tcx
, Ty
<'tcx
>>,
1431 use self::ReturnDest
::*;
1435 Store(dst
) => bx
.store_arg(&ret_abi
, llval
, dst
),
1436 IndirectOperand(tmp
, index
) => {
1437 let op
= bx
.load_operand(tmp
);
1438 tmp
.storage_dead(bx
);
1439 self.locals
[index
] = LocalRef
::Operand(Some(op
));
1440 self.debug_introduce_local(bx
, index
);
1442 DirectOperand(index
) => {
1443 // If there is a cast, we have to store and reload.
1444 let op
= if let PassMode
::Cast(_
) = ret_abi
.mode
{
1445 let tmp
= PlaceRef
::alloca(bx
, ret_abi
.layout
);
1446 tmp
.storage_live(bx
);
1447 bx
.store_arg(&ret_abi
, llval
, tmp
);
1448 let op
= bx
.load_operand(tmp
);
1449 tmp
.storage_dead(bx
);
1452 OperandRef
::from_immediate_or_packed_pair(bx
, llval
, ret_abi
.layout
)
1454 self.locals
[index
] = LocalRef
::Operand(Some(op
));
1455 self.debug_introduce_local(bx
, index
);
1461 enum ReturnDest
<'tcx
, V
> {
1462 // Do nothing; the return value is indirect or ignored.
1464 // Store the return value to the pointer.
1465 Store(PlaceRef
<'tcx
, V
>),
1466 // Store an indirect return value to an operand local place.
1467 IndirectOperand(PlaceRef
<'tcx
, V
>, mir
::Local
),
1468 // Store a direct return value to an operand local place.
1469 DirectOperand(mir
::Local
),