1 use super::operand
::OperandRef
;
2 use super::operand
::OperandValue
::{Immediate, Pair, Ref}
;
3 use super::place
::PlaceRef
;
4 use super::{FunctionCx, LocalRef}
;
7 use crate::common
::{self, IntPredicate}
;
12 use rustc_hir
::lang_items
;
13 use rustc_index
::vec
::Idx
;
14 use rustc_middle
::mir
;
15 use rustc_middle
::mir
::AssertKind
;
16 use rustc_middle
::ty
::layout
::{FnAbiExt, HasTyCtxt}
;
17 use rustc_middle
::ty
::{self, Instance, Ty, TypeFoldable}
;
18 use rustc_span
::{source_map::Span, symbol::Symbol}
;
19 use rustc_target
::abi
::call
::{ArgAbi, FnAbi, PassMode}
;
20 use rustc_target
::abi
::{self, LayoutOf}
;
21 use rustc_target
::spec
::abi
::Abi
;
25 /// Used by `FunctionCx::codegen_terminator` for emitting common patterns
26 /// e.g., creating a basic block, calling a function, etc.
27 struct TerminatorCodegenHelper
<'tcx
> {
29 terminator
: &'tcx mir
::Terminator
<'tcx
>,
30 funclet_bb
: Option
<mir
::BasicBlock
>,
33 impl<'a
, 'tcx
> TerminatorCodegenHelper
<'tcx
> {
34 /// Returns the associated funclet from `FunctionCx::funclets` for the
35 /// `funclet_bb` member if it is not `None`.
36 fn funclet
<'b
, Bx
: BuilderMethods
<'a
, 'tcx
>>(
38 fx
: &'b
mut FunctionCx
<'a
, 'tcx
, Bx
>,
39 ) -> Option
<&'b Bx
::Funclet
> {
40 match self.funclet_bb
{
41 Some(funcl
) => fx
.funclets
[funcl
].as_ref(),
46 fn lltarget
<Bx
: BuilderMethods
<'a
, 'tcx
>>(
48 fx
: &mut FunctionCx
<'a
, 'tcx
, Bx
>,
49 target
: mir
::BasicBlock
,
50 ) -> (Bx
::BasicBlock
, bool
) {
51 let span
= self.terminator
.source_info
.span
;
52 let lltarget
= fx
.blocks
[target
];
53 let target_funclet
= fx
.cleanup_kinds
[target
].funclet_bb(target
);
54 match (self.funclet_bb
, target_funclet
) {
55 (None
, None
) => (lltarget
, false),
56 (Some(f
), Some(t_f
)) if f
== t_f
|| !base
::wants_msvc_seh(fx
.cx
.tcx().sess
) => {
59 // jump *into* cleanup - need a landing pad if GNU
60 (None
, Some(_
)) => (fx
.landing_pad_to(target
), false),
61 (Some(_
), None
) => span_bug
!(span
, "{:?} - jump out of cleanup?", self.terminator
),
62 (Some(_
), Some(_
)) => (fx
.landing_pad_to(target
), true),
66 /// Create a basic block.
67 fn llblock
<Bx
: BuilderMethods
<'a
, 'tcx
>>(
69 fx
: &mut FunctionCx
<'a
, 'tcx
, Bx
>,
70 target
: mir
::BasicBlock
,
72 let (lltarget
, is_cleanupret
) = self.lltarget(fx
, target
);
74 // MSVC cross-funclet jump - need a trampoline
76 debug
!("llblock: creating cleanup trampoline for {:?}", target
);
77 let name
= &format
!("{:?}_cleanup_trampoline_{:?}", self.bb
, target
);
78 let mut trampoline
= fx
.new_block(name
);
79 trampoline
.cleanup_ret(self.funclet(fx
).unwrap(), Some(lltarget
));
86 fn funclet_br
<Bx
: BuilderMethods
<'a
, 'tcx
>>(
88 fx
: &mut FunctionCx
<'a
, 'tcx
, Bx
>,
90 target
: mir
::BasicBlock
,
92 let (lltarget
, is_cleanupret
) = self.lltarget(fx
, target
);
94 // micro-optimization: generate a `ret` rather than a jump
96 bx
.cleanup_ret(self.funclet(fx
).unwrap(), Some(lltarget
));
102 /// Call `fn_ptr` of `fn_abi` with the arguments `llargs`, the optional
103 /// return destination `destination` and the cleanup function `cleanup`.
104 fn do_call
<Bx
: BuilderMethods
<'a
, 'tcx
>>(
106 fx
: &mut FunctionCx
<'a
, 'tcx
, Bx
>,
108 fn_abi
: FnAbi
<'tcx
, Ty
<'tcx
>>,
110 llargs
: &[Bx
::Value
],
111 destination
: Option
<(ReturnDest
<'tcx
, Bx
::Value
>, mir
::BasicBlock
)>,
112 cleanup
: Option
<mir
::BasicBlock
>,
114 // If there is a cleanup block and the function we're calling can unwind, then
115 // do an invoke, otherwise do a call.
116 if let Some(cleanup
) = cleanup
.filter(|_
| fn_abi
.can_unwind
) {
117 let ret_bx
= if let Some((_
, target
)) = destination
{
120 fx
.unreachable_block()
123 bx
.invoke(fn_ptr
, &llargs
, ret_bx
, self.llblock(fx
, cleanup
), self.funclet(fx
));
124 bx
.apply_attrs_callsite(&fn_abi
, invokeret
);
126 if let Some((ret_dest
, target
)) = destination
{
127 let mut ret_bx
= fx
.build_block(target
);
128 fx
.set_debug_loc(&mut ret_bx
, self.terminator
.source_info
);
129 fx
.store_return(&mut ret_bx
, ret_dest
, &fn_abi
.ret
, invokeret
);
132 let llret
= bx
.call(fn_ptr
, &llargs
, self.funclet(fx
));
133 bx
.apply_attrs_callsite(&fn_abi
, llret
);
134 if fx
.mir
[self.bb
].is_cleanup
{
135 // Cleanup is always the cold path. Don't inline
136 // drop glue. Also, when there is a deeply-nested
137 // struct, there are "symmetry" issues that cause
138 // exponential inlining - see issue #41696.
139 bx
.do_not_inline(llret
);
142 if let Some((ret_dest
, target
)) = destination
{
143 fx
.store_return(bx
, ret_dest
, &fn_abi
.ret
, llret
);
144 self.funclet_br(fx
, bx
, target
);
151 // Generate sideeffect intrinsic if jumping to any of the targets can form
153 fn maybe_sideeffect
<Bx
: BuilderMethods
<'a
, 'tcx
>>(
155 mir
: mir
::ReadOnlyBodyAndCache
<'tcx
, 'tcx
>,
157 targets
: &[mir
::BasicBlock
],
159 if bx
.tcx().sess
.opts
.debugging_opts
.insert_sideeffect
{
160 if targets
.iter().any(|&target
| {
162 && target
.start_location().is_predecessor_of(self.bb
.start_location(), mir
)
170 /// Codegen implementations for some terminator variants.
171 impl<'a
, 'tcx
, Bx
: BuilderMethods
<'a
, 'tcx
>> FunctionCx
<'a
, 'tcx
, Bx
> {
172 /// Generates code for a `Resume` terminator.
173 fn codegen_resume_terminator(&mut self, helper
: TerminatorCodegenHelper
<'tcx
>, mut bx
: Bx
) {
174 if let Some(funclet
) = helper
.funclet(self) {
175 bx
.cleanup_ret(funclet
, None
);
177 let slot
= self.get_personality_slot(&mut bx
);
178 let lp0
= slot
.project_field(&mut bx
, 0);
179 let lp0
= bx
.load_operand(lp0
).immediate();
180 let lp1
= slot
.project_field(&mut bx
, 1);
181 let lp1
= bx
.load_operand(lp1
).immediate();
182 slot
.storage_dead(&mut bx
);
184 let mut lp
= bx
.const_undef(self.landing_pad_type());
185 lp
= bx
.insert_value(lp
, lp0
, 0);
186 lp
= bx
.insert_value(lp
, lp1
, 1);
191 fn codegen_switchint_terminator(
193 helper
: TerminatorCodegenHelper
<'tcx
>,
195 discr
: &mir
::Operand
<'tcx
>,
197 values
: &Cow
<'tcx
, [u128
]>,
198 targets
: &Vec
<mir
::BasicBlock
>,
200 let discr
= self.codegen_operand(&mut bx
, &discr
);
201 if targets
.len() == 2 {
202 // If there are two targets, emit br instead of switch
203 let lltrue
= helper
.llblock(self, targets
[0]);
204 let llfalse
= helper
.llblock(self, targets
[1]);
205 if switch_ty
== bx
.tcx().types
.bool
{
206 helper
.maybe_sideeffect(self.mir
, &mut bx
, targets
.as_slice());
207 // Don't generate trivial icmps when switching on bool
208 if let [0] = values
[..] {
209 bx
.cond_br(discr
.immediate(), llfalse
, lltrue
);
211 assert_eq
!(&values
[..], &[1]);
212 bx
.cond_br(discr
.immediate(), lltrue
, llfalse
);
215 let switch_llty
= bx
.immediate_backend_type(bx
.layout_of(switch_ty
));
216 let llval
= bx
.const_uint_big(switch_llty
, values
[0]);
217 let cmp
= bx
.icmp(IntPredicate
::IntEQ
, discr
.immediate(), llval
);
218 helper
.maybe_sideeffect(self.mir
, &mut bx
, targets
.as_slice());
219 bx
.cond_br(cmp
, lltrue
, llfalse
);
222 helper
.maybe_sideeffect(self.mir
, &mut bx
, targets
.as_slice());
223 let (otherwise
, targets
) = targets
.split_last().unwrap();
226 helper
.llblock(self, *otherwise
),
230 .map(|(&value
, target
)| (value
, helper
.llblock(self, *target
))),
235 fn codegen_return_terminator(&mut self, mut bx
: Bx
) {
236 // Call `va_end` if this is the definition of a C-variadic function.
237 if self.fn_abi
.c_variadic
{
238 // The `VaList` "spoofed" argument is just after all the real arguments.
239 let va_list_arg_idx
= self.fn_abi
.args
.len();
240 match self.locals
[mir
::Local
::new(1 + va_list_arg_idx
)] {
241 LocalRef
::Place(va_list
) => {
242 bx
.va_end(va_list
.llval
);
244 _
=> bug
!("C-variadic function must have a `VaList` place"),
247 if self.fn_abi
.ret
.layout
.abi
.is_uninhabited() {
248 // Functions with uninhabited return values are marked `noreturn`,
249 // so we should make sure that we never actually do.
250 // We play it safe by using a well-defined `abort`, but we could go for immediate UB
251 // if that turns out to be helpful.
253 // `abort` does not terminate the block, so we still need to generate
254 // an `unreachable` terminator after it.
258 let llval
= match self.fn_abi
.ret
.mode
{
259 PassMode
::Ignore
| PassMode
::Indirect(..) => {
264 PassMode
::Direct(_
) | PassMode
::Pair(..) => {
265 let op
= self.codegen_consume(&mut bx
, mir
::Place
::return_place().as_ref());
266 if let Ref(llval
, _
, align
) = op
.val
{
267 bx
.load(llval
, align
)
269 op
.immediate_or_packed_pair(&mut bx
)
273 PassMode
::Cast(cast_ty
) => {
274 let op
= match self.locals
[mir
::RETURN_PLACE
] {
275 LocalRef
::Operand(Some(op
)) => op
,
276 LocalRef
::Operand(None
) => bug
!("use of return before def"),
277 LocalRef
::Place(cg_place
) => OperandRef
{
278 val
: Ref(cg_place
.llval
, None
, cg_place
.align
),
279 layout
: cg_place
.layout
,
281 LocalRef
::UnsizedPlace(_
) => bug
!("return type must be sized"),
283 let llslot
= match op
.val
{
284 Immediate(_
) | Pair(..) => {
285 let scratch
= PlaceRef
::alloca(&mut bx
, self.fn_abi
.ret
.layout
);
286 op
.val
.store(&mut bx
, scratch
);
289 Ref(llval
, _
, align
) => {
290 assert_eq
!(align
, op
.layout
.align
.abi
, "return place is unaligned!");
294 let addr
= bx
.pointercast(llslot
, bx
.type_ptr_to(bx
.cast_backend_type(&cast_ty
)));
295 bx
.load(addr
, self.fn_abi
.ret
.layout
.align
.abi
)
301 fn codegen_drop_terminator(
303 helper
: TerminatorCodegenHelper
<'tcx
>,
305 location
: mir
::Place
<'tcx
>,
306 target
: mir
::BasicBlock
,
307 unwind
: Option
<mir
::BasicBlock
>,
309 let ty
= location
.ty(*self.mir
, bx
.tcx()).ty
;
310 let ty
= self.monomorphize(&ty
);
311 let drop_fn
= Instance
::resolve_drop_in_place(bx
.tcx(), ty
);
313 if let ty
::InstanceDef
::DropGlue(_
, None
) = drop_fn
.def
{
314 // we don't actually need to drop anything.
315 helper
.maybe_sideeffect(self.mir
, &mut bx
, &[target
]);
316 helper
.funclet_br(self, &mut bx
, target
);
320 let place
= self.codegen_place(&mut bx
, location
.as_ref());
322 let mut args
= if let Some(llextra
) = place
.llextra
{
323 args2
= [place
.llval
, llextra
];
326 args1
= [place
.llval
];
329 let (drop_fn
, fn_abi
) = match ty
.kind
{
330 // FIXME(eddyb) perhaps move some of this logic into
331 // `Instance::resolve_drop_in_place`?
333 let virtual_drop
= Instance
{
334 def
: ty
::InstanceDef
::Virtual(drop_fn
.def_id(), 0),
335 substs
: drop_fn
.substs
,
337 let fn_abi
= FnAbi
::of_instance(&bx
, virtual_drop
, &[]);
338 let vtable
= args
[1];
340 (meth
::DESTRUCTOR
.get_fn(&mut bx
, vtable
, &fn_abi
), fn_abi
)
342 _
=> (bx
.get_fn_addr(drop_fn
), FnAbi
::of_instance(&bx
, drop_fn
, &[])),
344 helper
.maybe_sideeffect(self.mir
, &mut bx
, &[target
]);
351 Some((ReturnDest
::Nothing
, target
)),
356 fn codegen_assert_terminator(
358 helper
: TerminatorCodegenHelper
<'tcx
>,
360 terminator
: &mir
::Terminator
<'tcx
>,
361 cond
: &mir
::Operand
<'tcx
>,
363 msg
: &mir
::AssertMessage
<'tcx
>,
364 target
: mir
::BasicBlock
,
365 cleanup
: Option
<mir
::BasicBlock
>,
367 let span
= terminator
.source_info
.span
;
368 let cond
= self.codegen_operand(&mut bx
, cond
).immediate();
369 let mut const_cond
= bx
.const_to_opt_u128(cond
, false).map(|c
| c
== 1);
371 // This case can currently arise only from functions marked
372 // with #[rustc_inherit_overflow_checks] and inlined from
373 // another crate (mostly core::num generic/#[inline] fns),
374 // while the current crate doesn't use overflow checks.
375 // NOTE: Unlike binops, negation doesn't have its own
376 // checked operation, just a comparison with the minimum
377 // value, so we have to check for the assert message.
378 if !bx
.check_overflow() {
379 if let AssertKind
::OverflowNeg
= *msg
{
380 const_cond
= Some(expected
);
384 // Don't codegen the panic block if success if known.
385 if const_cond
== Some(expected
) {
386 helper
.maybe_sideeffect(self.mir
, &mut bx
, &[target
]);
387 helper
.funclet_br(self, &mut bx
, target
);
391 // Pass the condition through llvm.expect for branch hinting.
392 let cond
= bx
.expect(cond
, expected
);
394 // Create the failure block and the conditional branch to it.
395 let lltarget
= helper
.llblock(self, target
);
396 let panic_block
= self.new_block("panic");
397 helper
.maybe_sideeffect(self.mir
, &mut bx
, &[target
]);
399 bx
.cond_br(cond
, lltarget
, panic_block
.llbb());
401 bx
.cond_br(cond
, panic_block
.llbb(), lltarget
);
404 // After this point, bx is the block for the call to panic.
406 self.set_debug_loc(&mut bx
, terminator
.source_info
);
408 // Get the location information.
409 let location
= self.get_caller_location(&mut bx
, span
).immediate();
411 // Put together the arguments to the panic entry point.
412 let (lang_item
, args
) = match msg
{
413 AssertKind
::BoundsCheck { ref len, ref index }
=> {
414 let len
= self.codegen_operand(&mut bx
, len
).immediate();
415 let index
= self.codegen_operand(&mut bx
, index
).immediate();
416 // It's `fn panic_bounds_check(index: usize, len: usize)`,
417 // and `#[track_caller]` adds an implicit third argument.
418 (lang_items
::PanicBoundsCheckFnLangItem
, vec
![index
, len
, location
])
421 let msg_str
= Symbol
::intern(msg
.description());
422 let msg
= bx
.const_str(msg_str
);
423 // It's `pub fn panic(expr: &str)`, with the wide reference being passed
424 // as two arguments, and `#[track_caller]` adds an implicit third argument.
425 (lang_items
::PanicFnLangItem
, vec
![msg
.0, msg
.1, location
])
429 // Obtain the panic entry point.
430 let def_id
= common
::langcall(bx
.tcx(), Some(span
), "", lang_item
);
431 let instance
= ty
::Instance
::mono(bx
.tcx(), def_id
);
432 let fn_abi
= FnAbi
::of_instance(&bx
, instance
, &[]);
433 let llfn
= bx
.get_fn_addr(instance
);
435 // Codegen the actual panic invoke/call.
436 helper
.do_call(self, &mut bx
, fn_abi
, llfn
, &args
, None
, cleanup
);
439 /// Returns `true` if this is indeed a panic intrinsic and codegen is done.
440 fn codegen_panic_intrinsic(
442 helper
: &TerminatorCodegenHelper
<'tcx
>,
444 intrinsic
: Option
<&str>,
445 instance
: Option
<Instance
<'tcx
>>,
447 destination
: &Option
<(mir
::Place
<'tcx
>, mir
::BasicBlock
)>,
448 cleanup
: Option
<mir
::BasicBlock
>,
450 // Emit a panic or a no-op for `assert_*` intrinsics.
451 // These are intrinsics that compile to panics so that we can get a message
452 // which mentions the offending type, even from a const context.
453 #[derive(Debug, PartialEq)]
454 enum AssertIntrinsic
{
459 let panic_intrinsic
= intrinsic
.and_then(|i
| match i
{
460 // FIXME: Move to symbols instead of strings.
461 "assert_inhabited" => Some(AssertIntrinsic
::Inhabited
),
462 "assert_zero_valid" => Some(AssertIntrinsic
::ZeroValid
),
463 "assert_uninit_valid" => Some(AssertIntrinsic
::UninitValid
),
466 if let Some(intrinsic
) = panic_intrinsic
{
467 use AssertIntrinsic
::*;
468 let ty
= instance
.unwrap().substs
.type_at(0);
469 let layout
= bx
.layout_of(ty
);
470 let do_panic
= match intrinsic
{
471 Inhabited
=> layout
.abi
.is_uninhabited(),
472 // We unwrap as the error type is `!`.
473 ZeroValid
=> !layout
.might_permit_raw_init(bx
, /*zero:*/ true).unwrap(),
474 // We unwrap as the error type is `!`.
475 UninitValid
=> !layout
.might_permit_raw_init(bx
, /*zero:*/ false).unwrap(),
478 let msg_str
= if layout
.abi
.is_uninhabited() {
479 // Use this error even for the other intrinsics as it is more precise.
480 format
!("attempted to instantiate uninhabited type `{}`", ty
)
481 } else if intrinsic
== ZeroValid
{
482 format
!("attempted to zero-initialize type `{}`, which is invalid", ty
)
484 format
!("attempted to leave type `{}` uninitialized, which is invalid", ty
)
486 let msg
= bx
.const_str(Symbol
::intern(&msg_str
));
487 let location
= self.get_caller_location(bx
, span
).immediate();
489 // Obtain the panic entry point.
490 // FIXME: dedup this with `codegen_assert_terminator` above.
492 common
::langcall(bx
.tcx(), Some(span
), "", lang_items
::PanicFnLangItem
);
493 let instance
= ty
::Instance
::mono(bx
.tcx(), def_id
);
494 let fn_abi
= FnAbi
::of_instance(bx
, instance
, &[]);
495 let llfn
= bx
.get_fn_addr(instance
);
497 if let Some((_
, target
)) = destination
.as_ref() {
498 helper
.maybe_sideeffect(self.mir
, bx
, &[*target
]);
500 // Codegen the actual panic invoke/call.
506 &[msg
.0, msg
.1, location
],
507 destination
.as_ref().map(|(_
, bb
)| (ReturnDest
::Nothing
, *bb
)),
512 let target
= destination
.as_ref().unwrap().1;
513 helper
.maybe_sideeffect(self.mir
, bx
, &[target
]);
514 helper
.funclet_br(self, bx
, target
)
522 fn codegen_call_terminator(
524 helper
: TerminatorCodegenHelper
<'tcx
>,
526 terminator
: &mir
::Terminator
<'tcx
>,
527 func
: &mir
::Operand
<'tcx
>,
528 args
: &Vec
<mir
::Operand
<'tcx
>>,
529 destination
: &Option
<(mir
::Place
<'tcx
>, mir
::BasicBlock
)>,
530 cleanup
: Option
<mir
::BasicBlock
>,
532 let span
= terminator
.source_info
.span
;
533 // Create the callee. This is a fn ptr or zero-sized and hence a kind of scalar.
534 let callee
= self.codegen_operand(&mut bx
, func
);
536 let (instance
, mut llfn
) = match callee
.layout
.ty
.kind
{
537 ty
::FnDef(def_id
, substs
) => (
539 ty
::Instance
::resolve(bx
.tcx(), ty
::ParamEnv
::reveal_all(), def_id
, substs
)
544 ty
::FnPtr(_
) => (None
, Some(callee
.immediate())),
545 _
=> bug
!("{} is not callable", callee
.layout
.ty
),
547 let def
= instance
.map(|i
| i
.def
);
549 if let Some(ty
::InstanceDef
::DropGlue(_
, None
)) = def
{
550 // Empty drop glue; a no-op.
551 let &(_
, target
) = destination
.as_ref().unwrap();
552 helper
.maybe_sideeffect(self.mir
, &mut bx
, &[target
]);
553 helper
.funclet_br(self, &mut bx
, target
);
557 // FIXME(eddyb) avoid computing this if possible, when `instance` is
558 // available - right now `sig` is only needed for getting the `abi`
559 // and figuring out how many extra args were passed to a C-variadic `fn`.
560 let sig
= callee
.layout
.ty
.fn_sig(bx
.tcx());
563 // Handle intrinsics old codegen wants Expr's for, ourselves.
564 let intrinsic
= match def
{
565 Some(ty
::InstanceDef
::Intrinsic(def_id
)) => Some(bx
.tcx().item_name(def_id
).as_str()),
568 let intrinsic
= intrinsic
.as_ref().map(|s
| &s
[..]);
570 let extra_args
= &args
[sig
.inputs().skip_binder().len()..];
571 let extra_args
= extra_args
574 let op_ty
= op_arg
.ty(*self.mir
, bx
.tcx());
575 self.monomorphize(&op_ty
)
577 .collect
::<Vec
<_
>>();
579 let fn_abi
= match instance
{
580 Some(instance
) => FnAbi
::of_instance(&bx
, instance
, &extra_args
),
581 None
=> FnAbi
::of_fn_ptr(&bx
, sig
, &extra_args
),
584 if intrinsic
== Some("transmute") {
585 if let Some(destination_ref
) = destination
.as_ref() {
586 let &(dest
, target
) = destination_ref
;
587 self.codegen_transmute(&mut bx
, &args
[0], dest
);
588 helper
.maybe_sideeffect(self.mir
, &mut bx
, &[target
]);
589 helper
.funclet_br(self, &mut bx
, target
);
591 // If we are trying to transmute to an uninhabited type,
592 // it is likely there is no allotted destination. In fact,
593 // transmuting to an uninhabited type is UB, which means
594 // we can do what we like. Here, we declare that transmuting
595 // into an uninhabited type is impossible, so anything following
596 // it must be unreachable.
597 assert_eq
!(fn_abi
.ret
.layout
.abi
, abi
::Abi
::Uninhabited
);
603 // For normal codegen, this Miri-specific intrinsic should never occur.
604 if intrinsic
== Some("miri_start_panic") {
605 bug
!("`miri_start_panic` should never end up in compiled code");
608 if self.codegen_panic_intrinsic(
620 // The arguments we'll be passing. Plus one to account for outptr, if used.
621 let arg_count
= fn_abi
.args
.len() + fn_abi
.ret
.is_indirect() as usize;
622 let mut llargs
= Vec
::with_capacity(arg_count
);
624 // Prepare the return value destination
625 let ret_dest
= if let Some((dest
, _
)) = *destination
{
626 let is_intrinsic
= intrinsic
.is_some();
627 self.make_return_dest(&mut bx
, dest
, &fn_abi
.ret
, &mut llargs
, is_intrinsic
)
632 if intrinsic
== Some("caller_location") {
633 if let Some((_
, target
)) = destination
.as_ref() {
634 let location
= self.get_caller_location(&mut bx
, span
);
636 if let ReturnDest
::IndirectOperand(tmp
, _
) = ret_dest
{
637 location
.val
.store(&mut bx
, tmp
);
639 self.store_return(&mut bx
, ret_dest
, &fn_abi
.ret
, location
.immediate());
641 helper
.maybe_sideeffect(self.mir
, &mut bx
, &[*target
]);
642 helper
.funclet_br(self, &mut bx
, *target
);
647 if intrinsic
.is_some() && intrinsic
!= Some("drop_in_place") {
648 let dest
= match ret_dest
{
649 _
if fn_abi
.ret
.is_indirect() => llargs
[0],
650 ReturnDest
::Nothing
=> {
651 bx
.const_undef(bx
.type_ptr_to(bx
.arg_memory_ty(&fn_abi
.ret
)))
653 ReturnDest
::IndirectOperand(dst
, _
) | ReturnDest
::Store(dst
) => dst
.llval
,
654 ReturnDest
::DirectOperand(_
) => {
655 bug
!("Cannot use direct operand with an intrinsic call")
659 let args
: Vec
<_
> = args
663 // The indices passed to simd_shuffle* in the
664 // third argument must be constant. This is
665 // checked by const-qualification, which also
666 // promotes any complex rvalues to constants.
667 if i
== 2 && intrinsic
.unwrap().starts_with("simd_shuffle") {
668 if let mir
::Operand
::Constant(constant
) = arg
{
669 let c
= self.eval_mir_constant(constant
);
670 let (llval
, ty
) = self.simd_shuffle_indices(
676 return OperandRef { val: Immediate(llval), layout: bx.layout_of(ty) }
;
678 span_bug
!(span
, "shuffle indices must be constant");
682 self.codegen_operand(&mut bx
, arg
)
686 bx
.codegen_intrinsic_call(
687 *instance
.as_ref().unwrap(),
691 terminator
.source_info
.span
,
694 if let ReturnDest
::IndirectOperand(dst
, _
) = ret_dest
{
695 self.store_return(&mut bx
, ret_dest
, &fn_abi
.ret
, dst
.llval
);
698 if let Some((_
, target
)) = *destination
{
699 helper
.maybe_sideeffect(self.mir
, &mut bx
, &[target
]);
700 helper
.funclet_br(self, &mut bx
, target
);
708 // Split the rust-call tupled arguments off.
709 let (first_args
, untuple
) = if abi
== Abi
::RustCall
&& !args
.is_empty() {
710 let (tup
, args
) = args
.split_last().unwrap();
716 'make_args
: for (i
, arg
) in first_args
.iter().enumerate() {
717 let mut op
= self.codegen_operand(&mut bx
, arg
);
719 if let (0, Some(ty
::InstanceDef
::Virtual(_
, idx
))) = (i
, def
) {
720 if let Pair(..) = op
.val
{
721 // In the case of Rc<Self>, we need to explicitly pass a
722 // *mut RcBox<Self> with a Scalar (not ScalarPair) ABI. This is a hack
723 // that is understood elsewhere in the compiler as a method on
725 // To get a `*mut RcBox<Self>`, we just keep unwrapping newtypes until
726 // we get a value of a built-in pointer type
727 'descend_newtypes
: while !op
.layout
.ty
.is_unsafe_ptr()
728 && !op
.layout
.ty
.is_region_ptr()
730 for i
in 0..op
.layout
.fields
.count() {
731 let field
= op
.extract_field(&mut bx
, i
);
732 if !field
.layout
.is_zst() {
733 // we found the one non-zero-sized field that is allowed
734 // now find *its* non-zero-sized field, or stop if it's a
737 continue 'descend_newtypes
;
741 span_bug
!(span
, "receiver has no non-zero-sized fields {:?}", op
);
744 // now that we have `*dyn Trait` or `&dyn Trait`, split it up into its
745 // data pointer and vtable. Look up the method in the vtable, and pass
746 // the data pointer as the first argument
748 Pair(data_ptr
, meta
) => {
750 meth
::VirtualIndex
::from_index(idx
).get_fn(&mut bx
, meta
, &fn_abi
),
752 llargs
.push(data_ptr
);
755 other
=> bug
!("expected a Pair, got {:?}", other
),
757 } else if let Ref(data_ptr
, Some(meta
), _
) = op
.val
{
758 // by-value dynamic dispatch
759 llfn
= Some(meth
::VirtualIndex
::from_index(idx
).get_fn(&mut bx
, meta
, &fn_abi
));
760 llargs
.push(data_ptr
);
763 span_bug
!(span
, "can't codegen a virtual call on {:?}", op
);
767 // The callee needs to own the argument memory if we pass it
768 // by-ref, so make a local copy of non-immediate constants.
769 match (arg
, op
.val
) {
770 (&mir
::Operand
::Copy(_
), Ref(_
, None
, _
))
771 | (&mir
::Operand
::Constant(_
), Ref(_
, None
, _
)) => {
772 let tmp
= PlaceRef
::alloca(&mut bx
, op
.layout
);
773 op
.val
.store(&mut bx
, tmp
);
774 op
.val
= Ref(tmp
.llval
, None
, tmp
.align
);
779 self.codegen_argument(&mut bx
, op
, &mut llargs
, &fn_abi
.args
[i
]);
781 if let Some(tup
) = untuple
{
782 self.codegen_arguments_untupled(
786 &fn_abi
.args
[first_args
.len()..],
791 instance
.map_or(false, |i
| i
.def
.requires_caller_location(self.cx
.tcx()));
796 "#[track_caller] fn's must have 1 more argument in their ABI than in their MIR",
798 let location
= self.get_caller_location(&mut bx
, span
);
799 let last_arg
= fn_abi
.args
.last().unwrap();
800 self.codegen_argument(&mut bx
, location
, &mut llargs
, last_arg
);
803 let fn_ptr
= match (llfn
, instance
) {
804 (Some(llfn
), _
) => llfn
,
805 (None
, Some(instance
)) => bx
.get_fn_addr(instance
),
806 _
=> span_bug
!(span
, "no llfn for call"),
809 if let Some((_
, target
)) = destination
.as_ref() {
810 helper
.maybe_sideeffect(self.mir
, &mut bx
, &[*target
]);
818 destination
.as_ref().map(|&(_
, target
)| (ret_dest
, target
)),
824 impl<'a
, 'tcx
, Bx
: BuilderMethods
<'a
, 'tcx
>> FunctionCx
<'a
, 'tcx
, Bx
> {
825 pub fn codegen_block(&mut self, bb
: mir
::BasicBlock
) {
826 let mut bx
= self.build_block(bb
);
830 debug
!("codegen_block({:?}={:?})", bb
, data
);
832 for statement
in &data
.statements
{
833 bx
= self.codegen_statement(bx
, statement
);
836 self.codegen_terminator(bx
, bb
, data
.terminator());
839 fn codegen_terminator(
843 terminator
: &'tcx mir
::Terminator
<'tcx
>,
845 debug
!("codegen_terminator: {:?}", terminator
);
847 // Create the cleanup bundle, if needed.
848 let funclet_bb
= self.cleanup_kinds
[bb
].funclet_bb(bb
);
849 let helper
= TerminatorCodegenHelper { bb, terminator, funclet_bb }
;
851 self.set_debug_loc(&mut bx
, terminator
.source_info
);
852 match terminator
.kind
{
853 mir
::TerminatorKind
::Resume
=> self.codegen_resume_terminator(helper
, bx
),
855 mir
::TerminatorKind
::Abort
=> {
857 // `abort` does not terminate the block, so we still need to generate
858 // an `unreachable` terminator after it.
862 mir
::TerminatorKind
::Goto { target }
=> {
863 helper
.maybe_sideeffect(self.mir
, &mut bx
, &[target
]);
864 helper
.funclet_br(self, &mut bx
, target
);
867 mir
::TerminatorKind
::SwitchInt { ref discr, switch_ty, ref values, ref targets }
=> {
868 self.codegen_switchint_terminator(helper
, bx
, discr
, switch_ty
, values
, targets
);
871 mir
::TerminatorKind
::Return
=> {
872 self.codegen_return_terminator(bx
);
875 mir
::TerminatorKind
::Unreachable
=> {
879 mir
::TerminatorKind
::Drop { location, target, unwind }
=> {
880 self.codegen_drop_terminator(helper
, bx
, location
, target
, unwind
);
883 mir
::TerminatorKind
::Assert { ref cond, expected, ref msg, target, cleanup }
=> {
884 self.codegen_assert_terminator(
885 helper
, bx
, terminator
, cond
, expected
, msg
, target
, cleanup
,
889 mir
::TerminatorKind
::DropAndReplace { .. }
=> {
890 bug
!("undesugared DropAndReplace in codegen: {:?}", terminator
);
893 mir
::TerminatorKind
::Call
{
900 self.codegen_call_terminator(
910 mir
::TerminatorKind
::GeneratorDrop
| mir
::TerminatorKind
::Yield { .. }
=> {
911 bug
!("generator ops in codegen")
913 mir
::TerminatorKind
::FalseEdges { .. }
| mir
::TerminatorKind
::FalseUnwind { .. }
=> {
914 bug
!("borrowck false edges in codegen")
922 op
: OperandRef
<'tcx
, Bx
::Value
>,
923 llargs
: &mut Vec
<Bx
::Value
>,
924 arg
: &ArgAbi
<'tcx
, Ty
<'tcx
>>,
926 // Fill padding with undef value, where applicable.
927 if let Some(ty
) = arg
.pad
{
928 llargs
.push(bx
.const_undef(bx
.reg_backend_type(&ty
)))
935 if let PassMode
::Pair(..) = arg
.mode
{
942 _
=> bug
!("codegen_argument: {:?} invalid for pair argument", op
),
944 } else if arg
.is_unsized_indirect() {
946 Ref(a
, Some(b
), _
) => {
951 _
=> bug
!("codegen_argument: {:?} invalid for unsized indirect argument", op
),
955 // Force by-ref if we have to load through a cast pointer.
956 let (mut llval
, align
, by_ref
) = match op
.val
{
957 Immediate(_
) | Pair(..) => match arg
.mode
{
958 PassMode
::Indirect(..) | PassMode
::Cast(_
) => {
959 let scratch
= PlaceRef
::alloca(bx
, arg
.layout
);
960 op
.val
.store(bx
, scratch
);
961 (scratch
.llval
, scratch
.align
, true)
963 _
=> (op
.immediate_or_packed_pair(bx
), arg
.layout
.align
.abi
, false),
965 Ref(llval
, _
, align
) => {
966 if arg
.is_indirect() && align
< arg
.layout
.align
.abi
{
967 // `foo(packed.large_field)`. We can't pass the (unaligned) field directly. I
968 // think that ATM (Rust 1.16) we only pass temporaries, but we shouldn't
969 // have scary latent bugs around.
971 let scratch
= PlaceRef
::alloca(bx
, arg
.layout
);
981 (scratch
.llval
, scratch
.align
, true)
988 if by_ref
&& !arg
.is_indirect() {
989 // Have to load the argument, maybe while casting it.
990 if let PassMode
::Cast(ty
) = arg
.mode
{
991 let addr
= bx
.pointercast(llval
, bx
.type_ptr_to(bx
.cast_backend_type(&ty
)));
992 llval
= bx
.load(addr
, align
.min(arg
.layout
.align
.abi
));
994 // We can't use `PlaceRef::load` here because the argument
995 // may have a type we don't treat as immediate, but the ABI
996 // used for this call is passing it by-value. In that case,
997 // the load would just produce `OperandValue::Ref` instead
998 // of the `OperandValue::Immediate` we need for the call.
999 llval
= bx
.load(llval
, align
);
1000 if let abi
::Abi
::Scalar(ref scalar
) = arg
.layout
.abi
{
1001 if scalar
.is_bool() {
1002 bx
.range_metadata(llval
, 0..2);
1005 // We store bools as `i8` so we need to truncate to `i1`.
1006 llval
= base
::to_immediate(bx
, llval
, arg
.layout
);
1013 fn codegen_arguments_untupled(
1016 operand
: &mir
::Operand
<'tcx
>,
1017 llargs
: &mut Vec
<Bx
::Value
>,
1018 args
: &[ArgAbi
<'tcx
, Ty
<'tcx
>>],
1020 let tuple
= self.codegen_operand(bx
, operand
);
1022 // Handle both by-ref and immediate tuples.
1023 if let Ref(llval
, None
, align
) = tuple
.val
{
1024 let tuple_ptr
= PlaceRef
::new_sized_aligned(llval
, tuple
.layout
, align
);
1025 for i
in 0..tuple
.layout
.fields
.count() {
1026 let field_ptr
= tuple_ptr
.project_field(bx
, i
);
1027 let field
= bx
.load_operand(field_ptr
);
1028 self.codegen_argument(bx
, field
, llargs
, &args
[i
]);
1030 } else if let Ref(_
, Some(_
), _
) = tuple
.val
{
1031 bug
!("closure arguments must be sized")
1033 // If the tuple is immediate, the elements are as well.
1034 for i
in 0..tuple
.layout
.fields
.count() {
1035 let op
= tuple
.extract_field(bx
, i
);
1036 self.codegen_argument(bx
, op
, llargs
, &args
[i
]);
1041 fn get_caller_location(&mut self, bx
: &mut Bx
, span
: Span
) -> OperandRef
<'tcx
, Bx
::Value
> {
1042 self.caller_location
.unwrap_or_else(|| {
1043 let topmost
= span
.ctxt().outer_expn().expansion_cause().unwrap_or(span
);
1044 let caller
= bx
.tcx().sess
.source_map().lookup_char_pos(topmost
.lo());
1045 let const_loc
= bx
.tcx().const_caller_location((
1046 Symbol
::intern(&caller
.file
.name
.to_string()),
1048 caller
.col_display
as u32 + 1,
1050 OperandRef
::from_const(bx
, const_loc
, bx
.tcx().caller_location_ty())
1054 fn get_personality_slot(&mut self, bx
: &mut Bx
) -> PlaceRef
<'tcx
, Bx
::Value
> {
1056 if let Some(slot
) = self.personality_slot
{
1059 let layout
= cx
.layout_of(
1060 cx
.tcx().intern_tup(&[cx
.tcx().mk_mut_ptr(cx
.tcx().types
.u8), cx
.tcx().types
.i32]),
1062 let slot
= PlaceRef
::alloca(bx
, layout
);
1063 self.personality_slot
= Some(slot
);
1068 /// Returns the landing-pad wrapper around the given basic block.
1070 /// No-op in MSVC SEH scheme.
1071 fn landing_pad_to(&mut self, target_bb
: mir
::BasicBlock
) -> Bx
::BasicBlock
{
1072 if let Some(block
) = self.landing_pads
[target_bb
] {
1076 let block
= self.blocks
[target_bb
];
1077 let landing_pad
= self.landing_pad_uncached(block
);
1078 self.landing_pads
[target_bb
] = Some(landing_pad
);
1082 fn landing_pad_uncached(&mut self, target_bb
: Bx
::BasicBlock
) -> Bx
::BasicBlock
{
1083 if base
::wants_msvc_seh(self.cx
.sess()) {
1084 span_bug
!(self.mir
.span
, "landing pad was not inserted?")
1087 let mut bx
= self.new_block("cleanup");
1089 let llpersonality
= self.cx
.eh_personality();
1090 let llretty
= self.landing_pad_type();
1091 let lp
= bx
.landing_pad(llretty
, llpersonality
, 1);
1094 let slot
= self.get_personality_slot(&mut bx
);
1095 slot
.storage_live(&mut bx
);
1096 Pair(bx
.extract_value(lp
, 0), bx
.extract_value(lp
, 1)).store(&mut bx
, slot
);
1102 fn landing_pad_type(&self) -> Bx
::Type
{
1104 cx
.type_struct(&[cx
.type_i8p(), cx
.type_i32()], false)
1107 fn unreachable_block(&mut self) -> Bx
::BasicBlock
{
1108 self.unreachable_block
.unwrap_or_else(|| {
1109 let mut bx
= self.new_block("unreachable");
1111 self.unreachable_block
= Some(bx
.llbb());
1116 pub fn new_block(&self, name
: &str) -> Bx
{
1117 Bx
::new_block(self.cx
, self.llfn
, name
)
1120 pub fn build_block(&self, bb
: mir
::BasicBlock
) -> Bx
{
1121 let mut bx
= Bx
::with_cx(self.cx
);
1122 bx
.position_at_end(self.blocks
[bb
]);
1126 fn make_return_dest(
1129 dest
: mir
::Place
<'tcx
>,
1130 fn_ret
: &ArgAbi
<'tcx
, Ty
<'tcx
>>,
1131 llargs
: &mut Vec
<Bx
::Value
>,
1133 ) -> ReturnDest
<'tcx
, Bx
::Value
> {
1134 // If the return is ignored, we can just return a do-nothing `ReturnDest`.
1135 if fn_ret
.is_ignore() {
1136 return ReturnDest
::Nothing
;
1138 let dest
= if let Some(index
) = dest
.as_local() {
1139 match self.locals
[index
] {
1140 LocalRef
::Place(dest
) => dest
,
1141 LocalRef
::UnsizedPlace(_
) => bug
!("return type must be sized"),
1142 LocalRef
::Operand(None
) => {
1143 // Handle temporary places, specifically `Operand` ones, as
1144 // they don't have `alloca`s.
1145 return if fn_ret
.is_indirect() {
1146 // Odd, but possible, case, we have an operand temporary,
1147 // but the calling convention has an indirect return.
1148 let tmp
= PlaceRef
::alloca(bx
, fn_ret
.layout
);
1149 tmp
.storage_live(bx
);
1150 llargs
.push(tmp
.llval
);
1151 ReturnDest
::IndirectOperand(tmp
, index
)
1152 } else if is_intrinsic
{
1153 // Currently, intrinsics always need a location to store
1154 // the result, so we create a temporary `alloca` for the
1156 let tmp
= PlaceRef
::alloca(bx
, fn_ret
.layout
);
1157 tmp
.storage_live(bx
);
1158 ReturnDest
::IndirectOperand(tmp
, index
)
1160 ReturnDest
::DirectOperand(index
)
1163 LocalRef
::Operand(Some(_
)) => {
1164 bug
!("place local already assigned to");
1170 mir
::PlaceRef { local: dest.local, projection: &dest.projection }
,
1173 if fn_ret
.is_indirect() {
1174 if dest
.align
< dest
.layout
.align
.abi
{
1175 // Currently, MIR code generation does not create calls
1176 // that store directly to fields of packed structs (in
1177 // fact, the calls it creates write only to temps).
1179 // If someone changes that, please update this code path
1180 // to create a temporary.
1181 span_bug
!(self.mir
.span
, "can't directly store to unaligned value");
1183 llargs
.push(dest
.llval
);
1186 ReturnDest
::Store(dest
)
1190 fn codegen_transmute(&mut self, bx
: &mut Bx
, src
: &mir
::Operand
<'tcx
>, dst
: mir
::Place
<'tcx
>) {
1191 if let Some(index
) = dst
.as_local() {
1192 match self.locals
[index
] {
1193 LocalRef
::Place(place
) => self.codegen_transmute_into(bx
, src
, place
),
1194 LocalRef
::UnsizedPlace(_
) => bug
!("transmute must not involve unsized locals"),
1195 LocalRef
::Operand(None
) => {
1196 let dst_layout
= bx
.layout_of(self.monomorphized_place_ty(dst
.as_ref()));
1197 assert
!(!dst_layout
.ty
.has_erasable_regions());
1198 let place
= PlaceRef
::alloca(bx
, dst_layout
);
1199 place
.storage_live(bx
);
1200 self.codegen_transmute_into(bx
, src
, place
);
1201 let op
= bx
.load_operand(place
);
1202 place
.storage_dead(bx
);
1203 self.locals
[index
] = LocalRef
::Operand(Some(op
));
1204 self.debug_introduce_local(bx
, index
);
1206 LocalRef
::Operand(Some(op
)) => {
1207 assert
!(op
.layout
.is_zst(), "assigning to initialized SSAtemp");
1211 let dst
= self.codegen_place(bx
, dst
.as_ref());
1212 self.codegen_transmute_into(bx
, src
, dst
);
1216 fn codegen_transmute_into(
1219 src
: &mir
::Operand
<'tcx
>,
1220 dst
: PlaceRef
<'tcx
, Bx
::Value
>,
1222 let src
= self.codegen_operand(bx
, src
);
1223 let llty
= bx
.backend_type(src
.layout
);
1224 let cast_ptr
= bx
.pointercast(dst
.llval
, bx
.type_ptr_to(llty
));
1225 let align
= src
.layout
.align
.abi
.min(dst
.align
);
1226 src
.val
.store(bx
, PlaceRef
::new_sized_aligned(cast_ptr
, src
.layout
, align
));
1229 // Stores the return value of a function call into it's final location.
1233 dest
: ReturnDest
<'tcx
, Bx
::Value
>,
1234 ret_abi
: &ArgAbi
<'tcx
, Ty
<'tcx
>>,
1237 use self::ReturnDest
::*;
1241 Store(dst
) => bx
.store_arg(&ret_abi
, llval
, dst
),
1242 IndirectOperand(tmp
, index
) => {
1243 let op
= bx
.load_operand(tmp
);
1244 tmp
.storage_dead(bx
);
1245 self.locals
[index
] = LocalRef
::Operand(Some(op
));
1246 self.debug_introduce_local(bx
, index
);
1248 DirectOperand(index
) => {
1249 // If there is a cast, we have to store and reload.
1250 let op
= if let PassMode
::Cast(_
) = ret_abi
.mode
{
1251 let tmp
= PlaceRef
::alloca(bx
, ret_abi
.layout
);
1252 tmp
.storage_live(bx
);
1253 bx
.store_arg(&ret_abi
, llval
, tmp
);
1254 let op
= bx
.load_operand(tmp
);
1255 tmp
.storage_dead(bx
);
1258 OperandRef
::from_immediate_or_packed_pair(bx
, llval
, ret_abi
.layout
)
1260 self.locals
[index
] = LocalRef
::Operand(Some(op
));
1261 self.debug_introduce_local(bx
, index
);
1267 enum ReturnDest
<'tcx
, V
> {
1268 // Do nothing; the return value is indirect or ignored.
1270 // Store the return value to the pointer.
1271 Store(PlaceRef
<'tcx
, V
>),
1272 // Store an indirect return value to an operand local place.
1273 IndirectOperand(PlaceRef
<'tcx
, V
>, mir
::Local
),
1274 // Store a direct return value to an operand local place.
1275 DirectOperand(mir
::Local
),