1 use rustc
::middle
::lang_items
;
2 use rustc
::ty
::{self, Ty, TypeFoldable, Instance}
;
3 use rustc
::ty
::layout
::{self, LayoutOf, HasTyCtxt, FnTypeExt}
;
4 use rustc
::mir
::{self, Place, PlaceBase, Static, StaticKind}
;
5 use rustc
::mir
::interpret
::PanicInfo
;
6 use rustc_target
::abi
::call
::{ArgType, FnType, PassMode, IgnoreMode}
;
7 use rustc_target
::spec
::abi
::Abi
;
10 use crate::common
::{self, IntPredicate}
;
17 use syntax
::symbol
::LocalInternedString
;
20 use super::{FunctionCx, LocalRef}
;
21 use super::place
::PlaceRef
;
22 use super::operand
::OperandRef
;
23 use super::operand
::OperandValue
::{Pair, Ref, Immediate}
;
25 /// Used by `FunctionCx::codegen_terminator` for emitting common patterns
26 /// e.g., creating a basic block, calling a function, etc.
27 struct TerminatorCodegenHelper
<'a
, 'tcx
> {
28 bb
: &'a mir
::BasicBlock
,
29 terminator
: &'a mir
::Terminator
<'tcx
>,
30 funclet_bb
: Option
<mir
::BasicBlock
>,
33 impl<'a
, 'tcx
> TerminatorCodegenHelper
<'a
, 'tcx
> {
34 /// Returns the associated funclet from `FunctionCx::funclets` for the
35 /// `funclet_bb` member if it is not `None`.
36 fn funclet
<'c
, 'b
, Bx
: BuilderMethods
<'b
, 'tcx
>>(
38 fx
: &'c
mut FunctionCx
<'b
, 'tcx
, Bx
>,
39 ) -> Option
<&'c Bx
::Funclet
> {
40 match self.funclet_bb
{
41 Some(funcl
) => fx
.funclets
[funcl
].as_ref(),
46 fn lltarget
<'b
, 'c
, Bx
: BuilderMethods
<'b
, 'tcx
>>(
48 fx
: &'c
mut FunctionCx
<'b
, 'tcx
, Bx
>,
49 target
: mir
::BasicBlock
,
50 ) -> (Bx
::BasicBlock
, bool
) {
51 let span
= self.terminator
.source_info
.span
;
52 let lltarget
= fx
.blocks
[target
];
53 let target_funclet
= fx
.cleanup_kinds
[target
].funclet_bb(target
);
54 match (self.funclet_bb
, target_funclet
) {
55 (None
, None
) => (lltarget
, false),
56 (Some(f
), Some(t_f
)) if f
== t_f
|| !base
::wants_msvc_seh(fx
.cx
.tcx().sess
) =>
58 // jump *into* cleanup - need a landing pad if GNU
59 (None
, Some(_
)) => (fx
.landing_pad_to(target
), false),
60 (Some(_
), None
) => span_bug
!(span
, "{:?} - jump out of cleanup?", self.terminator
),
61 (Some(_
), Some(_
)) => (fx
.landing_pad_to(target
), true),
65 /// Create a basic block.
66 fn llblock
<'c
, 'b
, Bx
: BuilderMethods
<'b
, 'tcx
>>(
68 fx
: &'c
mut FunctionCx
<'b
, 'tcx
, Bx
>,
69 target
: mir
::BasicBlock
,
71 let (lltarget
, is_cleanupret
) = self.lltarget(fx
, target
);
73 // MSVC cross-funclet jump - need a trampoline
75 debug
!("llblock: creating cleanup trampoline for {:?}", target
);
76 let name
= &format
!("{:?}_cleanup_trampoline_{:?}", self.bb
, target
);
77 let mut trampoline
= fx
.new_block(name
);
78 trampoline
.cleanup_ret(self.funclet(fx
).unwrap(),
86 fn funclet_br
<'c
, 'b
, Bx
: BuilderMethods
<'b
, 'tcx
>>(
88 fx
: &'c
mut FunctionCx
<'b
, 'tcx
, Bx
>,
90 target
: mir
::BasicBlock
,
92 let (lltarget
, is_cleanupret
) = self.lltarget(fx
, target
);
94 // micro-optimization: generate a `ret` rather than a jump
96 bx
.cleanup_ret(self.funclet(fx
).unwrap(), Some(lltarget
));
102 /// Call `fn_ptr` of `fn_ty` with the arguments `llargs`, the optional
103 /// return destination `destination` and the cleanup function `cleanup`.
104 fn do_call
<'c
, 'b
, Bx
: BuilderMethods
<'b
, 'tcx
>>(
106 fx
: &'c
mut FunctionCx
<'b
, 'tcx
, Bx
>,
108 fn_ty
: FnType
<'tcx
, Ty
<'tcx
>>,
110 llargs
: &[Bx
::Value
],
111 destination
: Option
<(ReturnDest
<'tcx
, Bx
::Value
>, mir
::BasicBlock
)>,
112 cleanup
: Option
<mir
::BasicBlock
>,
114 if let Some(cleanup
) = cleanup
{
115 let ret_bx
= if let Some((_
, target
)) = destination
{
118 fx
.unreachable_block()
120 let invokeret
= bx
.invoke(fn_ptr
,
123 self.llblock(fx
, cleanup
),
125 bx
.apply_attrs_callsite(&fn_ty
, invokeret
);
127 if let Some((ret_dest
, target
)) = destination
{
128 let mut ret_bx
= fx
.build_block(target
);
129 fx
.set_debug_loc(&mut ret_bx
, self.terminator
.source_info
);
130 fx
.store_return(&mut ret_bx
, ret_dest
, &fn_ty
.ret
, invokeret
);
133 let llret
= bx
.call(fn_ptr
, &llargs
, self.funclet(fx
));
134 bx
.apply_attrs_callsite(&fn_ty
, llret
);
135 if fx
.mir
[*self.bb
].is_cleanup
{
136 // Cleanup is always the cold path. Don't inline
137 // drop glue. Also, when there is a deeply-nested
138 // struct, there are "symmetry" issues that cause
139 // exponential inlining - see issue #41696.
140 bx
.do_not_inline(llret
);
143 if let Some((ret_dest
, target
)) = destination
{
144 fx
.store_return(bx
, ret_dest
, &fn_ty
.ret
, llret
);
145 self.funclet_br(fx
, bx
, target
);
153 /// Codegen implementations for some terminator variants.
154 impl<'a
, 'tcx
, Bx
: BuilderMethods
<'a
, 'tcx
>> FunctionCx
<'a
, 'tcx
, Bx
> {
155 /// Generates code for a `Resume` terminator.
156 fn codegen_resume_terminator
<'b
>(
158 helper
: TerminatorCodegenHelper
<'b
, 'tcx
>,
161 if let Some(funclet
) = helper
.funclet(self) {
162 bx
.cleanup_ret(funclet
, None
);
164 let slot
= self.get_personality_slot(&mut bx
);
165 let lp0
= slot
.project_field(&mut bx
, 0);
166 let lp0
= bx
.load_operand(lp0
).immediate();
167 let lp1
= slot
.project_field(&mut bx
, 1);
168 let lp1
= bx
.load_operand(lp1
).immediate();
169 slot
.storage_dead(&mut bx
);
171 if !bx
.sess().target
.target
.options
.custom_unwind_resume
{
172 let mut lp
= bx
.const_undef(self.landing_pad_type());
173 lp
= bx
.insert_value(lp
, lp0
, 0);
174 lp
= bx
.insert_value(lp
, lp1
, 1);
177 bx
.call(bx
.eh_unwind_resume(), &[lp0
],
178 helper
.funclet(self));
184 fn codegen_switchint_terminator
<'b
>(
186 helper
: TerminatorCodegenHelper
<'b
, 'tcx
>,
188 discr
: &mir
::Operand
<'tcx
>,
190 values
: &Cow
<'tcx
, [u128
]>,
191 targets
: &Vec
<mir
::BasicBlock
>,
193 let discr
= self.codegen_operand(&mut bx
, &discr
);
194 if targets
.len() == 2 {
195 // If there are two targets, emit br instead of switch
196 let lltrue
= helper
.llblock(self, targets
[0]);
197 let llfalse
= helper
.llblock(self, targets
[1]);
198 if switch_ty
== bx
.tcx().types
.bool
{
199 // Don't generate trivial icmps when switching on bool
200 if let [0] = values
[..] {
201 bx
.cond_br(discr
.immediate(), llfalse
, lltrue
);
203 assert_eq
!(&values
[..], &[1]);
204 bx
.cond_br(discr
.immediate(), lltrue
, llfalse
);
207 let switch_llty
= bx
.immediate_backend_type(
208 bx
.layout_of(switch_ty
)
210 let llval
= bx
.const_uint_big(switch_llty
, values
[0]);
211 let cmp
= bx
.icmp(IntPredicate
::IntEQ
, discr
.immediate(), llval
);
212 bx
.cond_br(cmp
, lltrue
, llfalse
);
215 let (otherwise
, targets
) = targets
.split_last().unwrap();
218 helper
.llblock(self, *otherwise
),
219 values
.iter().zip(targets
).map(|(&value
, target
)| {
220 (value
, helper
.llblock(self, *target
))
226 fn codegen_return_terminator(&mut self, mut bx
: Bx
) {
227 if self.fn_ty
.c_variadic
{
228 match self.va_list_ref
{
230 bx
.va_end(va_list
.llval
);
233 bug
!("C-variadic function must have a `va_list_ref`");
237 if self.fn_ty
.ret
.layout
.abi
.is_uninhabited() {
238 // Functions with uninhabited return values are marked `noreturn`,
239 // so we should make sure that we never actually do.
244 let llval
= match self.fn_ty
.ret
.mode
{
245 PassMode
::Ignore(IgnoreMode
::Zst
) | PassMode
::Indirect(..) => {
250 PassMode
::Ignore(IgnoreMode
::CVarArgs
) => {
251 bug
!("C-variadic arguments should never be the return type");
254 PassMode
::Direct(_
) | PassMode
::Pair(..) => {
256 self.codegen_consume(&mut bx
, &mir
::Place
::RETURN_PLACE
.as_ref());
257 if let Ref(llval
, _
, align
) = op
.val
{
258 bx
.load(llval
, align
)
260 op
.immediate_or_packed_pair(&mut bx
)
264 PassMode
::Cast(cast_ty
) => {
265 let op
= match self.locals
[mir
::RETURN_PLACE
] {
266 LocalRef
::Operand(Some(op
)) => op
,
267 LocalRef
::Operand(None
) => bug
!("use of return before def"),
268 LocalRef
::Place(cg_place
) => {
270 val
: Ref(cg_place
.llval
, None
, cg_place
.align
),
271 layout
: cg_place
.layout
274 LocalRef
::UnsizedPlace(_
) => bug
!("return type must be sized"),
276 let llslot
= match op
.val
{
277 Immediate(_
) | Pair(..) => {
279 PlaceRef
::alloca(&mut bx
, self.fn_ty
.ret
.layout
, "ret");
280 op
.val
.store(&mut bx
, scratch
);
283 Ref(llval
, _
, align
) => {
284 assert_eq
!(align
, op
.layout
.align
.abi
,
285 "return place is unaligned!");
289 let addr
= bx
.pointercast(llslot
, bx
.type_ptr_to(
290 bx
.cast_backend_type(&cast_ty
)
292 bx
.load(addr
, self.fn_ty
.ret
.layout
.align
.abi
)
299 fn codegen_drop_terminator
<'b
>(
301 helper
: TerminatorCodegenHelper
<'b
, 'tcx
>,
303 location
: &mir
::Place
<'tcx
>,
304 target
: mir
::BasicBlock
,
305 unwind
: Option
<mir
::BasicBlock
>,
307 let ty
= location
.ty(self.mir
, bx
.tcx()).ty
;
308 let ty
= self.monomorphize(&ty
);
309 let drop_fn
= Instance
::resolve_drop_in_place(bx
.tcx(), ty
);
311 if let ty
::InstanceDef
::DropGlue(_
, None
) = drop_fn
.def
{
312 // we don't actually need to drop anything.
313 helper
.funclet_br(self, &mut bx
, target
);
317 let place
= self.codegen_place(&mut bx
, &location
.as_ref());
319 let mut args
= if let Some(llextra
) = place
.llextra
{
320 args2
= [place
.llval
, llextra
];
323 args1
= [place
.llval
];
326 let (drop_fn
, fn_ty
) = match ty
.sty
{
328 let sig
= drop_fn
.fn_sig(self.cx
.tcx());
329 let sig
= self.cx
.tcx().normalize_erasing_late_bound_regions(
330 ty
::ParamEnv
::reveal_all(),
333 let fn_ty
= FnType
::new_vtable(&bx
, sig
, &[]);
334 let vtable
= args
[1];
336 (meth
::DESTRUCTOR
.get_fn(&mut bx
, vtable
, &fn_ty
), fn_ty
)
340 FnType
::of_instance(&bx
, drop_fn
))
343 helper
.do_call(self, &mut bx
, fn_ty
, drop_fn
, args
,
344 Some((ReturnDest
::Nothing
, target
)),
348 fn codegen_assert_terminator
<'b
>(
350 helper
: TerminatorCodegenHelper
<'b
, 'tcx
>,
352 terminator
: &mir
::Terminator
<'tcx
>,
353 cond
: &mir
::Operand
<'tcx
>,
355 msg
: &mir
::AssertMessage
<'tcx
>,
356 target
: mir
::BasicBlock
,
357 cleanup
: Option
<mir
::BasicBlock
>,
359 let span
= terminator
.source_info
.span
;
360 let cond
= self.codegen_operand(&mut bx
, cond
).immediate();
361 let mut const_cond
= bx
.const_to_opt_u128(cond
, false).map(|c
| c
== 1);
363 // This case can currently arise only from functions marked
364 // with #[rustc_inherit_overflow_checks] and inlined from
365 // another crate (mostly core::num generic/#[inline] fns),
366 // while the current crate doesn't use overflow checks.
367 // NOTE: Unlike binops, negation doesn't have its own
368 // checked operation, just a comparison with the minimum
369 // value, so we have to check for the assert message.
370 if !bx
.check_overflow() {
371 if let PanicInfo
::OverflowNeg
= *msg
{
372 const_cond
= Some(expected
);
376 // Don't codegen the panic block if success if known.
377 if const_cond
== Some(expected
) {
378 helper
.funclet_br(self, &mut bx
, target
);
382 // Pass the condition through llvm.expect for branch hinting.
383 let cond
= bx
.expect(cond
, expected
);
385 // Create the failure block and the conditional branch to it.
386 let lltarget
= helper
.llblock(self, target
);
387 let panic_block
= self.new_block("panic");
389 bx
.cond_br(cond
, lltarget
, panic_block
.llbb());
391 bx
.cond_br(cond
, panic_block
.llbb(), lltarget
);
394 // After this point, bx is the block for the call to panic.
396 self.set_debug_loc(&mut bx
, terminator
.source_info
);
398 // Get the location information.
399 let loc
= bx
.sess().source_map().lookup_char_pos(span
.lo());
400 let filename
= LocalInternedString
::intern(&loc
.file
.name
.to_string());
401 let line
= bx
.const_u32(loc
.line
as u32);
402 let col
= bx
.const_u32(loc
.col
.to_usize() as u32 + 1);
404 // Put together the arguments to the panic entry point.
405 let (lang_item
, args
) = match msg
{
406 PanicInfo
::BoundsCheck { ref len, ref index }
=> {
407 let len
= self.codegen_operand(&mut bx
, len
).immediate();
408 let index
= self.codegen_operand(&mut bx
, index
).immediate();
410 let file_line_col
= bx
.static_panic_msg(
415 "panic_bounds_check_loc",
417 (lang_items
::PanicBoundsCheckFnLangItem
,
418 vec
![file_line_col
, index
, len
])
421 let str = msg
.description();
422 let msg_str
= LocalInternedString
::intern(str);
423 let msg_file_line_col
= bx
.static_panic_msg(
430 (lang_items
::PanicFnLangItem
,
431 vec
![msg_file_line_col
])
435 // Obtain the panic entry point.
436 let def_id
= common
::langcall(bx
.tcx(), Some(span
), "", lang_item
);
437 let instance
= ty
::Instance
::mono(bx
.tcx(), def_id
);
438 let fn_ty
= FnType
::of_instance(&bx
, instance
);
439 let llfn
= bx
.get_fn(instance
);
441 // Codegen the actual panic invoke/call.
442 helper
.do_call(self, &mut bx
, fn_ty
, llfn
, &args
, None
, cleanup
);
445 fn codegen_call_terminator
<'b
>(
447 helper
: TerminatorCodegenHelper
<'b
, 'tcx
>,
449 terminator
: &mir
::Terminator
<'tcx
>,
450 func
: &mir
::Operand
<'tcx
>,
451 args
: &Vec
<mir
::Operand
<'tcx
>>,
452 destination
: &Option
<(mir
::Place
<'tcx
>, mir
::BasicBlock
)>,
453 cleanup
: Option
<mir
::BasicBlock
>,
455 let span
= terminator
.source_info
.span
;
456 // Create the callee. This is a fn ptr or zero-sized and hence a kind of scalar.
457 let callee
= self.codegen_operand(&mut bx
, func
);
459 let (instance
, mut llfn
) = match callee
.layout
.ty
.sty
{
460 ty
::FnDef(def_id
, substs
) => {
461 (Some(ty
::Instance
::resolve(bx
.tcx(),
462 ty
::ParamEnv
::reveal_all(),
468 (None
, Some(callee
.immediate()))
470 _
=> bug
!("{} is not callable", callee
.layout
.ty
),
472 let def
= instance
.map(|i
| i
.def
);
473 let sig
= callee
.layout
.ty
.fn_sig(bx
.tcx());
474 let sig
= bx
.tcx().normalize_erasing_late_bound_regions(
475 ty
::ParamEnv
::reveal_all(),
480 // Handle intrinsics old codegen wants Expr's for, ourselves.
481 let intrinsic
= match def
{
482 Some(ty
::InstanceDef
::Intrinsic(def_id
)) =>
483 Some(bx
.tcx().item_name(def_id
).as_str()),
486 let intrinsic
= intrinsic
.as_ref().map(|s
| &s
[..]);
488 if intrinsic
== Some("transmute") {
489 if let Some(destination_ref
) = destination
.as_ref() {
490 let &(ref dest
, target
) = destination_ref
;
491 self.codegen_transmute(&mut bx
, &args
[0], dest
);
492 helper
.funclet_br(self, &mut bx
, target
);
494 // If we are trying to transmute to an uninhabited type,
495 // it is likely there is no allotted destination. In fact,
496 // transmuting to an uninhabited type is UB, which means
497 // we can do what we like. Here, we declare that transmuting
498 // into an uninhabited type is impossible, so anything following
499 // it must be unreachable.
500 assert_eq
!(bx
.layout_of(sig
.output()).abi
, layout
::Abi
::Uninhabited
);
506 // The "spoofed" `VaListImpl` added to a C-variadic functions signature
507 // should not be included in the `extra_args` calculation.
508 let extra_args_start_idx
= sig
.inputs().len() - if sig
.c_variadic { 1 }
else { 0 }
;
509 let extra_args
= &args
[extra_args_start_idx
..];
510 let extra_args
= extra_args
.iter().map(|op_arg
| {
511 let op_ty
= op_arg
.ty(self.mir
, bx
.tcx());
512 self.monomorphize(&op_ty
)
513 }).collect
::<Vec
<_
>>();
515 let fn_ty
= match def
{
516 Some(ty
::InstanceDef
::Virtual(..)) => {
517 FnType
::new_vtable(&bx
, sig
, &extra_args
)
519 Some(ty
::InstanceDef
::DropGlue(_
, None
)) => {
520 // Empty drop glue; a no-op.
521 let &(_
, target
) = destination
.as_ref().unwrap();
522 helper
.funclet_br(self, &mut bx
, target
);
525 _
=> FnType
::new(&bx
, sig
, &extra_args
)
528 // Emit a panic or a no-op for `panic_if_uninhabited`.
529 if intrinsic
== Some("panic_if_uninhabited") {
530 let ty
= instance
.unwrap().substs
.type_at(0);
531 let layout
= bx
.layout_of(ty
);
532 if layout
.abi
.is_uninhabited() {
533 let loc
= bx
.sess().source_map().lookup_char_pos(span
.lo());
534 let filename
= LocalInternedString
::intern(&loc
.file
.name
.to_string());
535 let line
= bx
.const_u32(loc
.line
as u32);
536 let col
= bx
.const_u32(loc
.col
.to_usize() as u32 + 1);
539 "Attempted to instantiate uninhabited type {}",
542 let msg_str
= LocalInternedString
::intern(&str);
543 let msg_file_line_col
= bx
.static_panic_msg(
551 // Obtain the panic entry point.
553 common
::langcall(bx
.tcx(), Some(span
), "", lang_items
::PanicFnLangItem
);
554 let instance
= ty
::Instance
::mono(bx
.tcx(), def_id
);
555 let fn_ty
= FnType
::of_instance(&bx
, instance
);
556 let llfn
= bx
.get_fn(instance
);
558 // Codegen the actual panic invoke/call.
564 &[msg_file_line_col
],
565 destination
.as_ref().map(|(_
, bb
)| (ReturnDest
::Nothing
, *bb
)),
570 helper
.funclet_br(self, &mut bx
, destination
.as_ref().unwrap().1)
575 // The arguments we'll be passing. Plus one to account for outptr, if used.
576 let arg_count
= fn_ty
.args
.len() + fn_ty
.ret
.is_indirect() as usize;
577 let mut llargs
= Vec
::with_capacity(arg_count
);
579 // Prepare the return value destination
580 let ret_dest
= if let Some((ref dest
, _
)) = *destination
{
581 let is_intrinsic
= intrinsic
.is_some();
582 self.make_return_dest(&mut bx
, dest
, &fn_ty
.ret
, &mut llargs
,
588 if intrinsic
.is_some() && intrinsic
!= Some("drop_in_place") {
589 let dest
= match ret_dest
{
590 _
if fn_ty
.ret
.is_indirect() => llargs
[0],
591 ReturnDest
::Nothing
=>
592 bx
.const_undef(bx
.type_ptr_to(bx
.memory_ty(&fn_ty
.ret
))),
593 ReturnDest
::IndirectOperand(dst
, _
) | ReturnDest
::Store(dst
) =>
595 ReturnDest
::DirectOperand(_
) =>
596 bug
!("Cannot use direct operand with an intrinsic call"),
599 let args
: Vec
<_
> = args
.iter().enumerate().map(|(i
, arg
)| {
600 // The indices passed to simd_shuffle* in the
601 // third argument must be constant. This is
602 // checked by const-qualification, which also
603 // promotes any complex rvalues to constants.
604 if i
== 2 && intrinsic
.unwrap().starts_with("simd_shuffle") {
606 // The shuffle array argument is usually not an explicit constant,
607 // but specified directly in the code. This means it gets promoted
608 // and we can then extract the value by evaluating the promoted.
611 base
: PlaceBase
::Static(box Static
{
612 kind
: StaticKind
::Promoted(promoted
),
620 base
: PlaceBase
::Static(box Static
{
621 kind
: StaticKind
::Promoted(promoted
),
627 let param_env
= ty
::ParamEnv
::reveal_all();
628 let cid
= mir
::interpret
::GlobalId
{
629 instance
: self.instance
,
630 promoted
: Some(promoted
),
632 let c
= bx
.tcx().const_eval(param_env
.and(cid
));
633 let (llval
, ty
) = self.simd_shuffle_indices(
635 terminator
.source_info
.span
,
640 val
: Immediate(llval
),
641 layout
: bx
.layout_of(ty
),
645 mir
::Operand
::Copy(_
) |
646 mir
::Operand
::Move(_
) => {
647 span_bug
!(span
, "shuffle indices must be constant");
649 mir
::Operand
::Constant(ref constant
) => {
650 let c
= self.eval_mir_constant(constant
);
651 let (llval
, ty
) = self.simd_shuffle_indices(
658 val
: Immediate(llval
),
659 layout
: bx
.layout_of(ty
)
665 self.codegen_operand(&mut bx
, arg
)
669 let callee_ty
= instance
.as_ref().unwrap().ty(bx
.tcx());
670 bx
.codegen_intrinsic_call(callee_ty
, &fn_ty
, &args
, dest
,
671 terminator
.source_info
.span
);
673 if let ReturnDest
::IndirectOperand(dst
, _
) = ret_dest
{
674 self.store_return(&mut bx
, ret_dest
, &fn_ty
.ret
, dst
.llval
);
677 if let Some((_
, target
)) = *destination
{
678 helper
.funclet_br(self, &mut bx
, target
);
686 // Split the rust-call tupled arguments off.
687 let (first_args
, untuple
) = if abi
== Abi
::RustCall
&& !args
.is_empty() {
688 let (tup
, args
) = args
.split_last().unwrap();
694 // Useful determining if the current argument is the "spoofed" `VaListImpl`
695 let last_arg_idx
= if sig
.inputs().is_empty() {
698 Some(sig
.inputs().len() - 1)
700 'make_args
: for (i
, arg
) in first_args
.iter().enumerate() {
701 // If this is a C-variadic function the function signature contains
702 // an "spoofed" `VaListImpl`. This argument is ignored, but we need to
703 // populate it with a dummy operand so that the users real arguments
704 // are not overwritten.
705 let i
= if sig
.c_variadic
&& last_arg_idx
.map(|x
| i
>= x
).unwrap_or(false) {
706 if i
+ 1 < fn_ty
.args
.len() {
714 let mut op
= self.codegen_operand(&mut bx
, arg
);
716 if let (0, Some(ty
::InstanceDef
::Virtual(_
, idx
))) = (i
, def
) {
717 if let Pair(..) = op
.val
{
718 // In the case of Rc<Self>, we need to explicitly pass a
719 // *mut RcBox<Self> with a Scalar (not ScalarPair) ABI. This is a hack
720 // that is understood elsewhere in the compiler as a method on
722 // To get a `*mut RcBox<Self>`, we just keep unwrapping newtypes until
723 // we get a value of a built-in pointer type
724 'descend_newtypes
: while !op
.layout
.ty
.is_unsafe_ptr()
725 && !op
.layout
.ty
.is_region_ptr()
727 'iter_fields
: for i
in 0..op
.layout
.fields
.count() {
728 let field
= op
.extract_field(&mut bx
, i
);
729 if !field
.layout
.is_zst() {
730 // we found the one non-zero-sized field that is allowed
731 // now find *its* non-zero-sized field, or stop if it's a
734 continue 'descend_newtypes
738 span_bug
!(span
, "receiver has no non-zero-sized fields {:?}", op
);
741 // now that we have `*dyn Trait` or `&dyn Trait`, split it up into its
742 // data pointer and vtable. Look up the method in the vtable, and pass
743 // the data pointer as the first argument
745 Pair(data_ptr
, meta
) => {
746 llfn
= Some(meth
::VirtualIndex
::from_index(idx
)
747 .get_fn(&mut bx
, meta
, &fn_ty
));
748 llargs
.push(data_ptr
);
751 other
=> bug
!("expected a Pair, got {:?}", other
),
753 } else if let Ref(data_ptr
, Some(meta
), _
) = op
.val
{
754 // by-value dynamic dispatch
755 llfn
= Some(meth
::VirtualIndex
::from_index(idx
)
756 .get_fn(&mut bx
, meta
, &fn_ty
));
757 llargs
.push(data_ptr
);
760 span_bug
!(span
, "can't codegen a virtual call on {:?}", op
);
764 // The callee needs to own the argument memory if we pass it
765 // by-ref, so make a local copy of non-immediate constants.
766 match (arg
, op
.val
) {
767 (&mir
::Operand
::Copy(_
), Ref(_
, None
, _
)) |
768 (&mir
::Operand
::Constant(_
), Ref(_
, None
, _
)) => {
769 let tmp
= PlaceRef
::alloca(&mut bx
, op
.layout
, "const");
770 op
.val
.store(&mut bx
, tmp
);
771 op
.val
= Ref(tmp
.llval
, None
, tmp
.align
);
776 self.codegen_argument(&mut bx
, op
, &mut llargs
, &fn_ty
.args
[i
]);
778 if let Some(tup
) = untuple
{
779 self.codegen_arguments_untupled(&mut bx
, tup
, &mut llargs
,
780 &fn_ty
.args
[first_args
.len()..])
783 let fn_ptr
= match (llfn
, instance
) {
784 (Some(llfn
), _
) => llfn
,
785 (None
, Some(instance
)) => bx
.get_fn(instance
),
786 _
=> span_bug
!(span
, "no llfn for call"),
789 helper
.do_call(self, &mut bx
, fn_ty
, fn_ptr
, &llargs
,
790 destination
.as_ref().map(|&(_
, target
)| (ret_dest
, target
)),
795 impl<'a
, 'tcx
, Bx
: BuilderMethods
<'a
, 'tcx
>> FunctionCx
<'a
, 'tcx
, Bx
> {
796 pub fn codegen_block(
800 let mut bx
= self.build_block(bb
);
801 let data
= &self.mir
[bb
];
803 debug
!("codegen_block({:?}={:?})", bb
, data
);
805 for statement
in &data
.statements
{
806 bx
= self.codegen_statement(bx
, statement
);
809 self.codegen_terminator(bx
, bb
, data
.terminator());
812 fn codegen_terminator(
816 terminator
: &mir
::Terminator
<'tcx
>
818 debug
!("codegen_terminator: {:?}", terminator
);
820 // Create the cleanup bundle, if needed.
821 let funclet_bb
= self.cleanup_kinds
[bb
].funclet_bb(bb
);
822 let helper
= TerminatorCodegenHelper
{
823 bb
: &bb
, terminator
, funclet_bb
826 self.set_debug_loc(&mut bx
, terminator
.source_info
);
827 match terminator
.kind
{
828 mir
::TerminatorKind
::Resume
=> {
829 self.codegen_resume_terminator(helper
, bx
)
832 mir
::TerminatorKind
::Abort
=> {
837 mir
::TerminatorKind
::Goto { target }
=> {
838 helper
.funclet_br(self, &mut bx
, target
);
841 mir
::TerminatorKind
::SwitchInt
{
842 ref discr
, switch_ty
, ref values
, ref targets
844 self.codegen_switchint_terminator(helper
, bx
, discr
, switch_ty
,
848 mir
::TerminatorKind
::Return
=> {
849 self.codegen_return_terminator(bx
);
852 mir
::TerminatorKind
::Unreachable
=> {
856 mir
::TerminatorKind
::Drop { ref location, target, unwind }
=> {
857 self.codegen_drop_terminator(helper
, bx
, location
, target
, unwind
);
860 mir
::TerminatorKind
::Assert { ref cond, expected, ref msg, target, cleanup }
=> {
861 self.codegen_assert_terminator(helper
, bx
, terminator
, cond
,
862 expected
, msg
, target
, cleanup
);
865 mir
::TerminatorKind
::DropAndReplace { .. }
=> {
866 bug
!("undesugared DropAndReplace in codegen: {:?}", terminator
);
869 mir
::TerminatorKind
::Call
{
876 self.codegen_call_terminator(helper
, bx
, terminator
, func
,
877 args
, destination
, cleanup
);
879 mir
::TerminatorKind
::GeneratorDrop
|
880 mir
::TerminatorKind
::Yield { .. }
=> bug
!("generator ops in codegen"),
881 mir
::TerminatorKind
::FalseEdges { .. }
|
882 mir
::TerminatorKind
::FalseUnwind { .. }
=> bug
!("borrowck false edges in codegen"),
889 op
: OperandRef
<'tcx
, Bx
::Value
>,
890 llargs
: &mut Vec
<Bx
::Value
>,
891 arg
: &ArgType
<'tcx
, Ty
<'tcx
>>
893 // Fill padding with undef value, where applicable.
894 if let Some(ty
) = arg
.pad
{
895 llargs
.push(bx
.const_undef(bx
.reg_backend_type(&ty
)))
902 if let PassMode
::Pair(..) = arg
.mode
{
909 _
=> bug
!("codegen_argument: {:?} invalid for pair argument", op
)
911 } else if arg
.is_unsized_indirect() {
913 Ref(a
, Some(b
), _
) => {
918 _
=> bug
!("codegen_argument: {:?} invalid for unsized indirect argument", op
)
922 // Force by-ref if we have to load through a cast pointer.
923 let (mut llval
, align
, by_ref
) = match op
.val
{
924 Immediate(_
) | Pair(..) => {
926 PassMode
::Indirect(..) | PassMode
::Cast(_
) => {
927 let scratch
= PlaceRef
::alloca(bx
, arg
.layout
, "arg");
928 op
.val
.store(bx
, scratch
);
929 (scratch
.llval
, scratch
.align
, true)
932 (op
.immediate_or_packed_pair(bx
), arg
.layout
.align
.abi
, false)
936 Ref(llval
, _
, align
) => {
937 if arg
.is_indirect() && align
< arg
.layout
.align
.abi
{
938 // `foo(packed.large_field)`. We can't pass the (unaligned) field directly. I
939 // think that ATM (Rust 1.16) we only pass temporaries, but we shouldn't
940 // have scary latent bugs around.
942 let scratch
= PlaceRef
::alloca(bx
, arg
.layout
, "arg");
943 base
::memcpy_ty(bx
, scratch
.llval
, scratch
.align
, llval
, align
,
944 op
.layout
, MemFlags
::empty());
945 (scratch
.llval
, scratch
.align
, true)
952 if by_ref
&& !arg
.is_indirect() {
953 // Have to load the argument, maybe while casting it.
954 if let PassMode
::Cast(ty
) = arg
.mode
{
955 let addr
= bx
.pointercast(llval
, bx
.type_ptr_to(
956 bx
.cast_backend_type(&ty
))
958 llval
= bx
.load(addr
, align
.min(arg
.layout
.align
.abi
));
960 // We can't use `PlaceRef::load` here because the argument
961 // may have a type we don't treat as immediate, but the ABI
962 // used for this call is passing it by-value. In that case,
963 // the load would just produce `OperandValue::Ref` instead
964 // of the `OperandValue::Immediate` we need for the call.
965 llval
= bx
.load(llval
, align
);
966 if let layout
::Abi
::Scalar(ref scalar
) = arg
.layout
.abi
{
967 if scalar
.is_bool() {
968 bx
.range_metadata(llval
, 0..2);
971 // We store bools as `i8` so we need to truncate to `i1`.
972 llval
= base
::to_immediate(bx
, llval
, arg
.layout
);
979 fn codegen_arguments_untupled(
982 operand
: &mir
::Operand
<'tcx
>,
983 llargs
: &mut Vec
<Bx
::Value
>,
984 args
: &[ArgType
<'tcx
, Ty
<'tcx
>>]
986 let tuple
= self.codegen_operand(bx
, operand
);
988 // Handle both by-ref and immediate tuples.
989 if let Ref(llval
, None
, align
) = tuple
.val
{
990 let tuple_ptr
= PlaceRef
::new_sized(llval
, tuple
.layout
, align
);
991 for i
in 0..tuple
.layout
.fields
.count() {
992 let field_ptr
= tuple_ptr
.project_field(bx
, i
);
993 let field
= bx
.load_operand(field_ptr
);
994 self.codegen_argument(bx
, field
, llargs
, &args
[i
]);
996 } else if let Ref(_
, Some(_
), _
) = tuple
.val
{
997 bug
!("closure arguments must be sized")
999 // If the tuple is immediate, the elements are as well.
1000 for i
in 0..tuple
.layout
.fields
.count() {
1001 let op
= tuple
.extract_field(bx
, i
);
1002 self.codegen_argument(bx
, op
, llargs
, &args
[i
]);
1007 fn get_personality_slot(
1010 ) -> PlaceRef
<'tcx
, Bx
::Value
> {
1012 if let Some(slot
) = self.personality_slot
{
1015 let layout
= cx
.layout_of(cx
.tcx().intern_tup(&[
1016 cx
.tcx().mk_mut_ptr(cx
.tcx().types
.u8),
1019 let slot
= PlaceRef
::alloca(bx
, layout
, "personalityslot");
1020 self.personality_slot
= Some(slot
);
1025 /// Returns the landing-pad wrapper around the given basic block.
1027 /// No-op in MSVC SEH scheme.
1030 target_bb
: mir
::BasicBlock
1031 ) -> Bx
::BasicBlock
{
1032 if let Some(block
) = self.landing_pads
[target_bb
] {
1036 let block
= self.blocks
[target_bb
];
1037 let landing_pad
= self.landing_pad_uncached(block
);
1038 self.landing_pads
[target_bb
] = Some(landing_pad
);
1042 fn landing_pad_uncached(
1044 target_bb
: Bx
::BasicBlock
1045 ) -> Bx
::BasicBlock
{
1046 if base
::wants_msvc_seh(self.cx
.sess()) {
1047 span_bug
!(self.mir
.span
, "landing pad was not inserted?")
1050 let mut bx
= self.new_block("cleanup");
1052 let llpersonality
= self.cx
.eh_personality();
1053 let llretty
= self.landing_pad_type();
1054 let lp
= bx
.landing_pad(llretty
, llpersonality
, 1);
1057 let slot
= self.get_personality_slot(&mut bx
);
1058 slot
.storage_live(&mut bx
);
1059 Pair(bx
.extract_value(lp
, 0), bx
.extract_value(lp
, 1)).store(&mut bx
, slot
);
1065 fn landing_pad_type(&self) -> Bx
::Type
{
1067 cx
.type_struct(&[cx
.type_i8p(), cx
.type_i32()], false)
1070 fn unreachable_block(
1072 ) -> Bx
::BasicBlock
{
1073 self.unreachable_block
.unwrap_or_else(|| {
1074 let mut bx
= self.new_block("unreachable");
1076 self.unreachable_block
= Some(bx
.llbb());
1081 pub fn new_block(&self, name
: &str) -> Bx
{
1082 Bx
::new_block(self.cx
, self.llfn
, name
)
1089 let mut bx
= Bx
::with_cx(self.cx
);
1090 bx
.position_at_end(self.blocks
[bb
]);
1094 fn make_return_dest(
1097 dest
: &mir
::Place
<'tcx
>,
1098 fn_ret
: &ArgType
<'tcx
, Ty
<'tcx
>>,
1099 llargs
: &mut Vec
<Bx
::Value
>, is_intrinsic
: bool
1100 ) -> ReturnDest
<'tcx
, Bx
::Value
> {
1101 // If the return is ignored, we can just return a do-nothing `ReturnDest`.
1102 if fn_ret
.is_ignore() {
1103 return ReturnDest
::Nothing
;
1105 let dest
= if let mir
::Place
{
1106 base
: mir
::PlaceBase
::Local(index
),
1109 match self.locals
[index
] {
1110 LocalRef
::Place(dest
) => dest
,
1111 LocalRef
::UnsizedPlace(_
) => bug
!("return type must be sized"),
1112 LocalRef
::Operand(None
) => {
1113 // Handle temporary places, specifically `Operand` ones, as
1114 // they don't have `alloca`s.
1115 return if fn_ret
.is_indirect() {
1116 // Odd, but possible, case, we have an operand temporary,
1117 // but the calling convention has an indirect return.
1118 let tmp
= PlaceRef
::alloca(bx
, fn_ret
.layout
, "tmp_ret");
1119 tmp
.storage_live(bx
);
1120 llargs
.push(tmp
.llval
);
1121 ReturnDest
::IndirectOperand(tmp
, index
)
1122 } else if is_intrinsic
{
1123 // Currently, intrinsics always need a location to store
1124 // the result, so we create a temporary `alloca` for the
1126 let tmp
= PlaceRef
::alloca(bx
, fn_ret
.layout
, "tmp_ret");
1127 tmp
.storage_live(bx
);
1128 ReturnDest
::IndirectOperand(tmp
, index
)
1130 ReturnDest
::DirectOperand(index
)
1133 LocalRef
::Operand(Some(_
)) => {
1134 bug
!("place local already assigned to");
1138 self.codegen_place(bx
, &mir
::PlaceRef
{
1140 projection
: &dest
.projection
,
1143 if fn_ret
.is_indirect() {
1144 if dest
.align
< dest
.layout
.align
.abi
{
1145 // Currently, MIR code generation does not create calls
1146 // that store directly to fields of packed structs (in
1147 // fact, the calls it creates write only to temps).
1149 // If someone changes that, please update this code path
1150 // to create a temporary.
1151 span_bug
!(self.mir
.span
, "can't directly store to unaligned value");
1153 llargs
.push(dest
.llval
);
1156 ReturnDest
::Store(dest
)
1160 fn codegen_transmute(
1163 src
: &mir
::Operand
<'tcx
>,
1164 dst
: &mir
::Place
<'tcx
>
1167 base
: mir
::PlaceBase
::Local(index
),
1170 match self.locals
[index
] {
1171 LocalRef
::Place(place
) => self.codegen_transmute_into(bx
, src
, place
),
1172 LocalRef
::UnsizedPlace(_
) => bug
!("transmute must not involve unsized locals"),
1173 LocalRef
::Operand(None
) => {
1174 let dst_layout
= bx
.layout_of(self.monomorphized_place_ty(&dst
.as_ref()));
1175 assert
!(!dst_layout
.ty
.has_erasable_regions());
1176 let place
= PlaceRef
::alloca(bx
, dst_layout
, "transmute_temp");
1177 place
.storage_live(bx
);
1178 self.codegen_transmute_into(bx
, src
, place
);
1179 let op
= bx
.load_operand(place
);
1180 place
.storage_dead(bx
);
1181 self.locals
[index
] = LocalRef
::Operand(Some(op
));
1183 LocalRef
::Operand(Some(op
)) => {
1184 assert
!(op
.layout
.is_zst(),
1185 "assigning to initialized SSAtemp");
1189 let dst
= self.codegen_place(bx
, &dst
.as_ref());
1190 self.codegen_transmute_into(bx
, src
, dst
);
1194 fn codegen_transmute_into(
1197 src
: &mir
::Operand
<'tcx
>,
1198 dst
: PlaceRef
<'tcx
, Bx
::Value
>
1200 let src
= self.codegen_operand(bx
, src
);
1201 let llty
= bx
.backend_type(src
.layout
);
1202 let cast_ptr
= bx
.pointercast(dst
.llval
, bx
.type_ptr_to(llty
));
1203 let align
= src
.layout
.align
.abi
.min(dst
.align
);
1204 src
.val
.store(bx
, PlaceRef
::new_sized(cast_ptr
, src
.layout
, align
));
1208 // Stores the return value of a function call into it's final location.
1212 dest
: ReturnDest
<'tcx
, Bx
::Value
>,
1213 ret_ty
: &ArgType
<'tcx
, Ty
<'tcx
>>,
1216 use self::ReturnDest
::*;
1220 Store(dst
) => bx
.store_arg_ty(&ret_ty
, llval
, dst
),
1221 IndirectOperand(tmp
, index
) => {
1222 let op
= bx
.load_operand(tmp
);
1223 tmp
.storage_dead(bx
);
1224 self.locals
[index
] = LocalRef
::Operand(Some(op
));
1226 DirectOperand(index
) => {
1227 // If there is a cast, we have to store and reload.
1228 let op
= if let PassMode
::Cast(_
) = ret_ty
.mode
{
1229 let tmp
= PlaceRef
::alloca(bx
, ret_ty
.layout
, "tmp_ret");
1230 tmp
.storage_live(bx
);
1231 bx
.store_arg_ty(&ret_ty
, llval
, tmp
);
1232 let op
= bx
.load_operand(tmp
);
1233 tmp
.storage_dead(bx
);
1236 OperandRef
::from_immediate_or_packed_pair(bx
, llval
, ret_ty
.layout
)
1238 self.locals
[index
] = LocalRef
::Operand(Some(op
));
1244 enum ReturnDest
<'tcx
, V
> {
1245 // Do nothing; the return value is indirect or ignored.
1247 // Store the return value to the pointer.
1248 Store(PlaceRef
<'tcx
, V
>),
1249 // Store an indirect return value to an operand local place.
1250 IndirectOperand(PlaceRef
<'tcx
, V
>, mir
::Local
),
1251 // Store a direct return value to an operand local place.
1252 DirectOperand(mir
::Local
)