1 use rustc
::middle
::lang_items
;
2 use rustc
::ty
::{self, Ty, TypeFoldable}
;
3 use rustc
::ty
::layout
::{self, LayoutOf, HasTyCtxt}
;
5 use rustc
::mir
::interpret
::EvalErrorKind
;
6 use rustc_target
::abi
::call
::{ArgType, FnType, PassMode}
;
7 use rustc_target
::spec
::abi
::Abi
;
8 use rustc_mir
::monomorphize
;
11 use crate::common
::{self, IntPredicate}
;
16 use syntax
::symbol
::Symbol
;
19 use super::{FunctionCx, LocalRef}
;
20 use super::place
::PlaceRef
;
21 use super::operand
::OperandRef
;
22 use super::operand
::OperandValue
::{Pair, Ref, Immediate}
;
24 impl<'a
, 'tcx
: 'a
, Bx
: BuilderMethods
<'a
, 'tcx
>> FunctionCx
<'a
, 'tcx
, Bx
> {
29 let mut bx
= self.build_block(bb
);
30 let data
= &self.mir
[bb
];
32 debug
!("codegen_block({:?}={:?})", bb
, data
);
34 for statement
in &data
.statements
{
35 bx
= self.codegen_statement(bx
, statement
);
38 self.codegen_terminator(bx
, bb
, data
.terminator());
41 fn codegen_terminator(
45 terminator
: &mir
::Terminator
<'tcx
>
47 debug
!("codegen_terminator: {:?}", terminator
);
49 // Create the cleanup bundle, if needed.
50 let tcx
= self.cx
.tcx();
51 let span
= terminator
.source_info
.span
;
52 let funclet_bb
= self.cleanup_kinds
[bb
].funclet_bb(bb
);
54 // HACK(eddyb) force the right lifetimes, NLL can't figure them out.
55 fn funclet_closure_factory
<'a
, 'tcx
: 'a
, Bx
: BuilderMethods
<'a
, 'tcx
>>(
56 funclet_bb
: Option
<mir
::BasicBlock
>
58 &'b FunctionCx
<'a
, 'tcx
, Bx
>,
59 ) -> Option
<&'b Bx
::Funclet
> {
62 Some(funclet_bb
) => this
.funclets
[funclet_bb
].as_ref(),
67 let funclet
= funclet_closure_factory(funclet_bb
);
69 let lltarget
= |this
: &mut Self, target
: mir
::BasicBlock
| {
70 let lltarget
= this
.blocks
[target
];
71 let target_funclet
= this
.cleanup_kinds
[target
].funclet_bb(target
);
72 match (funclet_bb
, target_funclet
) {
73 (None
, None
) => (lltarget
, false),
75 if f
== t_f
|| !base
::wants_msvc_seh(tcx
.sess
)
78 // jump *into* cleanup - need a landing pad if GNU
79 (this
.landing_pad_to(target
), false)
81 (Some(_
), None
) => span_bug
!(span
, "{:?} - jump out of cleanup?", terminator
),
82 (Some(_
), Some(_
)) => {
83 (this
.landing_pad_to(target
), true)
88 let llblock
= |this
: &mut Self, target
: mir
::BasicBlock
| {
89 let (lltarget
, is_cleanupret
) = lltarget(this
, target
);
91 // MSVC cross-funclet jump - need a trampoline
93 debug
!("llblock: creating cleanup trampoline for {:?}", target
);
94 let name
= &format
!("{:?}_cleanup_trampoline_{:?}", bb
, target
);
95 let mut trampoline
= this
.new_block(name
);
96 trampoline
.cleanup_ret(funclet(this
).unwrap(), Some(lltarget
));
104 |this
: &mut Self, bx
: &mut Bx
, target
: mir
::BasicBlock
| {
105 let (lltarget
, is_cleanupret
) = lltarget(this
, target
);
107 // micro-optimization: generate a `ret` rather than a jump
109 bx
.cleanup_ret(funclet(this
).unwrap(), Some(lltarget
));
118 fn_ty
: FnType
<'tcx
, Ty
<'tcx
>>,
120 llargs
: &[Bx
::Value
],
121 destination
: Option
<(ReturnDest
<'tcx
, Bx
::Value
>, mir
::BasicBlock
)>,
122 cleanup
: Option
<mir
::BasicBlock
>
124 if let Some(cleanup
) = cleanup
{
125 let ret_bx
= if let Some((_
, target
)) = destination
{
128 this
.unreachable_block()
130 let invokeret
= bx
.invoke(fn_ptr
,
133 llblock(this
, cleanup
),
135 bx
.apply_attrs_callsite(&fn_ty
, invokeret
);
137 if let Some((ret_dest
, target
)) = destination
{
138 let mut ret_bx
= this
.build_block(target
);
139 this
.set_debug_loc(&mut ret_bx
, terminator
.source_info
);
140 this
.store_return(&mut ret_bx
, ret_dest
, &fn_ty
.ret
, invokeret
);
143 let llret
= bx
.call(fn_ptr
, &llargs
, funclet(this
));
144 bx
.apply_attrs_callsite(&fn_ty
, llret
);
145 if this
.mir
[bb
].is_cleanup
{
146 // Cleanup is always the cold path. Don't inline
147 // drop glue. Also, when there is a deeply-nested
148 // struct, there are "symmetry" issues that cause
149 // exponential inlining - see issue #41696.
150 bx
.do_not_inline(llret
);
153 if let Some((ret_dest
, target
)) = destination
{
154 this
.store_return(bx
, ret_dest
, &fn_ty
.ret
, llret
);
155 funclet_br(this
, bx
, target
);
162 self.set_debug_loc(&mut bx
, terminator
.source_info
);
163 match terminator
.kind
{
164 mir
::TerminatorKind
::Resume
=> {
165 if let Some(funclet
) = funclet(self) {
166 bx
.cleanup_ret(funclet
, None
);
168 let slot
= self.get_personality_slot(&mut bx
);
169 let lp0
= slot
.project_field(&mut bx
, 0);
170 let lp0
= bx
.load_operand(lp0
).immediate();
171 let lp1
= slot
.project_field(&mut bx
, 1);
172 let lp1
= bx
.load_operand(lp1
).immediate();
173 slot
.storage_dead(&mut bx
);
175 if !bx
.sess().target
.target
.options
.custom_unwind_resume
{
176 let mut lp
= bx
.const_undef(self.landing_pad_type());
177 lp
= bx
.insert_value(lp
, lp0
, 0);
178 lp
= bx
.insert_value(lp
, lp1
, 1);
181 bx
.call(bx
.eh_unwind_resume(), &[lp0
], funclet(self));
187 mir
::TerminatorKind
::Abort
=> {
192 mir
::TerminatorKind
::Goto { target }
=> {
193 funclet_br(self, &mut bx
, target
);
196 mir
::TerminatorKind
::SwitchInt { ref discr, switch_ty, ref values, ref targets }
=> {
197 let discr
= self.codegen_operand(&mut bx
, discr
);
198 if targets
.len() == 2 {
199 // If there are two targets, emit br instead of switch
200 let lltrue
= llblock(self, targets
[0]);
201 let llfalse
= llblock(self, targets
[1]);
202 if switch_ty
== bx
.tcx().types
.bool
{
203 // Don't generate trivial icmps when switching on bool
204 if let [0] = values
[..] {
205 bx
.cond_br(discr
.immediate(), llfalse
, lltrue
);
207 assert_eq
!(&values
[..], &[1]);
208 bx
.cond_br(discr
.immediate(), lltrue
, llfalse
);
211 let switch_llty
= bx
.immediate_backend_type(
212 bx
.layout_of(switch_ty
)
214 let llval
= bx
.const_uint_big(switch_llty
, values
[0]);
215 let cmp
= bx
.icmp(IntPredicate
::IntEQ
, discr
.immediate(), llval
);
216 bx
.cond_br(cmp
, lltrue
, llfalse
);
219 let (otherwise
, targets
) = targets
.split_last().unwrap();
220 let switch
= bx
.switch(discr
.immediate(),
221 llblock(self, *otherwise
),
223 let switch_llty
= bx
.immediate_backend_type(
224 bx
.layout_of(switch_ty
)
226 for (&value
, target
) in values
.iter().zip(targets
) {
227 let llval
= bx
.const_uint_big(switch_llty
, value
);
228 let llbb
= llblock(self, *target
);
229 bx
.add_case(switch
, llval
, llbb
)
234 mir
::TerminatorKind
::Return
=> {
235 let llval
= match self.fn_ty
.ret
.mode
{
236 PassMode
::Ignore
| PassMode
::Indirect(..) => {
241 PassMode
::Direct(_
) | PassMode
::Pair(..) => {
243 self.codegen_consume(&mut bx
, &mir
::Place
::Local(mir
::RETURN_PLACE
));
244 if let Ref(llval
, _
, align
) = op
.val
{
245 bx
.load(llval
, align
)
247 op
.immediate_or_packed_pair(&mut bx
)
251 PassMode
::Cast(cast_ty
) => {
252 let op
= match self.locals
[mir
::RETURN_PLACE
] {
253 LocalRef
::Operand(Some(op
)) => op
,
254 LocalRef
::Operand(None
) => bug
!("use of return before def"),
255 LocalRef
::Place(cg_place
) => {
257 val
: Ref(cg_place
.llval
, None
, cg_place
.align
),
258 layout
: cg_place
.layout
261 LocalRef
::UnsizedPlace(_
) => bug
!("return type must be sized"),
263 let llslot
= match op
.val
{
264 Immediate(_
) | Pair(..) => {
266 PlaceRef
::alloca(&mut bx
, self.fn_ty
.ret
.layout
, "ret");
267 op
.val
.store(&mut bx
, scratch
);
270 Ref(llval
, _
, align
) => {
271 assert_eq
!(align
, op
.layout
.align
.abi
,
272 "return place is unaligned!");
276 let addr
= bx
.pointercast(llslot
, bx
.type_ptr_to(
277 bx
.cast_backend_type(&cast_ty
)
279 bx
.load(addr
, self.fn_ty
.ret
.layout
.align
.abi
)
285 mir
::TerminatorKind
::Unreachable
=> {
289 mir
::TerminatorKind
::Drop { ref location, target, unwind }
=> {
290 let ty
= location
.ty(self.mir
, bx
.tcx()).to_ty(bx
.tcx());
291 let ty
= self.monomorphize(&ty
);
292 let drop_fn
= monomorphize
::resolve_drop_in_place(bx
.tcx(), ty
);
294 if let ty
::InstanceDef
::DropGlue(_
, None
) = drop_fn
.def
{
295 // we don't actually need to drop anything.
296 funclet_br(self, &mut bx
, target
);
300 let place
= self.codegen_place(&mut bx
, location
);
302 let mut args
= if let Some(llextra
) = place
.llextra
{
303 args2
= [place
.llval
, llextra
];
306 args1
= [place
.llval
];
309 let (drop_fn
, fn_ty
) = match ty
.sty
{
311 let sig
= drop_fn
.fn_sig(tcx
);
312 let sig
= tcx
.normalize_erasing_late_bound_regions(
313 ty
::ParamEnv
::reveal_all(),
316 let fn_ty
= bx
.new_vtable(sig
, &[]);
317 let vtable
= args
[1];
319 (meth
::DESTRUCTOR
.get_fn(&mut bx
, vtable
, &fn_ty
), fn_ty
)
323 bx
.fn_type_of_instance(&drop_fn
))
326 do_call(self, &mut bx
, fn_ty
, drop_fn
, args
,
327 Some((ReturnDest
::Nothing
, target
)),
331 mir
::TerminatorKind
::Assert { ref cond, expected, ref msg, target, cleanup }
=> {
332 let cond
= self.codegen_operand(&mut bx
, cond
).immediate();
333 let mut const_cond
= bx
.const_to_opt_u128(cond
, false).map(|c
| c
== 1);
335 // This case can currently arise only from functions marked
336 // with #[rustc_inherit_overflow_checks] and inlined from
337 // another crate (mostly core::num generic/#[inline] fns),
338 // while the current crate doesn't use overflow checks.
339 // NOTE: Unlike binops, negation doesn't have its own
340 // checked operation, just a comparison with the minimum
341 // value, so we have to check for the assert message.
342 if !bx
.check_overflow() {
343 if let mir
::interpret
::EvalErrorKind
::OverflowNeg
= *msg
{
344 const_cond
= Some(expected
);
348 // Don't codegen the panic block if success if known.
349 if const_cond
== Some(expected
) {
350 funclet_br(self, &mut bx
, target
);
354 // Pass the condition through llvm.expect for branch hinting.
355 let cond
= bx
.expect(cond
, expected
);
357 // Create the failure block and the conditional branch to it.
358 let lltarget
= llblock(self, target
);
359 let panic_block
= self.new_block("panic");
361 bx
.cond_br(cond
, lltarget
, panic_block
.llbb());
363 bx
.cond_br(cond
, panic_block
.llbb(), lltarget
);
366 // After this point, bx is the block for the call to panic.
368 self.set_debug_loc(&mut bx
, terminator
.source_info
);
370 // Get the location information.
371 let loc
= bx
.sess().source_map().lookup_char_pos(span
.lo());
372 let filename
= Symbol
::intern(&loc
.file
.name
.to_string()).as_str();
373 let filename
= bx
.const_str_slice(filename
);
374 let line
= bx
.const_u32(loc
.line
as u32);
375 let col
= bx
.const_u32(loc
.col
.to_usize() as u32 + 1);
376 let align
= tcx
.data_layout
.aggregate_align
.abi
377 .max(tcx
.data_layout
.i32_align
.abi
)
378 .max(tcx
.data_layout
.pointer_align
.abi
);
380 // Put together the arguments to the panic entry point.
381 let (lang_item
, args
) = match *msg
{
382 EvalErrorKind
::BoundsCheck { ref len, ref index }
=> {
383 let len
= self.codegen_operand(&mut bx
, len
).immediate();
384 let index
= self.codegen_operand(&mut bx
, index
).immediate();
386 let file_line_col
= bx
.const_struct(&[filename
, line
, col
], false);
387 let file_line_col
= bx
.static_addr_of(
390 Some("panic_bounds_check_loc")
392 (lang_items
::PanicBoundsCheckFnLangItem
,
393 vec
![file_line_col
, index
, len
])
396 let str = msg
.description();
397 let msg_str
= Symbol
::intern(str).as_str();
398 let msg_str
= bx
.const_str_slice(msg_str
);
399 let msg_file_line_col
= bx
.const_struct(
400 &[msg_str
, filename
, line
, col
],
403 let msg_file_line_col
= bx
.static_addr_of(
408 (lang_items
::PanicFnLangItem
,
409 vec
![msg_file_line_col
])
413 // Obtain the panic entry point.
414 let def_id
= common
::langcall(bx
.tcx(), Some(span
), "", lang_item
);
415 let instance
= ty
::Instance
::mono(bx
.tcx(), def_id
);
416 let fn_ty
= bx
.fn_type_of_instance(&instance
);
417 let llfn
= bx
.get_fn(instance
);
419 // Codegen the actual panic invoke/call.
420 do_call(self, &mut bx
, fn_ty
, llfn
, &args
, None
, cleanup
);
423 mir
::TerminatorKind
::DropAndReplace { .. }
=> {
424 bug
!("undesugared DropAndReplace in codegen: {:?}", terminator
);
427 mir
::TerminatorKind
::Call
{
434 // Create the callee. This is a fn ptr or zero-sized and hence a kind of scalar.
435 let callee
= self.codegen_operand(&mut bx
, func
);
437 let (instance
, mut llfn
) = match callee
.layout
.ty
.sty
{
438 ty
::FnDef(def_id
, substs
) => {
439 (Some(ty
::Instance
::resolve(bx
.tcx(),
440 ty
::ParamEnv
::reveal_all(),
446 (None
, Some(callee
.immediate()))
448 _
=> bug
!("{} is not callable", callee
.layout
.ty
)
450 let def
= instance
.map(|i
| i
.def
);
451 let sig
= callee
.layout
.ty
.fn_sig(bx
.tcx());
452 let sig
= bx
.tcx().normalize_erasing_late_bound_regions(
453 ty
::ParamEnv
::reveal_all(),
458 // Handle intrinsics old codegen wants Expr's for, ourselves.
459 let intrinsic
= match def
{
460 Some(ty
::InstanceDef
::Intrinsic(def_id
))
461 => Some(bx
.tcx().item_name(def_id
).as_str()),
464 let intrinsic
= intrinsic
.as_ref().map(|s
| &s
[..]);
466 if intrinsic
== Some("transmute") {
467 if let Some(destination_ref
) = destination
.as_ref() {
468 let &(ref dest
, target
) = destination_ref
;
469 self.codegen_transmute(&mut bx
, &args
[0], dest
);
470 funclet_br(self, &mut bx
, target
);
472 // If we are trying to transmute to an uninhabited type,
473 // it is likely there is no allotted destination. In fact,
474 // transmuting to an uninhabited type is UB, which means
475 // we can do what we like. Here, we declare that transmuting
476 // into an uninhabited type is impossible, so anything following
477 // it must be unreachable.
478 assert_eq
!(bx
.layout_of(sig
.output()).abi
, layout
::Abi
::Uninhabited
);
484 let extra_args
= &args
[sig
.inputs().len()..];
485 let extra_args
= extra_args
.iter().map(|op_arg
| {
486 let op_ty
= op_arg
.ty(self.mir
, bx
.tcx());
487 self.monomorphize(&op_ty
)
488 }).collect
::<Vec
<_
>>();
490 let fn_ty
= match def
{
491 Some(ty
::InstanceDef
::Virtual(..)) => {
492 bx
.new_vtable(sig
, &extra_args
)
494 Some(ty
::InstanceDef
::DropGlue(_
, None
)) => {
495 // empty drop glue - a nop.
496 let &(_
, target
) = destination
.as_ref().unwrap();
497 funclet_br(self, &mut bx
, target
);
500 _
=> bx
.new_fn_type(sig
, &extra_args
)
503 // emit a panic or a NOP for `panic_if_uninhabited`
504 if intrinsic
== Some("panic_if_uninhabited") {
505 let ty
= instance
.unwrap().substs
.type_at(0);
506 let layout
= bx
.layout_of(ty
);
507 if layout
.abi
.is_uninhabited() {
508 let loc
= bx
.sess().source_map().lookup_char_pos(span
.lo());
509 let filename
= Symbol
::intern(&loc
.file
.name
.to_string()).as_str();
510 let filename
= bx
.const_str_slice(filename
);
511 let line
= bx
.const_u32(loc
.line
as u32);
512 let col
= bx
.const_u32(loc
.col
.to_usize() as u32 + 1);
513 let align
= tcx
.data_layout
.aggregate_align
.abi
514 .max(tcx
.data_layout
.i32_align
.abi
)
515 .max(tcx
.data_layout
.pointer_align
.abi
);
518 "Attempted to instantiate uninhabited type {}",
521 let msg_str
= Symbol
::intern(&str).as_str();
522 let msg_str
= bx
.const_str_slice(msg_str
);
523 let msg_file_line_col
= bx
.const_struct(
524 &[msg_str
, filename
, line
, col
],
527 let msg_file_line_col
= bx
.static_addr_of(
533 // Obtain the panic entry point.
535 common
::langcall(bx
.tcx(), Some(span
), "", lang_items
::PanicFnLangItem
);
536 let instance
= ty
::Instance
::mono(bx
.tcx(), def_id
);
537 let fn_ty
= bx
.fn_type_of_instance(&instance
);
538 let llfn
= bx
.get_fn(instance
);
540 // Codegen the actual panic invoke/call.
546 &[msg_file_line_col
],
547 destination
.as_ref().map(|(_
, bb
)| (ReturnDest
::Nothing
, *bb
)),
552 funclet_br(self, &mut bx
, destination
.as_ref().unwrap().1);
557 // The arguments we'll be passing. Plus one to account for outptr, if used.
558 let arg_count
= fn_ty
.args
.len() + fn_ty
.ret
.is_indirect() as usize;
559 let mut llargs
= Vec
::with_capacity(arg_count
);
561 // Prepare the return value destination
562 let ret_dest
= if let Some((ref dest
, _
)) = *destination
{
563 let is_intrinsic
= intrinsic
.is_some();
564 self.make_return_dest(&mut bx
, dest
, &fn_ty
.ret
, &mut llargs
,
570 if intrinsic
.is_some() && intrinsic
!= Some("drop_in_place") {
571 let dest
= match ret_dest
{
572 _
if fn_ty
.ret
.is_indirect() => llargs
[0],
573 ReturnDest
::Nothing
=> {
574 bx
.const_undef(bx
.type_ptr_to(bx
.memory_ty(&fn_ty
.ret
)))
576 ReturnDest
::IndirectOperand(dst
, _
) |
577 ReturnDest
::Store(dst
) => dst
.llval
,
578 ReturnDest
::DirectOperand(_
) =>
579 bug
!("Cannot use direct operand with an intrinsic call")
582 let args
: Vec
<_
> = args
.iter().enumerate().map(|(i
, arg
)| {
583 // The indices passed to simd_shuffle* in the
584 // third argument must be constant. This is
585 // checked by const-qualification, which also
586 // promotes any complex rvalues to constants.
587 if i
== 2 && intrinsic
.unwrap().starts_with("simd_shuffle") {
589 // The shuffle array argument is usually not an explicit constant,
590 // but specified directly in the code. This means it gets promoted
591 // and we can then extract the value by evaluating the promoted.
592 mir
::Operand
::Copy(mir
::Place
::Promoted(box(index
, ty
))) |
593 mir
::Operand
::Move(mir
::Place
::Promoted(box(index
, ty
))) => {
594 let param_env
= ty
::ParamEnv
::reveal_all();
595 let cid
= mir
::interpret
::GlobalId
{
596 instance
: self.instance
,
597 promoted
: Some(index
),
599 let c
= bx
.tcx().const_eval(param_env
.and(cid
));
600 let (llval
, ty
) = self.simd_shuffle_indices(
602 terminator
.source_info
.span
,
607 val
: Immediate(llval
),
608 layout
: bx
.layout_of(ty
),
612 mir
::Operand
::Copy(_
) |
613 mir
::Operand
::Move(_
) => {
614 span_bug
!(span
, "shuffle indices must be constant");
616 mir
::Operand
::Constant(ref constant
) => {
617 let c
= self.eval_mir_constant(&bx
, constant
);
618 let (llval
, ty
) = self.simd_shuffle_indices(
625 val
: Immediate(llval
),
626 layout
: bx
.layout_of(ty
)
632 self.codegen_operand(&mut bx
, arg
)
636 let callee_ty
= instance
.as_ref().unwrap().ty(bx
.tcx());
637 bx
.codegen_intrinsic_call(callee_ty
, &fn_ty
, &args
, dest
,
638 terminator
.source_info
.span
);
640 if let ReturnDest
::IndirectOperand(dst
, _
) = ret_dest
{
641 self.store_return(&mut bx
, ret_dest
, &fn_ty
.ret
, dst
.llval
);
644 if let Some((_
, target
)) = *destination
{
645 funclet_br(self, &mut bx
, target
);
653 // Split the rust-call tupled arguments off.
654 let (first_args
, untuple
) = if abi
== Abi
::RustCall
&& !args
.is_empty() {
655 let (tup
, args
) = args
.split_last().unwrap();
661 'make_args
: for (i
, arg
) in first_args
.iter().enumerate() {
662 let mut op
= self.codegen_operand(&mut bx
, arg
);
664 if let (0, Some(ty
::InstanceDef
::Virtual(_
, idx
))) = (i
, def
) {
665 if let Pair(..) = op
.val
{
666 // In the case of Rc<Self>, we need to explicitly pass a
667 // *mut RcBox<Self> with a Scalar (not ScalarPair) ABI. This is a hack
668 // that is understood elsewhere in the compiler as a method on
670 // To get a `*mut RcBox<Self>`, we just keep unwrapping newtypes until
671 // we get a value of a built-in pointer type
672 'descend_newtypes
: while !op
.layout
.ty
.is_unsafe_ptr()
673 && !op
.layout
.ty
.is_region_ptr()
675 'iter_fields
: for i
in 0..op
.layout
.fields
.count() {
676 let field
= op
.extract_field(&mut bx
, i
);
677 if !field
.layout
.is_zst() {
678 // we found the one non-zero-sized field that is allowed
679 // now find *its* non-zero-sized field, or stop if it's a
682 continue 'descend_newtypes
686 span_bug
!(span
, "receiver has no non-zero-sized fields {:?}", op
);
689 // now that we have `*dyn Trait` or `&dyn Trait`, split it up into its
690 // data pointer and vtable. Look up the method in the vtable, and pass
691 // the data pointer as the first argument
693 Pair(data_ptr
, meta
) => {
694 llfn
= Some(meth
::VirtualIndex
::from_index(idx
)
695 .get_fn(&mut bx
, meta
, &fn_ty
));
696 llargs
.push(data_ptr
);
699 other
=> bug
!("expected a Pair, got {:?}", other
)
701 } else if let Ref(data_ptr
, Some(meta
), _
) = op
.val
{
702 // by-value dynamic dispatch
703 llfn
= Some(meth
::VirtualIndex
::from_index(idx
)
704 .get_fn(&mut bx
, meta
, &fn_ty
));
705 llargs
.push(data_ptr
);
708 span_bug
!(span
, "can't codegen a virtual call on {:?}", op
);
712 // The callee needs to own the argument memory if we pass it
713 // by-ref, so make a local copy of non-immediate constants.
714 match (arg
, op
.val
) {
715 (&mir
::Operand
::Copy(_
), Ref(_
, None
, _
)) |
716 (&mir
::Operand
::Constant(_
), Ref(_
, None
, _
)) => {
717 let tmp
= PlaceRef
::alloca(&mut bx
, op
.layout
, "const");
718 op
.val
.store(&mut bx
, tmp
);
719 op
.val
= Ref(tmp
.llval
, None
, tmp
.align
);
724 self.codegen_argument(&mut bx
, op
, &mut llargs
, &fn_ty
.args
[i
]);
726 if let Some(tup
) = untuple
{
727 self.codegen_arguments_untupled(&mut bx
, tup
, &mut llargs
,
728 &fn_ty
.args
[first_args
.len()..])
731 let fn_ptr
= match (llfn
, instance
) {
732 (Some(llfn
), _
) => llfn
,
733 (None
, Some(instance
)) => bx
.get_fn(instance
),
734 _
=> span_bug
!(span
, "no llfn for call"),
737 do_call(self, &mut bx
, fn_ty
, fn_ptr
, &llargs
,
738 destination
.as_ref().map(|&(_
, target
)| (ret_dest
, target
)),
741 mir
::TerminatorKind
::GeneratorDrop
|
742 mir
::TerminatorKind
::Yield { .. }
=> bug
!("generator ops in codegen"),
743 mir
::TerminatorKind
::FalseEdges { .. }
|
744 mir
::TerminatorKind
::FalseUnwind { .. }
=> bug
!("borrowck false edges in codegen"),
751 op
: OperandRef
<'tcx
, Bx
::Value
>,
752 llargs
: &mut Vec
<Bx
::Value
>,
753 arg
: &ArgType
<'tcx
, Ty
<'tcx
>>
755 // Fill padding with undef value, where applicable.
756 if let Some(ty
) = arg
.pad
{
757 llargs
.push(bx
.const_undef(bx
.reg_backend_type(&ty
)))
764 if let PassMode
::Pair(..) = arg
.mode
{
771 _
=> bug
!("codegen_argument: {:?} invalid for pair argument", op
)
773 } else if arg
.is_unsized_indirect() {
775 Ref(a
, Some(b
), _
) => {
780 _
=> bug
!("codegen_argument: {:?} invalid for unsized indirect argument", op
)
784 // Force by-ref if we have to load through a cast pointer.
785 let (mut llval
, align
, by_ref
) = match op
.val
{
786 Immediate(_
) | Pair(..) => {
788 PassMode
::Indirect(..) | PassMode
::Cast(_
) => {
789 let scratch
= PlaceRef
::alloca(bx
, arg
.layout
, "arg");
790 op
.val
.store(bx
, scratch
);
791 (scratch
.llval
, scratch
.align
, true)
794 (op
.immediate_or_packed_pair(bx
), arg
.layout
.align
.abi
, false)
798 Ref(llval
, _
, align
) => {
799 if arg
.is_indirect() && align
< arg
.layout
.align
.abi
{
800 // `foo(packed.large_field)`. We can't pass the (unaligned) field directly. I
801 // think that ATM (Rust 1.16) we only pass temporaries, but we shouldn't
802 // have scary latent bugs around.
804 let scratch
= PlaceRef
::alloca(bx
, arg
.layout
, "arg");
805 base
::memcpy_ty(bx
, scratch
.llval
, scratch
.align
, llval
, align
,
806 op
.layout
, MemFlags
::empty());
807 (scratch
.llval
, scratch
.align
, true)
814 if by_ref
&& !arg
.is_indirect() {
815 // Have to load the argument, maybe while casting it.
816 if let PassMode
::Cast(ty
) = arg
.mode
{
817 let addr
= bx
.pointercast(llval
, bx
.type_ptr_to(
818 bx
.cast_backend_type(&ty
))
820 llval
= bx
.load(addr
, align
.min(arg
.layout
.align
.abi
));
822 // We can't use `PlaceRef::load` here because the argument
823 // may have a type we don't treat as immediate, but the ABI
824 // used for this call is passing it by-value. In that case,
825 // the load would just produce `OperandValue::Ref` instead
826 // of the `OperandValue::Immediate` we need for the call.
827 llval
= bx
.load(llval
, align
);
828 if let layout
::Abi
::Scalar(ref scalar
) = arg
.layout
.abi
{
829 if scalar
.is_bool() {
830 bx
.range_metadata(llval
, 0..2);
833 // We store bools as i8 so we need to truncate to i1.
834 llval
= base
::to_immediate(bx
, llval
, arg
.layout
);
841 fn codegen_arguments_untupled(
844 operand
: &mir
::Operand
<'tcx
>,
845 llargs
: &mut Vec
<Bx
::Value
>,
846 args
: &[ArgType
<'tcx
, Ty
<'tcx
>>]
848 let tuple
= self.codegen_operand(bx
, operand
);
850 // Handle both by-ref and immediate tuples.
851 if let Ref(llval
, None
, align
) = tuple
.val
{
852 let tuple_ptr
= PlaceRef
::new_sized(llval
, tuple
.layout
, align
);
853 for i
in 0..tuple
.layout
.fields
.count() {
854 let field_ptr
= tuple_ptr
.project_field(bx
, i
);
855 let field
= bx
.load_operand(field_ptr
);
856 self.codegen_argument(bx
, field
, llargs
, &args
[i
]);
858 } else if let Ref(_
, Some(_
), _
) = tuple
.val
{
859 bug
!("closure arguments must be sized")
861 // If the tuple is immediate, the elements are as well.
862 for i
in 0..tuple
.layout
.fields
.count() {
863 let op
= tuple
.extract_field(bx
, i
);
864 self.codegen_argument(bx
, op
, llargs
, &args
[i
]);
869 fn get_personality_slot(
872 ) -> PlaceRef
<'tcx
, Bx
::Value
> {
874 if let Some(slot
) = self.personality_slot
{
877 let layout
= cx
.layout_of(cx
.tcx().intern_tup(&[
878 cx
.tcx().mk_mut_ptr(cx
.tcx().types
.u8),
881 let slot
= PlaceRef
::alloca(bx
, layout
, "personalityslot");
882 self.personality_slot
= Some(slot
);
887 /// Returns the landing-pad wrapper around the given basic block.
889 /// No-op in MSVC SEH scheme.
892 target_bb
: mir
::BasicBlock
893 ) -> Bx
::BasicBlock
{
894 if let Some(block
) = self.landing_pads
[target_bb
] {
898 let block
= self.blocks
[target_bb
];
899 let landing_pad
= self.landing_pad_uncached(block
);
900 self.landing_pads
[target_bb
] = Some(landing_pad
);
904 fn landing_pad_uncached(
906 target_bb
: Bx
::BasicBlock
907 ) -> Bx
::BasicBlock
{
908 if base
::wants_msvc_seh(self.cx
.sess()) {
909 span_bug
!(self.mir
.span
, "landing pad was not inserted?")
912 let mut bx
= self.new_block("cleanup");
914 let llpersonality
= self.cx
.eh_personality();
915 let llretty
= self.landing_pad_type();
916 let lp
= bx
.landing_pad(llretty
, llpersonality
, 1);
919 let slot
= self.get_personality_slot(&mut bx
);
920 slot
.storage_live(&mut bx
);
921 Pair(bx
.extract_value(lp
, 0), bx
.extract_value(lp
, 1)).store(&mut bx
, slot
);
927 fn landing_pad_type(&self) -> Bx
::Type
{
929 cx
.type_struct(&[cx
.type_i8p(), cx
.type_i32()], false)
932 fn unreachable_block(
934 ) -> Bx
::BasicBlock
{
935 self.unreachable_block
.unwrap_or_else(|| {
936 let mut bx
= self.new_block("unreachable");
938 self.unreachable_block
= Some(bx
.llbb());
943 pub fn new_block(&self, name
: &str) -> Bx
{
944 Bx
::new_block(self.cx
, self.llfn
, name
)
951 let mut bx
= Bx
::with_cx(self.cx
);
952 bx
.position_at_end(self.blocks
[bb
]);
959 dest
: &mir
::Place
<'tcx
>,
960 fn_ret
: &ArgType
<'tcx
, Ty
<'tcx
>>,
961 llargs
: &mut Vec
<Bx
::Value
>, is_intrinsic
: bool
962 ) -> ReturnDest
<'tcx
, Bx
::Value
> {
963 // If the return is ignored, we can just return a do-nothing ReturnDest
964 if fn_ret
.is_ignore() {
965 return ReturnDest
::Nothing
;
967 let dest
= if let mir
::Place
::Local(index
) = *dest
{
968 match self.locals
[index
] {
969 LocalRef
::Place(dest
) => dest
,
970 LocalRef
::UnsizedPlace(_
) => bug
!("return type must be sized"),
971 LocalRef
::Operand(None
) => {
972 // Handle temporary places, specifically Operand ones, as
973 // they don't have allocas
974 return if fn_ret
.is_indirect() {
975 // Odd, but possible, case, we have an operand temporary,
976 // but the calling convention has an indirect return.
977 let tmp
= PlaceRef
::alloca(bx
, fn_ret
.layout
, "tmp_ret");
978 tmp
.storage_live(bx
);
979 llargs
.push(tmp
.llval
);
980 ReturnDest
::IndirectOperand(tmp
, index
)
981 } else if is_intrinsic
{
982 // Currently, intrinsics always need a location to store
983 // the result. so we create a temporary alloca for the
985 let tmp
= PlaceRef
::alloca(bx
, fn_ret
.layout
, "tmp_ret");
986 tmp
.storage_live(bx
);
987 ReturnDest
::IndirectOperand(tmp
, index
)
989 ReturnDest
::DirectOperand(index
)
992 LocalRef
::Operand(Some(_
)) => {
993 bug
!("place local already assigned to");
997 self.codegen_place(bx
, dest
)
999 if fn_ret
.is_indirect() {
1000 if dest
.align
< dest
.layout
.align
.abi
{
1001 // Currently, MIR code generation does not create calls
1002 // that store directly to fields of packed structs (in
1003 // fact, the calls it creates write only to temps),
1005 // If someone changes that, please update this code path
1006 // to create a temporary.
1007 span_bug
!(self.mir
.span
, "can't directly store to unaligned value");
1009 llargs
.push(dest
.llval
);
1012 ReturnDest
::Store(dest
)
1016 fn codegen_transmute(
1019 src
: &mir
::Operand
<'tcx
>,
1020 dst
: &mir
::Place
<'tcx
>
1022 if let mir
::Place
::Local(index
) = *dst
{
1023 match self.locals
[index
] {
1024 LocalRef
::Place(place
) => self.codegen_transmute_into(bx
, src
, place
),
1025 LocalRef
::UnsizedPlace(_
) => bug
!("transmute must not involve unsized locals"),
1026 LocalRef
::Operand(None
) => {
1027 let dst_layout
= bx
.layout_of(self.monomorphized_place_ty(dst
));
1028 assert
!(!dst_layout
.ty
.has_erasable_regions());
1029 let place
= PlaceRef
::alloca(bx
, dst_layout
, "transmute_temp");
1030 place
.storage_live(bx
);
1031 self.codegen_transmute_into(bx
, src
, place
);
1032 let op
= bx
.load_operand(place
);
1033 place
.storage_dead(bx
);
1034 self.locals
[index
] = LocalRef
::Operand(Some(op
));
1036 LocalRef
::Operand(Some(op
)) => {
1037 assert
!(op
.layout
.is_zst(),
1038 "assigning to initialized SSAtemp");
1042 let dst
= self.codegen_place(bx
, dst
);
1043 self.codegen_transmute_into(bx
, src
, dst
);
1047 fn codegen_transmute_into(
1050 src
: &mir
::Operand
<'tcx
>,
1051 dst
: PlaceRef
<'tcx
, Bx
::Value
>
1053 let src
= self.codegen_operand(bx
, src
);
1054 let llty
= bx
.backend_type(src
.layout
);
1055 let cast_ptr
= bx
.pointercast(dst
.llval
, bx
.type_ptr_to(llty
));
1056 let align
= src
.layout
.align
.abi
.min(dst
.align
);
1057 src
.val
.store(bx
, PlaceRef
::new_sized(cast_ptr
, src
.layout
, align
));
1061 // Stores the return value of a function call into it's final location.
1065 dest
: ReturnDest
<'tcx
, Bx
::Value
>,
1066 ret_ty
: &ArgType
<'tcx
, Ty
<'tcx
>>,
1069 use self::ReturnDest
::*;
1073 Store(dst
) => bx
.store_arg_ty(&ret_ty
, llval
, dst
),
1074 IndirectOperand(tmp
, index
) => {
1075 let op
= bx
.load_operand(tmp
);
1076 tmp
.storage_dead(bx
);
1077 self.locals
[index
] = LocalRef
::Operand(Some(op
));
1079 DirectOperand(index
) => {
1080 // If there is a cast, we have to store and reload.
1081 let op
= if let PassMode
::Cast(_
) = ret_ty
.mode
{
1082 let tmp
= PlaceRef
::alloca(bx
, ret_ty
.layout
, "tmp_ret");
1083 tmp
.storage_live(bx
);
1084 bx
.store_arg_ty(&ret_ty
, llval
, tmp
);
1085 let op
= bx
.load_operand(tmp
);
1086 tmp
.storage_dead(bx
);
1089 OperandRef
::from_immediate_or_packed_pair(bx
, llval
, ret_ty
.layout
)
1091 self.locals
[index
] = LocalRef
::Operand(Some(op
));
1097 enum ReturnDest
<'tcx
, V
> {
1098 // Do nothing, the return value is indirect or ignored
1100 // Store the return value to the pointer
1101 Store(PlaceRef
<'tcx
, V
>),
1102 // Stores an indirect return value to an operand local place
1103 IndirectOperand(PlaceRef
<'tcx
, V
>, mir
::Local
),
1104 // Stores a direct return value to an operand local place
1105 DirectOperand(mir
::Local
)