]> git.proxmox.com Git - rustc.git/blame - compiler/rustc_codegen_ssa/src/mir/block.rs
New upstream version 1.59.0+dfsg1
[rustc.git] / compiler / rustc_codegen_ssa / src / mir / block.rs
CommitLineData
dfeec247
XL
1use super::operand::OperandRef;
2use super::operand::OperandValue::{Immediate, Pair, Ref};
3use super::place::PlaceRef;
4use super::{FunctionCx, LocalRef};
5
9fa01778 6use crate::base;
9fa01778
XL
7use crate::common::{self, IntPredicate};
8use crate::meth;
9fa01778 9use crate::traits::*;
dfeec247 10use crate::MemFlags;
3157f602 11
3dfed10e 12use rustc_ast as ast;
a2a8927a 13use rustc_ast::{InlineAsmOptions, InlineAsmTemplatePiece};
3dfed10e 14use rustc_hir::lang_items::LangItem;
dfeec247 15use rustc_index::vec::Idx;
ba9703b0 16use rustc_middle::mir::AssertKind;
29967ef6 17use rustc_middle::mir::{self, SwitchTargets};
c295e0f8
XL
18use rustc_middle::ty::layout::{HasTyCtxt, LayoutOf};
19use rustc_middle::ty::print::{with_no_trimmed_paths, with_no_visible_paths};
ba9703b0 20use rustc_middle::ty::{self, Instance, Ty, TypeFoldable};
3dfed10e
XL
21use rustc_span::source_map::Span;
22use rustc_span::{sym, Symbol};
3c0e092e 23use rustc_symbol_mangling::typeid_for_fnabi;
dfeec247 24use rustc_target::abi::call::{ArgAbi, FnAbi, PassMode};
c295e0f8 25use rustc_target::abi::{self, HasDataLayout, WrappingRange};
dfeec247 26use rustc_target::spec::abi::Abi;
54a0048b 27
532ac7d7
XL
28/// Used by `FunctionCx::codegen_terminator` for emitting common patterns
29/// e.g., creating a basic block, calling a function, etc.
60c5eb7d
XL
30struct TerminatorCodegenHelper<'tcx> {
31 bb: mir::BasicBlock,
32 terminator: &'tcx mir::Terminator<'tcx>,
532ac7d7
XL
33 funclet_bb: Option<mir::BasicBlock>,
34}
35
60c5eb7d 36impl<'a, 'tcx> TerminatorCodegenHelper<'tcx> {
17df50a5
XL
37 /// Returns the appropriate `Funclet` for the current funclet, if on MSVC,
38 /// either already previously cached, or newly created, by `landing_pad_for`.
60c5eb7d 39 fn funclet<'b, Bx: BuilderMethods<'a, 'tcx>>(
532ac7d7 40 &self,
17df50a5 41 fx: &'b mut FunctionCx<'a, 'tcx, Bx>,
60c5eb7d 42 ) -> Option<&'b Bx::Funclet> {
17df50a5
XL
43 let funclet_bb = self.funclet_bb?;
44 if base::wants_msvc_seh(fx.cx.tcx().sess) {
45 // If `landing_pad_for` hasn't been called yet to create the `Funclet`,
46 // it has to be now. This may not seem necessary, as RPO should lead
47 // to all the unwind edges being visited (and so to `landing_pad_for`
48 // getting called for them), before building any of the blocks inside
49 // the funclet itself - however, if MIR contains edges that end up not
50 // being needed in the LLVM IR after monomorphization, the funclet may
51 // be unreachable, and we don't have yet a way to skip building it in
52 // such an eventuality (which may be a better solution than this).
53 if fx.funclets[funclet_bb].is_none() {
54 fx.landing_pad_for(funclet_bb);
55 }
56
57 Some(
58 fx.funclets[funclet_bb]
59 .as_ref()
60 .expect("landing_pad_for didn't also create funclets entry"),
61 )
62 } else {
63 None
64 }
532ac7d7 65 }
3157f602 66
60c5eb7d 67 fn lltarget<Bx: BuilderMethods<'a, 'tcx>>(
532ac7d7 68 &self,
60c5eb7d 69 fx: &mut FunctionCx<'a, 'tcx, Bx>,
532ac7d7
XL
70 target: mir::BasicBlock,
71 ) -> (Bx::BasicBlock, bool) {
72 let span = self.terminator.source_info.span;
17df50a5 73 let lltarget = fx.llbb(target);
532ac7d7
XL
74 let target_funclet = fx.cleanup_kinds[target].funclet_bb(target);
75 match (self.funclet_bb, target_funclet) {
76 (None, None) => (lltarget, false),
dfeec247
XL
77 (Some(f), Some(t_f)) if f == t_f || !base::wants_msvc_seh(fx.cx.tcx().sess) => {
78 (lltarget, false)
79 }
17df50a5
XL
80 // jump *into* cleanup - need a landing pad if GNU, cleanup pad if MSVC
81 (None, Some(_)) => (fx.landing_pad_for(target), false),
532ac7d7 82 (Some(_), None) => span_bug!(span, "{:?} - jump out of cleanup?", self.terminator),
17df50a5 83 (Some(_), Some(_)) => (fx.landing_pad_for(target), true),
7cac9316 84 }
532ac7d7 85 }
7cac9316 86
532ac7d7 87 /// Create a basic block.
60c5eb7d 88 fn llblock<Bx: BuilderMethods<'a, 'tcx>>(
532ac7d7 89 &self,
60c5eb7d 90 fx: &mut FunctionCx<'a, 'tcx, Bx>,
532ac7d7
XL
91 target: mir::BasicBlock,
92 ) -> Bx::BasicBlock {
93 let (lltarget, is_cleanupret) = self.lltarget(fx, target);
94 if is_cleanupret {
95 // MSVC cross-funclet jump - need a trampoline
96
97 debug!("llblock: creating cleanup trampoline for {:?}", target);
98 let name = &format!("{:?}_cleanup_trampoline_{:?}", self.bb, target);
99 let mut trampoline = fx.new_block(name);
dfeec247 100 trampoline.cleanup_ret(self.funclet(fx).unwrap(), Some(lltarget));
532ac7d7
XL
101 trampoline.llbb()
102 } else {
103 lltarget
104 }
7cac9316
XL
105 }
106
60c5eb7d 107 fn funclet_br<Bx: BuilderMethods<'a, 'tcx>>(
532ac7d7 108 &self,
60c5eb7d 109 fx: &mut FunctionCx<'a, 'tcx, Bx>,
532ac7d7
XL
110 bx: &mut Bx,
111 target: mir::BasicBlock,
a1dfa0c6 112 ) {
532ac7d7
XL
113 let (lltarget, is_cleanupret) = self.lltarget(fx, target);
114 if is_cleanupret {
115 // micro-optimization: generate a `ret` rather than a jump
116 // to a trampoline.
117 bx.cleanup_ret(self.funclet(fx).unwrap(), Some(lltarget));
118 } else {
119 bx.br(lltarget);
120 }
121 }
7cac9316 122
60c5eb7d 123 /// Call `fn_ptr` of `fn_abi` with the arguments `llargs`, the optional
532ac7d7 124 /// return destination `destination` and the cleanup function `cleanup`.
60c5eb7d 125 fn do_call<Bx: BuilderMethods<'a, 'tcx>>(
532ac7d7 126 &self,
60c5eb7d 127 fx: &mut FunctionCx<'a, 'tcx, Bx>,
532ac7d7 128 bx: &mut Bx,
c295e0f8 129 fn_abi: &'tcx FnAbi<'tcx, Ty<'tcx>>,
532ac7d7
XL
130 fn_ptr: Bx::Value,
131 llargs: &[Bx::Value],
132 destination: Option<(ReturnDest<'tcx, Bx::Value>, mir::BasicBlock)>,
133 cleanup: Option<mir::BasicBlock>,
134 ) {
ba9703b0
XL
135 // If there is a cleanup block and the function we're calling can unwind, then
136 // do an invoke, otherwise do a call.
94222f64 137 let fn_ty = bx.fn_decl_backend_type(&fn_abi);
ba9703b0 138 if let Some(cleanup) = cleanup.filter(|_| fn_abi.can_unwind) {
17df50a5
XL
139 let ret_llbb = if let Some((_, target)) = destination {
140 fx.llbb(target)
532ac7d7
XL
141 } else {
142 fx.unreachable_block()
143 };
94222f64
XL
144 let invokeret = bx.invoke(
145 fn_ty,
146 fn_ptr,
147 &llargs,
148 ret_llbb,
149 self.llblock(fx, cleanup),
150 self.funclet(fx),
151 );
60c5eb7d 152 bx.apply_attrs_callsite(&fn_abi, invokeret);
532ac7d7
XL
153
154 if let Some((ret_dest, target)) = destination {
155 let mut ret_bx = fx.build_block(target);
156 fx.set_debug_loc(&mut ret_bx, self.terminator.source_info);
60c5eb7d 157 fx.store_return(&mut ret_bx, ret_dest, &fn_abi.ret, invokeret);
a1dfa0c6 158 }
532ac7d7 159 } else {
94222f64 160 let llret = bx.call(fn_ty, fn_ptr, &llargs, self.funclet(fx));
60c5eb7d
XL
161 bx.apply_attrs_callsite(&fn_abi, llret);
162 if fx.mir[self.bb].is_cleanup {
a2a8927a 163 bx.apply_attrs_to_cleanup_callsite(llret);
3157f602 164 }
7cac9316 165
532ac7d7 166 if let Some((ret_dest, target)) = destination {
60c5eb7d 167 fx.store_return(bx, ret_dest, &fn_abi.ret, llret);
532ac7d7 168 self.funclet_br(fx, bx, target);
7cac9316 169 } else {
532ac7d7 170 bx.unreachable();
7cac9316 171 }
532ac7d7
XL
172 }
173 }
a2a8927a
XL
174
175 /// Generates inline assembly with optional `destination` and `cleanup`.
176 fn do_inlineasm<Bx: BuilderMethods<'a, 'tcx>>(
177 &self,
178 fx: &mut FunctionCx<'a, 'tcx, Bx>,
179 bx: &mut Bx,
180 template: &[InlineAsmTemplatePiece],
181 operands: &[InlineAsmOperandRef<'tcx, Bx>],
182 options: InlineAsmOptions,
183 line_spans: &[Span],
184 destination: Option<mir::BasicBlock>,
185 cleanup: Option<mir::BasicBlock>,
186 instance: Instance<'_>,
187 ) {
188 if let Some(cleanup) = cleanup {
189 let ret_llbb = if let Some(target) = destination {
190 fx.llbb(target)
191 } else {
192 fx.unreachable_block()
193 };
194
195 bx.codegen_inline_asm(
196 template,
197 &operands,
198 options,
199 line_spans,
200 instance,
201 Some((ret_llbb, self.llblock(fx, cleanup), self.funclet(fx))),
202 );
203 } else {
204 bx.codegen_inline_asm(template, &operands, options, line_spans, instance, None);
205
206 if let Some(target) = destination {
207 self.funclet_br(fx, bx, target);
208 } else {
209 bx.unreachable();
210 }
211 }
212 }
532ac7d7 213}
3157f602 214
532ac7d7 215/// Codegen implementations for some terminator variants.
dc9dc135 216impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
532ac7d7 217 /// Generates code for a `Resume` terminator.
dfeec247 218 fn codegen_resume_terminator(&mut self, helper: TerminatorCodegenHelper<'tcx>, mut bx: Bx) {
532ac7d7
XL
219 if let Some(funclet) = helper.funclet(self) {
220 bx.cleanup_ret(funclet, None);
221 } else {
222 let slot = self.get_personality_slot(&mut bx);
223 let lp0 = slot.project_field(&mut bx, 0);
224 let lp0 = bx.load_operand(lp0).immediate();
225 let lp1 = slot.project_field(&mut bx, 1);
226 let lp1 = bx.load_operand(lp1).immediate();
227 slot.storage_dead(&mut bx);
228
ba9703b0
XL
229 let mut lp = bx.const_undef(self.landing_pad_type());
230 lp = bx.insert_value(lp, lp0, 0);
231 lp = bx.insert_value(lp, lp1, 1);
232 bx.resume(lp);
532ac7d7
XL
233 }
234 }
7cac9316 235
60c5eb7d 236 fn codegen_switchint_terminator(
532ac7d7 237 &mut self,
60c5eb7d 238 helper: TerminatorCodegenHelper<'tcx>,
532ac7d7
XL
239 mut bx: Bx,
240 discr: &mir::Operand<'tcx>,
241 switch_ty: Ty<'tcx>,
29967ef6 242 targets: &SwitchTargets,
532ac7d7
XL
243 ) {
244 let discr = self.codegen_operand(&mut bx, &discr);
f035d41b
XL
245 // `switch_ty` is redundant, sanity-check that.
246 assert_eq!(discr.layout.ty, switch_ty);
29967ef6
XL
247 let mut target_iter = targets.iter();
248 if target_iter.len() == 1 {
249 // If there are two targets (one conditional, one fallback), emit br instead of switch
250 let (test_value, target) = target_iter.next().unwrap();
251 let lltrue = helper.llblock(self, target);
252 let llfalse = helper.llblock(self, targets.otherwise());
532ac7d7
XL
253 if switch_ty == bx.tcx().types.bool {
254 // Don't generate trivial icmps when switching on bool
29967ef6
XL
255 match test_value {
256 0 => bx.cond_br(discr.immediate(), llfalse, lltrue),
257 1 => bx.cond_br(discr.immediate(), lltrue, llfalse),
258 _ => bug!(),
3157f602 259 }
532ac7d7 260 } else {
dfeec247 261 let switch_llty = bx.immediate_backend_type(bx.layout_of(switch_ty));
29967ef6 262 let llval = bx.const_uint_big(switch_llty, test_value);
532ac7d7
XL
263 let cmp = bx.icmp(IntPredicate::IntEQ, discr.immediate(), llval);
264 bx.cond_br(cmp, lltrue, llfalse);
3157f602 265 }
532ac7d7 266 } else {
532ac7d7
XL
267 bx.switch(
268 discr.immediate(),
29967ef6
XL
269 helper.llblock(self, targets.otherwise()),
270 target_iter.map(|(value, target)| (value, helper.llblock(self, target))),
532ac7d7
XL
271 );
272 }
273 }
54a0048b 274
dc9dc135 275 fn codegen_return_terminator(&mut self, mut bx: Bx) {
e74abb32 276 // Call `va_end` if this is the definition of a C-variadic function.
60c5eb7d 277 if self.fn_abi.c_variadic {
e74abb32 278 // The `VaList` "spoofed" argument is just after all the real arguments.
60c5eb7d 279 let va_list_arg_idx = self.fn_abi.args.len();
e74abb32
XL
280 match self.locals[mir::Local::new(1 + va_list_arg_idx)] {
281 LocalRef::Place(va_list) => {
532ac7d7
XL
282 bx.va_end(va_list.llval);
283 }
e74abb32 284 _ => bug!("C-variadic function must have a `VaList` place"),
54a0048b 285 }
532ac7d7 286 }
60c5eb7d 287 if self.fn_abi.ret.layout.abi.is_uninhabited() {
532ac7d7
XL
288 // Functions with uninhabited return values are marked `noreturn`,
289 // so we should make sure that we never actually do.
60c5eb7d
XL
290 // We play it safe by using a well-defined `abort`, but we could go for immediate UB
291 // if that turns out to be helpful.
532ac7d7 292 bx.abort();
60c5eb7d
XL
293 // `abort` does not terminate the block, so we still need to generate
294 // an `unreachable` terminator after it.
532ac7d7
XL
295 bx.unreachable();
296 return;
297 }
60c5eb7d 298 let llval = match self.fn_abi.ret.mode {
fc512014 299 PassMode::Ignore | PassMode::Indirect { .. } => {
532ac7d7
XL
300 bx.ret_void();
301 return;
ff7c6d11
XL
302 }
303
532ac7d7 304 PassMode::Direct(_) | PassMode::Pair(..) => {
74b04a01 305 let op = self.codegen_consume(&mut bx, mir::Place::return_place().as_ref());
532ac7d7 306 if let Ref(llval, _, align) = op.val {
136023e0 307 bx.load(bx.backend_type(op.layout), llval, align)
8bb4bdeb 308 } else {
532ac7d7 309 op.immediate_or_packed_pair(&mut bx)
54a0048b
SL
310 }
311 }
312
532ac7d7
XL
313 PassMode::Cast(cast_ty) => {
314 let op = match self.locals[mir::RETURN_PLACE] {
315 LocalRef::Operand(Some(op)) => op,
316 LocalRef::Operand(None) => bug!("use of return before def"),
dfeec247
XL
317 LocalRef::Place(cg_place) => OperandRef {
318 val: Ref(cg_place.llval, None, cg_place.align),
319 layout: cg_place.layout,
320 },
532ac7d7
XL
321 LocalRef::UnsizedPlace(_) => bug!("return type must be sized"),
322 };
323 let llslot = match op.val {
324 Immediate(_) | Pair(..) => {
dfeec247 325 let scratch = PlaceRef::alloca(&mut bx, self.fn_abi.ret.layout);
532ac7d7
XL
326 op.val.store(&mut bx, scratch);
327 scratch.llval
328 }
329 Ref(llval, _, align) => {
dfeec247 330 assert_eq!(align, op.layout.align.abi, "return place is unaligned!");
532ac7d7 331 llval
476ff2be 332 }
3157f602 333 };
136023e0
XL
334 let ty = bx.cast_backend_type(&cast_ty);
335 let addr = bx.pointercast(llslot, bx.type_ptr_to(ty));
336 bx.load(ty, addr, self.fn_abi.ret.layout.align.abi)
3157f602 337 }
532ac7d7
XL
338 };
339 bx.ret(llval);
340 }
3157f602 341
60c5eb7d 342 fn codegen_drop_terminator(
532ac7d7 343 &mut self,
60c5eb7d 344 helper: TerminatorCodegenHelper<'tcx>,
532ac7d7 345 mut bx: Bx,
ba9703b0 346 location: mir::Place<'tcx>,
532ac7d7
XL
347 target: mir::BasicBlock,
348 unwind: Option<mir::BasicBlock>,
349 ) {
f9f354fc 350 let ty = location.ty(self.mir, bx.tcx()).ty;
fc512014 351 let ty = self.monomorphize(ty);
dc9dc135 352 let drop_fn = Instance::resolve_drop_in_place(bx.tcx(), ty);
532ac7d7
XL
353
354 if let ty::InstanceDef::DropGlue(_, None) = drop_fn.def {
355 // we don't actually need to drop anything.
356 helper.funclet_br(self, &mut bx, target);
dfeec247 357 return;
532ac7d7
XL
358 }
359
74b04a01 360 let place = self.codegen_place(&mut bx, location.as_ref());
532ac7d7
XL
361 let (args1, args2);
362 let mut args = if let Some(llextra) = place.llextra {
363 args2 = [place.llval, llextra];
364 &args2[..]
365 } else {
366 args1 = [place.llval];
367 &args1[..]
368 };
1b1a35ee 369 let (drop_fn, fn_abi) = match ty.kind() {
60c5eb7d
XL
370 // FIXME(eddyb) perhaps move some of this logic into
371 // `Instance::resolve_drop_in_place`?
532ac7d7 372 ty::Dynamic(..) => {
60c5eb7d
XL
373 let virtual_drop = Instance {
374 def: ty::InstanceDef::Virtual(drop_fn.def_id(), 0),
375 substs: drop_fn.substs,
376 };
c295e0f8 377 let fn_abi = bx.fn_abi_of_instance(virtual_drop, ty::List::empty());
532ac7d7
XL
378 let vtable = args[1];
379 args = &args[..1];
136023e0
XL
380 (
381 meth::VirtualIndex::from_index(ty::COMMON_VTABLE_ENTRIES_DROPINPLACE)
382 .get_fn(&mut bx, vtable, &fn_abi),
383 fn_abi,
384 )
54a0048b 385 }
c295e0f8 386 _ => (bx.get_fn_addr(drop_fn), bx.fn_abi_of_instance(drop_fn, ty::List::empty())),
532ac7d7 387 };
dfeec247
XL
388 helper.do_call(
389 self,
390 &mut bx,
391 fn_abi,
392 drop_fn,
393 args,
394 Some((ReturnDest::Nothing, target)),
395 unwind,
396 );
532ac7d7 397 }
54a0048b 398
60c5eb7d 399 fn codegen_assert_terminator(
532ac7d7 400 &mut self,
60c5eb7d 401 helper: TerminatorCodegenHelper<'tcx>,
532ac7d7
XL
402 mut bx: Bx,
403 terminator: &mir::Terminator<'tcx>,
404 cond: &mir::Operand<'tcx>,
405 expected: bool,
406 msg: &mir::AssertMessage<'tcx>,
407 target: mir::BasicBlock,
408 cleanup: Option<mir::BasicBlock>,
409 ) {
410 let span = terminator.source_info.span;
411 let cond = self.codegen_operand(&mut bx, cond).immediate();
412 let mut const_cond = bx.const_to_opt_u128(cond, false).map(|c| c == 1);
413
414 // This case can currently arise only from functions marked
415 // with #[rustc_inherit_overflow_checks] and inlined from
416 // another crate (mostly core::num generic/#[inline] fns),
417 // while the current crate doesn't use overflow checks.
418 // NOTE: Unlike binops, negation doesn't have its own
419 // checked operation, just a comparison with the minimum
420 // value, so we have to check for the assert message.
421 if !bx.check_overflow() {
f035d41b 422 if let AssertKind::OverflowNeg(_) = *msg {
532ac7d7
XL
423 const_cond = Some(expected);
424 }
425 }
3157f602 426
532ac7d7
XL
427 // Don't codegen the panic block if success if known.
428 if const_cond == Some(expected) {
429 helper.funclet_br(self, &mut bx, target);
430 return;
431 }
432
433 // Pass the condition through llvm.expect for branch hinting.
434 let cond = bx.expect(cond, expected);
435
436 // Create the failure block and the conditional branch to it.
437 let lltarget = helper.llblock(self, target);
17df50a5 438 let panic_block = bx.build_sibling_block("panic");
532ac7d7
XL
439 if expected {
440 bx.cond_br(cond, lltarget, panic_block.llbb());
441 } else {
442 bx.cond_br(cond, panic_block.llbb(), lltarget);
443 }
444
445 // After this point, bx is the block for the call to panic.
446 bx = panic_block;
447 self.set_debug_loc(&mut bx, terminator.source_info);
448
449 // Get the location information.
29967ef6 450 let location = self.get_caller_location(&mut bx, terminator.source_info).immediate();
532ac7d7
XL
451
452 // Put together the arguments to the panic entry point.
416331ca 453 let (lang_item, args) = match msg {
74b04a01 454 AssertKind::BoundsCheck { ref len, ref index } => {
532ac7d7
XL
455 let len = self.codegen_operand(&mut bx, len).immediate();
456 let index = self.codegen_operand(&mut bx, index).immediate();
ba9703b0
XL
457 // It's `fn panic_bounds_check(index: usize, len: usize)`,
458 // and `#[track_caller]` adds an implicit third argument.
3dfed10e 459 (LangItem::PanicBoundsCheck, vec![index, len, location])
532ac7d7
XL
460 }
461 _ => {
e1599b0c 462 let msg_str = Symbol::intern(msg.description());
e74abb32 463 let msg = bx.const_str(msg_str);
ba9703b0
XL
464 // It's `pub fn panic(expr: &str)`, with the wide reference being passed
465 // as two arguments, and `#[track_caller]` adds an implicit third argument.
3dfed10e 466 (LangItem::Panic, vec![msg.0, msg.1, location])
3157f602 467 }
532ac7d7 468 };
3157f602 469
532ac7d7
XL
470 // Obtain the panic entry point.
471 let def_id = common::langcall(bx.tcx(), Some(span), "", lang_item);
472 let instance = ty::Instance::mono(bx.tcx(), def_id);
c295e0f8 473 let fn_abi = bx.fn_abi_of_instance(instance, ty::List::empty());
e74abb32 474 let llfn = bx.get_fn_addr(instance);
3157f602 475
532ac7d7 476 // Codegen the actual panic invoke/call.
60c5eb7d 477 helper.do_call(self, &mut bx, fn_abi, llfn, &args, None, cleanup);
532ac7d7 478 }
3157f602 479
ba9703b0
XL
480 /// Returns `true` if this is indeed a panic intrinsic and codegen is done.
481 fn codegen_panic_intrinsic(
482 &mut self,
483 helper: &TerminatorCodegenHelper<'tcx>,
484 bx: &mut Bx,
3dfed10e 485 intrinsic: Option<Symbol>,
ba9703b0 486 instance: Option<Instance<'tcx>>,
29967ef6 487 source_info: mir::SourceInfo,
ba9703b0
XL
488 destination: &Option<(mir::Place<'tcx>, mir::BasicBlock)>,
489 cleanup: Option<mir::BasicBlock>,
490 ) -> bool {
491 // Emit a panic or a no-op for `assert_*` intrinsics.
492 // These are intrinsics that compile to panics so that we can get a message
493 // which mentions the offending type, even from a const context.
494 #[derive(Debug, PartialEq)]
495 enum AssertIntrinsic {
496 Inhabited,
497 ZeroValid,
498 UninitValid,
fc512014 499 }
ba9703b0 500 let panic_intrinsic = intrinsic.and_then(|i| match i {
3dfed10e
XL
501 sym::assert_inhabited => Some(AssertIntrinsic::Inhabited),
502 sym::assert_zero_valid => Some(AssertIntrinsic::ZeroValid),
503 sym::assert_uninit_valid => Some(AssertIntrinsic::UninitValid),
ba9703b0
XL
504 _ => None,
505 });
506 if let Some(intrinsic) = panic_intrinsic {
507 use AssertIntrinsic::*;
508 let ty = instance.unwrap().substs.type_at(0);
509 let layout = bx.layout_of(ty);
510 let do_panic = match intrinsic {
511 Inhabited => layout.abi.is_uninhabited(),
94222f64
XL
512 ZeroValid => !layout.might_permit_raw_init(bx, /*zero:*/ true),
513 UninitValid => !layout.might_permit_raw_init(bx, /*zero:*/ false),
ba9703b0
XL
514 };
515 if do_panic {
c295e0f8
XL
516 let msg_str = with_no_visible_paths(|| {
517 with_no_trimmed_paths(|| {
518 if layout.abi.is_uninhabited() {
519 // Use this error even for the other intrinsics as it is more precise.
520 format!("attempted to instantiate uninhabited type `{}`", ty)
521 } else if intrinsic == ZeroValid {
522 format!("attempted to zero-initialize type `{}`, which is invalid", ty)
523 } else {
524 format!(
525 "attempted to leave type `{}` uninitialized, which is invalid",
526 ty
527 )
528 }
529 })
1b1a35ee 530 });
ba9703b0 531 let msg = bx.const_str(Symbol::intern(&msg_str));
29967ef6 532 let location = self.get_caller_location(bx, source_info).immediate();
ba9703b0
XL
533
534 // Obtain the panic entry point.
535 // FIXME: dedup this with `codegen_assert_terminator` above.
29967ef6
XL
536 let def_id =
537 common::langcall(bx.tcx(), Some(source_info.span), "", LangItem::Panic);
ba9703b0 538 let instance = ty::Instance::mono(bx.tcx(), def_id);
c295e0f8 539 let fn_abi = bx.fn_abi_of_instance(instance, ty::List::empty());
ba9703b0
XL
540 let llfn = bx.get_fn_addr(instance);
541
ba9703b0
XL
542 // Codegen the actual panic invoke/call.
543 helper.do_call(
544 self,
545 bx,
546 fn_abi,
547 llfn,
548 &[msg.0, msg.1, location],
549 destination.as_ref().map(|(_, bb)| (ReturnDest::Nothing, *bb)),
550 cleanup,
551 );
552 } else {
553 // a NOP
554 let target = destination.as_ref().unwrap().1;
ba9703b0
XL
555 helper.funclet_br(self, bx, target)
556 }
557 true
558 } else {
559 false
560 }
561 }
562
60c5eb7d 563 fn codegen_call_terminator(
532ac7d7 564 &mut self,
60c5eb7d 565 helper: TerminatorCodegenHelper<'tcx>,
532ac7d7
XL
566 mut bx: Bx,
567 terminator: &mir::Terminator<'tcx>,
568 func: &mir::Operand<'tcx>,
5869c6ff 569 args: &[mir::Operand<'tcx>],
532ac7d7
XL
570 destination: &Option<(mir::Place<'tcx>, mir::BasicBlock)>,
571 cleanup: Option<mir::BasicBlock>,
f035d41b 572 fn_span: Span,
532ac7d7 573 ) {
29967ef6
XL
574 let source_info = terminator.source_info;
575 let span = source_info.span;
576
532ac7d7
XL
577 // Create the callee. This is a fn ptr or zero-sized and hence a kind of scalar.
578 let callee = self.codegen_operand(&mut bx, func);
579
1b1a35ee 580 let (instance, mut llfn) = match *callee.layout.ty.kind() {
dfeec247
XL
581 ty::FnDef(def_id, substs) => (
582 Some(
583 ty::Instance::resolve(bx.tcx(), ty::ParamEnv::reveal_all(), def_id, substs)
f9f354fc 584 .unwrap()
3dfed10e
XL
585 .unwrap()
586 .polymorphize(bx.tcx()),
dfeec247
XL
587 ),
588 None,
589 ),
590 ty::FnPtr(_) => (None, Some(callee.immediate())),
532ac7d7
XL
591 _ => bug!("{} is not callable", callee.layout.ty),
592 };
593 let def = instance.map(|i| i.def);
60c5eb7d
XL
594
595 if let Some(ty::InstanceDef::DropGlue(_, None)) = def {
596 // Empty drop glue; a no-op.
597 let &(_, target) = destination.as_ref().unwrap();
60c5eb7d
XL
598 helper.funclet_br(self, &mut bx, target);
599 return;
600 }
601
602 // FIXME(eddyb) avoid computing this if possible, when `instance` is
603 // available - right now `sig` is only needed for getting the `abi`
604 // and figuring out how many extra args were passed to a C-variadic `fn`.
532ac7d7 605 let sig = callee.layout.ty.fn_sig(bx.tcx());
60c5eb7d 606 let abi = sig.abi();
532ac7d7
XL
607
608 // Handle intrinsics old codegen wants Expr's for, ourselves.
609 let intrinsic = match def {
3dfed10e 610 Some(ty::InstanceDef::Intrinsic(def_id)) => Some(bx.tcx().item_name(def_id)),
dfeec247 611 _ => None,
532ac7d7 612 };
3157f602 613
60c5eb7d 614 let extra_args = &args[sig.inputs().skip_binder().len()..];
c295e0f8
XL
615 let extra_args = bx.tcx().mk_type_list(extra_args.iter().map(|op_arg| {
616 let op_ty = op_arg.ty(self.mir, bx.tcx());
617 self.monomorphize(op_ty)
618 }));
60c5eb7d
XL
619
620 let fn_abi = match instance {
c295e0f8
XL
621 Some(instance) => bx.fn_abi_of_instance(instance, extra_args),
622 None => bx.fn_abi_of_fn_ptr(sig, extra_args),
60c5eb7d
XL
623 };
624
3dfed10e 625 if intrinsic == Some(sym::transmute) {
532ac7d7 626 if let Some(destination_ref) = destination.as_ref() {
ba9703b0 627 let &(dest, target) = destination_ref;
532ac7d7
XL
628 self.codegen_transmute(&mut bx, &args[0], dest);
629 helper.funclet_br(self, &mut bx, target);
630 } else {
631 // If we are trying to transmute to an uninhabited type,
632 // it is likely there is no allotted destination. In fact,
633 // transmuting to an uninhabited type is UB, which means
634 // we can do what we like. Here, we declare that transmuting
635 // into an uninhabited type is impossible, so anything following
636 // it must be unreachable.
ba9703b0 637 assert_eq!(fn_abi.ret.layout.abi, abi::Abi::Uninhabited);
532ac7d7
XL
638 bx.unreachable();
639 }
640 return;
641 }
3157f602 642
ba9703b0
XL
643 if self.codegen_panic_intrinsic(
644 &helper,
645 &mut bx,
646 intrinsic,
647 instance,
29967ef6 648 source_info,
ba9703b0
XL
649 destination,
650 cleanup,
651 ) {
532ac7d7
XL
652 return;
653 }
54a0048b 654
532ac7d7 655 // The arguments we'll be passing. Plus one to account for outptr, if used.
60c5eb7d 656 let arg_count = fn_abi.args.len() + fn_abi.ret.is_indirect() as usize;
532ac7d7 657 let mut llargs = Vec::with_capacity(arg_count);
3157f602 658
532ac7d7 659 // Prepare the return value destination
ba9703b0 660 let ret_dest = if let Some((dest, _)) = *destination {
532ac7d7 661 let is_intrinsic = intrinsic.is_some();
dfeec247 662 self.make_return_dest(&mut bx, dest, &fn_abi.ret, &mut llargs, is_intrinsic)
532ac7d7
XL
663 } else {
664 ReturnDest::Nothing
665 };
54a0048b 666
3dfed10e 667 if intrinsic == Some(sym::caller_location) {
e74abb32 668 if let Some((_, target)) = destination.as_ref() {
29967ef6
XL
669 let location = self
670 .get_caller_location(&mut bx, mir::SourceInfo { span: fn_span, ..source_info });
e74abb32
XL
671
672 if let ReturnDest::IndirectOperand(tmp, _) = ret_dest {
673 location.val.store(&mut bx, tmp);
674 }
60c5eb7d 675 self.store_return(&mut bx, ret_dest, &fn_abi.ret, location.immediate());
e74abb32
XL
676 helper.funclet_br(self, &mut bx, *target);
677 }
678 return;
679 }
680
6a06907d
XL
681 match intrinsic {
682 None | Some(sym::drop_in_place) => {}
683 Some(sym::copy_nonoverlapping) => unreachable!(),
684 Some(intrinsic) => {
685 let dest = match ret_dest {
686 _ if fn_abi.ret.is_indirect() => llargs[0],
687 ReturnDest::Nothing => {
688 bx.const_undef(bx.type_ptr_to(bx.arg_memory_ty(&fn_abi.ret)))
689 }
690 ReturnDest::IndirectOperand(dst, _) | ReturnDest::Store(dst) => dst.llval,
691 ReturnDest::DirectOperand(_) => {
692 bug!("Cannot use direct operand with an intrinsic call")
693 }
694 };
54a0048b 695
6a06907d
XL
696 let args: Vec<_> = args
697 .iter()
698 .enumerate()
699 .map(|(i, arg)| {
700 // The indices passed to simd_shuffle* in the
701 // third argument must be constant. This is
702 // checked by const-qualification, which also
703 // promotes any complex rvalues to constants.
704 if i == 2 && intrinsic.as_str().starts_with("simd_shuffle") {
705 if let mir::Operand::Constant(constant) = arg {
706 let c = self.eval_mir_constant(constant);
c295e0f8
XL
707 let (llval, ty) = self.simd_shuffle_indices(
708 &bx,
709 constant.span,
710 self.monomorphize(constant.ty()),
711 c,
712 );
6a06907d
XL
713 return OperandRef {
714 val: Immediate(llval),
715 layout: bx.layout_of(ty),
716 };
717 } else {
718 span_bug!(span, "shuffle indices must be constant");
719 }
532ac7d7 720 }
54a0048b 721
6a06907d
XL
722 self.codegen_operand(&mut bx, arg)
723 })
724 .collect();
725
726 Self::codegen_intrinsic_call(
727 &mut bx,
728 *instance.as_ref().unwrap(),
729 &fn_abi,
730 &args,
731 dest,
732 span,
733 );
dfeec247 734
6a06907d
XL
735 if let ReturnDest::IndirectOperand(dst, _) = ret_dest {
736 self.store_return(&mut bx, ret_dest, &fn_abi.ret, dst.llval);
737 }
532ac7d7 738
6a06907d
XL
739 if let Some((_, target)) = *destination {
740 helper.funclet_br(self, &mut bx, target);
741 } else {
742 bx.unreachable();
743 }
532ac7d7 744
6a06907d 745 return;
532ac7d7 746 }
532ac7d7
XL
747 }
748
749 // Split the rust-call tupled arguments off.
750 let (first_args, untuple) = if abi == Abi::RustCall && !args.is_empty() {
751 let (tup, args) = args.split_last().unwrap();
752 (args, Some(tup))
753 } else {
6a06907d 754 (args, None)
532ac7d7
XL
755 };
756
532ac7d7 757 'make_args: for (i, arg) in first_args.iter().enumerate() {
532ac7d7
XL
758 let mut op = self.codegen_operand(&mut bx, arg);
759
760 if let (0, Some(ty::InstanceDef::Virtual(_, idx))) = (i, def) {
761 if let Pair(..) = op.val {
762 // In the case of Rc<Self>, we need to explicitly pass a
763 // *mut RcBox<Self> with a Scalar (not ScalarPair) ABI. This is a hack
764 // that is understood elsewhere in the compiler as a method on
765 // `dyn Trait`.
766 // To get a `*mut RcBox<Self>`, we just keep unwrapping newtypes until
767 // we get a value of a built-in pointer type
768 'descend_newtypes: while !op.layout.ty.is_unsafe_ptr()
dfeec247 769 && !op.layout.ty.is_region_ptr()
532ac7d7 770 {
60c5eb7d 771 for i in 0..op.layout.fields.count() {
532ac7d7
XL
772 let field = op.extract_field(&mut bx, i);
773 if !field.layout.is_zst() {
774 // we found the one non-zero-sized field that is allowed
775 // now find *its* non-zero-sized field, or stop if it's a
776 // pointer
777 op = field;
dfeec247 778 continue 'descend_newtypes;
532ac7d7
XL
779 }
780 }
781
782 span_bug!(span, "receiver has no non-zero-sized fields {:?}", op);
32a655c1 783 }
54a0048b 784
532ac7d7
XL
785 // now that we have `*dyn Trait` or `&dyn Trait`, split it up into its
786 // data pointer and vtable. Look up the method in the vtable, and pass
787 // the data pointer as the first argument
788 match op.val {
789 Pair(data_ptr, meta) => {
dfeec247
XL
790 llfn = Some(
791 meth::VirtualIndex::from_index(idx).get_fn(&mut bx, meta, &fn_abi),
792 );
532ac7d7 793 llargs.push(data_ptr);
dfeec247 794 continue 'make_args;
532ac7d7
XL
795 }
796 other => bug!("expected a Pair, got {:?}", other),
0731742a 797 }
532ac7d7
XL
798 } else if let Ref(data_ptr, Some(meta), _) = op.val {
799 // by-value dynamic dispatch
dfeec247 800 llfn = Some(meth::VirtualIndex::from_index(idx).get_fn(&mut bx, meta, &fn_abi));
532ac7d7
XL
801 llargs.push(data_ptr);
802 continue;
803 } else {
804 span_bug!(span, "can't codegen a virtual call on {:?}", op);
0bf4aa26 805 }
532ac7d7 806 }
0bf4aa26 807
532ac7d7
XL
808 // The callee needs to own the argument memory if we pass it
809 // by-ref, so make a local copy of non-immediate constants.
810 match (arg, op.val) {
dfeec247
XL
811 (&mir::Operand::Copy(_), Ref(_, None, _))
812 | (&mir::Operand::Constant(_), Ref(_, None, _)) => {
e1599b0c 813 let tmp = PlaceRef::alloca(&mut bx, op.layout);
532ac7d7
XL
814 op.val.store(&mut bx, tmp);
815 op.val = Ref(tmp.llval, None, tmp.align);
816 }
817 _ => {}
818 }
54a0048b 819
60c5eb7d 820 self.codegen_argument(&mut bx, op, &mut llargs, &fn_abi.args[i]);
532ac7d7 821 }
c295e0f8 822 let num_untupled = untuple.map(|tup| {
dfeec247
XL
823 self.codegen_arguments_untupled(
824 &mut bx,
825 tup,
826 &mut llargs,
827 &fn_abi.args[first_args.len()..],
828 )
c295e0f8 829 });
60c5eb7d
XL
830
831 let needs_location =
832 instance.map_or(false, |i| i.def.requires_caller_location(self.cx.tcx()));
833 if needs_location {
c295e0f8
XL
834 let mir_args = if let Some(num_untupled) = num_untupled {
835 first_args.len() + num_untupled
836 } else {
837 args.len()
838 };
60c5eb7d 839 assert_eq!(
dfeec247 840 fn_abi.args.len(),
c295e0f8
XL
841 mir_args + 1,
842 "#[track_caller] fn's must have 1 more argument in their ABI than in their MIR: {:?} {:?} {:?}",
843 instance,
844 fn_span,
845 fn_abi,
60c5eb7d 846 );
29967ef6
XL
847 let location =
848 self.get_caller_location(&mut bx, mir::SourceInfo { span: fn_span, ..source_info });
f035d41b
XL
849 debug!(
850 "codegen_call_terminator({:?}): location={:?} (fn_span {:?})",
851 terminator, location, fn_span
852 );
853
60c5eb7d
XL
854 let last_arg = fn_abi.args.last().unwrap();
855 self.codegen_argument(&mut bx, location, &mut llargs, last_arg);
532ac7d7 856 }
54a0048b 857
3c0e092e
XL
858 let (is_indirect_call, fn_ptr) = match (llfn, instance) {
859 (Some(llfn), _) => (true, llfn),
860 (None, Some(instance)) => (false, bx.get_fn_addr(instance)),
532ac7d7
XL
861 _ => span_bug!(span, "no llfn for call"),
862 };
54a0048b 863
3c0e092e
XL
864 // For backends that support CFI using type membership (i.e., testing whether a given
865 // pointer is associated with a type identifier).
866 if bx.tcx().sess.is_sanitizer_cfi_enabled() && is_indirect_call {
867 // Emit type metadata and checks.
868 // FIXME(rcvalle): Add support for generalized identifiers.
869 // FIXME(rcvalle): Create distinct unnamed MDNodes for internal identifiers.
870 let typeid = typeid_for_fnabi(bx.tcx(), fn_abi);
871 let typeid_metadata = bx.typeid_metadata(typeid);
872
873 // Test whether the function pointer is associated with the type identifier.
874 let cond = bx.type_test(fn_ptr, typeid_metadata);
875 let mut bx_pass = bx.build_sibling_block("type_test.pass");
876 let mut bx_fail = bx.build_sibling_block("type_test.fail");
877 bx.cond_br(cond, bx_pass.llbb(), bx_fail.llbb());
878
879 helper.do_call(
880 self,
881 &mut bx_pass,
882 fn_abi,
883 fn_ptr,
884 &llargs,
885 destination.as_ref().map(|&(_, target)| (ret_dest, target)),
886 cleanup,
887 );
888
889 bx_fail.abort();
890 bx_fail.unreachable();
891
892 return;
893 }
894
dfeec247
XL
895 helper.do_call(
896 self,
897 &mut bx,
898 fn_abi,
899 fn_ptr,
900 &llargs,
901 destination.as_ref().map(|&(_, target)| (ret_dest, target)),
902 cleanup,
903 );
532ac7d7 904 }
f9f354fc
XL
905
906 fn codegen_asm_terminator(
907 &mut self,
908 helper: TerminatorCodegenHelper<'tcx>,
909 mut bx: Bx,
910 terminator: &mir::Terminator<'tcx>,
911 template: &[ast::InlineAsmTemplatePiece],
912 operands: &[mir::InlineAsmOperand<'tcx>],
913 options: ast::InlineAsmOptions,
914 line_spans: &[Span],
915 destination: Option<mir::BasicBlock>,
a2a8927a 916 cleanup: Option<mir::BasicBlock>,
3c0e092e 917 instance: Instance<'_>,
f9f354fc
XL
918 ) {
919 let span = terminator.source_info.span;
920
921 let operands: Vec<_> = operands
922 .iter()
923 .map(|op| match *op {
924 mir::InlineAsmOperand::In { reg, ref value } => {
925 let value = self.codegen_operand(&mut bx, value);
926 InlineAsmOperandRef::In { reg, value }
927 }
928 mir::InlineAsmOperand::Out { reg, late, ref place } => {
929 let place = place.map(|place| self.codegen_place(&mut bx, place.as_ref()));
930 InlineAsmOperandRef::Out { reg, late, place }
931 }
932 mir::InlineAsmOperand::InOut { reg, late, ref in_value, ref out_place } => {
933 let in_value = self.codegen_operand(&mut bx, in_value);
934 let out_place =
935 out_place.map(|out_place| self.codegen_place(&mut bx, out_place.as_ref()));
936 InlineAsmOperandRef::InOut { reg, late, in_value, out_place }
937 }
938 mir::InlineAsmOperand::Const { ref value } => {
cdc7bbd5
XL
939 let const_value = self
940 .eval_mir_constant(value)
941 .unwrap_or_else(|_| span_bug!(span, "asm const cannot be resolved"));
17df50a5
XL
942 let string = common::asm_const_to_str(
943 bx.tcx(),
944 span,
945 const_value,
946 bx.layout_of(value.ty()),
947 );
cdc7bbd5 948 InlineAsmOperandRef::Const { string }
f9f354fc
XL
949 }
950 mir::InlineAsmOperand::SymFn { ref value } => {
fc512014 951 let literal = self.monomorphize(value.literal);
6a06907d 952 if let ty::FnDef(def_id, substs) = *literal.ty().kind() {
f9f354fc
XL
953 let instance = ty::Instance::resolve_for_fn_ptr(
954 bx.tcx(),
955 ty::ParamEnv::reveal_all(),
956 def_id,
957 substs,
958 )
959 .unwrap();
960 InlineAsmOperandRef::SymFn { instance }
961 } else {
962 span_bug!(span, "invalid type for asm sym (fn)");
963 }
964 }
f035d41b
XL
965 mir::InlineAsmOperand::SymStatic { def_id } => {
966 InlineAsmOperandRef::SymStatic { def_id }
f9f354fc
XL
967 }
968 })
969 .collect();
970
a2a8927a
XL
971 helper.do_inlineasm(
972 self,
973 &mut bx,
974 template,
975 &operands,
976 options,
977 line_spans,
978 destination,
979 cleanup,
980 instance,
981 );
f9f354fc 982 }
532ac7d7 983}
ff7c6d11 984
dc9dc135 985impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
dfeec247 986 pub fn codegen_block(&mut self, bb: mir::BasicBlock) {
532ac7d7 987 let mut bx = self.build_block(bb);
60c5eb7d
XL
988 let mir = self.mir;
989 let data = &mir[bb];
ff7c6d11 990
532ac7d7 991 debug!("codegen_block({:?}={:?})", bb, data);
ff7c6d11 992
532ac7d7
XL
993 for statement in &data.statements {
994 bx = self.codegen_statement(bx, statement);
995 }
cc61c64b 996
532ac7d7
XL
997 self.codegen_terminator(bx, bb, data.terminator());
998 }
cc61c64b 999
532ac7d7
XL
1000 fn codegen_terminator(
1001 &mut self,
1002 mut bx: Bx,
1003 bb: mir::BasicBlock,
dfeec247 1004 terminator: &'tcx mir::Terminator<'tcx>,
532ac7d7
XL
1005 ) {
1006 debug!("codegen_terminator: {:?}", terminator);
cc61c64b 1007
532ac7d7
XL
1008 // Create the cleanup bundle, if needed.
1009 let funclet_bb = self.cleanup_kinds[bb].funclet_bb(bb);
dfeec247 1010 let helper = TerminatorCodegenHelper { bb, terminator, funclet_bb };
cc61c64b 1011
532ac7d7
XL
1012 self.set_debug_loc(&mut bx, terminator.source_info);
1013 match terminator.kind {
dfeec247 1014 mir::TerminatorKind::Resume => self.codegen_resume_terminator(helper, bx),
ff7c6d11 1015
532ac7d7
XL
1016 mir::TerminatorKind::Abort => {
1017 bx.abort();
60c5eb7d
XL
1018 // `abort` does not terminate the block, so we still need to generate
1019 // an `unreachable` terminator after it.
532ac7d7
XL
1020 bx.unreachable();
1021 }
a1dfa0c6 1022
532ac7d7
XL
1023 mir::TerminatorKind::Goto { target } => {
1024 helper.funclet_br(self, &mut bx, target);
1025 }
ff7c6d11 1026
29967ef6
XL
1027 mir::TerminatorKind::SwitchInt { ref discr, switch_ty, ref targets } => {
1028 self.codegen_switchint_terminator(helper, bx, discr, switch_ty, targets);
532ac7d7 1029 }
ff7c6d11 1030
532ac7d7
XL
1031 mir::TerminatorKind::Return => {
1032 self.codegen_return_terminator(bx);
1033 }
ff7c6d11 1034
532ac7d7
XL
1035 mir::TerminatorKind::Unreachable => {
1036 bx.unreachable();
1037 }
54a0048b 1038
f035d41b
XL
1039 mir::TerminatorKind::Drop { place, target, unwind } => {
1040 self.codegen_drop_terminator(helper, bx, place, target, unwind);
532ac7d7
XL
1041 }
1042
1043 mir::TerminatorKind::Assert { ref cond, expected, ref msg, target, cleanup } => {
dfeec247
XL
1044 self.codegen_assert_terminator(
1045 helper, bx, terminator, cond, expected, msg, target, cleanup,
1046 );
532ac7d7
XL
1047 }
1048
1049 mir::TerminatorKind::DropAndReplace { .. } => {
1050 bug!("undesugared DropAndReplace in codegen: {:?}", terminator);
1051 }
1052
1053 mir::TerminatorKind::Call {
1054 ref func,
1055 ref args,
1056 ref destination,
1057 cleanup,
dfeec247 1058 from_hir_call: _,
f035d41b 1059 fn_span,
532ac7d7 1060 } => {
dfeec247
XL
1061 self.codegen_call_terminator(
1062 helper,
1063 bx,
1064 terminator,
1065 func,
1066 args,
1067 destination,
1068 cleanup,
f035d41b 1069 fn_span,
dfeec247
XL
1070 );
1071 }
1072 mir::TerminatorKind::GeneratorDrop | mir::TerminatorKind::Yield { .. } => {
1073 bug!("generator ops in codegen")
1074 }
f035d41b 1075 mir::TerminatorKind::FalseEdge { .. } | mir::TerminatorKind::FalseUnwind { .. } => {
dfeec247 1076 bug!("borrowck false edges in codegen")
54a0048b 1077 }
f9f354fc
XL
1078
1079 mir::TerminatorKind::InlineAsm {
1080 template,
1081 ref operands,
1082 options,
1083 line_spans,
1084 destination,
a2a8927a 1085 cleanup,
f9f354fc
XL
1086 } => {
1087 self.codegen_asm_terminator(
1088 helper,
1089 bx,
1090 terminator,
1091 template,
1092 operands,
1093 options,
1094 line_spans,
1095 destination,
a2a8927a 1096 cleanup,
3c0e092e 1097 self.instance,
f9f354fc
XL
1098 );
1099 }
54a0048b
SL
1100 }
1101 }
1102
a1dfa0c6
XL
1103 fn codegen_argument(
1104 &mut self,
1105 bx: &mut Bx,
1106 op: OperandRef<'tcx, Bx::Value>,
1107 llargs: &mut Vec<Bx::Value>,
dfeec247 1108 arg: &ArgAbi<'tcx, Ty<'tcx>>,
a1dfa0c6 1109 ) {
54a0048b
SL
1110 // Fill padding with undef value, where applicable.
1111 if let Some(ty) = arg.pad {
a1dfa0c6 1112 llargs.push(bx.const_undef(bx.reg_backend_type(&ty)))
54a0048b
SL
1113 }
1114
1115 if arg.is_ignore() {
1116 return;
1117 }
1118
ff7c6d11
XL
1119 if let PassMode::Pair(..) = arg.mode {
1120 match op.val {
1121 Pair(a, b) => {
1122 llargs.push(a);
1123 llargs.push(b);
1124 return;
1125 }
dfeec247 1126 _ => bug!("codegen_argument: {:?} invalid for pair argument", op),
b7449926
XL
1127 }
1128 } else if arg.is_unsized_indirect() {
1129 match op.val {
1130 Ref(a, Some(b), _) => {
1131 llargs.push(a);
1132 llargs.push(b);
1133 return;
1134 }
dfeec247 1135 _ => bug!("codegen_argument: {:?} invalid for unsized indirect argument", op),
ff7c6d11
XL
1136 }
1137 }
1138
54a0048b 1139 // Force by-ref if we have to load through a cast pointer.
32a655c1 1140 let (mut llval, align, by_ref) = match op.val {
dfeec247 1141 Immediate(_) | Pair(..) => match arg.mode {
fc512014 1142 PassMode::Indirect { .. } | PassMode::Cast(_) => {
dfeec247
XL
1143 let scratch = PlaceRef::alloca(bx, arg.layout);
1144 op.val.store(bx, scratch);
1145 (scratch.llval, scratch.align, true)
3157f602 1146 }
dfeec247
XL
1147 _ => (op.immediate_or_packed_pair(bx), arg.layout.align.abi, false),
1148 },
b7449926 1149 Ref(llval, _, align) => {
a1dfa0c6 1150 if arg.is_indirect() && align < arg.layout.align.abi {
ff7c6d11
XL
1151 // `foo(packed.large_field)`. We can't pass the (unaligned) field directly. I
1152 // think that ATM (Rust 1.16) we only pass temporaries, but we shouldn't
1153 // have scary latent bugs around.
1154
e1599b0c 1155 let scratch = PlaceRef::alloca(bx, arg.layout);
dfeec247
XL
1156 base::memcpy_ty(
1157 bx,
1158 scratch.llval,
1159 scratch.align,
1160 llval,
1161 align,
1162 op.layout,
1163 MemFlags::empty(),
1164 );
ff7c6d11
XL
1165 (scratch.llval, scratch.align, true)
1166 } else {
1167 (llval, align, true)
1168 }
32a655c1 1169 }
54a0048b
SL
1170 };
1171
1172 if by_ref && !arg.is_indirect() {
1173 // Have to load the argument, maybe while casting it.
ff7c6d11 1174 if let PassMode::Cast(ty) = arg.mode {
136023e0
XL
1175 let llty = bx.cast_backend_type(&ty);
1176 let addr = bx.pointercast(llval, bx.type_ptr_to(llty));
1177 llval = bx.load(llty, addr, align.min(arg.layout.align.abi));
54a0048b 1178 } else {
ff7c6d11
XL
1179 // We can't use `PlaceRef::load` here because the argument
1180 // may have a type we don't treat as immediate, but the ABI
1181 // used for this call is passing it by-value. In that case,
1182 // the load would just produce `OperandValue::Ref` instead
1183 // of the `OperandValue::Immediate` we need for the call.
136023e0 1184 llval = bx.load(bx.backend_type(arg.layout), llval, align);
c295e0f8 1185 if let abi::Abi::Scalar(scalar) = arg.layout.abi {
ff7c6d11 1186 if scalar.is_bool() {
c295e0f8 1187 bx.range_metadata(llval, WrappingRange { start: 0, end: 1 });
ff7c6d11
XL
1188 }
1189 }
dc9dc135 1190 // We store bools as `i8` so we need to truncate to `i1`.
1b1a35ee 1191 llval = bx.to_immediate(llval, arg.layout);
54a0048b
SL
1192 }
1193 }
1194
1195 llargs.push(llval);
1196 }
1197
a1dfa0c6
XL
1198 fn codegen_arguments_untupled(
1199 &mut self,
1200 bx: &mut Bx,
1201 operand: &mir::Operand<'tcx>,
1202 llargs: &mut Vec<Bx::Value>,
dfeec247 1203 args: &[ArgAbi<'tcx, Ty<'tcx>>],
c295e0f8 1204 ) -> usize {
94b46f34 1205 let tuple = self.codegen_operand(bx, operand);
54a0048b 1206
a7813a04 1207 // Handle both by-ref and immediate tuples.
b7449926 1208 if let Ref(llval, None, align) = tuple.val {
e1599b0c 1209 let tuple_ptr = PlaceRef::new_sized_aligned(llval, tuple.layout, align);
ff7c6d11 1210 for i in 0..tuple.layout.fields.count() {
2c00a5a8 1211 let field_ptr = tuple_ptr.project_field(bx, i);
a1dfa0c6
XL
1212 let field = bx.load_operand(field_ptr);
1213 self.codegen_argument(bx, field, llargs, &args[i]);
3157f602 1214 }
b7449926
XL
1215 } else if let Ref(_, Some(_), _) = tuple.val {
1216 bug!("closure arguments must be sized")
ff7c6d11
XL
1217 } else {
1218 // If the tuple is immediate, the elements are as well.
1219 for i in 0..tuple.layout.fields.count() {
2c00a5a8 1220 let op = tuple.extract_field(bx, i);
94b46f34 1221 self.codegen_argument(bx, op, llargs, &args[i]);
a7813a04 1222 }
54a0048b 1223 }
c295e0f8 1224 tuple.layout.fields.count()
54a0048b
SL
1225 }
1226
29967ef6
XL
1227 fn get_caller_location(
1228 &mut self,
1229 bx: &mut Bx,
1230 mut source_info: mir::SourceInfo,
1231 ) -> OperandRef<'tcx, Bx::Value> {
1232 let tcx = bx.tcx();
1233
1234 let mut span_to_caller_location = |span: Span| {
60c5eb7d 1235 let topmost = span.ctxt().outer_expn().expansion_cause().unwrap_or(span);
29967ef6
XL
1236 let caller = tcx.sess.source_map().lookup_char_pos(topmost.lo());
1237 let const_loc = tcx.const_caller_location((
17df50a5 1238 Symbol::intern(&caller.file.name.prefer_remapped().to_string_lossy()),
60c5eb7d
XL
1239 caller.line as u32,
1240 caller.col_display as u32 + 1,
1241 ));
74b04a01 1242 OperandRef::from_const(bx, const_loc, bx.tcx().caller_location_ty())
29967ef6
XL
1243 };
1244
1245 // Walk up the `SourceScope`s, in case some of them are from MIR inlining.
1246 // If so, the starting `source_info.span` is in the innermost inlined
1247 // function, and will be replaced with outer callsite spans as long
1248 // as the inlined functions were `#[track_caller]`.
1249 loop {
1250 let scope_data = &self.mir.source_scopes[source_info.scope];
1251
1252 if let Some((callee, callsite_span)) = scope_data.inlined {
1253 // Stop inside the most nested non-`#[track_caller]` function,
1254 // before ever reaching its caller (which is irrelevant).
1255 if !callee.def.requires_caller_location(tcx) {
1256 return span_to_caller_location(source_info.span);
1257 }
1258 source_info.span = callsite_span;
1259 }
1260
1261 // Skip past all of the parents with `inlined: None`.
1262 match scope_data.inlined_parent_scope {
1263 Some(parent) => source_info.scope = parent,
1264 None => break,
1265 }
1266 }
1267
1268 // No inlined `SourceScope`s, or all of them were `#[track_caller]`.
1269 self.caller_location.unwrap_or_else(|| span_to_caller_location(source_info.span))
e74abb32
XL
1270 }
1271
dfeec247 1272 fn get_personality_slot(&mut self, bx: &mut Bx) -> PlaceRef<'tcx, Bx::Value> {
a1dfa0c6 1273 let cx = bx.cx();
ff7c6d11 1274 if let Some(slot) = self.personality_slot {
54a0048b
SL
1275 slot
1276 } else {
dfeec247
XL
1277 let layout = cx.layout_of(
1278 cx.tcx().intern_tup(&[cx.tcx().mk_mut_ptr(cx.tcx().types.u8), cx.tcx().types.i32]),
1279 );
e1599b0c 1280 let slot = PlaceRef::alloca(bx, layout);
ff7c6d11 1281 self.personality_slot = Some(slot);
32a655c1 1282 slot
54a0048b
SL
1283 }
1284 }
1285
17df50a5
XL
1286 /// Returns the landing/cleanup pad wrapper around the given basic block.
1287 // FIXME(eddyb) rename this to `eh_pad_for`.
1288 fn landing_pad_for(&mut self, bb: mir::BasicBlock) -> Bx::BasicBlock {
1289 if let Some(landing_pad) = self.landing_pads[bb] {
1290 return landing_pad;
3157f602
XL
1291 }
1292
17df50a5
XL
1293 let landing_pad = self.landing_pad_for_uncached(bb);
1294 self.landing_pads[bb] = Some(landing_pad);
cc61c64b
XL
1295 landing_pad
1296 }
1297
17df50a5
XL
1298 // FIXME(eddyb) rename this to `eh_pad_for_uncached`.
1299 fn landing_pad_for_uncached(&mut self, bb: mir::BasicBlock) -> Bx::BasicBlock {
1300 let llbb = self.llbb(bb);
2c00a5a8 1301 if base::wants_msvc_seh(self.cx.sess()) {
17df50a5
XL
1302 let funclet;
1303 let ret_llbb;
1304 match self.mir[bb].terminator.as_ref().map(|t| &t.kind) {
1305 // This is a basic block that we're aborting the program for,
1306 // notably in an `extern` function. These basic blocks are inserted
1307 // so that we assert that `extern` functions do indeed not panic,
1308 // and if they do we abort the process.
1309 //
1310 // On MSVC these are tricky though (where we're doing funclets). If
1311 // we were to do a cleanuppad (like below) the normal functions like
1312 // `longjmp` would trigger the abort logic, terminating the
1313 // program. Instead we insert the equivalent of `catch(...)` for C++
1314 // which magically doesn't trigger when `longjmp` files over this
1315 // frame.
1316 //
1317 // Lots more discussion can be found on #48251 but this codegen is
1318 // modeled after clang's for:
1319 //
1320 // try {
1321 // foo();
1322 // } catch (...) {
1323 // bar();
1324 // }
1325 Some(&mir::TerminatorKind::Abort) => {
1326 let mut cs_bx = self.new_block(&format!("cs_funclet{:?}", bb));
1327 let mut cp_bx = self.new_block(&format!("cp_funclet{:?}", bb));
1328 ret_llbb = cs_bx.llbb();
1329
1330 let cs = cs_bx.catch_switch(None, None, 1);
1331 cs_bx.add_handler(cs, cp_bx.llbb());
1332
1333 // The "null" here is actually a RTTI type descriptor for the
1334 // C++ personality function, but `catch (...)` has no type so
1335 // it's null. The 64 here is actually a bitfield which
1336 // represents that this is a catch-all block.
1337 let null = cp_bx.const_null(
1338 cp_bx.type_i8p_ext(cp_bx.cx().data_layout().instruction_address_space),
1339 );
1340 let sixty_four = cp_bx.const_i32(64);
1341 funclet = cp_bx.catch_pad(cs, &[null, sixty_four, null]);
1342 cp_bx.br(llbb);
1343 }
1344 _ => {
1345 let mut cleanup_bx = self.new_block(&format!("funclet_{:?}", bb));
1346 ret_llbb = cleanup_bx.llbb();
1347 funclet = cleanup_bx.cleanup_pad(None, &[]);
1348 cleanup_bx.br(llbb);
1349 }
1350 }
1351 self.funclets[bb] = Some(funclet);
1352 ret_llbb
1353 } else {
1354 let mut bx = self.new_block("cleanup");
3157f602 1355
17df50a5
XL
1356 let llpersonality = self.cx.eh_personality();
1357 let llretty = self.landing_pad_type();
1358 let lp = bx.landing_pad(llretty, llpersonality, 1);
1359 bx.set_cleanup(lp);
ff7c6d11 1360
17df50a5
XL
1361 let slot = self.get_personality_slot(&mut bx);
1362 slot.storage_live(&mut bx);
1363 Pair(bx.extract_value(lp, 0), bx.extract_value(lp, 1)).store(&mut bx, slot);
ff7c6d11 1364
17df50a5
XL
1365 bx.br(llbb);
1366 bx.llbb()
1367 }
54a0048b
SL
1368 }
1369
a1dfa0c6 1370 fn landing_pad_type(&self) -> Bx::Type {
2c00a5a8 1371 let cx = self.cx;
a1dfa0c6 1372 cx.type_struct(&[cx.type_i8p(), cx.type_i32()], false)
ff7c6d11
XL
1373 }
1374
dfeec247 1375 fn unreachable_block(&mut self) -> Bx::BasicBlock {
54a0048b 1376 self.unreachable_block.unwrap_or_else(|| {
a1dfa0c6
XL
1377 let mut bx = self.new_block("unreachable");
1378 bx.unreachable();
1379 self.unreachable_block = Some(bx.llbb());
1380 bx.llbb()
54a0048b
SL
1381 })
1382 }
1383
17df50a5
XL
1384 // FIXME(eddyb) replace with `build_sibling_block`/`append_sibling_block`
1385 // (which requires having a `Bx` already, and not all callers do).
1386 fn new_block(&self, name: &str) -> Bx {
1387 let llbb = Bx::append_block(self.cx, self.llfn, name);
1388 Bx::build(self.cx, llbb)
1389 }
1390
1391 /// Get the backend `BasicBlock` for a MIR `BasicBlock`, either already
1392 /// cached in `self.cached_llbbs`, or created on demand (and cached).
1393 // FIXME(eddyb) rename `llbb` and other `ll`-prefixed things to use a
1394 // more backend-agnostic prefix such as `cg` (i.e. this would be `cgbb`).
1395 pub fn llbb(&mut self, bb: mir::BasicBlock) -> Bx::BasicBlock {
1396 self.cached_llbbs[bb].unwrap_or_else(|| {
1397 // FIXME(eddyb) only name the block if `fewer_names` is `false`.
1398 let llbb = Bx::append_block(self.cx, self.llfn, &format!("{:?}", bb));
1399 self.cached_llbbs[bb] = Some(llbb);
1400 llbb
1401 })
32a655c1
SL
1402 }
1403
17df50a5
XL
1404 pub fn build_block(&mut self, bb: mir::BasicBlock) -> Bx {
1405 let llbb = self.llbb(bb);
1406 Bx::build(self.cx, llbb)
54a0048b
SL
1407 }
1408
a1dfa0c6
XL
1409 fn make_return_dest(
1410 &mut self,
1411 bx: &mut Bx,
ba9703b0 1412 dest: mir::Place<'tcx>,
60c5eb7d 1413 fn_ret: &ArgAbi<'tcx, Ty<'tcx>>,
dfeec247
XL
1414 llargs: &mut Vec<Bx::Value>,
1415 is_intrinsic: bool,
a1dfa0c6 1416 ) -> ReturnDest<'tcx, Bx::Value> {
dc9dc135 1417 // If the return is ignored, we can just return a do-nothing `ReturnDest`.
ff7c6d11 1418 if fn_ret.is_ignore() {
54a0048b
SL
1419 return ReturnDest::Nothing;
1420 }
e74abb32 1421 let dest = if let Some(index) = dest.as_local() {
3157f602 1422 match self.locals[index] {
ff7c6d11 1423 LocalRef::Place(dest) => dest,
b7449926 1424 LocalRef::UnsizedPlace(_) => bug!("return type must be sized"),
3157f602 1425 LocalRef::Operand(None) => {
dc9dc135
XL
1426 // Handle temporary places, specifically `Operand` ones, as
1427 // they don't have `alloca`s.
ff7c6d11 1428 return if fn_ret.is_indirect() {
3157f602
XL
1429 // Odd, but possible, case, we have an operand temporary,
1430 // but the calling convention has an indirect return.
e1599b0c 1431 let tmp = PlaceRef::alloca(bx, fn_ret.layout);
2c00a5a8 1432 tmp.storage_live(bx);
32a655c1 1433 llargs.push(tmp.llval);
ff7c6d11 1434 ReturnDest::IndirectOperand(tmp, index)
3157f602
XL
1435 } else if is_intrinsic {
1436 // Currently, intrinsics always need a location to store
dc9dc135
XL
1437 // the result, so we create a temporary `alloca` for the
1438 // result.
e1599b0c 1439 let tmp = PlaceRef::alloca(bx, fn_ret.layout);
2c00a5a8 1440 tmp.storage_live(bx);
ff7c6d11 1441 ReturnDest::IndirectOperand(tmp, index)
3157f602
XL
1442 } else {
1443 ReturnDest::DirectOperand(index)
1444 };
1445 }
1446 LocalRef::Operand(Some(_)) => {
ff7c6d11 1447 bug!("place local already assigned to");
54a0048b
SL
1448 }
1449 }
3157f602 1450 } else {
dfeec247
XL
1451 self.codegen_place(
1452 bx,
74b04a01 1453 mir::PlaceRef { local: dest.local, projection: &dest.projection },
dfeec247 1454 )
54a0048b 1455 };
ff7c6d11 1456 if fn_ret.is_indirect() {
a1dfa0c6 1457 if dest.align < dest.layout.align.abi {
ff7c6d11
XL
1458 // Currently, MIR code generation does not create calls
1459 // that store directly to fields of packed structs (in
dc9dc135 1460 // fact, the calls it creates write only to temps).
ff7c6d11
XL
1461 //
1462 // If someone changes that, please update this code path
1463 // to create a temporary.
1464 span_bug!(self.mir.span, "can't directly store to unaligned value");
8bb4bdeb 1465 }
ff7c6d11
XL
1466 llargs.push(dest.llval);
1467 ReturnDest::Nothing
54a0048b 1468 } else {
ff7c6d11 1469 ReturnDest::Store(dest)
54a0048b
SL
1470 }
1471 }
1472
ba9703b0 1473 fn codegen_transmute(&mut self, bx: &mut Bx, src: &mir::Operand<'tcx>, dst: mir::Place<'tcx>) {
e74abb32 1474 if let Some(index) = dst.as_local() {
32a655c1 1475 match self.locals[index] {
94b46f34 1476 LocalRef::Place(place) => self.codegen_transmute_into(bx, src, place),
b7449926 1477 LocalRef::UnsizedPlace(_) => bug!("transmute must not involve unsized locals"),
32a655c1 1478 LocalRef::Operand(None) => {
74b04a01 1479 let dst_layout = bx.layout_of(self.monomorphized_place_ty(dst.as_ref()));
94222f64 1480 assert!(!dst_layout.ty.has_erasable_regions(self.cx.tcx()));
e1599b0c 1481 let place = PlaceRef::alloca(bx, dst_layout);
2c00a5a8 1482 place.storage_live(bx);
94b46f34 1483 self.codegen_transmute_into(bx, src, place);
a1dfa0c6 1484 let op = bx.load_operand(place);
2c00a5a8 1485 place.storage_dead(bx);
32a655c1 1486 self.locals[index] = LocalRef::Operand(Some(op));
74b04a01 1487 self.debug_introduce_local(bx, index);
32a655c1 1488 }
ff7c6d11 1489 LocalRef::Operand(Some(op)) => {
dfeec247 1490 assert!(op.layout.is_zst(), "assigning to initialized SSAtemp");
32a655c1
SL
1491 }
1492 }
1493 } else {
74b04a01 1494 let dst = self.codegen_place(bx, dst.as_ref());
94b46f34 1495 self.codegen_transmute_into(bx, src, dst);
32a655c1
SL
1496 }
1497 }
1498
a1dfa0c6
XL
1499 fn codegen_transmute_into(
1500 &mut self,
1501 bx: &mut Bx,
1502 src: &mir::Operand<'tcx>,
dfeec247 1503 dst: PlaceRef<'tcx, Bx::Value>,
a1dfa0c6 1504 ) {
94b46f34 1505 let src = self.codegen_operand(bx, src);
fc512014
XL
1506
1507 // Special-case transmutes between scalars as simple bitcasts.
c295e0f8 1508 match (src.layout.abi, dst.layout.abi) {
fc512014
XL
1509 (abi::Abi::Scalar(src_scalar), abi::Abi::Scalar(dst_scalar)) => {
1510 // HACK(eddyb) LLVM doesn't like `bitcast`s between pointers and non-pointers.
1511 if (src_scalar.value == abi::Pointer) == (dst_scalar.value == abi::Pointer) {
1512 assert_eq!(src.layout.size, dst.layout.size);
1513
1514 // NOTE(eddyb) the `from_immediate` and `to_immediate_scalar`
1515 // conversions allow handling `bool`s the same as `u8`s.
1516 let src = bx.from_immediate(src.immediate());
1517 let src_as_dst = bx.bitcast(src, bx.backend_type(dst.layout));
1518 Immediate(bx.to_immediate_scalar(src_as_dst, dst_scalar)).store(bx, dst);
1519 return;
1520 }
1521 }
1522 _ => {}
1523 }
1524
a1dfa0c6
XL
1525 let llty = bx.backend_type(src.layout);
1526 let cast_ptr = bx.pointercast(dst.llval, bx.type_ptr_to(llty));
1527 let align = src.layout.align.abi.min(dst.align);
e1599b0c 1528 src.val.store(bx, PlaceRef::new_sized_aligned(cast_ptr, src.layout, align));
54a0048b
SL
1529 }
1530
1531 // Stores the return value of a function call into it's final location.
a1dfa0c6
XL
1532 fn store_return(
1533 &mut self,
1534 bx: &mut Bx,
1535 dest: ReturnDest<'tcx, Bx::Value>,
60c5eb7d 1536 ret_abi: &ArgAbi<'tcx, Ty<'tcx>>,
dfeec247 1537 llval: Bx::Value,
a1dfa0c6 1538 ) {
54a0048b
SL
1539 use self::ReturnDest::*;
1540
1541 match dest {
1542 Nothing => (),
60c5eb7d 1543 Store(dst) => bx.store_arg(&ret_abi, llval, dst),
3157f602 1544 IndirectOperand(tmp, index) => {
a1dfa0c6 1545 let op = bx.load_operand(tmp);
2c00a5a8 1546 tmp.storage_dead(bx);
3157f602 1547 self.locals[index] = LocalRef::Operand(Some(op));
74b04a01 1548 self.debug_introduce_local(bx, index);
54a0048b 1549 }
3157f602
XL
1550 DirectOperand(index) => {
1551 // If there is a cast, we have to store and reload.
60c5eb7d
XL
1552 let op = if let PassMode::Cast(_) = ret_abi.mode {
1553 let tmp = PlaceRef::alloca(bx, ret_abi.layout);
2c00a5a8 1554 tmp.storage_live(bx);
60c5eb7d 1555 bx.store_arg(&ret_abi, llval, tmp);
a1dfa0c6 1556 let op = bx.load_operand(tmp);
2c00a5a8 1557 tmp.storage_dead(bx);
ff7c6d11 1558 op
54a0048b 1559 } else {
60c5eb7d 1560 OperandRef::from_immediate_or_packed_pair(bx, llval, ret_abi.layout)
54a0048b 1561 };
3157f602 1562 self.locals[index] = LocalRef::Operand(Some(op));
74b04a01 1563 self.debug_introduce_local(bx, index);
54a0048b
SL
1564 }
1565 }
1566 }
1567}
1568
a1dfa0c6 1569enum ReturnDest<'tcx, V> {
dc9dc135 1570 // Do nothing; the return value is indirect or ignored.
54a0048b 1571 Nothing,
dc9dc135 1572 // Store the return value to the pointer.
a1dfa0c6 1573 Store(PlaceRef<'tcx, V>),
dc9dc135 1574 // Store an indirect return value to an operand local place.
a1dfa0c6 1575 IndirectOperand(PlaceRef<'tcx, V>, mir::Local),
dc9dc135 1576 // Store a direct return value to an operand local place.
dfeec247 1577 DirectOperand(mir::Local),
54a0048b 1578}