]> git.proxmox.com Git - rustc.git/blob - compiler/rustc_codegen_cranelift/src/abi/mod.rs
New upstream version 1.66.0+dfsg1
[rustc.git] / compiler / rustc_codegen_cranelift / src / abi / mod.rs
1 //! Handling of everything related to the calling convention. Also fills `fx.local_map`.
2
3 mod comments;
4 mod pass_mode;
5 mod returning;
6
7 use cranelift_module::ModuleError;
8 use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
9 use rustc_middle::ty::layout::FnAbiOf;
10 use rustc_target::abi::call::{Conv, FnAbi};
11 use rustc_target::spec::abi::Abi;
12
13 use cranelift_codegen::ir::{AbiParam, SigRef};
14
15 use self::pass_mode::*;
16 use crate::prelude::*;
17
18 pub(crate) use self::returning::codegen_return;
19
20 fn clif_sig_from_fn_abi<'tcx>(
21 tcx: TyCtxt<'tcx>,
22 default_call_conv: CallConv,
23 fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
24 ) -> Signature {
25 let call_conv = match fn_abi.conv {
26 Conv::Rust | Conv::C => default_call_conv,
27 Conv::RustCold => CallConv::Cold,
28 Conv::X86_64SysV => CallConv::SystemV,
29 Conv::X86_64Win64 => CallConv::WindowsFastcall,
30 Conv::ArmAapcs
31 | Conv::CCmseNonSecureCall
32 | Conv::Msp430Intr
33 | Conv::PtxKernel
34 | Conv::X86Fastcall
35 | Conv::X86Intr
36 | Conv::X86Stdcall
37 | Conv::X86ThisCall
38 | Conv::X86VectorCall
39 | Conv::AmdGpuKernel
40 | Conv::AvrInterrupt
41 | Conv::AvrNonBlockingInterrupt => todo!("{:?}", fn_abi.conv),
42 };
43 let inputs = fn_abi.args.iter().map(|arg_abi| arg_abi.get_abi_param(tcx).into_iter()).flatten();
44
45 let (return_ptr, returns) = fn_abi.ret.get_abi_return(tcx);
46 // Sometimes the first param is an pointer to the place where the return value needs to be stored.
47 let params: Vec<_> = return_ptr.into_iter().chain(inputs).collect();
48
49 Signature { params, returns, call_conv }
50 }
51
52 pub(crate) fn get_function_sig<'tcx>(
53 tcx: TyCtxt<'tcx>,
54 triple: &target_lexicon::Triple,
55 inst: Instance<'tcx>,
56 ) -> Signature {
57 assert!(!inst.substs.needs_infer());
58 clif_sig_from_fn_abi(
59 tcx,
60 CallConv::triple_default(triple),
61 &RevealAllLayoutCx(tcx).fn_abi_of_instance(inst, ty::List::empty()),
62 )
63 }
64
65 /// Instance must be monomorphized
66 pub(crate) fn import_function<'tcx>(
67 tcx: TyCtxt<'tcx>,
68 module: &mut dyn Module,
69 inst: Instance<'tcx>,
70 ) -> FuncId {
71 let name = tcx.symbol_name(inst).name;
72 let sig = get_function_sig(tcx, module.isa().triple(), inst);
73 match module.declare_function(name, Linkage::Import, &sig) {
74 Ok(func_id) => func_id,
75 Err(ModuleError::IncompatibleDeclaration(_)) => tcx.sess.fatal(&format!(
76 "attempt to declare `{name}` as function, but it was already declared as static"
77 )),
78 Err(ModuleError::IncompatibleSignature(_, prev_sig, new_sig)) => tcx.sess.fatal(&format!(
79 "attempt to declare `{name}` with signature {new_sig:?}, \
80 but it was already declared with signature {prev_sig:?}"
81 )),
82 Err(err) => Err::<_, _>(err).unwrap(),
83 }
84 }
85
86 impl<'tcx> FunctionCx<'_, '_, 'tcx> {
87 /// Instance must be monomorphized
88 pub(crate) fn get_function_ref(&mut self, inst: Instance<'tcx>) -> FuncRef {
89 let func_id = import_function(self.tcx, self.module, inst);
90 let func_ref = self.module.declare_func_in_func(func_id, &mut self.bcx.func);
91
92 if self.clif_comments.enabled() {
93 self.add_comment(func_ref, format!("{:?}", inst));
94 }
95
96 func_ref
97 }
98
99 pub(crate) fn lib_call(
100 &mut self,
101 name: &str,
102 params: Vec<AbiParam>,
103 returns: Vec<AbiParam>,
104 args: &[Value],
105 ) -> &[Value] {
106 let sig = Signature { params, returns, call_conv: self.target_config.default_call_conv };
107 let func_id = self.module.declare_function(name, Linkage::Import, &sig).unwrap();
108 let func_ref = self.module.declare_func_in_func(func_id, &mut self.bcx.func);
109 if self.clif_comments.enabled() {
110 self.add_comment(func_ref, format!("{:?}", name));
111 }
112 let call_inst = self.bcx.ins().call(func_ref, args);
113 if self.clif_comments.enabled() {
114 self.add_comment(call_inst, format!("easy_call {}", name));
115 }
116 let results = self.bcx.inst_results(call_inst);
117 assert!(results.len() <= 2, "{}", results.len());
118 results
119 }
120
121 pub(crate) fn easy_call(
122 &mut self,
123 name: &str,
124 args: &[CValue<'tcx>],
125 return_ty: Ty<'tcx>,
126 ) -> CValue<'tcx> {
127 let (input_tys, args): (Vec<_>, Vec<_>) = args
128 .iter()
129 .map(|arg| {
130 (AbiParam::new(self.clif_type(arg.layout().ty).unwrap()), arg.load_scalar(self))
131 })
132 .unzip();
133 let return_layout = self.layout_of(return_ty);
134 let return_tys = if let ty::Tuple(tup) = return_ty.kind() {
135 tup.iter().map(|ty| AbiParam::new(self.clif_type(ty).unwrap())).collect()
136 } else {
137 vec![AbiParam::new(self.clif_type(return_ty).unwrap())]
138 };
139 let ret_vals = self.lib_call(name, input_tys, return_tys, &args);
140 match *ret_vals {
141 [] => CValue::by_ref(
142 Pointer::const_addr(self, i64::from(self.pointer_type.bytes())),
143 return_layout,
144 ),
145 [val] => CValue::by_val(val, return_layout),
146 [val, extra] => CValue::by_val_pair(val, extra, return_layout),
147 _ => unreachable!(),
148 }
149 }
150 }
151
152 /// Make a [`CPlace`] capable of holding value of the specified type.
153 fn make_local_place<'tcx>(
154 fx: &mut FunctionCx<'_, '_, 'tcx>,
155 local: Local,
156 layout: TyAndLayout<'tcx>,
157 is_ssa: bool,
158 ) -> CPlace<'tcx> {
159 let place = if is_ssa {
160 if let rustc_target::abi::Abi::ScalarPair(_, _) = layout.abi {
161 CPlace::new_var_pair(fx, local, layout)
162 } else {
163 CPlace::new_var(fx, local, layout)
164 }
165 } else {
166 CPlace::new_stack_slot(fx, layout)
167 };
168
169 self::comments::add_local_place_comments(fx, place, local);
170
171 place
172 }
173
174 pub(crate) fn codegen_fn_prelude<'tcx>(fx: &mut FunctionCx<'_, '_, 'tcx>, start_block: Block) {
175 fx.bcx.append_block_params_for_function_params(start_block);
176
177 fx.bcx.switch_to_block(start_block);
178 fx.bcx.ins().nop();
179
180 let ssa_analyzed = crate::analyze::analyze(fx);
181
182 self::comments::add_args_header_comment(fx);
183
184 let mut block_params_iter = fx.bcx.func.dfg.block_params(start_block).to_vec().into_iter();
185 let ret_place =
186 self::returning::codegen_return_param(fx, &ssa_analyzed, &mut block_params_iter);
187 assert_eq!(fx.local_map.push(ret_place), RETURN_PLACE);
188
189 // None means pass_mode == NoPass
190 enum ArgKind<'tcx> {
191 Normal(Option<CValue<'tcx>>),
192 Spread(Vec<Option<CValue<'tcx>>>),
193 }
194
195 let fn_abi = fx.fn_abi.take().unwrap();
196
197 // FIXME implement variadics in cranelift
198 if fn_abi.c_variadic {
199 fx.tcx.sess.span_fatal(
200 fx.mir.span,
201 "Defining variadic functions is not yet supported by Cranelift",
202 );
203 }
204
205 let mut arg_abis_iter = fn_abi.args.iter();
206
207 let func_params = fx
208 .mir
209 .args_iter()
210 .map(|local| {
211 let arg_ty = fx.monomorphize(fx.mir.local_decls[local].ty);
212
213 // Adapted from https://github.com/rust-lang/rust/blob/145155dc96757002c7b2e9de8489416e2fdbbd57/src/librustc_codegen_llvm/mir/mod.rs#L442-L482
214 if Some(local) == fx.mir.spread_arg {
215 // This argument (e.g. the last argument in the "rust-call" ABI)
216 // is a tuple that was spread at the ABI level and now we have
217 // to reconstruct it into a tuple local variable, from multiple
218 // individual function arguments.
219
220 let tupled_arg_tys = match arg_ty.kind() {
221 ty::Tuple(ref tys) => tys,
222 _ => bug!("spread argument isn't a tuple?! but {:?}", arg_ty),
223 };
224
225 let mut params = Vec::new();
226 for (i, _arg_ty) in tupled_arg_tys.iter().enumerate() {
227 let arg_abi = arg_abis_iter.next().unwrap();
228 let param =
229 cvalue_for_param(fx, Some(local), Some(i), arg_abi, &mut block_params_iter);
230 params.push(param);
231 }
232
233 (local, ArgKind::Spread(params), arg_ty)
234 } else {
235 let arg_abi = arg_abis_iter.next().unwrap();
236 let param =
237 cvalue_for_param(fx, Some(local), None, arg_abi, &mut block_params_iter);
238 (local, ArgKind::Normal(param), arg_ty)
239 }
240 })
241 .collect::<Vec<(Local, ArgKind<'tcx>, Ty<'tcx>)>>();
242
243 assert!(fx.caller_location.is_none());
244 if fx.instance.def.requires_caller_location(fx.tcx) {
245 // Store caller location for `#[track_caller]`.
246 let arg_abi = arg_abis_iter.next().unwrap();
247 fx.caller_location =
248 Some(cvalue_for_param(fx, None, None, arg_abi, &mut block_params_iter).unwrap());
249 }
250
251 assert!(arg_abis_iter.next().is_none(), "ArgAbi left behind");
252 fx.fn_abi = Some(fn_abi);
253 assert!(block_params_iter.next().is_none(), "arg_value left behind");
254
255 self::comments::add_locals_header_comment(fx);
256
257 for (local, arg_kind, ty) in func_params {
258 let layout = fx.layout_of(ty);
259
260 let is_ssa = ssa_analyzed[local] == crate::analyze::SsaKind::Ssa;
261
262 // While this is normally an optimization to prevent an unnecessary copy when an argument is
263 // not mutated by the current function, this is necessary to support unsized arguments.
264 if let ArgKind::Normal(Some(val)) = arg_kind {
265 if let Some((addr, meta)) = val.try_to_ptr() {
266 // Ownership of the value at the backing storage for an argument is passed to the
267 // callee per the ABI, so it is fine to borrow the backing storage of this argument
268 // to prevent a copy.
269
270 let place = if let Some(meta) = meta {
271 CPlace::for_ptr_with_extra(addr, meta, val.layout())
272 } else {
273 CPlace::for_ptr(addr, val.layout())
274 };
275
276 self::comments::add_local_place_comments(fx, place, local);
277
278 assert_eq!(fx.local_map.push(place), local);
279 continue;
280 }
281 }
282
283 let place = make_local_place(fx, local, layout, is_ssa);
284 assert_eq!(fx.local_map.push(place), local);
285
286 match arg_kind {
287 ArgKind::Normal(param) => {
288 if let Some(param) = param {
289 place.write_cvalue(fx, param);
290 }
291 }
292 ArgKind::Spread(params) => {
293 for (i, param) in params.into_iter().enumerate() {
294 if let Some(param) = param {
295 place.place_field(fx, mir::Field::new(i)).write_cvalue(fx, param);
296 }
297 }
298 }
299 }
300 }
301
302 for local in fx.mir.vars_and_temps_iter() {
303 let ty = fx.monomorphize(fx.mir.local_decls[local].ty);
304 let layout = fx.layout_of(ty);
305
306 let is_ssa = ssa_analyzed[local] == crate::analyze::SsaKind::Ssa;
307
308 let place = make_local_place(fx, local, layout, is_ssa);
309 assert_eq!(fx.local_map.push(place), local);
310 }
311
312 fx.bcx.ins().jump(*fx.block_map.get(START_BLOCK).unwrap(), &[]);
313 }
314
315 struct CallArgument<'tcx> {
316 value: CValue<'tcx>,
317 is_owned: bool,
318 }
319
320 // FIXME avoid intermediate `CValue` before calling `adjust_arg_for_abi`
321 fn codegen_call_argument_operand<'tcx>(
322 fx: &mut FunctionCx<'_, '_, 'tcx>,
323 operand: &Operand<'tcx>,
324 ) -> CallArgument<'tcx> {
325 CallArgument {
326 value: codegen_operand(fx, operand),
327 is_owned: matches!(operand, Operand::Move(_)),
328 }
329 }
330
331 pub(crate) fn codegen_terminator_call<'tcx>(
332 fx: &mut FunctionCx<'_, '_, 'tcx>,
333 source_info: mir::SourceInfo,
334 func: &Operand<'tcx>,
335 args: &[Operand<'tcx>],
336 destination: Place<'tcx>,
337 target: Option<BasicBlock>,
338 ) {
339 let fn_ty = fx.monomorphize(func.ty(fx.mir, fx.tcx));
340 let fn_sig =
341 fx.tcx.normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), fn_ty.fn_sig(fx.tcx));
342
343 let ret_place = codegen_place(fx, destination);
344
345 // Handle special calls like intrinsics and empty drop glue.
346 let instance = if let ty::FnDef(def_id, substs) = *fn_ty.kind() {
347 let instance = ty::Instance::resolve(fx.tcx, ty::ParamEnv::reveal_all(), def_id, substs)
348 .unwrap()
349 .unwrap()
350 .polymorphize(fx.tcx);
351
352 if fx.tcx.symbol_name(instance).name.starts_with("llvm.") {
353 crate::intrinsics::codegen_llvm_intrinsic_call(
354 fx,
355 &fx.tcx.symbol_name(instance).name,
356 substs,
357 args,
358 ret_place,
359 target,
360 );
361 return;
362 }
363
364 match instance.def {
365 InstanceDef::Intrinsic(_) => {
366 crate::intrinsics::codegen_intrinsic_call(
367 fx,
368 instance,
369 args,
370 ret_place,
371 target,
372 source_info,
373 );
374 return;
375 }
376 InstanceDef::DropGlue(_, None) => {
377 // empty drop glue - a nop.
378 let dest = target.expect("Non terminating drop_in_place_real???");
379 let ret_block = fx.get_block(dest);
380 fx.bcx.ins().jump(ret_block, &[]);
381 return;
382 }
383 _ => Some(instance),
384 }
385 } else {
386 None
387 };
388
389 let extra_args = &args[fn_sig.inputs().len()..];
390 let extra_args = fx
391 .tcx
392 .mk_type_list(extra_args.iter().map(|op_arg| fx.monomorphize(op_arg.ty(fx.mir, fx.tcx))));
393 let fn_abi = if let Some(instance) = instance {
394 RevealAllLayoutCx(fx.tcx).fn_abi_of_instance(instance, extra_args)
395 } else {
396 RevealAllLayoutCx(fx.tcx).fn_abi_of_fn_ptr(fn_ty.fn_sig(fx.tcx), extra_args)
397 };
398
399 let is_cold = if fn_sig.abi == Abi::RustCold {
400 true
401 } else {
402 instance
403 .map(|inst| {
404 fx.tcx.codegen_fn_attrs(inst.def_id()).flags.contains(CodegenFnAttrFlags::COLD)
405 })
406 .unwrap_or(false)
407 };
408 if is_cold {
409 fx.bcx.set_cold_block(fx.bcx.current_block().unwrap());
410 if let Some(destination_block) = target {
411 fx.bcx.set_cold_block(fx.get_block(destination_block));
412 }
413 }
414
415 // Unpack arguments tuple for closures
416 let mut args = if fn_sig.abi == Abi::RustCall {
417 assert_eq!(args.len(), 2, "rust-call abi requires two arguments");
418 let self_arg = codegen_call_argument_operand(fx, &args[0]);
419 let pack_arg = codegen_call_argument_operand(fx, &args[1]);
420
421 let tupled_arguments = match pack_arg.value.layout().ty.kind() {
422 ty::Tuple(ref tupled_arguments) => tupled_arguments,
423 _ => bug!("argument to function with \"rust-call\" ABI is not a tuple"),
424 };
425
426 let mut args = Vec::with_capacity(1 + tupled_arguments.len());
427 args.push(self_arg);
428 for i in 0..tupled_arguments.len() {
429 args.push(CallArgument {
430 value: pack_arg.value.value_field(fx, mir::Field::new(i)),
431 is_owned: pack_arg.is_owned,
432 });
433 }
434 args
435 } else {
436 args.iter().map(|arg| codegen_call_argument_operand(fx, arg)).collect::<Vec<_>>()
437 };
438
439 // Pass the caller location for `#[track_caller]`.
440 if instance.map(|inst| inst.def.requires_caller_location(fx.tcx)).unwrap_or(false) {
441 let caller_location = fx.get_caller_location(source_info);
442 args.push(CallArgument { value: caller_location, is_owned: false });
443 }
444
445 let args = args;
446 assert_eq!(fn_abi.args.len(), args.len());
447
448 enum CallTarget {
449 Direct(FuncRef),
450 Indirect(SigRef, Value),
451 }
452
453 let (func_ref, first_arg_override) = match instance {
454 // Trait object call
455 Some(Instance { def: InstanceDef::Virtual(_, idx), .. }) => {
456 if fx.clif_comments.enabled() {
457 let nop_inst = fx.bcx.ins().nop();
458 fx.add_comment(
459 nop_inst,
460 format!("virtual call; self arg pass mode: {:?}", &fn_abi.args[0]),
461 );
462 }
463
464 let (ptr, method) = crate::vtable::get_ptr_and_method_ref(fx, args[0].value, idx);
465 let sig = clif_sig_from_fn_abi(fx.tcx, fx.target_config.default_call_conv, &fn_abi);
466 let sig = fx.bcx.import_signature(sig);
467
468 (CallTarget::Indirect(sig, method), Some(ptr.get_addr(fx)))
469 }
470
471 // Normal call
472 Some(instance) => {
473 let func_ref = fx.get_function_ref(instance);
474 (CallTarget::Direct(func_ref), None)
475 }
476
477 // Indirect call
478 None => {
479 if fx.clif_comments.enabled() {
480 let nop_inst = fx.bcx.ins().nop();
481 fx.add_comment(nop_inst, "indirect call");
482 }
483
484 let func = codegen_operand(fx, func).load_scalar(fx);
485 let sig = clif_sig_from_fn_abi(fx.tcx, fx.target_config.default_call_conv, &fn_abi);
486 let sig = fx.bcx.import_signature(sig);
487
488 (CallTarget::Indirect(sig, func), None)
489 }
490 };
491
492 self::returning::codegen_with_call_return_arg(fx, &fn_abi.ret, ret_place, |fx, return_ptr| {
493 let call_args = return_ptr
494 .into_iter()
495 .chain(first_arg_override.into_iter())
496 .chain(
497 args.into_iter()
498 .enumerate()
499 .skip(if first_arg_override.is_some() { 1 } else { 0 })
500 .map(|(i, arg)| {
501 adjust_arg_for_abi(fx, arg.value, &fn_abi.args[i], arg.is_owned).into_iter()
502 })
503 .flatten(),
504 )
505 .collect::<Vec<Value>>();
506
507 let call_inst = match func_ref {
508 CallTarget::Direct(func_ref) => fx.bcx.ins().call(func_ref, &call_args),
509 CallTarget::Indirect(sig, func_ptr) => {
510 fx.bcx.ins().call_indirect(sig, func_ptr, &call_args)
511 }
512 };
513
514 // FIXME find a cleaner way to support varargs
515 if fn_sig.c_variadic {
516 if !matches!(fn_sig.abi, Abi::C { .. }) {
517 fx.tcx.sess.span_fatal(
518 source_info.span,
519 &format!("Variadic call for non-C abi {:?}", fn_sig.abi),
520 );
521 }
522 let sig_ref = fx.bcx.func.dfg.call_signature(call_inst).unwrap();
523 let abi_params = call_args
524 .into_iter()
525 .map(|arg| {
526 let ty = fx.bcx.func.dfg.value_type(arg);
527 if !ty.is_int() {
528 // FIXME set %al to upperbound on float args once floats are supported
529 fx.tcx.sess.span_fatal(
530 source_info.span,
531 &format!("Non int ty {:?} for variadic call", ty),
532 );
533 }
534 AbiParam::new(ty)
535 })
536 .collect::<Vec<AbiParam>>();
537 fx.bcx.func.dfg.signatures[sig_ref].params = abi_params;
538 }
539
540 call_inst
541 });
542
543 if let Some(dest) = target {
544 let ret_block = fx.get_block(dest);
545 fx.bcx.ins().jump(ret_block, &[]);
546 } else {
547 fx.bcx.ins().trap(TrapCode::UnreachableCodeReached);
548 }
549 }
550
551 pub(crate) fn codegen_drop<'tcx>(
552 fx: &mut FunctionCx<'_, '_, 'tcx>,
553 source_info: mir::SourceInfo,
554 drop_place: CPlace<'tcx>,
555 ) {
556 let ty = drop_place.layout().ty;
557 let drop_instance = Instance::resolve_drop_in_place(fx.tcx, ty).polymorphize(fx.tcx);
558
559 if let ty::InstanceDef::DropGlue(_, None) = drop_instance.def {
560 // we don't actually need to drop anything
561 } else {
562 match ty.kind() {
563 ty::Dynamic(_, _, ty::Dyn) => {
564 // IN THIS ARM, WE HAVE:
565 // ty = *mut (dyn Trait)
566 // which is: exists<T> ( *mut T, Vtable<T: Trait> )
567 // args[0] args[1]
568 //
569 // args = ( Data, Vtable )
570 // |
571 // v
572 // /-------\
573 // | ... |
574 // \-------/
575 //
576 let (ptr, vtable) = drop_place.to_ptr_maybe_unsized();
577 let ptr = ptr.get_addr(fx);
578 let drop_fn = crate::vtable::drop_fn_of_obj(fx, vtable.unwrap());
579
580 // FIXME(eddyb) perhaps move some of this logic into
581 // `Instance::resolve_drop_in_place`?
582 let virtual_drop = Instance {
583 def: ty::InstanceDef::Virtual(drop_instance.def_id(), 0),
584 substs: drop_instance.substs,
585 };
586 let fn_abi =
587 RevealAllLayoutCx(fx.tcx).fn_abi_of_instance(virtual_drop, ty::List::empty());
588
589 let sig = clif_sig_from_fn_abi(fx.tcx, fx.target_config.default_call_conv, &fn_abi);
590 let sig = fx.bcx.import_signature(sig);
591 fx.bcx.ins().call_indirect(sig, drop_fn, &[ptr]);
592 }
593 ty::Dynamic(_, _, ty::DynStar) => {
594 // IN THIS ARM, WE HAVE:
595 // ty = *mut (dyn* Trait)
596 // which is: *mut exists<T: sizeof(T) == sizeof(usize)> (T, Vtable<T: Trait>)
597 //
598 // args = [ * ]
599 // |
600 // v
601 // ( Data, Vtable )
602 // |
603 // v
604 // /-------\
605 // | ... |
606 // \-------/
607 //
608 //
609 // WE CAN CONVERT THIS INTO THE ABOVE LOGIC BY DOING
610 //
611 // data = &(*args[0]).0 // gives a pointer to Data above (really the same pointer)
612 // vtable = (*args[0]).1 // loads the vtable out
613 // (data, vtable) // an equivalent Rust `*mut dyn Trait`
614 //
615 // SO THEN WE CAN USE THE ABOVE CODE.
616 let (data, vtable) = drop_place.to_cvalue(fx).dyn_star_force_data_on_stack(fx);
617 let drop_fn = crate::vtable::drop_fn_of_obj(fx, vtable);
618
619 let virtual_drop = Instance {
620 def: ty::InstanceDef::Virtual(drop_instance.def_id(), 0),
621 substs: drop_instance.substs,
622 };
623 let fn_abi =
624 RevealAllLayoutCx(fx.tcx).fn_abi_of_instance(virtual_drop, ty::List::empty());
625
626 let sig = clif_sig_from_fn_abi(fx.tcx, fx.target_config.default_call_conv, &fn_abi);
627 let sig = fx.bcx.import_signature(sig);
628 fx.bcx.ins().call_indirect(sig, drop_fn, &[data]);
629 }
630 _ => {
631 assert!(!matches!(drop_instance.def, InstanceDef::Virtual(_, _)));
632
633 let fn_abi =
634 RevealAllLayoutCx(fx.tcx).fn_abi_of_instance(drop_instance, ty::List::empty());
635
636 let arg_value = drop_place.place_ref(
637 fx,
638 fx.layout_of(fx.tcx.mk_ref(
639 fx.tcx.lifetimes.re_erased,
640 TypeAndMut { ty, mutbl: crate::rustc_hir::Mutability::Mut },
641 )),
642 );
643 let arg_value = adjust_arg_for_abi(fx, arg_value, &fn_abi.args[0], true);
644
645 let mut call_args: Vec<Value> = arg_value.into_iter().collect::<Vec<_>>();
646
647 if drop_instance.def.requires_caller_location(fx.tcx) {
648 // Pass the caller location for `#[track_caller]`.
649 let caller_location = fx.get_caller_location(source_info);
650 call_args.extend(
651 adjust_arg_for_abi(fx, caller_location, &fn_abi.args[1], false).into_iter(),
652 );
653 }
654
655 let func_ref = fx.get_function_ref(drop_instance);
656 fx.bcx.ins().call(func_ref, &call_args);
657 }
658 }
659 }
660 }