]> git.proxmox.com Git - rustc.git/blob - compiler/rustc_codegen_cranelift/src/abi/mod.rs
New upstream version 1.51.0+dfsg1
[rustc.git] / compiler / rustc_codegen_cranelift / src / abi / mod.rs
1 //! Handling of everything related to the calling convention. Also fills `fx.local_map`.
2
3 #[cfg(debug_assertions)]
4 mod comments;
5 mod pass_mode;
6 mod returning;
7
8 use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
9 use rustc_middle::ty::layout::FnAbiExt;
10 use rustc_target::abi::call::{Conv, FnAbi};
11 use rustc_target::spec::abi::Abi;
12
13 use cranelift_codegen::ir::AbiParam;
14 use smallvec::smallvec;
15
16 use self::pass_mode::*;
17 use crate::prelude::*;
18
19 pub(crate) use self::returning::{can_return_to_ssa_var, codegen_return};
20
21 fn clif_sig_from_fn_abi<'tcx>(
22 tcx: TyCtxt<'tcx>,
23 triple: &target_lexicon::Triple,
24 fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
25 ) -> Signature {
26 let call_conv = match fn_abi.conv {
27 Conv::Rust | Conv::C => CallConv::triple_default(triple),
28 Conv::X86_64SysV => CallConv::SystemV,
29 Conv::X86_64Win64 => CallConv::WindowsFastcall,
30 Conv::ArmAapcs
31 | Conv::CCmseNonSecureCall
32 | Conv::Msp430Intr
33 | Conv::PtxKernel
34 | Conv::X86Fastcall
35 | Conv::X86Intr
36 | Conv::X86Stdcall
37 | Conv::X86ThisCall
38 | Conv::X86VectorCall
39 | Conv::AmdGpuKernel
40 | Conv::AvrInterrupt
41 | Conv::AvrNonBlockingInterrupt => {
42 todo!("{:?}", fn_abi.conv)
43 }
44 };
45 let inputs = fn_abi
46 .args
47 .iter()
48 .map(|arg_abi| arg_abi.get_abi_param(tcx).into_iter())
49 .flatten();
50
51 let (return_ptr, returns) = fn_abi.ret.get_abi_return(tcx);
52 // Sometimes the first param is an pointer to the place where the return value needs to be stored.
53 let params: Vec<_> = return_ptr.into_iter().chain(inputs).collect();
54
55 Signature {
56 params,
57 returns,
58 call_conv,
59 }
60 }
61
62 pub(crate) fn get_function_sig<'tcx>(
63 tcx: TyCtxt<'tcx>,
64 triple: &target_lexicon::Triple,
65 inst: Instance<'tcx>,
66 ) -> Signature {
67 assert!(!inst.substs.needs_infer());
68 clif_sig_from_fn_abi(
69 tcx,
70 triple,
71 &FnAbi::of_instance(&RevealAllLayoutCx(tcx), inst, &[]),
72 )
73 }
74
75 /// Instance must be monomorphized
76 pub(crate) fn import_function<'tcx>(
77 tcx: TyCtxt<'tcx>,
78 module: &mut impl Module,
79 inst: Instance<'tcx>,
80 ) -> FuncId {
81 let name = tcx.symbol_name(inst).name.to_string();
82 let sig = get_function_sig(tcx, module.isa().triple(), inst);
83 module
84 .declare_function(&name, Linkage::Import, &sig)
85 .unwrap()
86 }
87
88 impl<'tcx, M: Module> FunctionCx<'_, 'tcx, M> {
89 /// Instance must be monomorphized
90 pub(crate) fn get_function_ref(&mut self, inst: Instance<'tcx>) -> FuncRef {
91 let func_id = import_function(self.tcx, &mut self.cx.module, inst);
92 let func_ref = self
93 .cx
94 .module
95 .declare_func_in_func(func_id, &mut self.bcx.func);
96
97 #[cfg(debug_assertions)]
98 self.add_comment(func_ref, format!("{:?}", inst));
99
100 func_ref
101 }
102
103 pub(crate) fn lib_call(
104 &mut self,
105 name: &str,
106 params: Vec<AbiParam>,
107 returns: Vec<AbiParam>,
108 args: &[Value],
109 ) -> &[Value] {
110 let sig = Signature {
111 params,
112 returns,
113 call_conv: CallConv::triple_default(self.triple()),
114 };
115 let func_id = self
116 .cx
117 .module
118 .declare_function(&name, Linkage::Import, &sig)
119 .unwrap();
120 let func_ref = self
121 .cx
122 .module
123 .declare_func_in_func(func_id, &mut self.bcx.func);
124 let call_inst = self.bcx.ins().call(func_ref, args);
125 #[cfg(debug_assertions)]
126 {
127 self.add_comment(call_inst, format!("easy_call {}", name));
128 }
129 let results = self.bcx.inst_results(call_inst);
130 assert!(results.len() <= 2, "{}", results.len());
131 results
132 }
133
134 pub(crate) fn easy_call(
135 &mut self,
136 name: &str,
137 args: &[CValue<'tcx>],
138 return_ty: Ty<'tcx>,
139 ) -> CValue<'tcx> {
140 let (input_tys, args): (Vec<_>, Vec<_>) = args
141 .iter()
142 .map(|arg| {
143 (
144 AbiParam::new(self.clif_type(arg.layout().ty).unwrap()),
145 arg.load_scalar(self),
146 )
147 })
148 .unzip();
149 let return_layout = self.layout_of(return_ty);
150 let return_tys = if let ty::Tuple(tup) = return_ty.kind() {
151 tup.types()
152 .map(|ty| AbiParam::new(self.clif_type(ty).unwrap()))
153 .collect()
154 } else {
155 vec![AbiParam::new(self.clif_type(return_ty).unwrap())]
156 };
157 let ret_vals = self.lib_call(name, input_tys, return_tys, &args);
158 match *ret_vals {
159 [] => CValue::by_ref(
160 Pointer::const_addr(self, i64::from(self.pointer_type.bytes())),
161 return_layout,
162 ),
163 [val] => CValue::by_val(val, return_layout),
164 [val, extra] => CValue::by_val_pair(val, extra, return_layout),
165 _ => unreachable!(),
166 }
167 }
168 }
169
170 /// Make a [`CPlace`] capable of holding value of the specified type.
171 fn make_local_place<'tcx>(
172 fx: &mut FunctionCx<'_, 'tcx, impl Module>,
173 local: Local,
174 layout: TyAndLayout<'tcx>,
175 is_ssa: bool,
176 ) -> CPlace<'tcx> {
177 let place = if is_ssa {
178 if let rustc_target::abi::Abi::ScalarPair(_, _) = layout.abi {
179 CPlace::new_var_pair(fx, local, layout)
180 } else {
181 CPlace::new_var(fx, local, layout)
182 }
183 } else {
184 CPlace::new_stack_slot(fx, layout)
185 };
186
187 #[cfg(debug_assertions)]
188 self::comments::add_local_place_comments(fx, place, local);
189
190 place
191 }
192
193 pub(crate) fn codegen_fn_prelude<'tcx>(
194 fx: &mut FunctionCx<'_, 'tcx, impl Module>,
195 start_block: Block,
196 ) {
197 fx.bcx.append_block_params_for_function_params(start_block);
198
199 fx.bcx.switch_to_block(start_block);
200 fx.bcx.ins().nop();
201
202 let ssa_analyzed = crate::analyze::analyze(fx);
203
204 #[cfg(debug_assertions)]
205 self::comments::add_args_header_comment(fx);
206
207 let mut block_params_iter = fx
208 .bcx
209 .func
210 .dfg
211 .block_params(start_block)
212 .to_vec()
213 .into_iter();
214 let ret_place =
215 self::returning::codegen_return_param(fx, &ssa_analyzed, &mut block_params_iter);
216 assert_eq!(fx.local_map.push(ret_place), RETURN_PLACE);
217
218 // None means pass_mode == NoPass
219 enum ArgKind<'tcx> {
220 Normal(Option<CValue<'tcx>>),
221 Spread(Vec<Option<CValue<'tcx>>>),
222 }
223
224 let fn_abi = fx.fn_abi.take().unwrap();
225 let mut arg_abis_iter = fn_abi.args.iter();
226
227 let func_params = fx
228 .mir
229 .args_iter()
230 .map(|local| {
231 let arg_ty = fx.monomorphize(fx.mir.local_decls[local].ty);
232
233 // Adapted from https://github.com/rust-lang/rust/blob/145155dc96757002c7b2e9de8489416e2fdbbd57/src/librustc_codegen_llvm/mir/mod.rs#L442-L482
234 if Some(local) == fx.mir.spread_arg {
235 // This argument (e.g. the last argument in the "rust-call" ABI)
236 // is a tuple that was spread at the ABI level and now we have
237 // to reconstruct it into a tuple local variable, from multiple
238 // individual function arguments.
239
240 let tupled_arg_tys = match arg_ty.kind() {
241 ty::Tuple(ref tys) => tys,
242 _ => bug!("spread argument isn't a tuple?! but {:?}", arg_ty),
243 };
244
245 let mut params = Vec::new();
246 for (i, _arg_ty) in tupled_arg_tys.types().enumerate() {
247 let arg_abi = arg_abis_iter.next().unwrap();
248 let param =
249 cvalue_for_param(fx, Some(local), Some(i), arg_abi, &mut block_params_iter);
250 params.push(param);
251 }
252
253 (local, ArgKind::Spread(params), arg_ty)
254 } else {
255 let arg_abi = arg_abis_iter.next().unwrap();
256 let param =
257 cvalue_for_param(fx, Some(local), None, arg_abi, &mut block_params_iter);
258 (local, ArgKind::Normal(param), arg_ty)
259 }
260 })
261 .collect::<Vec<(Local, ArgKind<'tcx>, Ty<'tcx>)>>();
262
263 assert!(fx.caller_location.is_none());
264 if fx.instance.def.requires_caller_location(fx.tcx) {
265 // Store caller location for `#[track_caller]`.
266 let arg_abi = arg_abis_iter.next().unwrap();
267 fx.caller_location =
268 Some(cvalue_for_param(fx, None, None, arg_abi, &mut block_params_iter).unwrap());
269 }
270
271 assert!(arg_abis_iter.next().is_none(), "ArgAbi left behind");
272 fx.fn_abi = Some(fn_abi);
273 assert!(block_params_iter.next().is_none(), "arg_value left behind");
274
275 #[cfg(debug_assertions)]
276 self::comments::add_locals_header_comment(fx);
277
278 for (local, arg_kind, ty) in func_params {
279 let layout = fx.layout_of(ty);
280
281 let is_ssa = ssa_analyzed[local] == crate::analyze::SsaKind::Ssa;
282
283 // While this is normally an optimization to prevent an unnecessary copy when an argument is
284 // not mutated by the current function, this is necessary to support unsized arguments.
285 if let ArgKind::Normal(Some(val)) = arg_kind {
286 if let Some((addr, meta)) = val.try_to_ptr() {
287 let local_decl = &fx.mir.local_decls[local];
288 // v this ! is important
289 let internally_mutable = !val.layout().ty.is_freeze(
290 fx.tcx.at(local_decl.source_info.span),
291 ParamEnv::reveal_all(),
292 );
293 if local_decl.mutability == mir::Mutability::Not && !internally_mutable {
294 // We wont mutate this argument, so it is fine to borrow the backing storage
295 // of this argument, to prevent a copy.
296
297 let place = if let Some(meta) = meta {
298 CPlace::for_ptr_with_extra(addr, meta, val.layout())
299 } else {
300 CPlace::for_ptr(addr, val.layout())
301 };
302
303 #[cfg(debug_assertions)]
304 self::comments::add_local_place_comments(fx, place, local);
305
306 assert_eq!(fx.local_map.push(place), local);
307 continue;
308 }
309 }
310 }
311
312 let place = make_local_place(fx, local, layout, is_ssa);
313 assert_eq!(fx.local_map.push(place), local);
314
315 match arg_kind {
316 ArgKind::Normal(param) => {
317 if let Some(param) = param {
318 place.write_cvalue(fx, param);
319 }
320 }
321 ArgKind::Spread(params) => {
322 for (i, param) in params.into_iter().enumerate() {
323 if let Some(param) = param {
324 place
325 .place_field(fx, mir::Field::new(i))
326 .write_cvalue(fx, param);
327 }
328 }
329 }
330 }
331 }
332
333 for local in fx.mir.vars_and_temps_iter() {
334 let ty = fx.monomorphize(fx.mir.local_decls[local].ty);
335 let layout = fx.layout_of(ty);
336
337 let is_ssa = ssa_analyzed[local] == crate::analyze::SsaKind::Ssa;
338
339 let place = make_local_place(fx, local, layout, is_ssa);
340 assert_eq!(fx.local_map.push(place), local);
341 }
342
343 fx.bcx
344 .ins()
345 .jump(*fx.block_map.get(START_BLOCK).unwrap(), &[]);
346 }
347
348 pub(crate) fn codegen_terminator_call<'tcx>(
349 fx: &mut FunctionCx<'_, 'tcx, impl Module>,
350 span: Span,
351 current_block: Block,
352 func: &Operand<'tcx>,
353 args: &[Operand<'tcx>],
354 destination: Option<(Place<'tcx>, BasicBlock)>,
355 ) {
356 let fn_ty = fx.monomorphize(func.ty(fx.mir, fx.tcx));
357 let fn_sig = fx
358 .tcx
359 .normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), fn_ty.fn_sig(fx.tcx));
360
361 let destination = destination.map(|(place, bb)| (codegen_place(fx, place), bb));
362
363 // Handle special calls like instrinsics and empty drop glue.
364 let instance = if let ty::FnDef(def_id, substs) = *fn_ty.kind() {
365 let instance = ty::Instance::resolve(fx.tcx, ty::ParamEnv::reveal_all(), def_id, substs)
366 .unwrap()
367 .unwrap()
368 .polymorphize(fx.tcx);
369
370 if fx.tcx.symbol_name(instance).name.starts_with("llvm.") {
371 crate::intrinsics::codegen_llvm_intrinsic_call(
372 fx,
373 &fx.tcx.symbol_name(instance).name,
374 substs,
375 args,
376 destination,
377 );
378 return;
379 }
380
381 match instance.def {
382 InstanceDef::Intrinsic(_) => {
383 crate::intrinsics::codegen_intrinsic_call(fx, instance, args, destination, span);
384 return;
385 }
386 InstanceDef::DropGlue(_, None) => {
387 // empty drop glue - a nop.
388 let (_, dest) = destination.expect("Non terminating drop_in_place_real???");
389 let ret_block = fx.get_block(dest);
390 fx.bcx.ins().jump(ret_block, &[]);
391 return;
392 }
393 _ => Some(instance),
394 }
395 } else {
396 None
397 };
398
399 let extra_args = &args[fn_sig.inputs().len()..];
400 let extra_args = extra_args
401 .iter()
402 .map(|op_arg| fx.monomorphize(op_arg.ty(fx.mir, fx.tcx)))
403 .collect::<Vec<_>>();
404 let fn_abi = if let Some(instance) = instance {
405 FnAbi::of_instance(&RevealAllLayoutCx(fx.tcx), instance, &extra_args)
406 } else {
407 FnAbi::of_fn_ptr(
408 &RevealAllLayoutCx(fx.tcx),
409 fn_ty.fn_sig(fx.tcx),
410 &extra_args,
411 )
412 };
413
414 let is_cold = instance
415 .map(|inst| {
416 fx.tcx
417 .codegen_fn_attrs(inst.def_id())
418 .flags
419 .contains(CodegenFnAttrFlags::COLD)
420 })
421 .unwrap_or(false);
422 if is_cold {
423 fx.cold_blocks.insert(current_block);
424 }
425
426 // Unpack arguments tuple for closures
427 let args = if fn_sig.abi == Abi::RustCall {
428 assert_eq!(args.len(), 2, "rust-call abi requires two arguments");
429 let self_arg = codegen_operand(fx, &args[0]);
430 let pack_arg = codegen_operand(fx, &args[1]);
431
432 let tupled_arguments = match pack_arg.layout().ty.kind() {
433 ty::Tuple(ref tupled_arguments) => tupled_arguments,
434 _ => bug!("argument to function with \"rust-call\" ABI is not a tuple"),
435 };
436
437 let mut args = Vec::with_capacity(1 + tupled_arguments.len());
438 args.push(self_arg);
439 for i in 0..tupled_arguments.len() {
440 args.push(pack_arg.value_field(fx, mir::Field::new(i)));
441 }
442 args
443 } else {
444 args.iter()
445 .map(|arg| codegen_operand(fx, arg))
446 .collect::<Vec<_>>()
447 };
448
449 // | indirect call target
450 // | | the first argument to be passed
451 // v v
452 let (func_ref, first_arg) = match instance {
453 // Trait object call
454 Some(Instance {
455 def: InstanceDef::Virtual(_, idx),
456 ..
457 }) => {
458 #[cfg(debug_assertions)]
459 {
460 let nop_inst = fx.bcx.ins().nop();
461 fx.add_comment(
462 nop_inst,
463 format!("virtual call; self arg pass mode: {:?}", &fn_abi.args[0],),
464 );
465 }
466 let (ptr, method) = crate::vtable::get_ptr_and_method_ref(fx, args[0], idx);
467 (Some(method), smallvec![ptr])
468 }
469
470 // Normal call
471 Some(_) => (
472 None,
473 args.get(0)
474 .map(|arg| adjust_arg_for_abi(fx, *arg, &fn_abi.args[0]))
475 .unwrap_or(smallvec![]),
476 ),
477
478 // Indirect call
479 None => {
480 #[cfg(debug_assertions)]
481 {
482 let nop_inst = fx.bcx.ins().nop();
483 fx.add_comment(nop_inst, "indirect call");
484 }
485 let func = codegen_operand(fx, func).load_scalar(fx);
486 (
487 Some(func),
488 args.get(0)
489 .map(|arg| adjust_arg_for_abi(fx, *arg, &fn_abi.args[0]))
490 .unwrap_or(smallvec![]),
491 )
492 }
493 };
494
495 let ret_place = destination.map(|(place, _)| place);
496 let (call_inst, call_args) = self::returning::codegen_with_call_return_arg(
497 fx,
498 &fn_abi.ret,
499 ret_place,
500 |fx, return_ptr| {
501 let regular_args_count = args.len();
502 let mut call_args: Vec<Value> = return_ptr
503 .into_iter()
504 .chain(first_arg.into_iter())
505 .chain(
506 args.into_iter()
507 .enumerate()
508 .skip(1)
509 .map(|(i, arg)| adjust_arg_for_abi(fx, arg, &fn_abi.args[i]).into_iter())
510 .flatten(),
511 )
512 .collect::<Vec<_>>();
513
514 if instance
515 .map(|inst| inst.def.requires_caller_location(fx.tcx))
516 .unwrap_or(false)
517 {
518 // Pass the caller location for `#[track_caller]`.
519 let caller_location = fx.get_caller_location(span);
520 call_args.extend(
521 adjust_arg_for_abi(fx, caller_location, &fn_abi.args[regular_args_count])
522 .into_iter(),
523 );
524 assert_eq!(fn_abi.args.len(), regular_args_count + 1);
525 } else {
526 assert_eq!(fn_abi.args.len(), regular_args_count);
527 }
528
529 let call_inst = if let Some(func_ref) = func_ref {
530 let sig = clif_sig_from_fn_abi(fx.tcx, fx.triple(), &fn_abi);
531 let sig = fx.bcx.import_signature(sig);
532 fx.bcx.ins().call_indirect(sig, func_ref, &call_args)
533 } else {
534 let func_ref =
535 fx.get_function_ref(instance.expect("non-indirect call on non-FnDef type"));
536 fx.bcx.ins().call(func_ref, &call_args)
537 };
538
539 (call_inst, call_args)
540 },
541 );
542
543 // FIXME find a cleaner way to support varargs
544 if fn_sig.c_variadic {
545 if fn_sig.abi != Abi::C {
546 fx.tcx.sess.span_fatal(
547 span,
548 &format!("Variadic call for non-C abi {:?}", fn_sig.abi),
549 );
550 }
551 let sig_ref = fx.bcx.func.dfg.call_signature(call_inst).unwrap();
552 let abi_params = call_args
553 .into_iter()
554 .map(|arg| {
555 let ty = fx.bcx.func.dfg.value_type(arg);
556 if !ty.is_int() {
557 // FIXME set %al to upperbound on float args once floats are supported
558 fx.tcx
559 .sess
560 .span_fatal(span, &format!("Non int ty {:?} for variadic call", ty));
561 }
562 AbiParam::new(ty)
563 })
564 .collect::<Vec<AbiParam>>();
565 fx.bcx.func.dfg.signatures[sig_ref].params = abi_params;
566 }
567
568 if let Some((_, dest)) = destination {
569 let ret_block = fx.get_block(dest);
570 fx.bcx.ins().jump(ret_block, &[]);
571 } else {
572 trap_unreachable(fx, "[corruption] Diverging function returned");
573 }
574 }
575
576 pub(crate) fn codegen_drop<'tcx>(
577 fx: &mut FunctionCx<'_, 'tcx, impl Module>,
578 span: Span,
579 drop_place: CPlace<'tcx>,
580 ) {
581 let ty = drop_place.layout().ty;
582 let drop_instance = Instance::resolve_drop_in_place(fx.tcx, ty).polymorphize(fx.tcx);
583
584 if let ty::InstanceDef::DropGlue(_, None) = drop_instance.def {
585 // we don't actually need to drop anything
586 } else {
587 match ty.kind() {
588 ty::Dynamic(..) => {
589 let (ptr, vtable) = drop_place.to_ptr_maybe_unsized();
590 let ptr = ptr.get_addr(fx);
591 let drop_fn = crate::vtable::drop_fn_of_obj(fx, vtable.unwrap());
592
593 // FIXME(eddyb) perhaps move some of this logic into
594 // `Instance::resolve_drop_in_place`?
595 let virtual_drop = Instance {
596 def: ty::InstanceDef::Virtual(drop_instance.def_id(), 0),
597 substs: drop_instance.substs,
598 };
599 let fn_abi = FnAbi::of_instance(&RevealAllLayoutCx(fx.tcx), virtual_drop, &[]);
600
601 let sig = clif_sig_from_fn_abi(fx.tcx, fx.triple(), &fn_abi);
602 let sig = fx.bcx.import_signature(sig);
603 fx.bcx.ins().call_indirect(sig, drop_fn, &[ptr]);
604 }
605 _ => {
606 assert!(!matches!(drop_instance.def, InstanceDef::Virtual(_, _)));
607
608 let fn_abi = FnAbi::of_instance(&RevealAllLayoutCx(fx.tcx), drop_instance, &[]);
609
610 let arg_value = drop_place.place_ref(
611 fx,
612 fx.layout_of(fx.tcx.mk_ref(
613 &ty::RegionKind::ReErased,
614 TypeAndMut {
615 ty,
616 mutbl: crate::rustc_hir::Mutability::Mut,
617 },
618 )),
619 );
620 let arg_value = adjust_arg_for_abi(fx, arg_value, &fn_abi.args[0]);
621
622 let mut call_args: Vec<Value> = arg_value.into_iter().collect::<Vec<_>>();
623
624 if drop_instance.def.requires_caller_location(fx.tcx) {
625 // Pass the caller location for `#[track_caller]`.
626 let caller_location = fx.get_caller_location(span);
627 call_args.extend(
628 adjust_arg_for_abi(fx, caller_location, &fn_abi.args[1]).into_iter(),
629 );
630 }
631
632 let func_ref = fx.get_function_ref(drop_instance);
633 fx.bcx.ins().call(func_ref, &call_args);
634 }
635 }
636 }
637 }