]> git.proxmox.com Git - rustc.git/blob - compiler/rustc_codegen_cranelift/src/abi/mod.rs
New upstream version 1.57.0+dfsg1
[rustc.git] / compiler / rustc_codegen_cranelift / src / abi / mod.rs
1 //! Handling of everything related to the calling convention. Also fills `fx.local_map`.
2
3 mod comments;
4 mod pass_mode;
5 mod returning;
6
7 use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
8 use rustc_middle::ty::layout::FnAbiOf;
9 use rustc_target::abi::call::{Conv, FnAbi};
10 use rustc_target::spec::abi::Abi;
11
12 use cranelift_codegen::ir::{AbiParam, SigRef};
13
14 use self::pass_mode::*;
15 use crate::prelude::*;
16
17 pub(crate) use self::returning::codegen_return;
18
19 fn clif_sig_from_fn_abi<'tcx>(
20 tcx: TyCtxt<'tcx>,
21 triple: &target_lexicon::Triple,
22 fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
23 ) -> Signature {
24 let call_conv = match fn_abi.conv {
25 Conv::Rust | Conv::C => CallConv::triple_default(triple),
26 Conv::X86_64SysV => CallConv::SystemV,
27 Conv::X86_64Win64 => CallConv::WindowsFastcall,
28 Conv::ArmAapcs
29 | Conv::CCmseNonSecureCall
30 | Conv::Msp430Intr
31 | Conv::PtxKernel
32 | Conv::X86Fastcall
33 | Conv::X86Intr
34 | Conv::X86Stdcall
35 | Conv::X86ThisCall
36 | Conv::X86VectorCall
37 | Conv::AmdGpuKernel
38 | Conv::AvrInterrupt
39 | Conv::AvrNonBlockingInterrupt => todo!("{:?}", fn_abi.conv),
40 };
41 let inputs = fn_abi.args.iter().map(|arg_abi| arg_abi.get_abi_param(tcx).into_iter()).flatten();
42
43 let (return_ptr, returns) = fn_abi.ret.get_abi_return(tcx);
44 // Sometimes the first param is an pointer to the place where the return value needs to be stored.
45 let params: Vec<_> = return_ptr.into_iter().chain(inputs).collect();
46
47 Signature { params, returns, call_conv }
48 }
49
50 pub(crate) fn get_function_sig<'tcx>(
51 tcx: TyCtxt<'tcx>,
52 triple: &target_lexicon::Triple,
53 inst: Instance<'tcx>,
54 ) -> Signature {
55 assert!(!inst.substs.needs_infer());
56 clif_sig_from_fn_abi(
57 tcx,
58 triple,
59 &RevealAllLayoutCx(tcx).fn_abi_of_instance(inst, ty::List::empty()),
60 )
61 }
62
63 /// Instance must be monomorphized
64 pub(crate) fn import_function<'tcx>(
65 tcx: TyCtxt<'tcx>,
66 module: &mut dyn Module,
67 inst: Instance<'tcx>,
68 ) -> FuncId {
69 let name = tcx.symbol_name(inst).name;
70 let sig = get_function_sig(tcx, module.isa().triple(), inst);
71 module.declare_function(name, Linkage::Import, &sig).unwrap()
72 }
73
74 impl<'tcx> FunctionCx<'_, '_, 'tcx> {
75 /// Instance must be monomorphized
76 pub(crate) fn get_function_ref(&mut self, inst: Instance<'tcx>) -> FuncRef {
77 let func_id = import_function(self.tcx, self.module, inst);
78 let func_ref = self.module.declare_func_in_func(func_id, &mut self.bcx.func);
79
80 if self.clif_comments.enabled() {
81 self.add_comment(func_ref, format!("{:?}", inst));
82 }
83
84 func_ref
85 }
86
87 pub(crate) fn lib_call(
88 &mut self,
89 name: &str,
90 params: Vec<AbiParam>,
91 returns: Vec<AbiParam>,
92 args: &[Value],
93 ) -> &[Value] {
94 let sig = Signature { params, returns, call_conv: CallConv::triple_default(self.triple()) };
95 let func_id = self.module.declare_function(name, Linkage::Import, &sig).unwrap();
96 let func_ref = self.module.declare_func_in_func(func_id, &mut self.bcx.func);
97 let call_inst = self.bcx.ins().call(func_ref, args);
98 if self.clif_comments.enabled() {
99 self.add_comment(call_inst, format!("easy_call {}", name));
100 }
101 let results = self.bcx.inst_results(call_inst);
102 assert!(results.len() <= 2, "{}", results.len());
103 results
104 }
105
106 pub(crate) fn easy_call(
107 &mut self,
108 name: &str,
109 args: &[CValue<'tcx>],
110 return_ty: Ty<'tcx>,
111 ) -> CValue<'tcx> {
112 let (input_tys, args): (Vec<_>, Vec<_>) = args
113 .iter()
114 .map(|arg| {
115 (AbiParam::new(self.clif_type(arg.layout().ty).unwrap()), arg.load_scalar(self))
116 })
117 .unzip();
118 let return_layout = self.layout_of(return_ty);
119 let return_tys = if let ty::Tuple(tup) = return_ty.kind() {
120 tup.types().map(|ty| AbiParam::new(self.clif_type(ty).unwrap())).collect()
121 } else {
122 vec![AbiParam::new(self.clif_type(return_ty).unwrap())]
123 };
124 let ret_vals = self.lib_call(name, input_tys, return_tys, &args);
125 match *ret_vals {
126 [] => CValue::by_ref(
127 Pointer::const_addr(self, i64::from(self.pointer_type.bytes())),
128 return_layout,
129 ),
130 [val] => CValue::by_val(val, return_layout),
131 [val, extra] => CValue::by_val_pair(val, extra, return_layout),
132 _ => unreachable!(),
133 }
134 }
135 }
136
137 /// Make a [`CPlace`] capable of holding value of the specified type.
138 fn make_local_place<'tcx>(
139 fx: &mut FunctionCx<'_, '_, 'tcx>,
140 local: Local,
141 layout: TyAndLayout<'tcx>,
142 is_ssa: bool,
143 ) -> CPlace<'tcx> {
144 let place = if is_ssa {
145 if let rustc_target::abi::Abi::ScalarPair(_, _) = layout.abi {
146 CPlace::new_var_pair(fx, local, layout)
147 } else {
148 CPlace::new_var(fx, local, layout)
149 }
150 } else {
151 CPlace::new_stack_slot(fx, layout)
152 };
153
154 self::comments::add_local_place_comments(fx, place, local);
155
156 place
157 }
158
159 pub(crate) fn codegen_fn_prelude<'tcx>(fx: &mut FunctionCx<'_, '_, 'tcx>, start_block: Block) {
160 fx.bcx.append_block_params_for_function_params(start_block);
161
162 fx.bcx.switch_to_block(start_block);
163 fx.bcx.ins().nop();
164
165 let ssa_analyzed = crate::analyze::analyze(fx);
166
167 self::comments::add_args_header_comment(fx);
168
169 let mut block_params_iter = fx.bcx.func.dfg.block_params(start_block).to_vec().into_iter();
170 let ret_place =
171 self::returning::codegen_return_param(fx, &ssa_analyzed, &mut block_params_iter);
172 assert_eq!(fx.local_map.push(ret_place), RETURN_PLACE);
173
174 // None means pass_mode == NoPass
175 enum ArgKind<'tcx> {
176 Normal(Option<CValue<'tcx>>),
177 Spread(Vec<Option<CValue<'tcx>>>),
178 }
179
180 let fn_abi = fx.fn_abi.take().unwrap();
181 let mut arg_abis_iter = fn_abi.args.iter();
182
183 let func_params = fx
184 .mir
185 .args_iter()
186 .map(|local| {
187 let arg_ty = fx.monomorphize(fx.mir.local_decls[local].ty);
188
189 // Adapted from https://github.com/rust-lang/rust/blob/145155dc96757002c7b2e9de8489416e2fdbbd57/src/librustc_codegen_llvm/mir/mod.rs#L442-L482
190 if Some(local) == fx.mir.spread_arg {
191 // This argument (e.g. the last argument in the "rust-call" ABI)
192 // is a tuple that was spread at the ABI level and now we have
193 // to reconstruct it into a tuple local variable, from multiple
194 // individual function arguments.
195
196 let tupled_arg_tys = match arg_ty.kind() {
197 ty::Tuple(ref tys) => tys,
198 _ => bug!("spread argument isn't a tuple?! but {:?}", arg_ty),
199 };
200
201 let mut params = Vec::new();
202 for (i, _arg_ty) in tupled_arg_tys.types().enumerate() {
203 let arg_abi = arg_abis_iter.next().unwrap();
204 let param =
205 cvalue_for_param(fx, Some(local), Some(i), arg_abi, &mut block_params_iter);
206 params.push(param);
207 }
208
209 (local, ArgKind::Spread(params), arg_ty)
210 } else {
211 let arg_abi = arg_abis_iter.next().unwrap();
212 let param =
213 cvalue_for_param(fx, Some(local), None, arg_abi, &mut block_params_iter);
214 (local, ArgKind::Normal(param), arg_ty)
215 }
216 })
217 .collect::<Vec<(Local, ArgKind<'tcx>, Ty<'tcx>)>>();
218
219 assert!(fx.caller_location.is_none());
220 if fx.instance.def.requires_caller_location(fx.tcx) {
221 // Store caller location for `#[track_caller]`.
222 let arg_abi = arg_abis_iter.next().unwrap();
223 fx.caller_location =
224 Some(cvalue_for_param(fx, None, None, arg_abi, &mut block_params_iter).unwrap());
225 }
226
227 assert!(arg_abis_iter.next().is_none(), "ArgAbi left behind");
228 fx.fn_abi = Some(fn_abi);
229 assert!(block_params_iter.next().is_none(), "arg_value left behind");
230
231 self::comments::add_locals_header_comment(fx);
232
233 for (local, arg_kind, ty) in func_params {
234 let layout = fx.layout_of(ty);
235
236 let is_ssa = ssa_analyzed[local] == crate::analyze::SsaKind::Ssa;
237
238 // While this is normally an optimization to prevent an unnecessary copy when an argument is
239 // not mutated by the current function, this is necessary to support unsized arguments.
240 if let ArgKind::Normal(Some(val)) = arg_kind {
241 if let Some((addr, meta)) = val.try_to_ptr() {
242 // Ownership of the value at the backing storage for an argument is passed to the
243 // callee per the ABI, so it is fine to borrow the backing storage of this argument
244 // to prevent a copy.
245
246 let place = if let Some(meta) = meta {
247 CPlace::for_ptr_with_extra(addr, meta, val.layout())
248 } else {
249 CPlace::for_ptr(addr, val.layout())
250 };
251
252 self::comments::add_local_place_comments(fx, place, local);
253
254 assert_eq!(fx.local_map.push(place), local);
255 continue;
256 }
257 }
258
259 let place = make_local_place(fx, local, layout, is_ssa);
260 assert_eq!(fx.local_map.push(place), local);
261
262 match arg_kind {
263 ArgKind::Normal(param) => {
264 if let Some(param) = param {
265 place.write_cvalue(fx, param);
266 }
267 }
268 ArgKind::Spread(params) => {
269 for (i, param) in params.into_iter().enumerate() {
270 if let Some(param) = param {
271 place.place_field(fx, mir::Field::new(i)).write_cvalue(fx, param);
272 }
273 }
274 }
275 }
276 }
277
278 for local in fx.mir.vars_and_temps_iter() {
279 let ty = fx.monomorphize(fx.mir.local_decls[local].ty);
280 let layout = fx.layout_of(ty);
281
282 let is_ssa = ssa_analyzed[local] == crate::analyze::SsaKind::Ssa;
283
284 let place = make_local_place(fx, local, layout, is_ssa);
285 assert_eq!(fx.local_map.push(place), local);
286 }
287
288 fx.bcx.ins().jump(*fx.block_map.get(START_BLOCK).unwrap(), &[]);
289 }
290
291 struct CallArgument<'tcx> {
292 value: CValue<'tcx>,
293 is_owned: bool,
294 }
295
296 // FIXME avoid intermediate `CValue` before calling `adjust_arg_for_abi`
297 fn codegen_call_argument_operand<'tcx>(
298 fx: &mut FunctionCx<'_, '_, 'tcx>,
299 operand: &Operand<'tcx>,
300 ) -> CallArgument<'tcx> {
301 CallArgument {
302 value: codegen_operand(fx, operand),
303 is_owned: matches!(operand, Operand::Move(_)),
304 }
305 }
306
307 pub(crate) fn codegen_terminator_call<'tcx>(
308 fx: &mut FunctionCx<'_, '_, 'tcx>,
309 span: Span,
310 func: &Operand<'tcx>,
311 args: &[Operand<'tcx>],
312 mir_dest: Option<(Place<'tcx>, BasicBlock)>,
313 ) {
314 let fn_ty = fx.monomorphize(func.ty(fx.mir, fx.tcx));
315 let fn_sig =
316 fx.tcx.normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), fn_ty.fn_sig(fx.tcx));
317
318 let destination = mir_dest.map(|(place, bb)| (codegen_place(fx, place), bb));
319
320 // Handle special calls like instrinsics and empty drop glue.
321 let instance = if let ty::FnDef(def_id, substs) = *fn_ty.kind() {
322 let instance = ty::Instance::resolve(fx.tcx, ty::ParamEnv::reveal_all(), def_id, substs)
323 .unwrap()
324 .unwrap()
325 .polymorphize(fx.tcx);
326
327 if fx.tcx.symbol_name(instance).name.starts_with("llvm.") {
328 crate::intrinsics::codegen_llvm_intrinsic_call(
329 fx,
330 &fx.tcx.symbol_name(instance).name,
331 substs,
332 args,
333 destination,
334 );
335 return;
336 }
337
338 match instance.def {
339 InstanceDef::Intrinsic(_) => {
340 crate::intrinsics::codegen_intrinsic_call(fx, instance, args, destination, span);
341 return;
342 }
343 InstanceDef::DropGlue(_, None) => {
344 // empty drop glue - a nop.
345 let (_, dest) = destination.expect("Non terminating drop_in_place_real???");
346 let ret_block = fx.get_block(dest);
347 fx.bcx.ins().jump(ret_block, &[]);
348 return;
349 }
350 _ => Some(instance),
351 }
352 } else {
353 None
354 };
355
356 let extra_args = &args[fn_sig.inputs().len()..];
357 let extra_args = fx
358 .tcx
359 .mk_type_list(extra_args.iter().map(|op_arg| fx.monomorphize(op_arg.ty(fx.mir, fx.tcx))));
360 let fn_abi = if let Some(instance) = instance {
361 RevealAllLayoutCx(fx.tcx).fn_abi_of_instance(instance, extra_args)
362 } else {
363 RevealAllLayoutCx(fx.tcx).fn_abi_of_fn_ptr(fn_ty.fn_sig(fx.tcx), extra_args)
364 };
365
366 let is_cold = instance
367 .map(|inst| fx.tcx.codegen_fn_attrs(inst.def_id()).flags.contains(CodegenFnAttrFlags::COLD))
368 .unwrap_or(false);
369 if is_cold {
370 // FIXME Mark current_block block as cold once Cranelift supports it
371 }
372
373 // Unpack arguments tuple for closures
374 let mut args = if fn_sig.abi == Abi::RustCall {
375 assert_eq!(args.len(), 2, "rust-call abi requires two arguments");
376 let self_arg = codegen_call_argument_operand(fx, &args[0]);
377 let pack_arg = codegen_call_argument_operand(fx, &args[1]);
378
379 let tupled_arguments = match pack_arg.value.layout().ty.kind() {
380 ty::Tuple(ref tupled_arguments) => tupled_arguments,
381 _ => bug!("argument to function with \"rust-call\" ABI is not a tuple"),
382 };
383
384 let mut args = Vec::with_capacity(1 + tupled_arguments.len());
385 args.push(self_arg);
386 for i in 0..tupled_arguments.len() {
387 args.push(CallArgument {
388 value: pack_arg.value.value_field(fx, mir::Field::new(i)),
389 is_owned: pack_arg.is_owned,
390 });
391 }
392 args
393 } else {
394 args.iter().map(|arg| codegen_call_argument_operand(fx, arg)).collect::<Vec<_>>()
395 };
396
397 // Pass the caller location for `#[track_caller]`.
398 if instance.map(|inst| inst.def.requires_caller_location(fx.tcx)).unwrap_or(false) {
399 let caller_location = fx.get_caller_location(span);
400 args.push(CallArgument { value: caller_location, is_owned: false });
401 }
402
403 let args = args;
404 assert_eq!(fn_abi.args.len(), args.len());
405
406 enum CallTarget {
407 Direct(FuncRef),
408 Indirect(SigRef, Value),
409 }
410
411 let (func_ref, first_arg_override) = match instance {
412 // Trait object call
413 Some(Instance { def: InstanceDef::Virtual(_, idx), .. }) => {
414 if fx.clif_comments.enabled() {
415 let nop_inst = fx.bcx.ins().nop();
416 fx.add_comment(
417 nop_inst,
418 format!("virtual call; self arg pass mode: {:?}", &fn_abi.args[0]),
419 );
420 }
421
422 let (ptr, method) = crate::vtable::get_ptr_and_method_ref(fx, args[0].value, idx);
423 let sig = clif_sig_from_fn_abi(fx.tcx, fx.triple(), &fn_abi);
424 let sig = fx.bcx.import_signature(sig);
425
426 (CallTarget::Indirect(sig, method), Some(ptr))
427 }
428
429 // Normal call
430 Some(instance) => {
431 let func_ref = fx.get_function_ref(instance);
432 (CallTarget::Direct(func_ref), None)
433 }
434
435 // Indirect call
436 None => {
437 if fx.clif_comments.enabled() {
438 let nop_inst = fx.bcx.ins().nop();
439 fx.add_comment(nop_inst, "indirect call");
440 }
441
442 let func = codegen_operand(fx, func).load_scalar(fx);
443 let sig = clif_sig_from_fn_abi(fx.tcx, fx.triple(), &fn_abi);
444 let sig = fx.bcx.import_signature(sig);
445
446 (CallTarget::Indirect(sig, func), None)
447 }
448 };
449
450 let ret_place = destination.map(|(place, _)| place);
451 self::returning::codegen_with_call_return_arg(fx, &fn_abi.ret, ret_place, |fx, return_ptr| {
452 let call_args = return_ptr
453 .into_iter()
454 .chain(first_arg_override.into_iter())
455 .chain(
456 args.into_iter()
457 .enumerate()
458 .skip(if first_arg_override.is_some() { 1 } else { 0 })
459 .map(|(i, arg)| {
460 adjust_arg_for_abi(fx, arg.value, &fn_abi.args[i], arg.is_owned).into_iter()
461 })
462 .flatten(),
463 )
464 .collect::<Vec<Value>>();
465
466 let call_inst = match func_ref {
467 CallTarget::Direct(func_ref) => fx.bcx.ins().call(func_ref, &call_args),
468 CallTarget::Indirect(sig, func_ptr) => {
469 fx.bcx.ins().call_indirect(sig, func_ptr, &call_args)
470 }
471 };
472
473 // FIXME find a cleaner way to support varargs
474 if fn_sig.c_variadic {
475 if !matches!(fn_sig.abi, Abi::C { .. }) {
476 fx.tcx
477 .sess
478 .span_fatal(span, &format!("Variadic call for non-C abi {:?}", fn_sig.abi));
479 }
480 let sig_ref = fx.bcx.func.dfg.call_signature(call_inst).unwrap();
481 let abi_params = call_args
482 .into_iter()
483 .map(|arg| {
484 let ty = fx.bcx.func.dfg.value_type(arg);
485 if !ty.is_int() {
486 // FIXME set %al to upperbound on float args once floats are supported
487 fx.tcx
488 .sess
489 .span_fatal(span, &format!("Non int ty {:?} for variadic call", ty));
490 }
491 AbiParam::new(ty)
492 })
493 .collect::<Vec<AbiParam>>();
494 fx.bcx.func.dfg.signatures[sig_ref].params = abi_params;
495 }
496
497 call_inst
498 });
499
500 if let Some((_, dest)) = destination {
501 let ret_block = fx.get_block(dest);
502 fx.bcx.ins().jump(ret_block, &[]);
503 } else {
504 trap_unreachable(fx, "[corruption] Diverging function returned");
505 }
506 }
507
508 pub(crate) fn codegen_drop<'tcx>(
509 fx: &mut FunctionCx<'_, '_, 'tcx>,
510 span: Span,
511 drop_place: CPlace<'tcx>,
512 ) {
513 let ty = drop_place.layout().ty;
514 let drop_instance = Instance::resolve_drop_in_place(fx.tcx, ty).polymorphize(fx.tcx);
515
516 if let ty::InstanceDef::DropGlue(_, None) = drop_instance.def {
517 // we don't actually need to drop anything
518 } else {
519 match ty.kind() {
520 ty::Dynamic(..) => {
521 let (ptr, vtable) = drop_place.to_ptr_maybe_unsized();
522 let ptr = ptr.get_addr(fx);
523 let drop_fn = crate::vtable::drop_fn_of_obj(fx, vtable.unwrap());
524
525 // FIXME(eddyb) perhaps move some of this logic into
526 // `Instance::resolve_drop_in_place`?
527 let virtual_drop = Instance {
528 def: ty::InstanceDef::Virtual(drop_instance.def_id(), 0),
529 substs: drop_instance.substs,
530 };
531 let fn_abi =
532 RevealAllLayoutCx(fx.tcx).fn_abi_of_instance(virtual_drop, ty::List::empty());
533
534 let sig = clif_sig_from_fn_abi(fx.tcx, fx.triple(), &fn_abi);
535 let sig = fx.bcx.import_signature(sig);
536 fx.bcx.ins().call_indirect(sig, drop_fn, &[ptr]);
537 }
538 _ => {
539 assert!(!matches!(drop_instance.def, InstanceDef::Virtual(_, _)));
540
541 let fn_abi =
542 RevealAllLayoutCx(fx.tcx).fn_abi_of_instance(drop_instance, ty::List::empty());
543
544 let arg_value = drop_place.place_ref(
545 fx,
546 fx.layout_of(fx.tcx.mk_ref(
547 &ty::RegionKind::ReErased,
548 TypeAndMut { ty, mutbl: crate::rustc_hir::Mutability::Mut },
549 )),
550 );
551 let arg_value = adjust_arg_for_abi(fx, arg_value, &fn_abi.args[0], true);
552
553 let mut call_args: Vec<Value> = arg_value.into_iter().collect::<Vec<_>>();
554
555 if drop_instance.def.requires_caller_location(fx.tcx) {
556 // Pass the caller location for `#[track_caller]`.
557 let caller_location = fx.get_caller_location(span);
558 call_args.extend(
559 adjust_arg_for_abi(fx, caller_location, &fn_abi.args[1], false).into_iter(),
560 );
561 }
562
563 let func_ref = fx.get_function_ref(drop_instance);
564 fx.bcx.ins().call(func_ref, &call_args);
565 }
566 }
567 }
568 }