1 //! Handling of everything related to the calling convention. Also fills `fx.local_map`.
7 use rustc_middle
::middle
::codegen_fn_attrs
::CodegenFnAttrFlags
;
8 use rustc_middle
::ty
::layout
::FnAbiOf
;
9 use rustc_target
::abi
::call
::{Conv, FnAbi}
;
10 use rustc_target
::spec
::abi
::Abi
;
12 use cranelift_codegen
::ir
::{AbiParam, SigRef}
;
14 use self::pass_mode
::*;
15 use crate::prelude
::*;
17 pub(crate) use self::returning
::codegen_return
;
19 fn clif_sig_from_fn_abi
<'tcx
>(
21 triple
: &target_lexicon
::Triple
,
22 fn_abi
: &FnAbi
<'tcx
, Ty
<'tcx
>>,
24 let call_conv
= match fn_abi
.conv
{
25 Conv
::Rust
| Conv
::C
=> CallConv
::triple_default(triple
),
26 Conv
::X86_64SysV
=> CallConv
::SystemV
,
27 Conv
::X86_64Win64
=> CallConv
::WindowsFastcall
,
29 | Conv
::CCmseNonSecureCall
39 | Conv
::AvrNonBlockingInterrupt
=> todo
!("{:?}", fn_abi
.conv
),
41 let inputs
= fn_abi
.args
.iter().map(|arg_abi
| arg_abi
.get_abi_param(tcx
).into_iter()).flatten();
43 let (return_ptr
, returns
) = fn_abi
.ret
.get_abi_return(tcx
);
44 // Sometimes the first param is an pointer to the place where the return value needs to be stored.
45 let params
: Vec
<_
> = return_ptr
.into_iter().chain(inputs
).collect();
47 Signature { params, returns, call_conv }
50 pub(crate) fn get_function_sig
<'tcx
>(
52 triple
: &target_lexicon
::Triple
,
55 assert
!(!inst
.substs
.needs_infer());
59 &RevealAllLayoutCx(tcx
).fn_abi_of_instance(inst
, ty
::List
::empty()),
63 /// Instance must be monomorphized
64 pub(crate) fn import_function
<'tcx
>(
66 module
: &mut dyn Module
,
69 let name
= tcx
.symbol_name(inst
).name
;
70 let sig
= get_function_sig(tcx
, module
.isa().triple(), inst
);
71 module
.declare_function(name
, Linkage
::Import
, &sig
).unwrap()
74 impl<'tcx
> FunctionCx
<'_
, '_
, 'tcx
> {
75 /// Instance must be monomorphized
76 pub(crate) fn get_function_ref(&mut self, inst
: Instance
<'tcx
>) -> FuncRef
{
77 let func_id
= import_function(self.tcx
, self.module
, inst
);
78 let func_ref
= self.module
.declare_func_in_func(func_id
, &mut self.bcx
.func
);
80 if self.clif_comments
.enabled() {
81 self.add_comment(func_ref
, format
!("{:?}", inst
));
87 pub(crate) fn lib_call(
90 params
: Vec
<AbiParam
>,
91 returns
: Vec
<AbiParam
>,
94 let sig
= Signature { params, returns, call_conv: CallConv::triple_default(self.triple()) }
;
95 let func_id
= self.module
.declare_function(name
, Linkage
::Import
, &sig
).unwrap();
96 let func_ref
= self.module
.declare_func_in_func(func_id
, &mut self.bcx
.func
);
97 let call_inst
= self.bcx
.ins().call(func_ref
, args
);
98 if self.clif_comments
.enabled() {
99 self.add_comment(call_inst
, format
!("easy_call {}", name
));
101 let results
= self.bcx
.inst_results(call_inst
);
102 assert
!(results
.len() <= 2, "{}", results
.len());
106 pub(crate) fn easy_call(
109 args
: &[CValue
<'tcx
>],
112 let (input_tys
, args
): (Vec
<_
>, Vec
<_
>) = args
115 (AbiParam
::new(self.clif_type(arg
.layout().ty
).unwrap()), arg
.load_scalar(self))
118 let return_layout
= self.layout_of(return_ty
);
119 let return_tys
= if let ty
::Tuple(tup
) = return_ty
.kind() {
120 tup
.types().map(|ty
| AbiParam
::new(self.clif_type(ty
).unwrap())).collect()
122 vec
![AbiParam
::new(self.clif_type(return_ty
).unwrap())]
124 let ret_vals
= self.lib_call(name
, input_tys
, return_tys
, &args
);
126 [] => CValue
::by_ref(
127 Pointer
::const_addr(self, i64::from(self.pointer_type
.bytes())),
130 [val
] => CValue
::by_val(val
, return_layout
),
131 [val
, extra
] => CValue
::by_val_pair(val
, extra
, return_layout
),
137 /// Make a [`CPlace`] capable of holding value of the specified type.
138 fn make_local_place
<'tcx
>(
139 fx
: &mut FunctionCx
<'_
, '_
, 'tcx
>,
141 layout
: TyAndLayout
<'tcx
>,
144 let place
= if is_ssa
{
145 if let rustc_target
::abi
::Abi
::ScalarPair(_
, _
) = layout
.abi
{
146 CPlace
::new_var_pair(fx
, local
, layout
)
148 CPlace
::new_var(fx
, local
, layout
)
151 CPlace
::new_stack_slot(fx
, layout
)
154 self::comments
::add_local_place_comments(fx
, place
, local
);
159 pub(crate) fn codegen_fn_prelude
<'tcx
>(fx
: &mut FunctionCx
<'_
, '_
, 'tcx
>, start_block
: Block
) {
160 fx
.bcx
.append_block_params_for_function_params(start_block
);
162 fx
.bcx
.switch_to_block(start_block
);
165 let ssa_analyzed
= crate::analyze
::analyze(fx
);
167 self::comments
::add_args_header_comment(fx
);
169 let mut block_params_iter
= fx
.bcx
.func
.dfg
.block_params(start_block
).to_vec().into_iter();
171 self::returning
::codegen_return_param(fx
, &ssa_analyzed
, &mut block_params_iter
);
172 assert_eq
!(fx
.local_map
.push(ret_place
), RETURN_PLACE
);
174 // None means pass_mode == NoPass
176 Normal(Option
<CValue
<'tcx
>>),
177 Spread(Vec
<Option
<CValue
<'tcx
>>>),
180 let fn_abi
= fx
.fn_abi
.take().unwrap();
181 let mut arg_abis_iter
= fn_abi
.args
.iter();
187 let arg_ty
= fx
.monomorphize(fx
.mir
.local_decls
[local
].ty
);
189 // Adapted from https://github.com/rust-lang/rust/blob/145155dc96757002c7b2e9de8489416e2fdbbd57/src/librustc_codegen_llvm/mir/mod.rs#L442-L482
190 if Some(local
) == fx
.mir
.spread_arg
{
191 // This argument (e.g. the last argument in the "rust-call" ABI)
192 // is a tuple that was spread at the ABI level and now we have
193 // to reconstruct it into a tuple local variable, from multiple
194 // individual function arguments.
196 let tupled_arg_tys
= match arg_ty
.kind() {
197 ty
::Tuple(ref tys
) => tys
,
198 _
=> bug
!("spread argument isn't a tuple?! but {:?}", arg_ty
),
201 let mut params
= Vec
::new();
202 for (i
, _arg_ty
) in tupled_arg_tys
.types().enumerate() {
203 let arg_abi
= arg_abis_iter
.next().unwrap();
205 cvalue_for_param(fx
, Some(local
), Some(i
), arg_abi
, &mut block_params_iter
);
209 (local
, ArgKind
::Spread(params
), arg_ty
)
211 let arg_abi
= arg_abis_iter
.next().unwrap();
213 cvalue_for_param(fx
, Some(local
), None
, arg_abi
, &mut block_params_iter
);
214 (local
, ArgKind
::Normal(param
), arg_ty
)
217 .collect
::<Vec
<(Local
, ArgKind
<'tcx
>, Ty
<'tcx
>)>>();
219 assert
!(fx
.caller_location
.is_none());
220 if fx
.instance
.def
.requires_caller_location(fx
.tcx
) {
221 // Store caller location for `#[track_caller]`.
222 let arg_abi
= arg_abis_iter
.next().unwrap();
224 Some(cvalue_for_param(fx
, None
, None
, arg_abi
, &mut block_params_iter
).unwrap());
227 assert
!(arg_abis_iter
.next().is_none(), "ArgAbi left behind");
228 fx
.fn_abi
= Some(fn_abi
);
229 assert
!(block_params_iter
.next().is_none(), "arg_value left behind");
231 self::comments
::add_locals_header_comment(fx
);
233 for (local
, arg_kind
, ty
) in func_params
{
234 let layout
= fx
.layout_of(ty
);
236 let is_ssa
= ssa_analyzed
[local
] == crate::analyze
::SsaKind
::Ssa
;
238 // While this is normally an optimization to prevent an unnecessary copy when an argument is
239 // not mutated by the current function, this is necessary to support unsized arguments.
240 if let ArgKind
::Normal(Some(val
)) = arg_kind
{
241 if let Some((addr
, meta
)) = val
.try_to_ptr() {
242 // Ownership of the value at the backing storage for an argument is passed to the
243 // callee per the ABI, so it is fine to borrow the backing storage of this argument
244 // to prevent a copy.
246 let place
= if let Some(meta
) = meta
{
247 CPlace
::for_ptr_with_extra(addr
, meta
, val
.layout())
249 CPlace
::for_ptr(addr
, val
.layout())
252 self::comments
::add_local_place_comments(fx
, place
, local
);
254 assert_eq
!(fx
.local_map
.push(place
), local
);
259 let place
= make_local_place(fx
, local
, layout
, is_ssa
);
260 assert_eq
!(fx
.local_map
.push(place
), local
);
263 ArgKind
::Normal(param
) => {
264 if let Some(param
) = param
{
265 place
.write_cvalue(fx
, param
);
268 ArgKind
::Spread(params
) => {
269 for (i
, param
) in params
.into_iter().enumerate() {
270 if let Some(param
) = param
{
271 place
.place_field(fx
, mir
::Field
::new(i
)).write_cvalue(fx
, param
);
278 for local
in fx
.mir
.vars_and_temps_iter() {
279 let ty
= fx
.monomorphize(fx
.mir
.local_decls
[local
].ty
);
280 let layout
= fx
.layout_of(ty
);
282 let is_ssa
= ssa_analyzed
[local
] == crate::analyze
::SsaKind
::Ssa
;
284 let place
= make_local_place(fx
, local
, layout
, is_ssa
);
285 assert_eq
!(fx
.local_map
.push(place
), local
);
288 fx
.bcx
.ins().jump(*fx
.block_map
.get(START_BLOCK
).unwrap(), &[]);
291 struct CallArgument
<'tcx
> {
296 // FIXME avoid intermediate `CValue` before calling `adjust_arg_for_abi`
297 fn codegen_call_argument_operand
<'tcx
>(
298 fx
: &mut FunctionCx
<'_
, '_
, 'tcx
>,
299 operand
: &Operand
<'tcx
>,
300 ) -> CallArgument
<'tcx
> {
302 value
: codegen_operand(fx
, operand
),
303 is_owned
: matches
!(operand
, Operand
::Move(_
)),
307 pub(crate) fn codegen_terminator_call
<'tcx
>(
308 fx
: &mut FunctionCx
<'_
, '_
, 'tcx
>,
310 func
: &Operand
<'tcx
>,
311 args
: &[Operand
<'tcx
>],
312 mir_dest
: Option
<(Place
<'tcx
>, BasicBlock
)>,
314 let fn_ty
= fx
.monomorphize(func
.ty(fx
.mir
, fx
.tcx
));
316 fx
.tcx
.normalize_erasing_late_bound_regions(ParamEnv
::reveal_all(), fn_ty
.fn_sig(fx
.tcx
));
318 let destination
= mir_dest
.map(|(place
, bb
)| (codegen_place(fx
, place
), bb
));
320 // Handle special calls like instrinsics and empty drop glue.
321 let instance
= if let ty
::FnDef(def_id
, substs
) = *fn_ty
.kind() {
322 let instance
= ty
::Instance
::resolve(fx
.tcx
, ty
::ParamEnv
::reveal_all(), def_id
, substs
)
325 .polymorphize(fx
.tcx
);
327 if fx
.tcx
.symbol_name(instance
).name
.starts_with("llvm.") {
328 crate::intrinsics
::codegen_llvm_intrinsic_call(
330 &fx
.tcx
.symbol_name(instance
).name
,
339 InstanceDef
::Intrinsic(_
) => {
340 crate::intrinsics
::codegen_intrinsic_call(fx
, instance
, args
, destination
, span
);
343 InstanceDef
::DropGlue(_
, None
) => {
344 // empty drop glue - a nop.
345 let (_
, dest
) = destination
.expect("Non terminating drop_in_place_real???");
346 let ret_block
= fx
.get_block(dest
);
347 fx
.bcx
.ins().jump(ret_block
, &[]);
356 let extra_args
= &args
[fn_sig
.inputs().len()..];
359 .mk_type_list(extra_args
.iter().map(|op_arg
| fx
.monomorphize(op_arg
.ty(fx
.mir
, fx
.tcx
))));
360 let fn_abi
= if let Some(instance
) = instance
{
361 RevealAllLayoutCx(fx
.tcx
).fn_abi_of_instance(instance
, extra_args
)
363 RevealAllLayoutCx(fx
.tcx
).fn_abi_of_fn_ptr(fn_ty
.fn_sig(fx
.tcx
), extra_args
)
366 let is_cold
= instance
367 .map(|inst
| fx
.tcx
.codegen_fn_attrs(inst
.def_id()).flags
.contains(CodegenFnAttrFlags
::COLD
))
370 // FIXME Mark current_block block as cold once Cranelift supports it
373 // Unpack arguments tuple for closures
374 let mut args
= if fn_sig
.abi
== Abi
::RustCall
{
375 assert_eq
!(args
.len(), 2, "rust-call abi requires two arguments");
376 let self_arg
= codegen_call_argument_operand(fx
, &args
[0]);
377 let pack_arg
= codegen_call_argument_operand(fx
, &args
[1]);
379 let tupled_arguments
= match pack_arg
.value
.layout().ty
.kind() {
380 ty
::Tuple(ref tupled_arguments
) => tupled_arguments
,
381 _
=> bug
!("argument to function with \"rust-call\" ABI is not a tuple"),
384 let mut args
= Vec
::with_capacity(1 + tupled_arguments
.len());
386 for i
in 0..tupled_arguments
.len() {
387 args
.push(CallArgument
{
388 value
: pack_arg
.value
.value_field(fx
, mir
::Field
::new(i
)),
389 is_owned
: pack_arg
.is_owned
,
394 args
.iter().map(|arg
| codegen_call_argument_operand(fx
, arg
)).collect
::<Vec
<_
>>()
397 // Pass the caller location for `#[track_caller]`.
398 if instance
.map(|inst
| inst
.def
.requires_caller_location(fx
.tcx
)).unwrap_or(false) {
399 let caller_location
= fx
.get_caller_location(span
);
400 args
.push(CallArgument { value: caller_location, is_owned: false }
);
404 assert_eq
!(fn_abi
.args
.len(), args
.len());
408 Indirect(SigRef
, Value
),
411 let (func_ref
, first_arg_override
) = match instance
{
413 Some(Instance { def: InstanceDef::Virtual(_, idx), .. }
) => {
414 if fx
.clif_comments
.enabled() {
415 let nop_inst
= fx
.bcx
.ins().nop();
418 format
!("virtual call; self arg pass mode: {:?}", &fn_abi
.args
[0]),
422 let (ptr
, method
) = crate::vtable
::get_ptr_and_method_ref(fx
, args
[0].value
, idx
);
423 let sig
= clif_sig_from_fn_abi(fx
.tcx
, fx
.triple(), &fn_abi
);
424 let sig
= fx
.bcx
.import_signature(sig
);
426 (CallTarget
::Indirect(sig
, method
), Some(ptr
))
431 let func_ref
= fx
.get_function_ref(instance
);
432 (CallTarget
::Direct(func_ref
), None
)
437 if fx
.clif_comments
.enabled() {
438 let nop_inst
= fx
.bcx
.ins().nop();
439 fx
.add_comment(nop_inst
, "indirect call");
442 let func
= codegen_operand(fx
, func
).load_scalar(fx
);
443 let sig
= clif_sig_from_fn_abi(fx
.tcx
, fx
.triple(), &fn_abi
);
444 let sig
= fx
.bcx
.import_signature(sig
);
446 (CallTarget
::Indirect(sig
, func
), None
)
450 let ret_place
= destination
.map(|(place
, _
)| place
);
451 self::returning
::codegen_with_call_return_arg(fx
, &fn_abi
.ret
, ret_place
, |fx
, return_ptr
| {
452 let call_args
= return_ptr
454 .chain(first_arg_override
.into_iter())
458 .skip(if first_arg_override
.is_some() { 1 }
else { 0 }
)
460 adjust_arg_for_abi(fx
, arg
.value
, &fn_abi
.args
[i
], arg
.is_owned
).into_iter()
464 .collect
::<Vec
<Value
>>();
466 let call_inst
= match func_ref
{
467 CallTarget
::Direct(func_ref
) => fx
.bcx
.ins().call(func_ref
, &call_args
),
468 CallTarget
::Indirect(sig
, func_ptr
) => {
469 fx
.bcx
.ins().call_indirect(sig
, func_ptr
, &call_args
)
473 // FIXME find a cleaner way to support varargs
474 if fn_sig
.c_variadic
{
475 if !matches
!(fn_sig
.abi
, Abi
::C { .. }
) {
478 .span_fatal(span
, &format
!("Variadic call for non-C abi {:?}", fn_sig
.abi
));
480 let sig_ref
= fx
.bcx
.func
.dfg
.call_signature(call_inst
).unwrap();
481 let abi_params
= call_args
484 let ty
= fx
.bcx
.func
.dfg
.value_type(arg
);
486 // FIXME set %al to upperbound on float args once floats are supported
489 .span_fatal(span
, &format
!("Non int ty {:?} for variadic call", ty
));
493 .collect
::<Vec
<AbiParam
>>();
494 fx
.bcx
.func
.dfg
.signatures
[sig_ref
].params
= abi_params
;
500 if let Some((_
, dest
)) = destination
{
501 let ret_block
= fx
.get_block(dest
);
502 fx
.bcx
.ins().jump(ret_block
, &[]);
504 trap_unreachable(fx
, "[corruption] Diverging function returned");
508 pub(crate) fn codegen_drop
<'tcx
>(
509 fx
: &mut FunctionCx
<'_
, '_
, 'tcx
>,
511 drop_place
: CPlace
<'tcx
>,
513 let ty
= drop_place
.layout().ty
;
514 let drop_instance
= Instance
::resolve_drop_in_place(fx
.tcx
, ty
).polymorphize(fx
.tcx
);
516 if let ty
::InstanceDef
::DropGlue(_
, None
) = drop_instance
.def
{
517 // we don't actually need to drop anything
521 let (ptr
, vtable
) = drop_place
.to_ptr_maybe_unsized();
522 let ptr
= ptr
.get_addr(fx
);
523 let drop_fn
= crate::vtable
::drop_fn_of_obj(fx
, vtable
.unwrap());
525 // FIXME(eddyb) perhaps move some of this logic into
526 // `Instance::resolve_drop_in_place`?
527 let virtual_drop
= Instance
{
528 def
: ty
::InstanceDef
::Virtual(drop_instance
.def_id(), 0),
529 substs
: drop_instance
.substs
,
532 RevealAllLayoutCx(fx
.tcx
).fn_abi_of_instance(virtual_drop
, ty
::List
::empty());
534 let sig
= clif_sig_from_fn_abi(fx
.tcx
, fx
.triple(), &fn_abi
);
535 let sig
= fx
.bcx
.import_signature(sig
);
536 fx
.bcx
.ins().call_indirect(sig
, drop_fn
, &[ptr
]);
539 assert
!(!matches
!(drop_instance
.def
, InstanceDef
::Virtual(_
, _
)));
542 RevealAllLayoutCx(fx
.tcx
).fn_abi_of_instance(drop_instance
, ty
::List
::empty());
544 let arg_value
= drop_place
.place_ref(
546 fx
.layout_of(fx
.tcx
.mk_ref(
547 &ty
::RegionKind
::ReErased
,
548 TypeAndMut { ty, mutbl: crate::rustc_hir::Mutability::Mut }
,
551 let arg_value
= adjust_arg_for_abi(fx
, arg_value
, &fn_abi
.args
[0], true);
553 let mut call_args
: Vec
<Value
> = arg_value
.into_iter().collect
::<Vec
<_
>>();
555 if drop_instance
.def
.requires_caller_location(fx
.tcx
) {
556 // Pass the caller location for `#[track_caller]`.
557 let caller_location
= fx
.get_caller_location(span
);
559 adjust_arg_for_abi(fx
, caller_location
, &fn_abi
.args
[1], false).into_iter(),
563 let func_ref
= fx
.get_function_ref(drop_instance
);
564 fx
.bcx
.ins().call(func_ref
, &call_args
);