1 //! Handling of everything related to the calling convention. Also fills `fx.local_map`.
7 use cranelift_module
::ModuleError
;
8 use rustc_middle
::middle
::codegen_fn_attrs
::CodegenFnAttrFlags
;
9 use rustc_middle
::ty
::layout
::FnAbiOf
;
10 use rustc_target
::abi
::call
::{Conv, FnAbi}
;
11 use rustc_target
::spec
::abi
::Abi
;
13 use cranelift_codegen
::ir
::{AbiParam, SigRef}
;
15 use self::pass_mode
::*;
16 use crate::prelude
::*;
18 pub(crate) use self::returning
::codegen_return
;
20 fn clif_sig_from_fn_abi
<'tcx
>(
22 default_call_conv
: CallConv
,
23 fn_abi
: &FnAbi
<'tcx
, Ty
<'tcx
>>,
25 let call_conv
= match fn_abi
.conv
{
26 Conv
::Rust
| Conv
::C
=> default_call_conv
,
27 Conv
::RustCold
=> CallConv
::Cold
,
28 Conv
::X86_64SysV
=> CallConv
::SystemV
,
29 Conv
::X86_64Win64
=> CallConv
::WindowsFastcall
,
31 | Conv
::CCmseNonSecureCall
41 | Conv
::AvrNonBlockingInterrupt
=> todo
!("{:?}", fn_abi
.conv
),
43 let inputs
= fn_abi
.args
.iter().map(|arg_abi
| arg_abi
.get_abi_param(tcx
).into_iter()).flatten();
45 let (return_ptr
, returns
) = fn_abi
.ret
.get_abi_return(tcx
);
46 // Sometimes the first param is an pointer to the place where the return value needs to be stored.
47 let params
: Vec
<_
> = return_ptr
.into_iter().chain(inputs
).collect();
49 Signature { params, returns, call_conv }
52 pub(crate) fn get_function_sig
<'tcx
>(
54 triple
: &target_lexicon
::Triple
,
57 assert
!(!inst
.substs
.needs_infer());
60 CallConv
::triple_default(triple
),
61 &RevealAllLayoutCx(tcx
).fn_abi_of_instance(inst
, ty
::List
::empty()),
65 /// Instance must be monomorphized
66 pub(crate) fn import_function
<'tcx
>(
68 module
: &mut dyn Module
,
71 let name
= tcx
.symbol_name(inst
).name
;
72 let sig
= get_function_sig(tcx
, module
.isa().triple(), inst
);
73 match module
.declare_function(name
, Linkage
::Import
, &sig
) {
74 Ok(func_id
) => func_id
,
75 Err(ModuleError
::IncompatibleDeclaration(_
)) => tcx
.sess
.fatal(&format
!(
76 "attempt to declare `{name}` as function, but it was already declared as static"
78 Err(ModuleError
::IncompatibleSignature(_
, prev_sig
, new_sig
)) => tcx
.sess
.fatal(&format
!(
79 "attempt to declare `{name}` with signature {new_sig:?}, \
80 but it was already declared with signature {prev_sig:?}"
82 Err(err
) => Err
::<_
, _
>(err
).unwrap(),
86 impl<'tcx
> FunctionCx
<'_
, '_
, 'tcx
> {
87 /// Instance must be monomorphized
88 pub(crate) fn get_function_ref(&mut self, inst
: Instance
<'tcx
>) -> FuncRef
{
89 let func_id
= import_function(self.tcx
, self.module
, inst
);
90 let func_ref
= self.module
.declare_func_in_func(func_id
, &mut self.bcx
.func
);
92 if self.clif_comments
.enabled() {
93 self.add_comment(func_ref
, format
!("{:?}", inst
));
99 pub(crate) fn lib_call(
102 params
: Vec
<AbiParam
>,
103 returns
: Vec
<AbiParam
>,
106 let sig
= Signature { params, returns, call_conv: self.target_config.default_call_conv }
;
107 let func_id
= self.module
.declare_function(name
, Linkage
::Import
, &sig
).unwrap();
108 let func_ref
= self.module
.declare_func_in_func(func_id
, &mut self.bcx
.func
);
109 if self.clif_comments
.enabled() {
110 self.add_comment(func_ref
, format
!("{:?}", name
));
112 let call_inst
= self.bcx
.ins().call(func_ref
, args
);
113 if self.clif_comments
.enabled() {
114 self.add_comment(call_inst
, format
!("easy_call {}", name
));
116 let results
= self.bcx
.inst_results(call_inst
);
117 assert
!(results
.len() <= 2, "{}", results
.len());
121 pub(crate) fn easy_call(
124 args
: &[CValue
<'tcx
>],
127 let (input_tys
, args
): (Vec
<_
>, Vec
<_
>) = args
130 (AbiParam
::new(self.clif_type(arg
.layout().ty
).unwrap()), arg
.load_scalar(self))
133 let return_layout
= self.layout_of(return_ty
);
134 let return_tys
= if let ty
::Tuple(tup
) = return_ty
.kind() {
135 tup
.iter().map(|ty
| AbiParam
::new(self.clif_type(ty
).unwrap())).collect()
137 vec
![AbiParam
::new(self.clif_type(return_ty
).unwrap())]
139 let ret_vals
= self.lib_call(name
, input_tys
, return_tys
, &args
);
141 [] => CValue
::by_ref(
142 Pointer
::const_addr(self, i64::from(self.pointer_type
.bytes())),
145 [val
] => CValue
::by_val(val
, return_layout
),
146 [val
, extra
] => CValue
::by_val_pair(val
, extra
, return_layout
),
152 /// Make a [`CPlace`] capable of holding value of the specified type.
153 fn make_local_place
<'tcx
>(
154 fx
: &mut FunctionCx
<'_
, '_
, 'tcx
>,
156 layout
: TyAndLayout
<'tcx
>,
159 let place
= if is_ssa
{
160 if let rustc_target
::abi
::Abi
::ScalarPair(_
, _
) = layout
.abi
{
161 CPlace
::new_var_pair(fx
, local
, layout
)
163 CPlace
::new_var(fx
, local
, layout
)
166 CPlace
::new_stack_slot(fx
, layout
)
169 self::comments
::add_local_place_comments(fx
, place
, local
);
174 pub(crate) fn codegen_fn_prelude
<'tcx
>(fx
: &mut FunctionCx
<'_
, '_
, 'tcx
>, start_block
: Block
) {
175 fx
.bcx
.append_block_params_for_function_params(start_block
);
177 fx
.bcx
.switch_to_block(start_block
);
180 let ssa_analyzed
= crate::analyze
::analyze(fx
);
182 self::comments
::add_args_header_comment(fx
);
184 let mut block_params_iter
= fx
.bcx
.func
.dfg
.block_params(start_block
).to_vec().into_iter();
186 self::returning
::codegen_return_param(fx
, &ssa_analyzed
, &mut block_params_iter
);
187 assert_eq
!(fx
.local_map
.push(ret_place
), RETURN_PLACE
);
189 // None means pass_mode == NoPass
191 Normal(Option
<CValue
<'tcx
>>),
192 Spread(Vec
<Option
<CValue
<'tcx
>>>),
195 let fn_abi
= fx
.fn_abi
.take().unwrap();
197 // FIXME implement variadics in cranelift
198 if fn_abi
.c_variadic
{
199 fx
.tcx
.sess
.span_fatal(
201 "Defining variadic functions is not yet supported by Cranelift",
205 let mut arg_abis_iter
= fn_abi
.args
.iter();
211 let arg_ty
= fx
.monomorphize(fx
.mir
.local_decls
[local
].ty
);
213 // Adapted from https://github.com/rust-lang/rust/blob/145155dc96757002c7b2e9de8489416e2fdbbd57/src/librustc_codegen_llvm/mir/mod.rs#L442-L482
214 if Some(local
) == fx
.mir
.spread_arg
{
215 // This argument (e.g. the last argument in the "rust-call" ABI)
216 // is a tuple that was spread at the ABI level and now we have
217 // to reconstruct it into a tuple local variable, from multiple
218 // individual function arguments.
220 let tupled_arg_tys
= match arg_ty
.kind() {
221 ty
::Tuple(ref tys
) => tys
,
222 _
=> bug
!("spread argument isn't a tuple?! but {:?}", arg_ty
),
225 let mut params
= Vec
::new();
226 for (i
, _arg_ty
) in tupled_arg_tys
.iter().enumerate() {
227 let arg_abi
= arg_abis_iter
.next().unwrap();
229 cvalue_for_param(fx
, Some(local
), Some(i
), arg_abi
, &mut block_params_iter
);
233 (local
, ArgKind
::Spread(params
), arg_ty
)
235 let arg_abi
= arg_abis_iter
.next().unwrap();
237 cvalue_for_param(fx
, Some(local
), None
, arg_abi
, &mut block_params_iter
);
238 (local
, ArgKind
::Normal(param
), arg_ty
)
241 .collect
::<Vec
<(Local
, ArgKind
<'tcx
>, Ty
<'tcx
>)>>();
243 assert
!(fx
.caller_location
.is_none());
244 if fx
.instance
.def
.requires_caller_location(fx
.tcx
) {
245 // Store caller location for `#[track_caller]`.
246 let arg_abi
= arg_abis_iter
.next().unwrap();
248 Some(cvalue_for_param(fx
, None
, None
, arg_abi
, &mut block_params_iter
).unwrap());
251 assert
!(arg_abis_iter
.next().is_none(), "ArgAbi left behind");
252 fx
.fn_abi
= Some(fn_abi
);
253 assert
!(block_params_iter
.next().is_none(), "arg_value left behind");
255 self::comments
::add_locals_header_comment(fx
);
257 for (local
, arg_kind
, ty
) in func_params
{
258 let layout
= fx
.layout_of(ty
);
260 let is_ssa
= ssa_analyzed
[local
] == crate::analyze
::SsaKind
::Ssa
;
262 // While this is normally an optimization to prevent an unnecessary copy when an argument is
263 // not mutated by the current function, this is necessary to support unsized arguments.
264 if let ArgKind
::Normal(Some(val
)) = arg_kind
{
265 if let Some((addr
, meta
)) = val
.try_to_ptr() {
266 // Ownership of the value at the backing storage for an argument is passed to the
267 // callee per the ABI, so it is fine to borrow the backing storage of this argument
268 // to prevent a copy.
270 let place
= if let Some(meta
) = meta
{
271 CPlace
::for_ptr_with_extra(addr
, meta
, val
.layout())
273 CPlace
::for_ptr(addr
, val
.layout())
276 self::comments
::add_local_place_comments(fx
, place
, local
);
278 assert_eq
!(fx
.local_map
.push(place
), local
);
283 let place
= make_local_place(fx
, local
, layout
, is_ssa
);
284 assert_eq
!(fx
.local_map
.push(place
), local
);
287 ArgKind
::Normal(param
) => {
288 if let Some(param
) = param
{
289 place
.write_cvalue(fx
, param
);
292 ArgKind
::Spread(params
) => {
293 for (i
, param
) in params
.into_iter().enumerate() {
294 if let Some(param
) = param
{
295 place
.place_field(fx
, mir
::Field
::new(i
)).write_cvalue(fx
, param
);
302 for local
in fx
.mir
.vars_and_temps_iter() {
303 let ty
= fx
.monomorphize(fx
.mir
.local_decls
[local
].ty
);
304 let layout
= fx
.layout_of(ty
);
306 let is_ssa
= ssa_analyzed
[local
] == crate::analyze
::SsaKind
::Ssa
;
308 let place
= make_local_place(fx
, local
, layout
, is_ssa
);
309 assert_eq
!(fx
.local_map
.push(place
), local
);
312 fx
.bcx
.ins().jump(*fx
.block_map
.get(START_BLOCK
).unwrap(), &[]);
315 struct CallArgument
<'tcx
> {
320 // FIXME avoid intermediate `CValue` before calling `adjust_arg_for_abi`
321 fn codegen_call_argument_operand
<'tcx
>(
322 fx
: &mut FunctionCx
<'_
, '_
, 'tcx
>,
323 operand
: &Operand
<'tcx
>,
324 ) -> CallArgument
<'tcx
> {
326 value
: codegen_operand(fx
, operand
),
327 is_owned
: matches
!(operand
, Operand
::Move(_
)),
331 pub(crate) fn codegen_terminator_call
<'tcx
>(
332 fx
: &mut FunctionCx
<'_
, '_
, 'tcx
>,
333 source_info
: mir
::SourceInfo
,
334 func
: &Operand
<'tcx
>,
335 args
: &[Operand
<'tcx
>],
336 destination
: Place
<'tcx
>,
337 target
: Option
<BasicBlock
>,
339 let fn_ty
= fx
.monomorphize(func
.ty(fx
.mir
, fx
.tcx
));
341 fx
.tcx
.normalize_erasing_late_bound_regions(ParamEnv
::reveal_all(), fn_ty
.fn_sig(fx
.tcx
));
343 let ret_place
= codegen_place(fx
, destination
);
345 // Handle special calls like intrinsics and empty drop glue.
346 let instance
= if let ty
::FnDef(def_id
, substs
) = *fn_ty
.kind() {
347 let instance
= ty
::Instance
::resolve(fx
.tcx
, ty
::ParamEnv
::reveal_all(), def_id
, substs
)
350 .polymorphize(fx
.tcx
);
352 if fx
.tcx
.symbol_name(instance
).name
.starts_with("llvm.") {
353 crate::intrinsics
::codegen_llvm_intrinsic_call(
355 &fx
.tcx
.symbol_name(instance
).name
,
365 InstanceDef
::Intrinsic(_
) => {
366 crate::intrinsics
::codegen_intrinsic_call(
376 InstanceDef
::DropGlue(_
, None
) => {
377 // empty drop glue - a nop.
378 let dest
= target
.expect("Non terminating drop_in_place_real???");
379 let ret_block
= fx
.get_block(dest
);
380 fx
.bcx
.ins().jump(ret_block
, &[]);
389 let extra_args
= &args
[fn_sig
.inputs().len()..];
392 .mk_type_list(extra_args
.iter().map(|op_arg
| fx
.monomorphize(op_arg
.ty(fx
.mir
, fx
.tcx
))));
393 let fn_abi
= if let Some(instance
) = instance
{
394 RevealAllLayoutCx(fx
.tcx
).fn_abi_of_instance(instance
, extra_args
)
396 RevealAllLayoutCx(fx
.tcx
).fn_abi_of_fn_ptr(fn_ty
.fn_sig(fx
.tcx
), extra_args
)
399 let is_cold
= if fn_sig
.abi
== Abi
::RustCold
{
404 fx
.tcx
.codegen_fn_attrs(inst
.def_id()).flags
.contains(CodegenFnAttrFlags
::COLD
)
409 fx
.bcx
.set_cold_block(fx
.bcx
.current_block().unwrap());
410 if let Some(destination_block
) = target
{
411 fx
.bcx
.set_cold_block(fx
.get_block(destination_block
));
415 // Unpack arguments tuple for closures
416 let mut args
= if fn_sig
.abi
== Abi
::RustCall
{
417 assert_eq
!(args
.len(), 2, "rust-call abi requires two arguments");
418 let self_arg
= codegen_call_argument_operand(fx
, &args
[0]);
419 let pack_arg
= codegen_call_argument_operand(fx
, &args
[1]);
421 let tupled_arguments
= match pack_arg
.value
.layout().ty
.kind() {
422 ty
::Tuple(ref tupled_arguments
) => tupled_arguments
,
423 _
=> bug
!("argument to function with \"rust-call\" ABI is not a tuple"),
426 let mut args
= Vec
::with_capacity(1 + tupled_arguments
.len());
428 for i
in 0..tupled_arguments
.len() {
429 args
.push(CallArgument
{
430 value
: pack_arg
.value
.value_field(fx
, mir
::Field
::new(i
)),
431 is_owned
: pack_arg
.is_owned
,
436 args
.iter().map(|arg
| codegen_call_argument_operand(fx
, arg
)).collect
::<Vec
<_
>>()
439 // Pass the caller location for `#[track_caller]`.
440 if instance
.map(|inst
| inst
.def
.requires_caller_location(fx
.tcx
)).unwrap_or(false) {
441 let caller_location
= fx
.get_caller_location(source_info
);
442 args
.push(CallArgument { value: caller_location, is_owned: false }
);
446 assert_eq
!(fn_abi
.args
.len(), args
.len());
450 Indirect(SigRef
, Value
),
453 let (func_ref
, first_arg_override
) = match instance
{
455 Some(Instance { def: InstanceDef::Virtual(_, idx), .. }
) => {
456 if fx
.clif_comments
.enabled() {
457 let nop_inst
= fx
.bcx
.ins().nop();
460 format
!("virtual call; self arg pass mode: {:?}", &fn_abi
.args
[0]),
464 let (ptr
, method
) = crate::vtable
::get_ptr_and_method_ref(fx
, args
[0].value
, idx
);
465 let sig
= clif_sig_from_fn_abi(fx
.tcx
, fx
.target_config
.default_call_conv
, &fn_abi
);
466 let sig
= fx
.bcx
.import_signature(sig
);
468 (CallTarget
::Indirect(sig
, method
), Some(ptr
.get_addr(fx
)))
473 let func_ref
= fx
.get_function_ref(instance
);
474 (CallTarget
::Direct(func_ref
), None
)
479 if fx
.clif_comments
.enabled() {
480 let nop_inst
= fx
.bcx
.ins().nop();
481 fx
.add_comment(nop_inst
, "indirect call");
484 let func
= codegen_operand(fx
, func
).load_scalar(fx
);
485 let sig
= clif_sig_from_fn_abi(fx
.tcx
, fx
.target_config
.default_call_conv
, &fn_abi
);
486 let sig
= fx
.bcx
.import_signature(sig
);
488 (CallTarget
::Indirect(sig
, func
), None
)
492 self::returning
::codegen_with_call_return_arg(fx
, &fn_abi
.ret
, ret_place
, |fx
, return_ptr
| {
493 let call_args
= return_ptr
495 .chain(first_arg_override
.into_iter())
499 .skip(if first_arg_override
.is_some() { 1 }
else { 0 }
)
501 adjust_arg_for_abi(fx
, arg
.value
, &fn_abi
.args
[i
], arg
.is_owned
).into_iter()
505 .collect
::<Vec
<Value
>>();
507 let call_inst
= match func_ref
{
508 CallTarget
::Direct(func_ref
) => fx
.bcx
.ins().call(func_ref
, &call_args
),
509 CallTarget
::Indirect(sig
, func_ptr
) => {
510 fx
.bcx
.ins().call_indirect(sig
, func_ptr
, &call_args
)
514 // FIXME find a cleaner way to support varargs
515 if fn_sig
.c_variadic
{
516 if !matches
!(fn_sig
.abi
, Abi
::C { .. }
) {
517 fx
.tcx
.sess
.span_fatal(
519 &format
!("Variadic call for non-C abi {:?}", fn_sig
.abi
),
522 let sig_ref
= fx
.bcx
.func
.dfg
.call_signature(call_inst
).unwrap();
523 let abi_params
= call_args
526 let ty
= fx
.bcx
.func
.dfg
.value_type(arg
);
528 // FIXME set %al to upperbound on float args once floats are supported
529 fx
.tcx
.sess
.span_fatal(
531 &format
!("Non int ty {:?} for variadic call", ty
),
536 .collect
::<Vec
<AbiParam
>>();
537 fx
.bcx
.func
.dfg
.signatures
[sig_ref
].params
= abi_params
;
543 if let Some(dest
) = target
{
544 let ret_block
= fx
.get_block(dest
);
545 fx
.bcx
.ins().jump(ret_block
, &[]);
547 fx
.bcx
.ins().trap(TrapCode
::UnreachableCodeReached
);
551 pub(crate) fn codegen_drop
<'tcx
>(
552 fx
: &mut FunctionCx
<'_
, '_
, 'tcx
>,
553 source_info
: mir
::SourceInfo
,
554 drop_place
: CPlace
<'tcx
>,
556 let ty
= drop_place
.layout().ty
;
557 let drop_instance
= Instance
::resolve_drop_in_place(fx
.tcx
, ty
).polymorphize(fx
.tcx
);
559 if let ty
::InstanceDef
::DropGlue(_
, None
) = drop_instance
.def
{
560 // we don't actually need to drop anything
563 ty
::Dynamic(_
, _
, ty
::Dyn
) => {
564 // IN THIS ARM, WE HAVE:
565 // ty = *mut (dyn Trait)
566 // which is: exists<T> ( *mut T, Vtable<T: Trait> )
569 // args = ( Data, Vtable )
576 let (ptr
, vtable
) = drop_place
.to_ptr_maybe_unsized();
577 let ptr
= ptr
.get_addr(fx
);
578 let drop_fn
= crate::vtable
::drop_fn_of_obj(fx
, vtable
.unwrap());
580 // FIXME(eddyb) perhaps move some of this logic into
581 // `Instance::resolve_drop_in_place`?
582 let virtual_drop
= Instance
{
583 def
: ty
::InstanceDef
::Virtual(drop_instance
.def_id(), 0),
584 substs
: drop_instance
.substs
,
587 RevealAllLayoutCx(fx
.tcx
).fn_abi_of_instance(virtual_drop
, ty
::List
::empty());
589 let sig
= clif_sig_from_fn_abi(fx
.tcx
, fx
.target_config
.default_call_conv
, &fn_abi
);
590 let sig
= fx
.bcx
.import_signature(sig
);
591 fx
.bcx
.ins().call_indirect(sig
, drop_fn
, &[ptr
]);
593 ty
::Dynamic(_
, _
, ty
::DynStar
) => {
594 // IN THIS ARM, WE HAVE:
595 // ty = *mut (dyn* Trait)
596 // which is: *mut exists<T: sizeof(T) == sizeof(usize)> (T, Vtable<T: Trait>)
609 // WE CAN CONVERT THIS INTO THE ABOVE LOGIC BY DOING
611 // data = &(*args[0]).0 // gives a pointer to Data above (really the same pointer)
612 // vtable = (*args[0]).1 // loads the vtable out
613 // (data, vtable) // an equivalent Rust `*mut dyn Trait`
615 // SO THEN WE CAN USE THE ABOVE CODE.
616 let (data
, vtable
) = drop_place
.to_cvalue(fx
).dyn_star_force_data_on_stack(fx
);
617 let drop_fn
= crate::vtable
::drop_fn_of_obj(fx
, vtable
);
619 let virtual_drop
= Instance
{
620 def
: ty
::InstanceDef
::Virtual(drop_instance
.def_id(), 0),
621 substs
: drop_instance
.substs
,
624 RevealAllLayoutCx(fx
.tcx
).fn_abi_of_instance(virtual_drop
, ty
::List
::empty());
626 let sig
= clif_sig_from_fn_abi(fx
.tcx
, fx
.target_config
.default_call_conv
, &fn_abi
);
627 let sig
= fx
.bcx
.import_signature(sig
);
628 fx
.bcx
.ins().call_indirect(sig
, drop_fn
, &[data
]);
631 assert
!(!matches
!(drop_instance
.def
, InstanceDef
::Virtual(_
, _
)));
634 RevealAllLayoutCx(fx
.tcx
).fn_abi_of_instance(drop_instance
, ty
::List
::empty());
636 let arg_value
= drop_place
.place_ref(
638 fx
.layout_of(fx
.tcx
.mk_ref(
639 fx
.tcx
.lifetimes
.re_erased
,
640 TypeAndMut { ty, mutbl: crate::rustc_hir::Mutability::Mut }
,
643 let arg_value
= adjust_arg_for_abi(fx
, arg_value
, &fn_abi
.args
[0], true);
645 let mut call_args
: Vec
<Value
> = arg_value
.into_iter().collect
::<Vec
<_
>>();
647 if drop_instance
.def
.requires_caller_location(fx
.tcx
) {
648 // Pass the caller location for `#[track_caller]`.
649 let caller_location
= fx
.get_caller_location(source_info
);
651 adjust_arg_for_abi(fx
, caller_location
, &fn_abi
.args
[1], false).into_iter(),
655 let func_ref
= fx
.get_function_ref(drop_instance
);
656 fx
.bcx
.ins().call(func_ref
, &call_args
);