1 //! Handling of everything related to the calling convention. Also fills `fx.local_map`.
3 #[cfg(debug_assertions)]
8 use rustc_middle
::middle
::codegen_fn_attrs
::CodegenFnAttrFlags
;
9 use rustc_middle
::ty
::layout
::FnAbiExt
;
10 use rustc_target
::abi
::call
::{Conv, FnAbi}
;
11 use rustc_target
::spec
::abi
::Abi
;
13 use cranelift_codegen
::ir
::AbiParam
;
14 use smallvec
::smallvec
;
16 use self::pass_mode
::*;
17 use crate::prelude
::*;
19 pub(crate) use self::returning
::{can_return_to_ssa_var, codegen_return}
;
21 fn clif_sig_from_fn_abi
<'tcx
>(
23 triple
: &target_lexicon
::Triple
,
24 fn_abi
: &FnAbi
<'tcx
, Ty
<'tcx
>>,
26 let call_conv
= match fn_abi
.conv
{
27 Conv
::Rust
| Conv
::C
=> CallConv
::triple_default(triple
),
28 Conv
::X86_64SysV
=> CallConv
::SystemV
,
29 Conv
::X86_64Win64
=> CallConv
::WindowsFastcall
,
31 | Conv
::CCmseNonSecureCall
41 | Conv
::AvrNonBlockingInterrupt
=> {
42 todo
!("{:?}", fn_abi
.conv
)
48 .map(|arg_abi
| arg_abi
.get_abi_param(tcx
).into_iter())
51 let (return_ptr
, returns
) = fn_abi
.ret
.get_abi_return(tcx
);
52 // Sometimes the first param is an pointer to the place where the return value needs to be stored.
53 let params
: Vec
<_
> = return_ptr
.into_iter().chain(inputs
).collect();
62 pub(crate) fn get_function_sig
<'tcx
>(
64 triple
: &target_lexicon
::Triple
,
67 assert
!(!inst
.substs
.needs_infer());
71 &FnAbi
::of_instance(&RevealAllLayoutCx(tcx
), inst
, &[]),
75 /// Instance must be monomorphized
76 pub(crate) fn import_function
<'tcx
>(
78 module
: &mut impl Module
,
81 let name
= tcx
.symbol_name(inst
).name
.to_string();
82 let sig
= get_function_sig(tcx
, module
.isa().triple(), inst
);
84 .declare_function(&name
, Linkage
::Import
, &sig
)
88 impl<'tcx
, M
: Module
> FunctionCx
<'_
, 'tcx
, M
> {
89 /// Instance must be monomorphized
90 pub(crate) fn get_function_ref(&mut self, inst
: Instance
<'tcx
>) -> FuncRef
{
91 let func_id
= import_function(self.tcx
, &mut self.cx
.module
, inst
);
95 .declare_func_in_func(func_id
, &mut self.bcx
.func
);
97 #[cfg(debug_assertions)]
98 self.add_comment(func_ref
, format
!("{:?}", inst
));
103 pub(crate) fn lib_call(
106 params
: Vec
<AbiParam
>,
107 returns
: Vec
<AbiParam
>,
110 let sig
= Signature
{
113 call_conv
: CallConv
::triple_default(self.triple()),
118 .declare_function(&name
, Linkage
::Import
, &sig
)
123 .declare_func_in_func(func_id
, &mut self.bcx
.func
);
124 let call_inst
= self.bcx
.ins().call(func_ref
, args
);
125 #[cfg(debug_assertions)]
127 self.add_comment(call_inst
, format
!("easy_call {}", name
));
129 let results
= self.bcx
.inst_results(call_inst
);
130 assert
!(results
.len() <= 2, "{}", results
.len());
134 pub(crate) fn easy_call(
137 args
: &[CValue
<'tcx
>],
140 let (input_tys
, args
): (Vec
<_
>, Vec
<_
>) = args
144 AbiParam
::new(self.clif_type(arg
.layout().ty
).unwrap()),
145 arg
.load_scalar(self),
149 let return_layout
= self.layout_of(return_ty
);
150 let return_tys
= if let ty
::Tuple(tup
) = return_ty
.kind() {
152 .map(|ty
| AbiParam
::new(self.clif_type(ty
).unwrap()))
155 vec
![AbiParam
::new(self.clif_type(return_ty
).unwrap())]
157 let ret_vals
= self.lib_call(name
, input_tys
, return_tys
, &args
);
159 [] => CValue
::by_ref(
160 Pointer
::const_addr(self, i64::from(self.pointer_type
.bytes())),
163 [val
] => CValue
::by_val(val
, return_layout
),
164 [val
, extra
] => CValue
::by_val_pair(val
, extra
, return_layout
),
170 /// Make a [`CPlace`] capable of holding value of the specified type.
171 fn make_local_place
<'tcx
>(
172 fx
: &mut FunctionCx
<'_
, 'tcx
, impl Module
>,
174 layout
: TyAndLayout
<'tcx
>,
177 let place
= if is_ssa
{
178 if let rustc_target
::abi
::Abi
::ScalarPair(_
, _
) = layout
.abi
{
179 CPlace
::new_var_pair(fx
, local
, layout
)
181 CPlace
::new_var(fx
, local
, layout
)
184 CPlace
::new_stack_slot(fx
, layout
)
187 #[cfg(debug_assertions)]
188 self::comments
::add_local_place_comments(fx
, place
, local
);
193 pub(crate) fn codegen_fn_prelude
<'tcx
>(
194 fx
: &mut FunctionCx
<'_
, 'tcx
, impl Module
>,
197 fx
.bcx
.append_block_params_for_function_params(start_block
);
199 fx
.bcx
.switch_to_block(start_block
);
202 let ssa_analyzed
= crate::analyze
::analyze(fx
);
204 #[cfg(debug_assertions)]
205 self::comments
::add_args_header_comment(fx
);
207 let mut block_params_iter
= fx
211 .block_params(start_block
)
215 self::returning
::codegen_return_param(fx
, &ssa_analyzed
, &mut block_params_iter
);
216 assert_eq
!(fx
.local_map
.push(ret_place
), RETURN_PLACE
);
218 // None means pass_mode == NoPass
220 Normal(Option
<CValue
<'tcx
>>),
221 Spread(Vec
<Option
<CValue
<'tcx
>>>),
224 let fn_abi
= fx
.fn_abi
.take().unwrap();
225 let mut arg_abis_iter
= fn_abi
.args
.iter();
231 let arg_ty
= fx
.monomorphize(fx
.mir
.local_decls
[local
].ty
);
233 // Adapted from https://github.com/rust-lang/rust/blob/145155dc96757002c7b2e9de8489416e2fdbbd57/src/librustc_codegen_llvm/mir/mod.rs#L442-L482
234 if Some(local
) == fx
.mir
.spread_arg
{
235 // This argument (e.g. the last argument in the "rust-call" ABI)
236 // is a tuple that was spread at the ABI level and now we have
237 // to reconstruct it into a tuple local variable, from multiple
238 // individual function arguments.
240 let tupled_arg_tys
= match arg_ty
.kind() {
241 ty
::Tuple(ref tys
) => tys
,
242 _
=> bug
!("spread argument isn't a tuple?! but {:?}", arg_ty
),
245 let mut params
= Vec
::new();
246 for (i
, _arg_ty
) in tupled_arg_tys
.types().enumerate() {
247 let arg_abi
= arg_abis_iter
.next().unwrap();
249 cvalue_for_param(fx
, Some(local
), Some(i
), arg_abi
, &mut block_params_iter
);
253 (local
, ArgKind
::Spread(params
), arg_ty
)
255 let arg_abi
= arg_abis_iter
.next().unwrap();
257 cvalue_for_param(fx
, Some(local
), None
, arg_abi
, &mut block_params_iter
);
258 (local
, ArgKind
::Normal(param
), arg_ty
)
261 .collect
::<Vec
<(Local
, ArgKind
<'tcx
>, Ty
<'tcx
>)>>();
263 assert
!(fx
.caller_location
.is_none());
264 if fx
.instance
.def
.requires_caller_location(fx
.tcx
) {
265 // Store caller location for `#[track_caller]`.
266 let arg_abi
= arg_abis_iter
.next().unwrap();
268 Some(cvalue_for_param(fx
, None
, None
, arg_abi
, &mut block_params_iter
).unwrap());
271 assert
!(arg_abis_iter
.next().is_none(), "ArgAbi left behind");
272 fx
.fn_abi
= Some(fn_abi
);
273 assert
!(block_params_iter
.next().is_none(), "arg_value left behind");
275 #[cfg(debug_assertions)]
276 self::comments
::add_locals_header_comment(fx
);
278 for (local
, arg_kind
, ty
) in func_params
{
279 let layout
= fx
.layout_of(ty
);
281 let is_ssa
= ssa_analyzed
[local
] == crate::analyze
::SsaKind
::Ssa
;
283 // While this is normally an optimization to prevent an unnecessary copy when an argument is
284 // not mutated by the current function, this is necessary to support unsized arguments.
285 if let ArgKind
::Normal(Some(val
)) = arg_kind
{
286 if let Some((addr
, meta
)) = val
.try_to_ptr() {
287 let local_decl
= &fx
.mir
.local_decls
[local
];
288 // v this ! is important
289 let internally_mutable
= !val
.layout().ty
.is_freeze(
290 fx
.tcx
.at(local_decl
.source_info
.span
),
291 ParamEnv
::reveal_all(),
293 if local_decl
.mutability
== mir
::Mutability
::Not
&& !internally_mutable
{
294 // We wont mutate this argument, so it is fine to borrow the backing storage
295 // of this argument, to prevent a copy.
297 let place
= if let Some(meta
) = meta
{
298 CPlace
::for_ptr_with_extra(addr
, meta
, val
.layout())
300 CPlace
::for_ptr(addr
, val
.layout())
303 #[cfg(debug_assertions)]
304 self::comments
::add_local_place_comments(fx
, place
, local
);
306 assert_eq
!(fx
.local_map
.push(place
), local
);
312 let place
= make_local_place(fx
, local
, layout
, is_ssa
);
313 assert_eq
!(fx
.local_map
.push(place
), local
);
316 ArgKind
::Normal(param
) => {
317 if let Some(param
) = param
{
318 place
.write_cvalue(fx
, param
);
321 ArgKind
::Spread(params
) => {
322 for (i
, param
) in params
.into_iter().enumerate() {
323 if let Some(param
) = param
{
325 .place_field(fx
, mir
::Field
::new(i
))
326 .write_cvalue(fx
, param
);
333 for local
in fx
.mir
.vars_and_temps_iter() {
334 let ty
= fx
.monomorphize(fx
.mir
.local_decls
[local
].ty
);
335 let layout
= fx
.layout_of(ty
);
337 let is_ssa
= ssa_analyzed
[local
] == crate::analyze
::SsaKind
::Ssa
;
339 let place
= make_local_place(fx
, local
, layout
, is_ssa
);
340 assert_eq
!(fx
.local_map
.push(place
), local
);
345 .jump(*fx
.block_map
.get(START_BLOCK
).unwrap(), &[]);
348 pub(crate) fn codegen_terminator_call
<'tcx
>(
349 fx
: &mut FunctionCx
<'_
, 'tcx
, impl Module
>,
351 current_block
: Block
,
352 func
: &Operand
<'tcx
>,
353 args
: &[Operand
<'tcx
>],
354 destination
: Option
<(Place
<'tcx
>, BasicBlock
)>,
356 let fn_ty
= fx
.monomorphize(func
.ty(fx
.mir
, fx
.tcx
));
359 .normalize_erasing_late_bound_regions(ParamEnv
::reveal_all(), fn_ty
.fn_sig(fx
.tcx
));
361 let destination
= destination
.map(|(place
, bb
)| (codegen_place(fx
, place
), bb
));
363 // Handle special calls like instrinsics and empty drop glue.
364 let instance
= if let ty
::FnDef(def_id
, substs
) = *fn_ty
.kind() {
365 let instance
= ty
::Instance
::resolve(fx
.tcx
, ty
::ParamEnv
::reveal_all(), def_id
, substs
)
368 .polymorphize(fx
.tcx
);
370 if fx
.tcx
.symbol_name(instance
).name
.starts_with("llvm.") {
371 crate::intrinsics
::codegen_llvm_intrinsic_call(
373 &fx
.tcx
.symbol_name(instance
).name
,
382 InstanceDef
::Intrinsic(_
) => {
383 crate::intrinsics
::codegen_intrinsic_call(fx
, instance
, args
, destination
, span
);
386 InstanceDef
::DropGlue(_
, None
) => {
387 // empty drop glue - a nop.
388 let (_
, dest
) = destination
.expect("Non terminating drop_in_place_real???");
389 let ret_block
= fx
.get_block(dest
);
390 fx
.bcx
.ins().jump(ret_block
, &[]);
399 let extra_args
= &args
[fn_sig
.inputs().len()..];
400 let extra_args
= extra_args
402 .map(|op_arg
| fx
.monomorphize(op_arg
.ty(fx
.mir
, fx
.tcx
)))
403 .collect
::<Vec
<_
>>();
404 let fn_abi
= if let Some(instance
) = instance
{
405 FnAbi
::of_instance(&RevealAllLayoutCx(fx
.tcx
), instance
, &extra_args
)
408 &RevealAllLayoutCx(fx
.tcx
),
409 fn_ty
.fn_sig(fx
.tcx
),
414 let is_cold
= instance
417 .codegen_fn_attrs(inst
.def_id())
419 .contains(CodegenFnAttrFlags
::COLD
)
423 fx
.cold_blocks
.insert(current_block
);
426 // Unpack arguments tuple for closures
427 let args
= if fn_sig
.abi
== Abi
::RustCall
{
428 assert_eq
!(args
.len(), 2, "rust-call abi requires two arguments");
429 let self_arg
= codegen_operand(fx
, &args
[0]);
430 let pack_arg
= codegen_operand(fx
, &args
[1]);
432 let tupled_arguments
= match pack_arg
.layout().ty
.kind() {
433 ty
::Tuple(ref tupled_arguments
) => tupled_arguments
,
434 _
=> bug
!("argument to function with \"rust-call\" ABI is not a tuple"),
437 let mut args
= Vec
::with_capacity(1 + tupled_arguments
.len());
439 for i
in 0..tupled_arguments
.len() {
440 args
.push(pack_arg
.value_field(fx
, mir
::Field
::new(i
)));
445 .map(|arg
| codegen_operand(fx
, arg
))
449 // | indirect call target
450 // | | the first argument to be passed
452 let (func_ref
, first_arg
) = match instance
{
455 def
: InstanceDef
::Virtual(_
, idx
),
458 #[cfg(debug_assertions)]
460 let nop_inst
= fx
.bcx
.ins().nop();
463 format
!("virtual call; self arg pass mode: {:?}", &fn_abi
.args
[0],),
466 let (ptr
, method
) = crate::vtable
::get_ptr_and_method_ref(fx
, args
[0], idx
);
467 (Some(method
), smallvec
![ptr
])
474 .map(|arg
| adjust_arg_for_abi(fx
, *arg
, &fn_abi
.args
[0]))
475 .unwrap_or(smallvec
![]),
480 #[cfg(debug_assertions)]
482 let nop_inst
= fx
.bcx
.ins().nop();
483 fx
.add_comment(nop_inst
, "indirect call");
485 let func
= codegen_operand(fx
, func
).load_scalar(fx
);
489 .map(|arg
| adjust_arg_for_abi(fx
, *arg
, &fn_abi
.args
[0]))
490 .unwrap_or(smallvec
![]),
495 let ret_place
= destination
.map(|(place
, _
)| place
);
496 let (call_inst
, call_args
) = self::returning
::codegen_with_call_return_arg(
501 let regular_args_count
= args
.len();
502 let mut call_args
: Vec
<Value
> = return_ptr
504 .chain(first_arg
.into_iter())
509 .map(|(i
, arg
)| adjust_arg_for_abi(fx
, arg
, &fn_abi
.args
[i
]).into_iter())
512 .collect
::<Vec
<_
>>();
515 .map(|inst
| inst
.def
.requires_caller_location(fx
.tcx
))
518 // Pass the caller location for `#[track_caller]`.
519 let caller_location
= fx
.get_caller_location(span
);
521 adjust_arg_for_abi(fx
, caller_location
, &fn_abi
.args
[regular_args_count
])
524 assert_eq
!(fn_abi
.args
.len(), regular_args_count
+ 1);
526 assert_eq
!(fn_abi
.args
.len(), regular_args_count
);
529 let call_inst
= if let Some(func_ref
) = func_ref
{
530 let sig
= clif_sig_from_fn_abi(fx
.tcx
, fx
.triple(), &fn_abi
);
531 let sig
= fx
.bcx
.import_signature(sig
);
532 fx
.bcx
.ins().call_indirect(sig
, func_ref
, &call_args
)
535 fx
.get_function_ref(instance
.expect("non-indirect call on non-FnDef type"));
536 fx
.bcx
.ins().call(func_ref
, &call_args
)
539 (call_inst
, call_args
)
543 // FIXME find a cleaner way to support varargs
544 if fn_sig
.c_variadic
{
545 if fn_sig
.abi
!= Abi
::C
{
546 fx
.tcx
.sess
.span_fatal(
548 &format
!("Variadic call for non-C abi {:?}", fn_sig
.abi
),
551 let sig_ref
= fx
.bcx
.func
.dfg
.call_signature(call_inst
).unwrap();
552 let abi_params
= call_args
555 let ty
= fx
.bcx
.func
.dfg
.value_type(arg
);
557 // FIXME set %al to upperbound on float args once floats are supported
560 .span_fatal(span
, &format
!("Non int ty {:?} for variadic call", ty
));
564 .collect
::<Vec
<AbiParam
>>();
565 fx
.bcx
.func
.dfg
.signatures
[sig_ref
].params
= abi_params
;
568 if let Some((_
, dest
)) = destination
{
569 let ret_block
= fx
.get_block(dest
);
570 fx
.bcx
.ins().jump(ret_block
, &[]);
572 trap_unreachable(fx
, "[corruption] Diverging function returned");
576 pub(crate) fn codegen_drop
<'tcx
>(
577 fx
: &mut FunctionCx
<'_
, 'tcx
, impl Module
>,
579 drop_place
: CPlace
<'tcx
>,
581 let ty
= drop_place
.layout().ty
;
582 let drop_instance
= Instance
::resolve_drop_in_place(fx
.tcx
, ty
).polymorphize(fx
.tcx
);
584 if let ty
::InstanceDef
::DropGlue(_
, None
) = drop_instance
.def
{
585 // we don't actually need to drop anything
589 let (ptr
, vtable
) = drop_place
.to_ptr_maybe_unsized();
590 let ptr
= ptr
.get_addr(fx
);
591 let drop_fn
= crate::vtable
::drop_fn_of_obj(fx
, vtable
.unwrap());
593 // FIXME(eddyb) perhaps move some of this logic into
594 // `Instance::resolve_drop_in_place`?
595 let virtual_drop
= Instance
{
596 def
: ty
::InstanceDef
::Virtual(drop_instance
.def_id(), 0),
597 substs
: drop_instance
.substs
,
599 let fn_abi
= FnAbi
::of_instance(&RevealAllLayoutCx(fx
.tcx
), virtual_drop
, &[]);
601 let sig
= clif_sig_from_fn_abi(fx
.tcx
, fx
.triple(), &fn_abi
);
602 let sig
= fx
.bcx
.import_signature(sig
);
603 fx
.bcx
.ins().call_indirect(sig
, drop_fn
, &[ptr
]);
606 assert
!(!matches
!(drop_instance
.def
, InstanceDef
::Virtual(_
, _
)));
608 let fn_abi
= FnAbi
::of_instance(&RevealAllLayoutCx(fx
.tcx
), drop_instance
, &[]);
610 let arg_value
= drop_place
.place_ref(
612 fx
.layout_of(fx
.tcx
.mk_ref(
613 &ty
::RegionKind
::ReErased
,
616 mutbl
: crate::rustc_hir
::Mutability
::Mut
,
620 let arg_value
= adjust_arg_for_abi(fx
, arg_value
, &fn_abi
.args
[0]);
622 let mut call_args
: Vec
<Value
> = arg_value
.into_iter().collect
::<Vec
<_
>>();
624 if drop_instance
.def
.requires_caller_location(fx
.tcx
) {
625 // Pass the caller location for `#[track_caller]`.
626 let caller_location
= fx
.get_caller_location(span
);
628 adjust_arg_for_abi(fx
, caller_location
, &fn_abi
.args
[1]).into_iter(),
632 let func_ref
= fx
.get_function_ref(drop_instance
);
633 fx
.bcx
.ins().call(func_ref
, &call_args
);