1 // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! Handles translation of callees as well as other call-related
12 //! things. Callees are a superset of normal rust values and sometimes
13 //! have different representations. In particular, top-level fn items
14 //! and methods are represented as just a fn ptr and not a full
17 pub use self::CalleeData
::*;
18 pub use self::CallArgs
::*;
20 use arena
::TypedArena
;
21 use back
::symbol_names
;
22 use llvm
::{self, ValueRef, get_params}
;
23 use middle
::cstore
::LOCAL_CRATE
;
24 use rustc
::hir
::def_id
::DefId
;
27 use rustc
::hir
::map
as hir_map
;
28 use abi
::{Abi, FnType}
;
35 use cleanup
::CleanupMethods
;
37 use common
::{self, Block, Result, CrateContext, FunctionContext, C_undef}
;
40 use debuginfo
::DebugLoc
;
46 use machine
::llalign_of_min
;
48 use monomorphize
::{self, Instance}
;
53 use rustc
::ty
::{self, Ty, TyCtxt, TypeFoldable}
;
56 use syntax_pos
::DUMMY_SP
;
62 /// Constructor for enum variant/tuple-like-struct.
63 NamedTupleConstructor(Disr
),
70 /// Trait object found in the vtable at that index.
75 pub struct Callee
<'tcx
> {
80 impl<'tcx
> Callee
<'tcx
> {
82 pub fn ptr(datum
: Datum
<'tcx
, Rvalue
>) -> Callee
<'tcx
> {
89 /// Trait or impl method call.
90 pub fn method_call
<'blk
>(bcx
: Block
<'blk
, 'tcx
>,
91 method_call
: ty
::MethodCall
)
93 let method
= bcx
.tcx().tables
.borrow().method_map
[&method_call
];
94 Callee
::method(bcx
, method
)
97 /// Trait or impl method.
98 pub fn method
<'blk
>(bcx
: Block
<'blk
, 'tcx
>,
99 method
: ty
::MethodCallee
<'tcx
>) -> Callee
<'tcx
> {
100 let substs
= bcx
.fcx
.monomorphize(&method
.substs
);
101 Callee
::def(bcx
.ccx(), method
.def_id
, substs
)
104 /// Function or method definition.
105 pub fn def
<'a
>(ccx
: &CrateContext
<'a
, 'tcx
>,
107 substs
: &'tcx subst
::Substs
<'tcx
>)
111 if substs
.self_ty().is_some() {
112 // Only trait methods can have a Self parameter.
113 return Callee
::trait_method(ccx
, def_id
, substs
);
116 let maybe_node_id
= inline
::get_local_instance(ccx
, def_id
)
117 .and_then(|def_id
| tcx
.map
.as_local_node_id(def_id
));
118 let maybe_ast_node
= maybe_node_id
.and_then(|node_id
| {
119 tcx
.map
.find(node_id
)
122 let data
= match maybe_ast_node
{
123 Some(hir_map
::NodeStructCtor(_
)) => {
124 NamedTupleConstructor(Disr(0))
126 Some(hir_map
::NodeVariant(_
)) => {
127 let vinfo
= common
::inlined_variant_def(ccx
, maybe_node_id
.unwrap());
128 NamedTupleConstructor(Disr
::from(vinfo
.disr_val
))
130 Some(hir_map
::NodeForeignItem(fi
)) if {
131 let abi
= tcx
.map
.get_foreign_abi(fi
.id
);
132 abi
== Abi
::RustIntrinsic
|| abi
== Abi
::PlatformIntrinsic
135 _
=> return Callee
::ptr(get_fn(ccx
, def_id
, substs
))
140 ty
: def_ty(tcx
, def_id
, substs
)
144 /// Trait method, which has to be resolved to an impl method.
145 pub fn trait_method
<'a
>(ccx
: &CrateContext
<'a
, 'tcx
>,
147 substs
: &'tcx subst
::Substs
<'tcx
>)
151 let method_item
= tcx
.impl_or_trait_item(def_id
);
152 let trait_id
= method_item
.container().id();
153 let trait_ref
= ty
::Binder(substs
.to_trait_ref(tcx
, trait_id
));
154 let trait_ref
= tcx
.normalize_associated_type(&trait_ref
);
155 match common
::fulfill_obligation(ccx
.shared(), DUMMY_SP
, trait_ref
) {
156 traits
::VtableImpl(vtable_impl
) => {
157 let impl_did
= vtable_impl
.impl_def_id
;
158 let mname
= tcx
.item_name(def_id
);
159 // create a concatenated set of substitutions which includes
160 // those from the impl and those from the method:
161 let impl_substs
= vtable_impl
.substs
.with_method_from(&substs
);
162 let substs
= tcx
.mk_substs(impl_substs
);
163 let mth
= meth
::get_impl_method(tcx
, impl_did
, substs
, mname
);
165 // Translate the function, bypassing Callee::def.
166 // That is because default methods have the same ID as the
167 // trait method used to look up the impl method that ended
168 // up here, so calling Callee::def would infinitely recurse.
169 Callee
::ptr(get_fn(ccx
, mth
.method
.def_id
, mth
.substs
))
171 traits
::VtableClosure(vtable_closure
) => {
172 // The substitutions should have no type parameters remaining
173 // after passing through fulfill_obligation
174 let trait_closure_kind
= tcx
.lang_items
.fn_trait_kind(trait_id
).unwrap();
175 let llfn
= closure
::trans_closure_method(ccx
,
176 vtable_closure
.closure_def_id
,
177 vtable_closure
.substs
,
180 let method_ty
= def_ty(tcx
, def_id
, substs
);
181 let fn_ptr_ty
= match method_ty
.sty
{
182 ty
::TyFnDef(_
, _
, fty
) => tcx
.mk_fn_ptr(fty
),
183 _
=> bug
!("expected fn item type, found {}",
186 Callee
::ptr(immediate_rvalue(llfn
, fn_ptr_ty
))
188 traits
::VtableFnPointer(vtable_fn_pointer
) => {
189 let trait_closure_kind
= tcx
.lang_items
.fn_trait_kind(trait_id
).unwrap();
190 let llfn
= trans_fn_pointer_shim(ccx
, trait_closure_kind
, vtable_fn_pointer
.fn_ty
);
192 let method_ty
= def_ty(tcx
, def_id
, substs
);
193 let fn_ptr_ty
= match method_ty
.sty
{
194 ty
::TyFnDef(_
, _
, fty
) => tcx
.mk_fn_ptr(fty
),
195 _
=> bug
!("expected fn item type, found {}",
198 Callee
::ptr(immediate_rvalue(llfn
, fn_ptr_ty
))
200 traits
::VtableObject(ref data
) => {
202 data
: Virtual(tcx
.get_vtable_index_of_object_method(data
, def_id
)),
203 ty
: def_ty(tcx
, def_id
, substs
)
207 bug
!("resolved vtable bad vtable {:?} in trans", vtable
);
212 /// Get the abi::FnType for a direct call. Mainly deals with the fact
213 /// that a Virtual call doesn't take the vtable, like its shim does.
214 /// The extra argument types are for variadic (extern "C") functions.
215 pub fn direct_fn_type
<'a
>(&self, ccx
: &CrateContext
<'a
, 'tcx
>,
216 extra_args
: &[Ty
<'tcx
>]) -> FnType
{
217 let abi
= self.ty
.fn_abi();
218 let sig
= ccx
.tcx().erase_late_bound_regions(self.ty
.fn_sig());
219 let sig
= ccx
.tcx().normalize_associated_type(&sig
);
220 let mut fn_ty
= FnType
::unadjusted(ccx
, abi
, &sig
, extra_args
);
221 if let Virtual(_
) = self.data
{
222 // Don't pass the vtable, it's not an argument of the virtual fn.
223 fn_ty
.args
[1].ignore();
225 fn_ty
.adjust_for_abi(ccx
, abi
, &sig
);
229 /// This behemoth of a function translates function calls. Unfortunately, in
230 /// order to generate more efficient LLVM output at -O0, it has quite a complex
231 /// signature (refactoring this into two functions seems like a good idea).
233 /// In particular, for lang items, it is invoked with a dest of None, and in
234 /// that case the return value contains the result of the fn. The lang item must
235 /// not return a structural type or else all heck breaks loose.
237 /// For non-lang items, `dest` is always Some, and hence the result is written
238 /// into memory somewhere. Nonetheless we return the actual return value of the
240 pub fn call
<'a
, 'blk
>(self, bcx
: Block
<'blk
, 'tcx
>,
242 args
: CallArgs
<'a
, 'tcx
>,
243 dest
: Option
<expr
::Dest
>)
244 -> Result
<'blk
, 'tcx
> {
245 trans_call_inner(bcx
, debug_loc
, self, args
, dest
)
248 /// Turn the callee into a function pointer.
249 pub fn reify
<'a
>(self, ccx
: &CrateContext
<'a
, 'tcx
>)
250 -> Datum
<'tcx
, Rvalue
> {
251 let fn_ptr_ty
= match self.ty
.sty
{
252 ty
::TyFnDef(_
, _
, f
) => ccx
.tcx().mk_fn_ptr(f
),
257 immediate_rvalue(llfn
, fn_ptr_ty
)
260 let llfn
= meth
::trans_object_shim(ccx
, self.ty
, idx
);
261 immediate_rvalue(llfn
, fn_ptr_ty
)
263 NamedTupleConstructor(_
) => match self.ty
.sty
{
264 ty
::TyFnDef(def_id
, substs
, _
) => {
265 return get_fn(ccx
, def_id
, substs
);
267 _
=> bug
!("expected fn item type, found {}", self.ty
)
269 Intrinsic
=> bug
!("intrinsic {} getting reified", self.ty
)
274 /// Given a DefId and some Substs, produces the monomorphic item type.
275 fn def_ty
<'a
, 'tcx
>(tcx
: TyCtxt
<'a
, 'tcx
, 'tcx
>,
277 substs
: &'tcx subst
::Substs
<'tcx
>)
279 let ty
= tcx
.lookup_item_type(def_id
).ty
;
280 monomorphize
::apply_param_substs(tcx
, substs
, &ty
)
283 /// Translates an adapter that implements the `Fn` trait for a fn
284 /// pointer. This is basically the equivalent of something like:
287 /// impl<'a> Fn(&'a int) -> &'a int for fn(&int) -> &int {
288 /// extern "rust-abi" fn call(&self, args: (&'a int,)) -> &'a int {
294 /// but for the bare function type given.
295 pub fn trans_fn_pointer_shim
<'a
, 'tcx
>(
296 ccx
: &'a CrateContext
<'a
, 'tcx
>,
297 closure_kind
: ty
::ClosureKind
,
298 bare_fn_ty
: Ty
<'tcx
>)
301 let _icx
= push_ctxt("trans_fn_pointer_shim");
304 // Normalize the type for better caching.
305 let bare_fn_ty
= tcx
.erase_regions(&bare_fn_ty
);
307 // If this is an impl of `Fn` or `FnMut` trait, the receiver is `&self`.
308 let is_by_ref
= match closure_kind
{
309 ty
::ClosureKind
::Fn
| ty
::ClosureKind
::FnMut
=> true,
310 ty
::ClosureKind
::FnOnce
=> false,
313 let llfnpointer
= match bare_fn_ty
.sty
{
314 ty
::TyFnDef(def_id
, substs
, _
) => {
315 // Function definitions have to be turned into a pointer.
316 let llfn
= Callee
::def(ccx
, def_id
, substs
).reify(ccx
).val
;
318 // A by-value fn item is ignored, so the shim has
319 // the same signature as the original function.
327 let bare_fn_ty_maybe_ref
= if is_by_ref
{
328 tcx
.mk_imm_ref(tcx
.mk_region(ty
::ReErased
), bare_fn_ty
)
333 // Check if we already trans'd this shim.
334 match ccx
.fn_pointer_shims().borrow().get(&bare_fn_ty_maybe_ref
) {
335 Some(&llval
) => { return llval; }
339 debug
!("trans_fn_pointer_shim(bare_fn_ty={:?})",
342 // Construct the "tuply" version of `bare_fn_ty`. It takes two arguments: `self`,
343 // which is the fn pointer, and `args`, which is the arguments tuple.
344 let sig
= match bare_fn_ty
.sty
{
346 &ty
::BareFnTy
{ unsafety
: hir
::Unsafety
::Normal
,
349 ty
::TyFnPtr(&ty
::BareFnTy
{ unsafety
: hir
::Unsafety
::Normal
,
354 bug
!("trans_fn_pointer_shim invoked on invalid type: {}",
358 let sig
= tcx
.erase_late_bound_regions(sig
);
359 let sig
= ccx
.tcx().normalize_associated_type(&sig
);
360 let tuple_input_ty
= tcx
.mk_tup(sig
.inputs
.to_vec());
361 let sig
= ty
::FnSig
{
362 inputs
: vec
![bare_fn_ty_maybe_ref
,
367 let fn_ty
= FnType
::new(ccx
, Abi
::RustCall
, &sig
, &[]);
368 let tuple_fn_ty
= tcx
.mk_fn_ptr(tcx
.mk_bare_fn(ty
::BareFnTy
{
369 unsafety
: hir
::Unsafety
::Normal
,
373 debug
!("tuple_fn_ty: {:?}", tuple_fn_ty
);
377 symbol_names
::internal_name_from_type_and_suffix(ccx
,
380 let llfn
= declare
::define_internal_fn(ccx
, &function_name
, tuple_fn_ty
);
381 attributes
::set_frame_pointer_elimination(ccx
, llfn
);
383 let (block_arena
, fcx
): (TypedArena
<_
>, FunctionContext
);
384 block_arena
= TypedArena
::new();
385 fcx
= FunctionContext
::new(ccx
, llfn
, fn_ty
, None
, &block_arena
);
386 let mut bcx
= fcx
.init(false, None
);
388 let llargs
= get_params(fcx
.llfn
);
390 let self_idx
= fcx
.fn_ty
.ret
.is_indirect() as usize;
391 let llfnpointer
= llfnpointer
.unwrap_or_else(|| {
392 // the first argument (`self`) will be ptr to the fn pointer
394 Load(bcx
, llargs
[self_idx
])
400 assert
!(!fcx
.needs_ret_allocas
);
402 let dest
= fcx
.llretslotptr
.get().map(|_
|
403 expr
::SaveIn(fcx
.get_ret_slot(bcx
, "ret_slot"))
406 let callee
= Callee
{
407 data
: Fn(llfnpointer
),
410 bcx
= callee
.call(bcx
, DebugLoc
::None
, ArgVals(&llargs
[(self_idx
+ 1)..]), dest
).bcx
;
412 fcx
.finish(bcx
, DebugLoc
::None
);
414 ccx
.fn_pointer_shims().borrow_mut().insert(bare_fn_ty_maybe_ref
, llfn
);
419 /// Translates a reference to a fn/method item, monomorphizing and
420 /// inlining as it goes.
424 /// - `ccx`: the crate context
425 /// - `def_id`: def id of the fn or method item being referenced
426 /// - `substs`: values for each of the fn/method's parameters
427 fn get_fn
<'a
, 'tcx
>(ccx
: &CrateContext
<'a
, 'tcx
>,
429 substs
: &'tcx subst
::Substs
<'tcx
>)
430 -> Datum
<'tcx
, Rvalue
> {
433 debug
!("get_fn(def_id={:?}, substs={:?})", def_id
, substs
);
435 assert
!(!substs
.types
.needs_infer());
436 assert
!(!substs
.types
.has_escaping_regions());
438 // Check whether this fn has an inlined copy and, if so, redirect
439 // def_id to the local id of the inlined copy.
440 let def_id
= inline
::maybe_instantiate_inline(ccx
, def_id
);
442 fn is_named_tuple_constructor(tcx
: TyCtxt
, def_id
: DefId
) -> bool
{
443 let node_id
= match tcx
.map
.as_local_node_id(def_id
) {
445 None
=> { return false; }
447 let map_node
= errors
::expect(
448 &tcx
.sess
.diagnostic(),
449 tcx
.map
.find(node_id
),
450 || "local item should be in ast map".to_string());
453 hir_map
::NodeVariant(v
) => {
454 v
.node
.data
.is_tuple()
456 hir_map
::NodeStructCtor(_
) => true,
460 let must_monomorphise
=
461 !substs
.types
.is_empty() || is_named_tuple_constructor(tcx
, def_id
);
463 debug
!("get_fn({:?}) must_monomorphise: {}",
464 def_id
, must_monomorphise
);
466 // Create a monomorphic version of generic functions
467 if must_monomorphise
{
468 // Should be either intra-crate or inlined.
469 assert_eq
!(def_id
.krate
, LOCAL_CRATE
);
471 let substs
= tcx
.mk_substs(substs
.clone().erase_regions());
472 let (val
, fn_ty
) = monomorphize
::monomorphic_fn(ccx
, def_id
, substs
);
473 let fn_ptr_ty
= match fn_ty
.sty
{
474 ty
::TyFnDef(_
, _
, fty
) => {
475 // Create a fn pointer with the substituted signature.
478 _
=> bug
!("expected fn item type, found {}", fn_ty
)
480 assert_eq
!(type_of
::type_of(ccx
, fn_ptr_ty
), common
::val_ty(val
));
481 return immediate_rvalue(val
, fn_ptr_ty
);
484 // Find the actual function pointer.
485 let ty
= ccx
.tcx().lookup_item_type(def_id
).ty
;
486 let fn_ptr_ty
= match ty
.sty
{
487 ty
::TyFnDef(_
, _
, ref fty
) => {
488 // Create a fn pointer with the normalized signature.
489 tcx
.mk_fn_ptr(tcx
.normalize_associated_type(fty
))
491 _
=> bug
!("expected fn item type, found {}", ty
)
494 let instance
= Instance
::mono(ccx
.shared(), def_id
);
495 if let Some(&llfn
) = ccx
.instances().borrow().get(&instance
) {
496 return immediate_rvalue(llfn
, fn_ptr_ty
);
499 let local_id
= ccx
.tcx().map
.as_local_node_id(def_id
);
500 let local_item
= match local_id
.and_then(|id
| tcx
.map
.find(id
)) {
501 Some(hir_map
::NodeItem(&hir
::Item
{
502 span
, node
: hir
::ItemFn(..), ..
504 Some(hir_map
::NodeTraitItem(&hir
::TraitItem
{
505 span
, node
: hir
::MethodTraitItem(_
, Some(_
)), ..
507 Some(hir_map
::NodeImplItem(&hir
::ImplItem
{
508 span
, node
: hir
::ImplItemKind
::Method(..), ..
515 // This is subtle and surprising, but sometimes we have to bitcast
516 // the resulting fn pointer. The reason has to do with external
517 // functions. If you have two crates that both bind the same C
518 // library, they may not use precisely the same types: for
519 // example, they will probably each declare their own structs,
520 // which are distinct types from LLVM's point of view (nominal
523 // Now, if those two crates are linked into an application, and
524 // they contain inlined code, you can wind up with a situation
525 // where both of those functions wind up being loaded into this
526 // application simultaneously. In that case, the same function
527 // (from LLVM's point of view) requires two types. But of course
528 // LLVM won't allow one function to have two types.
530 // What we currently do, therefore, is declare the function with
531 // one of the two types (whichever happens to come first) and then
532 // bitcast as needed when the function is referenced to make sure
533 // it has the type we expect.
535 // This can occur on either a crate-local or crate-external
536 // reference. It also occurs when testing libcore and in some
537 // other weird situations. Annoying.
539 let sym
= instance
.symbol_name(ccx
.shared());
540 let llptrty
= type_of
::type_of(ccx
, fn_ptr_ty
);
541 let llfn
= if let Some(llfn
) = declare
::get_declared_value(ccx
, &sym
) {
542 if let Some(span
) = local_item
{
543 if declare
::get_defined_value(ccx
, &sym
).is_some() {
544 ccx
.sess().span_fatal(span
,
545 &format
!("symbol `{}` is already defined", sym
));
549 if common
::val_ty(llfn
) != llptrty
{
550 if local_item
.is_some() {
551 bug
!("symbol `{}` previously declared as {:?}, now wanted as {:?}",
552 sym
, Value(llfn
), llptrty
);
554 debug
!("get_fn: casting {:?} to {:?}", llfn
, llptrty
);
555 consts
::ptrcast(llfn
, llptrty
)
557 debug
!("get_fn: not casting pointer!");
561 let llfn
= declare
::declare_fn(ccx
, &sym
, ty
);
562 assert_eq
!(common
::val_ty(llfn
), llptrty
);
563 debug
!("get_fn: not casting pointer!");
565 let attrs
= ccx
.tcx().get_attrs(def_id
);
566 attributes
::from_fn_attrs(ccx
, &attrs
, llfn
);
567 if local_item
.is_some() {
568 // FIXME(eddyb) Doubt all extern fn should allow unwinding.
569 attributes
::unwind(llfn
, true);
575 ccx
.instances().borrow_mut().insert(instance
, llfn
);
577 immediate_rvalue(llfn
, fn_ptr_ty
)
580 // ______________________________________________________________________
583 fn trans_call_inner
<'a
, 'blk
, 'tcx
>(mut bcx
: Block
<'blk
, 'tcx
>,
585 callee
: Callee
<'tcx
>,
586 args
: CallArgs
<'a
, 'tcx
>,
587 dest
: Option
<expr
::Dest
>)
588 -> Result
<'blk
, 'tcx
> {
589 // Introduce a temporary cleanup scope that will contain cleanups
590 // for the arguments while they are being evaluated. The purpose
591 // this cleanup is to ensure that, should a panic occur while
592 // evaluating argument N, the values for arguments 0...N-1 are all
593 // cleaned up. If no panic occurs, the values are handed off to
594 // the callee, and hence none of the cleanups in this temporary
595 // scope will ever execute.
599 let abi
= callee
.ty
.fn_abi();
600 let sig
= callee
.ty
.fn_sig();
601 let output
= bcx
.tcx().erase_late_bound_regions(&sig
.output());
602 let output
= bcx
.tcx().normalize_associated_type(&output
);
604 let extra_args
= match args
{
605 ArgExprs(args
) if abi
!= Abi
::RustCall
=> {
606 args
[sig
.0.inputs
.len()..].iter().map(|expr
| {
607 common
::expr_ty_adjusted(bcx
, expr
)
612 let fn_ty
= callee
.direct_fn_type(ccx
, &extra_args
);
614 let mut callee
= match callee
.data
{
616 assert
!(abi
== Abi
::RustIntrinsic
|| abi
== Abi
::PlatformIntrinsic
);
617 assert
!(dest
.is_some());
619 return intrinsic
::trans_intrinsic_call(bcx
, callee
.ty
, &fn_ty
,
623 NamedTupleConstructor(disr
) => {
624 assert
!(dest
.is_some());
626 return base
::trans_named_tuple_constructor(bcx
,
636 // Generate a location to store the result. If the user does
637 // not care about the result, just make a stack slot.
638 let opt_llretslot
= dest
.and_then(|dest
| match dest
{
639 expr
::SaveIn(dst
) => Some(dst
),
641 let needs_drop
= || match output
{
642 ty
::FnConverging(ret_ty
) => bcx
.fcx
.type_needs_drop(ret_ty
),
643 ty
::FnDiverging
=> false
645 if fn_ty
.ret
.is_indirect() || fn_ty
.ret
.cast
.is_some() || needs_drop() {
646 // Push the out-pointer if we use an out-pointer for this
647 // return type, otherwise push "undef".
648 if fn_ty
.ret
.is_ignore() {
649 Some(C_undef(fn_ty
.ret
.original_ty
.ptr_to()))
651 let llresult
= alloca(bcx
, fn_ty
.ret
.original_ty
, "__llret");
652 call_lifetime_start(bcx
, llresult
);
661 // If there no destination, return must be direct, with no cast.
662 if opt_llretslot
.is_none() {
663 assert
!(!fn_ty
.ret
.is_indirect() && fn_ty
.ret
.cast
.is_none());
666 let mut llargs
= Vec
::new();
668 if fn_ty
.ret
.is_indirect() {
669 let mut llretslot
= opt_llretslot
.unwrap();
670 if let Some(ty
) = fn_ty
.ret
.cast
{
671 llretslot
= PointerCast(bcx
, llretslot
, ty
.ptr_to());
673 llargs
.push(llretslot
);
676 let arg_cleanup_scope
= fcx
.push_custom_cleanup_scope();
677 bcx
= trans_args(bcx
, abi
, &fn_ty
, &mut callee
, args
, &mut llargs
,
678 cleanup
::CustomScope(arg_cleanup_scope
));
679 fcx
.scopes
.borrow_mut().last_mut().unwrap().drop_non_lifetime_clean();
681 let llfn
= match callee
{
683 _
=> bug
!("expected fn pointer callee, found {:?}", callee
)
686 let (llret
, mut bcx
) = base
::invoke(bcx
, llfn
, &llargs
, debug_loc
);
687 if !bcx
.unreachable
.get() {
688 fn_ty
.apply_attrs_callsite(llret
);
690 // If the function we just called does not use an outpointer,
691 // store the result into the rust outpointer. Cast the outpointer
692 // type to match because some ABIs will use a different type than
693 // the Rust type. e.g., a {u32,u32} struct could be returned as
695 if !fn_ty
.ret
.is_indirect() {
696 if let Some(llretslot
) = opt_llretslot
{
697 fn_ty
.ret
.store(&bcx
.build(), llret
, llretslot
);
702 fcx
.pop_and_trans_custom_cleanup_scope(bcx
, arg_cleanup_scope
);
704 // If the caller doesn't care about the result of this fn call,
705 // drop the temporary slot we made.
706 match (dest
, opt_llretslot
, output
) {
707 (Some(expr
::Ignore
), Some(llretslot
), ty
::FnConverging(ret_ty
)) => {
708 // drop the value if it is not being saved.
709 bcx
= glue
::drop_ty(bcx
, llretslot
, ret_ty
, debug_loc
);
710 call_lifetime_end(bcx
, llretslot
);
715 if output
== ty
::FnDiverging
{
719 Result
::new(bcx
, llret
)
722 pub enum CallArgs
<'a
, 'tcx
> {
723 /// Supply value of arguments as a list of expressions that must be
724 /// translated. This is used in the common case of `foo(bar, qux)`.
725 ArgExprs(&'a
[P
<hir
::Expr
>]),
727 /// Supply value of arguments as a list of LLVM value refs; frequently
728 /// used with lang items and so forth, when the argument is an internal
730 ArgVals(&'a
[ValueRef
]),
732 /// For overloaded operators: `(lhs, Option(rhs))`.
733 /// `lhs` is the left-hand-side and `rhs` is the datum
734 /// of the right-hand-side argument (if any).
735 ArgOverloadedOp(Datum
<'tcx
, Expr
>, Option
<Datum
<'tcx
, Expr
>>),
737 /// Supply value of arguments as a list of expressions that must be
738 /// translated, for overloaded call operators.
739 ArgOverloadedCall(Vec
<&'a hir
::Expr
>),
742 fn trans_args_under_call_abi
<'blk
, 'tcx
>(
743 mut bcx
: Block
<'blk
, 'tcx
>,
744 arg_exprs
: &[P
<hir
::Expr
>],
745 callee
: &mut CalleeData
,
747 llargs
: &mut Vec
<ValueRef
>,
748 arg_cleanup_scope
: cleanup
::ScopeId
)
753 // Translate the `self` argument first.
754 let arg_datum
= unpack_datum
!(bcx
, expr
::trans(bcx
, &arg_exprs
[0]));
755 bcx
= trans_arg_datum(bcx
,
757 callee
, fn_ty
, &mut arg_idx
,
761 // Now untuple the rest of the arguments.
762 let tuple_expr
= &arg_exprs
[1];
763 let tuple_type
= common
::node_id_type(bcx
, tuple_expr
.id
);
765 match tuple_type
.sty
{
766 ty
::TyTuple(ref field_types
) => {
767 let tuple_datum
= unpack_datum
!(bcx
,
768 expr
::trans(bcx
, &tuple_expr
));
769 let tuple_lvalue_datum
=
771 tuple_datum
.to_lvalue_datum(bcx
,
774 let repr
= adt
::represent_type(bcx
.ccx(), tuple_type
);
775 let repr_ptr
= &repr
;
776 for (i
, field_type
) in field_types
.iter().enumerate() {
777 let arg_datum
= tuple_lvalue_datum
.get_element(
781 adt
::trans_field_ptr(bcx
, repr_ptr
, srcval
, Disr(0), i
)
783 bcx
= trans_arg_datum(bcx
,
785 callee
, fn_ty
, &mut arg_idx
,
791 span_bug
!(tuple_expr
.span
,
792 "argument to `.call()` wasn't a tuple?!")
799 pub fn trans_args
<'a
, 'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
802 callee
: &mut CalleeData
,
803 args
: CallArgs
<'a
, 'tcx
>,
804 llargs
: &mut Vec
<ValueRef
>,
805 arg_cleanup_scope
: cleanup
::ScopeId
)
806 -> Block
<'blk
, 'tcx
> {
807 debug
!("trans_args(abi={})", abi
);
809 let _icx
= push_ctxt("trans_args");
814 // First we figure out the caller's view of the types of the arguments.
815 // This will be needed if this is a generic call, because the callee has
816 // to cast her view of the arguments to the caller's view.
818 ArgExprs(arg_exprs
) => {
819 if abi
== Abi
::RustCall
{
820 // This is only used for direct calls to the `call`,
821 // `call_mut` or `call_once` functions.
822 return trans_args_under_call_abi(bcx
,
823 arg_exprs
, callee
, fn_ty
,
828 for arg_expr
in arg_exprs
{
829 let arg_datum
= unpack_datum
!(bcx
, expr
::trans(bcx
, &arg_expr
));
830 bcx
= trans_arg_datum(bcx
,
832 callee
, fn_ty
, &mut arg_idx
,
837 ArgOverloadedCall(arg_exprs
) => {
838 for expr
in arg_exprs
{
840 unpack_datum
!(bcx
, expr
::trans(bcx
, expr
));
841 bcx
= trans_arg_datum(bcx
,
843 callee
, fn_ty
, &mut arg_idx
,
848 ArgOverloadedOp(lhs
, rhs
) => {
849 bcx
= trans_arg_datum(bcx
, lhs
,
850 callee
, fn_ty
, &mut arg_idx
,
854 if let Some(rhs
) = rhs
{
855 bcx
= trans_arg_datum(bcx
, rhs
,
856 callee
, fn_ty
, &mut arg_idx
,
866 let fn_ptr
= meth
::get_virtual_method(bcx
, vs
[1], idx
);
867 let llty
= fn_ty
.llvm_type(bcx
.ccx()).ptr_to();
868 *callee
= Fn(PointerCast(bcx
, fn_ptr
, llty
));
869 llargs
.extend_from_slice(&vs
[2..]);
871 _
=> llargs
.extend_from_slice(vs
)
879 fn trans_arg_datum
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
880 arg_datum
: Datum
<'tcx
, Expr
>,
881 callee
: &mut CalleeData
,
883 next_idx
: &mut usize,
884 arg_cleanup_scope
: cleanup
::ScopeId
,
885 llargs
: &mut Vec
<ValueRef
>)
886 -> Block
<'blk
, 'tcx
> {
887 let _icx
= push_ctxt("trans_arg_datum");
890 debug
!("trans_arg_datum({:?})", arg_datum
);
892 let arg
= &fn_ty
.args
[*next_idx
];
895 // Fill padding with undef value, where applicable.
896 if let Some(ty
) = arg
.pad
{
897 llargs
.push(C_undef(ty
));
900 // Determine whether we want a by-ref datum even if not appropriate.
901 let want_by_ref
= arg
.is_indirect() || arg
.cast
.is_some();
903 let fat_ptr
= common
::type_is_fat_ptr(bcx
.tcx(), arg_datum
.ty
);
904 let (by_ref
, val
) = if fat_ptr
&& !bcx
.fcx
.type_needs_drop(arg_datum
.ty
) {
905 (true, arg_datum
.val
)
907 // Make this an rvalue, since we are going to be
908 // passing ownership.
909 let arg_datum
= unpack_datum
!(
910 bcx
, arg_datum
.to_rvalue_datum(bcx
, "arg"));
912 // Now that arg_datum is owned, get it into the appropriate
913 // mode (ref vs value).
914 let arg_datum
= unpack_datum
!(bcx
, if want_by_ref
{
915 arg_datum
.to_ref_datum(bcx
)
917 arg_datum
.to_appropriate_datum(bcx
)
920 // Technically, ownership of val passes to the callee.
921 // However, we must cleanup should we panic before the
922 // callee is actually invoked.
923 (arg_datum
.kind
.is_by_ref(),
924 arg_datum
.add_clean(bcx
.fcx
, arg_cleanup_scope
))
931 debug
!("--- trans_arg_datum passing {:?}", Value(val
));
934 // Fat pointers should be passed without any transformations.
935 assert
!(!arg
.is_indirect() && arg
.cast
.is_none());
936 llargs
.push(Load(bcx
, expr
::get_dataptr(bcx
, val
)));
938 let info_arg
= &fn_ty
.args
[*next_idx
];
940 assert
!(!info_arg
.is_indirect() && info_arg
.cast
.is_none());
941 let info
= Load(bcx
, expr
::get_meta(bcx
, val
));
943 if let Virtual(idx
) = *callee
{
944 // We have to grab the fn pointer from the vtable when
945 // handling the first argument, ensure that here.
946 assert_eq
!(*next_idx
, 2);
947 assert
!(info_arg
.is_ignore());
948 let fn_ptr
= meth
::get_virtual_method(bcx
, info
, idx
);
949 let llty
= fn_ty
.llvm_type(bcx
.ccx()).ptr_to();
950 *callee
= Fn(PointerCast(bcx
, fn_ptr
, llty
));
952 assert
!(!info_arg
.is_ignore());
959 if by_ref
&& !arg
.is_indirect() {
960 // Have to load the argument, maybe while casting it.
961 if arg
.original_ty
== Type
::i1(bcx
.ccx()) {
962 // We store bools as i8 so we need to truncate to i1.
963 val
= LoadRangeAssert(bcx
, val
, 0, 2, llvm
::False
);
964 val
= Trunc(bcx
, val
, arg
.original_ty
);
965 } else if let Some(ty
) = arg
.cast
{
966 val
= Load(bcx
, PointerCast(bcx
, val
, ty
.ptr_to()));
967 if !bcx
.unreachable
.get() {
968 let llalign
= llalign_of_min(bcx
.ccx(), arg
.ty
);
970 llvm
::LLVMSetAlignment(val
, llalign
);
974 val
= Load(bcx
, val
);