1 // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! Handles translation of callees as well as other call-related
12 //! things. Callees are a superset of normal rust values and sometimes
13 //! have different representations. In particular, top-level fn items
14 //! and methods are represented as just a fn ptr and not a full
17 pub use self::CalleeData
::*;
18 pub use self::CallArgs
::*;
20 use arena
::TypedArena
;
21 use back
::symbol_names
;
22 use llvm
::{self, ValueRef, get_params}
;
23 use middle
::cstore
::LOCAL_CRATE
;
24 use rustc
::hir
::def_id
::DefId
;
27 use rustc
::ty
::subst
::{Substs}
;
29 use rustc
::hir
::map
as hir_map
;
30 use abi
::{Abi, FnType}
;
37 use cleanup
::CleanupMethods
;
39 use common
::{self, Block, Result, CrateContext, FunctionContext}
;
40 use common
::{C_uint, C_undef}
;
43 use debuginfo
::DebugLoc
;
49 use machine
::{llalign_of_min, llsize_of_store}
;
51 use monomorphize
::{self, Instance}
;
56 use rustc
::ty
::{self, Ty, TyCtxt, TypeFoldable}
;
59 use syntax
::codemap
::DUMMY_SP
;
67 /// Constructor for enum variant/tuple-like-struct.
68 NamedTupleConstructor(Disr
),
75 /// Trait object found in the vtable at that index.
80 pub struct Callee
<'tcx
> {
85 impl<'tcx
> Callee
<'tcx
> {
87 pub fn ptr(datum
: Datum
<'tcx
, Rvalue
>) -> Callee
<'tcx
> {
94 /// Trait or impl method call.
95 pub fn method_call
<'blk
>(bcx
: Block
<'blk
, 'tcx
>,
96 method_call
: ty
::MethodCall
)
98 let method
= bcx
.tcx().tables
.borrow().method_map
[&method_call
];
99 Callee
::method(bcx
, method
)
102 /// Trait or impl method.
103 pub fn method
<'blk
>(bcx
: Block
<'blk
, 'tcx
>,
104 method
: ty
::MethodCallee
<'tcx
>) -> Callee
<'tcx
> {
105 let substs
= bcx
.tcx().mk_substs(bcx
.fcx
.monomorphize(&method
.substs
));
106 Callee
::def(bcx
.ccx(), method
.def_id
, substs
)
109 /// Function or method definition.
110 pub fn def
<'a
>(ccx
: &CrateContext
<'a
, 'tcx
>,
112 substs
: &'tcx subst
::Substs
<'tcx
>)
116 if substs
.self_ty().is_some() {
117 // Only trait methods can have a Self parameter.
118 return Callee
::trait_method(ccx
, def_id
, substs
);
121 let maybe_node_id
= inline
::get_local_instance(ccx
, def_id
)
122 .and_then(|def_id
| tcx
.map
.as_local_node_id(def_id
));
123 let maybe_ast_node
= maybe_node_id
.and_then(|node_id
| {
124 tcx
.map
.find(node_id
)
127 let data
= match maybe_ast_node
{
128 Some(hir_map
::NodeStructCtor(_
)) => {
129 NamedTupleConstructor(Disr(0))
131 Some(hir_map
::NodeVariant(_
)) => {
132 let vinfo
= common
::inlined_variant_def(ccx
, maybe_node_id
.unwrap());
133 NamedTupleConstructor(Disr
::from(vinfo
.disr_val
))
135 Some(hir_map
::NodeForeignItem(fi
)) if {
136 let abi
= tcx
.map
.get_foreign_abi(fi
.id
);
137 abi
== Abi
::RustIntrinsic
|| abi
== Abi
::PlatformIntrinsic
140 _
=> return Callee
::ptr(get_fn(ccx
, def_id
, substs
))
145 ty
: def_ty(tcx
, def_id
, substs
)
149 /// Trait method, which has to be resolved to an impl method.
150 pub fn trait_method
<'a
>(ccx
: &CrateContext
<'a
, 'tcx
>,
152 substs
: &'tcx subst
::Substs
<'tcx
>)
156 let method_item
= tcx
.impl_or_trait_item(def_id
);
157 let trait_id
= method_item
.container().id();
158 let trait_ref
= ty
::Binder(substs
.to_trait_ref(tcx
, trait_id
));
159 let trait_ref
= infer
::normalize_associated_type(tcx
, &trait_ref
);
160 match common
::fulfill_obligation(ccx
, DUMMY_SP
, trait_ref
) {
161 traits
::VtableImpl(vtable_impl
) => {
162 let impl_did
= vtable_impl
.impl_def_id
;
163 let mname
= tcx
.item_name(def_id
);
164 // create a concatenated set of substitutions which includes
165 // those from the impl and those from the method:
166 let impl_substs
= vtable_impl
.substs
.with_method_from(&substs
);
167 let substs
= tcx
.mk_substs(impl_substs
);
168 let mth
= meth
::get_impl_method(tcx
, impl_did
, substs
, mname
);
170 // Translate the function, bypassing Callee::def.
171 // That is because default methods have the same ID as the
172 // trait method used to look up the impl method that ended
173 // up here, so calling Callee::def would infinitely recurse.
174 Callee
::ptr(get_fn(ccx
, mth
.method
.def_id
, mth
.substs
))
176 traits
::VtableClosure(vtable_closure
) => {
177 // The substitutions should have no type parameters remaining
178 // after passing through fulfill_obligation
179 let trait_closure_kind
= tcx
.lang_items
.fn_trait_kind(trait_id
).unwrap();
180 let llfn
= closure
::trans_closure_method(ccx
,
181 vtable_closure
.closure_def_id
,
182 vtable_closure
.substs
,
185 let method_ty
= def_ty(tcx
, def_id
, substs
);
186 let fn_ptr_ty
= match method_ty
.sty
{
187 ty
::TyFnDef(_
, _
, fty
) => tcx
.mk_ty(ty
::TyFnPtr(fty
)),
188 _
=> bug
!("expected fn item type, found {}",
191 Callee
::ptr(immediate_rvalue(llfn
, fn_ptr_ty
))
193 traits
::VtableFnPointer(fn_ty
) => {
194 let trait_closure_kind
= tcx
.lang_items
.fn_trait_kind(trait_id
).unwrap();
195 let llfn
= trans_fn_pointer_shim(ccx
, trait_closure_kind
, fn_ty
);
197 let method_ty
= def_ty(tcx
, def_id
, substs
);
198 let fn_ptr_ty
= match method_ty
.sty
{
199 ty
::TyFnDef(_
, _
, fty
) => tcx
.mk_ty(ty
::TyFnPtr(fty
)),
200 _
=> bug
!("expected fn item type, found {}",
203 Callee
::ptr(immediate_rvalue(llfn
, fn_ptr_ty
))
205 traits
::VtableObject(ref data
) => {
207 data
: Virtual(traits
::get_vtable_index_of_object_method(
209 ty
: def_ty(tcx
, def_id
, substs
)
213 bug
!("resolved vtable bad vtable {:?} in trans", vtable
);
218 /// Get the abi::FnType for a direct call. Mainly deals with the fact
219 /// that a Virtual call doesn't take the vtable, like its shim does.
220 /// The extra argument types are for variadic (extern "C") functions.
221 pub fn direct_fn_type
<'a
>(&self, ccx
: &CrateContext
<'a
, 'tcx
>,
222 extra_args
: &[Ty
<'tcx
>]) -> FnType
{
223 let abi
= self.ty
.fn_abi();
224 let sig
= ccx
.tcx().erase_late_bound_regions(self.ty
.fn_sig());
225 let sig
= infer
::normalize_associated_type(ccx
.tcx(), &sig
);
226 let mut fn_ty
= FnType
::unadjusted(ccx
, abi
, &sig
, extra_args
);
227 if let Virtual(_
) = self.data
{
228 // Don't pass the vtable, it's not an argument of the virtual fn.
229 fn_ty
.args
[1].ignore();
231 fn_ty
.adjust_for_abi(ccx
, abi
, &sig
);
235 /// This behemoth of a function translates function calls. Unfortunately, in
236 /// order to generate more efficient LLVM output at -O0, it has quite a complex
237 /// signature (refactoring this into two functions seems like a good idea).
239 /// In particular, for lang items, it is invoked with a dest of None, and in
240 /// that case the return value contains the result of the fn. The lang item must
241 /// not return a structural type or else all heck breaks loose.
243 /// For non-lang items, `dest` is always Some, and hence the result is written
244 /// into memory somewhere. Nonetheless we return the actual return value of the
246 pub fn call
<'a
, 'blk
>(self, bcx
: Block
<'blk
, 'tcx
>,
248 args
: CallArgs
<'a
, 'tcx
>,
249 dest
: Option
<expr
::Dest
>)
250 -> Result
<'blk
, 'tcx
> {
251 trans_call_inner(bcx
, debug_loc
, self, args
, dest
)
254 /// Turn the callee into a function pointer.
255 pub fn reify
<'a
>(self, ccx
: &CrateContext
<'a
, 'tcx
>)
256 -> Datum
<'tcx
, Rvalue
> {
257 let fn_ptr_ty
= match self.ty
.sty
{
258 ty
::TyFnDef(_
, _
, f
) => ccx
.tcx().mk_ty(ty
::TyFnPtr(f
)),
263 immediate_rvalue(llfn
, fn_ptr_ty
)
266 let llfn
= meth
::trans_object_shim(ccx
, self.ty
, idx
);
267 immediate_rvalue(llfn
, fn_ptr_ty
)
269 NamedTupleConstructor(_
) => match self.ty
.sty
{
270 ty
::TyFnDef(def_id
, substs
, _
) => {
271 return get_fn(ccx
, def_id
, substs
);
273 _
=> bug
!("expected fn item type, found {}", self.ty
)
275 Intrinsic
=> bug
!("intrinsic {} getting reified", self.ty
)
280 /// Given a DefId and some Substs, produces the monomorphic item type.
281 fn def_ty
<'tcx
>(tcx
: &TyCtxt
<'tcx
>,
283 substs
: &'tcx subst
::Substs
<'tcx
>)
285 let ty
= tcx
.lookup_item_type(def_id
).ty
;
286 monomorphize
::apply_param_substs(tcx
, substs
, &ty
)
289 /// Translates an adapter that implements the `Fn` trait for a fn
290 /// pointer. This is basically the equivalent of something like:
293 /// impl<'a> Fn(&'a int) -> &'a int for fn(&int) -> &int {
294 /// extern "rust-abi" fn call(&self, args: (&'a int,)) -> &'a int {
300 /// but for the bare function type given.
301 pub fn trans_fn_pointer_shim
<'a
, 'tcx
>(
302 ccx
: &'a CrateContext
<'a
, 'tcx
>,
303 closure_kind
: ty
::ClosureKind
,
304 bare_fn_ty
: Ty
<'tcx
>)
307 let _icx
= push_ctxt("trans_fn_pointer_shim");
310 // Normalize the type for better caching.
311 let bare_fn_ty
= tcx
.erase_regions(&bare_fn_ty
);
313 // If this is an impl of `Fn` or `FnMut` trait, the receiver is `&self`.
314 let is_by_ref
= match closure_kind
{
315 ty
::ClosureKind
::Fn
| ty
::ClosureKind
::FnMut
=> true,
316 ty
::ClosureKind
::FnOnce
=> false,
319 let llfnpointer
= match bare_fn_ty
.sty
{
320 ty
::TyFnDef(def_id
, substs
, _
) => {
321 // Function definitions have to be turned into a pointer.
322 let llfn
= Callee
::def(ccx
, def_id
, substs
).reify(ccx
).val
;
324 // A by-value fn item is ignored, so the shim has
325 // the same signature as the original function.
333 let bare_fn_ty_maybe_ref
= if is_by_ref
{
334 tcx
.mk_imm_ref(tcx
.mk_region(ty
::ReStatic
), bare_fn_ty
)
339 // Check if we already trans'd this shim.
340 match ccx
.fn_pointer_shims().borrow().get(&bare_fn_ty_maybe_ref
) {
341 Some(&llval
) => { return llval; }
345 debug
!("trans_fn_pointer_shim(bare_fn_ty={:?})",
348 // Construct the "tuply" version of `bare_fn_ty`. It takes two arguments: `self`,
349 // which is the fn pointer, and `args`, which is the arguments tuple.
350 let sig
= match bare_fn_ty
.sty
{
352 &ty
::BareFnTy
{ unsafety
: hir
::Unsafety
::Normal
,
355 ty
::TyFnPtr(&ty
::BareFnTy
{ unsafety
: hir
::Unsafety
::Normal
,
360 bug
!("trans_fn_pointer_shim invoked on invalid type: {}",
364 let sig
= tcx
.erase_late_bound_regions(sig
);
365 let sig
= infer
::normalize_associated_type(ccx
.tcx(), &sig
);
366 let tuple_input_ty
= tcx
.mk_tup(sig
.inputs
.to_vec());
367 let sig
= ty
::FnSig
{
368 inputs
: vec
![bare_fn_ty_maybe_ref
,
373 let fn_ty
= FnType
::new(ccx
, Abi
::RustCall
, &sig
, &[]);
374 let tuple_fn_ty
= tcx
.mk_fn_ptr(ty
::BareFnTy
{
375 unsafety
: hir
::Unsafety
::Normal
,
379 debug
!("tuple_fn_ty: {:?}", tuple_fn_ty
);
383 symbol_names
::internal_name_from_type_and_suffix(ccx
,
386 let llfn
= declare
::define_internal_fn(ccx
, &function_name
, tuple_fn_ty
);
389 let empty_substs
= tcx
.mk_substs(Substs
::empty());
390 let (block_arena
, fcx
): (TypedArena
<_
>, FunctionContext
);
391 block_arena
= TypedArena
::new();
392 fcx
= FunctionContext
::new(ccx
, llfn
, fn_ty
, None
, empty_substs
, &block_arena
);
393 let mut bcx
= fcx
.init(false, None
);
395 let llargs
= get_params(fcx
.llfn
);
397 let self_idx
= fcx
.fn_ty
.ret
.is_indirect() as usize;
398 let llfnpointer
= llfnpointer
.unwrap_or_else(|| {
399 // the first argument (`self`) will be ptr to the fn pointer
401 Load(bcx
, llargs
[self_idx
])
407 assert
!(!fcx
.needs_ret_allocas
);
409 let dest
= fcx
.llretslotptr
.get().map(|_
|
410 expr
::SaveIn(fcx
.get_ret_slot(bcx
, "ret_slot"))
413 let callee
= Callee
{
414 data
: Fn(llfnpointer
),
417 bcx
= callee
.call(bcx
, DebugLoc
::None
, ArgVals(&llargs
[(self_idx
+ 1)..]), dest
).bcx
;
419 fcx
.finish(bcx
, DebugLoc
::None
);
421 ccx
.fn_pointer_shims().borrow_mut().insert(bare_fn_ty_maybe_ref
, llfn
);
426 /// Translates a reference to a fn/method item, monomorphizing and
427 /// inlining as it goes.
431 /// - `ccx`: the crate context
432 /// - `def_id`: def id of the fn or method item being referenced
433 /// - `substs`: values for each of the fn/method's parameters
434 fn get_fn
<'a
, 'tcx
>(ccx
: &CrateContext
<'a
, 'tcx
>,
436 substs
: &'tcx subst
::Substs
<'tcx
>)
437 -> Datum
<'tcx
, Rvalue
> {
440 debug
!("get_fn(def_id={:?}, substs={:?})", def_id
, substs
);
442 assert
!(!substs
.types
.needs_infer());
443 assert
!(!substs
.types
.has_escaping_regions());
445 // Check whether this fn has an inlined copy and, if so, redirect
446 // def_id to the local id of the inlined copy.
447 let def_id
= inline
::maybe_instantiate_inline(ccx
, def_id
);
449 fn is_named_tuple_constructor(tcx
: &TyCtxt
, def_id
: DefId
) -> bool
{
450 let node_id
= match tcx
.map
.as_local_node_id(def_id
) {
452 None
=> { return false; }
454 let map_node
= errors
::expect(
455 &tcx
.sess
.diagnostic(),
456 tcx
.map
.find(node_id
),
457 || "local item should be in ast map".to_string());
460 hir_map
::NodeVariant(v
) => {
461 v
.node
.data
.is_tuple()
463 hir_map
::NodeStructCtor(_
) => true,
467 let must_monomorphise
=
468 !substs
.types
.is_empty() || is_named_tuple_constructor(tcx
, def_id
);
470 debug
!("get_fn({:?}) must_monomorphise: {}",
471 def_id
, must_monomorphise
);
473 // Create a monomorphic version of generic functions
474 if must_monomorphise
{
475 // Should be either intra-crate or inlined.
476 assert_eq
!(def_id
.krate
, LOCAL_CRATE
);
478 let substs
= tcx
.mk_substs(substs
.clone().erase_regions());
479 let (val
, fn_ty
) = monomorphize
::monomorphic_fn(ccx
, def_id
, substs
);
480 let fn_ptr_ty
= match fn_ty
.sty
{
481 ty
::TyFnDef(_
, _
, fty
) => {
482 // Create a fn pointer with the substituted signature.
483 tcx
.mk_ty(ty
::TyFnPtr(fty
))
485 _
=> bug
!("expected fn item type, found {}", fn_ty
)
487 assert_eq
!(type_of
::type_of(ccx
, fn_ptr_ty
), common
::val_ty(val
));
488 return immediate_rvalue(val
, fn_ptr_ty
);
491 // Find the actual function pointer.
492 let ty
= ccx
.tcx().lookup_item_type(def_id
).ty
;
493 let fn_ptr_ty
= match ty
.sty
{
494 ty
::TyFnDef(_
, _
, fty
) => {
495 // Create a fn pointer with the normalized signature.
496 tcx
.mk_fn_ptr(infer
::normalize_associated_type(tcx
, fty
))
498 _
=> bug
!("expected fn item type, found {}", ty
)
501 let instance
= Instance
::mono(ccx
.tcx(), def_id
);
502 if let Some(&llfn
) = ccx
.instances().borrow().get(&instance
) {
503 return immediate_rvalue(llfn
, fn_ptr_ty
);
507 let local_id
= ccx
.tcx().map
.as_local_node_id(def_id
);
508 let maybe_node
= local_id
.and_then(|id
| tcx
.map
.find(id
));
509 let (sym
, attrs
, local_item
) = match maybe_node
{
510 Some(hir_map
::NodeItem(&hir
::Item
{
511 ref attrs
, id
, span
, node
: hir
::ItemFn(..), ..
513 Some(hir_map
::NodeTraitItem(&hir
::TraitItem
{
514 ref attrs
, id
, span
, node
: hir
::MethodTraitItem(_
, Some(_
)), ..
516 Some(hir_map
::NodeImplItem(&hir
::ImplItem
{
517 ref attrs
, id
, span
, node
: hir
::ImplItemKind
::Method(..), ..
519 let sym
= exported_name(ccx
, instance
, attrs
);
521 if declare
::get_defined_value(ccx
, &sym
).is_some() {
522 ccx
.sess().span_fatal(span
,
523 &format
!("symbol `{}` is already defined", sym
));
526 (sym
, &attrs
[..], Some(id
))
529 Some(hir_map
::NodeForeignItem(&hir
::ForeignItem
{
530 ref attrs
, name
, node
: hir
::ForeignItemFn(..), ..
532 (imported_name(name
, attrs
).to_string(), &attrs
[..], None
)
536 attrs
= ccx
.sess().cstore
.item_attrs(def_id
);
537 (ccx
.sess().cstore
.item_symbol(def_id
), &attrs
[..], None
)
541 bug
!("get_fn: unexpected variant: {:?}", variant
)
545 // This is subtle and surprising, but sometimes we have to bitcast
546 // the resulting fn pointer. The reason has to do with external
547 // functions. If you have two crates that both bind the same C
548 // library, they may not use precisely the same types: for
549 // example, they will probably each declare their own structs,
550 // which are distinct types from LLVM's point of view (nominal
553 // Now, if those two crates are linked into an application, and
554 // they contain inlined code, you can wind up with a situation
555 // where both of those functions wind up being loaded into this
556 // application simultaneously. In that case, the same function
557 // (from LLVM's point of view) requires two types. But of course
558 // LLVM won't allow one function to have two types.
560 // What we currently do, therefore, is declare the function with
561 // one of the two types (whichever happens to come first) and then
562 // bitcast as needed when the function is referenced to make sure
563 // it has the type we expect.
565 // This can occur on either a crate-local or crate-external
566 // reference. It also occurs when testing libcore and in some
567 // other weird situations. Annoying.
569 let llptrty
= type_of
::type_of(ccx
, fn_ptr_ty
);
570 let llfn
= if let Some(llfn
) = declare
::get_declared_value(ccx
, &sym
) {
571 if common
::val_ty(llfn
) != llptrty
{
572 if local_item
.is_some() {
573 bug
!("symbol `{}` previously declared as {:?}, now wanted as {:?}",
574 sym
, Value(llfn
), llptrty
);
576 debug
!("get_fn: casting {:?} to {:?}", llfn
, llptrty
);
577 consts
::ptrcast(llfn
, llptrty
)
579 debug
!("get_fn: not casting pointer!");
583 let llfn
= declare
::declare_fn(ccx
, &sym
, ty
);
584 assert_eq
!(common
::val_ty(llfn
), llptrty
);
585 debug
!("get_fn: not casting pointer!");
587 attributes
::from_fn_attrs(ccx
, attrs
, llfn
);
588 if local_item
.is_some() {
589 // FIXME(eddyb) Doubt all extern fn should allow unwinding.
590 attributes
::unwind(llfn
, true);
596 // Always insert into item_symbols, in case this item is exported.
597 if let Some(id
) = local_item
{
598 ccx
.item_symbols().borrow_mut().insert(id
, sym
);
601 ccx
.instances().borrow_mut().insert(instance
, llfn
);
603 immediate_rvalue(llfn
, fn_ptr_ty
)
606 // ______________________________________________________________________
609 fn trans_call_inner
<'a
, 'blk
, 'tcx
>(mut bcx
: Block
<'blk
, 'tcx
>,
611 callee
: Callee
<'tcx
>,
612 args
: CallArgs
<'a
, 'tcx
>,
613 dest
: Option
<expr
::Dest
>)
614 -> Result
<'blk
, 'tcx
> {
615 // Introduce a temporary cleanup scope that will contain cleanups
616 // for the arguments while they are being evaluated. The purpose
617 // this cleanup is to ensure that, should a panic occur while
618 // evaluating argument N, the values for arguments 0...N-1 are all
619 // cleaned up. If no panic occurs, the values are handed off to
620 // the callee, and hence none of the cleanups in this temporary
621 // scope will ever execute.
625 let abi
= callee
.ty
.fn_abi();
626 let sig
= callee
.ty
.fn_sig();
627 let output
= bcx
.tcx().erase_late_bound_regions(&sig
.output());
628 let output
= infer
::normalize_associated_type(bcx
.tcx(), &output
);
630 let extra_args
= match args
{
631 ArgExprs(args
) if abi
!= Abi
::RustCall
=> {
632 args
[sig
.0.inputs
.len()..].iter().map(|expr
| {
633 common
::expr_ty_adjusted(bcx
, expr
)
638 let fn_ty
= callee
.direct_fn_type(ccx
, &extra_args
);
640 let mut callee
= match callee
.data
{
642 assert
!(abi
== Abi
::RustIntrinsic
|| abi
== Abi
::PlatformIntrinsic
);
643 assert
!(dest
.is_some());
645 return intrinsic
::trans_intrinsic_call(bcx
, callee
.ty
, &fn_ty
,
649 NamedTupleConstructor(disr
) => {
650 assert
!(dest
.is_some());
652 return base
::trans_named_tuple_constructor(bcx
,
662 // Generate a location to store the result. If the user does
663 // not care about the result, just make a stack slot.
664 let opt_llretslot
= dest
.and_then(|dest
| match dest
{
665 expr
::SaveIn(dst
) => Some(dst
),
667 let needs_drop
= || match output
{
668 ty
::FnConverging(ret_ty
) => bcx
.fcx
.type_needs_drop(ret_ty
),
669 ty
::FnDiverging
=> false
671 if fn_ty
.ret
.is_indirect() || fn_ty
.ret
.cast
.is_some() || needs_drop() {
672 // Push the out-pointer if we use an out-pointer for this
673 // return type, otherwise push "undef".
674 if fn_ty
.ret
.is_ignore() {
675 Some(C_undef(fn_ty
.ret
.original_ty
.ptr_to()))
677 let llresult
= alloca(bcx
, fn_ty
.ret
.original_ty
, "__llret");
678 call_lifetime_start(bcx
, llresult
);
687 // If there no destination, return must be direct, with no cast.
688 if opt_llretslot
.is_none() {
689 assert
!(!fn_ty
.ret
.is_indirect() && fn_ty
.ret
.cast
.is_none());
692 let mut llargs
= Vec
::new();
694 if fn_ty
.ret
.is_indirect() {
695 let mut llretslot
= opt_llretslot
.unwrap();
696 if let Some(ty
) = fn_ty
.ret
.cast
{
697 llretslot
= PointerCast(bcx
, llretslot
, ty
.ptr_to());
699 llargs
.push(llretslot
);
702 let arg_cleanup_scope
= fcx
.push_custom_cleanup_scope();
703 bcx
= trans_args(bcx
, abi
, &fn_ty
, &mut callee
, args
, &mut llargs
,
704 cleanup
::CustomScope(arg_cleanup_scope
));
705 fcx
.scopes
.borrow_mut().last_mut().unwrap().drop_non_lifetime_clean();
707 let llfn
= match callee
{
709 _
=> bug
!("expected fn pointer callee, found {:?}", callee
)
712 let (llret
, mut bcx
) = base
::invoke(bcx
, llfn
, &llargs
, debug_loc
);
713 if !bcx
.unreachable
.get() {
714 fn_ty
.apply_attrs_callsite(llret
);
717 // If the function we just called does not use an outpointer,
718 // store the result into the rust outpointer. Cast the outpointer
719 // type to match because some ABIs will use a different type than
720 // the Rust type. e.g., a {u32,u32} struct could be returned as
722 if !fn_ty
.ret
.is_ignore() && !fn_ty
.ret
.is_indirect() {
723 if let Some(llforeign_ret_ty
) = fn_ty
.ret
.cast
{
724 let llrust_ret_ty
= fn_ty
.ret
.original_ty
;
725 let llretslot
= opt_llretslot
.unwrap();
727 // The actual return type is a struct, but the ABI
728 // adaptation code has cast it into some scalar type. The
729 // code that follows is the only reliable way I have
730 // found to do a transform like i64 -> {i32,i32}.
731 // Basically we dump the data onto the stack then memcpy it.
733 // Other approaches I tried:
734 // - Casting rust ret pointer to the foreign type and using Store
735 // is (a) unsafe if size of foreign type > size of rust type and
736 // (b) runs afoul of strict aliasing rules, yielding invalid
737 // assembly under -O (specifically, the store gets removed).
738 // - Truncating foreign type to correct integral type and then
739 // bitcasting to the struct type yields invalid cast errors.
740 let llscratch
= base
::alloca(bcx
, llforeign_ret_ty
, "__cast");
741 base
::call_lifetime_start(bcx
, llscratch
);
742 Store(bcx
, llret
, llscratch
);
743 let llscratch_i8
= PointerCast(bcx
, llscratch
, Type
::i8(ccx
).ptr_to());
744 let llretptr_i8
= PointerCast(bcx
, llretslot
, Type
::i8(ccx
).ptr_to());
745 let llrust_size
= llsize_of_store(ccx
, llrust_ret_ty
);
746 let llforeign_align
= llalign_of_min(ccx
, llforeign_ret_ty
);
747 let llrust_align
= llalign_of_min(ccx
, llrust_ret_ty
);
748 let llalign
= cmp
::min(llforeign_align
, llrust_align
);
749 debug
!("llrust_size={}", llrust_size
);
751 if !bcx
.unreachable
.get() {
752 base
::call_memcpy(&B(bcx
), llretptr_i8
, llscratch_i8
,
753 C_uint(ccx
, llrust_size
), llalign
as u32);
755 base
::call_lifetime_end(bcx
, llscratch
);
756 } else if let Some(llretslot
) = opt_llretslot
{
757 base
::store_ty(bcx
, llret
, llretslot
, output
.unwrap());
761 fcx
.pop_and_trans_custom_cleanup_scope(bcx
, arg_cleanup_scope
);
763 // If the caller doesn't care about the result of this fn call,
764 // drop the temporary slot we made.
765 match (dest
, opt_llretslot
, output
) {
766 (Some(expr
::Ignore
), Some(llretslot
), ty
::FnConverging(ret_ty
)) => {
767 // drop the value if it is not being saved.
768 bcx
= glue
::drop_ty(bcx
, llretslot
, ret_ty
, debug_loc
);
769 call_lifetime_end(bcx
, llretslot
);
774 if output
== ty
::FnDiverging
{
778 Result
::new(bcx
, llret
)
781 pub enum CallArgs
<'a
, 'tcx
> {
782 /// Supply value of arguments as a list of expressions that must be
783 /// translated. This is used in the common case of `foo(bar, qux)`.
784 ArgExprs(&'a
[P
<hir
::Expr
>]),
786 /// Supply value of arguments as a list of LLVM value refs; frequently
787 /// used with lang items and so forth, when the argument is an internal
789 ArgVals(&'a
[ValueRef
]),
791 /// For overloaded operators: `(lhs, Option(rhs))`.
792 /// `lhs` is the left-hand-side and `rhs` is the datum
793 /// of the right-hand-side argument (if any).
794 ArgOverloadedOp(Datum
<'tcx
, Expr
>, Option
<Datum
<'tcx
, Expr
>>),
796 /// Supply value of arguments as a list of expressions that must be
797 /// translated, for overloaded call operators.
798 ArgOverloadedCall(Vec
<&'a hir
::Expr
>),
801 fn trans_args_under_call_abi
<'blk
, 'tcx
>(
802 mut bcx
: Block
<'blk
, 'tcx
>,
803 arg_exprs
: &[P
<hir
::Expr
>],
804 callee
: &mut CalleeData
,
806 llargs
: &mut Vec
<ValueRef
>,
807 arg_cleanup_scope
: cleanup
::ScopeId
)
812 // Translate the `self` argument first.
813 let arg_datum
= unpack_datum
!(bcx
, expr
::trans(bcx
, &arg_exprs
[0]));
814 bcx
= trans_arg_datum(bcx
,
816 callee
, fn_ty
, &mut arg_idx
,
820 // Now untuple the rest of the arguments.
821 let tuple_expr
= &arg_exprs
[1];
822 let tuple_type
= common
::node_id_type(bcx
, tuple_expr
.id
);
824 match tuple_type
.sty
{
825 ty
::TyTuple(ref field_types
) => {
826 let tuple_datum
= unpack_datum
!(bcx
,
827 expr
::trans(bcx
, &tuple_expr
));
828 let tuple_lvalue_datum
=
830 tuple_datum
.to_lvalue_datum(bcx
,
833 let repr
= adt
::represent_type(bcx
.ccx(), tuple_type
);
834 let repr_ptr
= &repr
;
835 for (i
, field_type
) in field_types
.iter().enumerate() {
836 let arg_datum
= tuple_lvalue_datum
.get_element(
840 adt
::trans_field_ptr(bcx
, repr_ptr
, srcval
, Disr(0), i
)
842 bcx
= trans_arg_datum(bcx
,
844 callee
, fn_ty
, &mut arg_idx
,
850 span_bug
!(tuple_expr
.span
,
851 "argument to `.call()` wasn't a tuple?!")
858 pub fn trans_args
<'a
, 'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
861 callee
: &mut CalleeData
,
862 args
: CallArgs
<'a
, 'tcx
>,
863 llargs
: &mut Vec
<ValueRef
>,
864 arg_cleanup_scope
: cleanup
::ScopeId
)
865 -> Block
<'blk
, 'tcx
> {
866 debug
!("trans_args(abi={})", abi
);
868 let _icx
= push_ctxt("trans_args");
873 // First we figure out the caller's view of the types of the arguments.
874 // This will be needed if this is a generic call, because the callee has
875 // to cast her view of the arguments to the caller's view.
877 ArgExprs(arg_exprs
) => {
878 if abi
== Abi
::RustCall
{
879 // This is only used for direct calls to the `call`,
880 // `call_mut` or `call_once` functions.
881 return trans_args_under_call_abi(bcx
,
882 arg_exprs
, callee
, fn_ty
,
887 for arg_expr
in arg_exprs
{
888 let arg_datum
= unpack_datum
!(bcx
, expr
::trans(bcx
, &arg_expr
));
889 bcx
= trans_arg_datum(bcx
,
891 callee
, fn_ty
, &mut arg_idx
,
896 ArgOverloadedCall(arg_exprs
) => {
897 for expr
in arg_exprs
{
899 unpack_datum
!(bcx
, expr
::trans(bcx
, expr
));
900 bcx
= trans_arg_datum(bcx
,
902 callee
, fn_ty
, &mut arg_idx
,
907 ArgOverloadedOp(lhs
, rhs
) => {
908 bcx
= trans_arg_datum(bcx
, lhs
,
909 callee
, fn_ty
, &mut arg_idx
,
913 if let Some(rhs
) = rhs
{
914 bcx
= trans_arg_datum(bcx
, rhs
,
915 callee
, fn_ty
, &mut arg_idx
,
925 let fn_ptr
= meth
::get_virtual_method(bcx
, vs
[1], idx
);
926 let llty
= fn_ty
.llvm_type(bcx
.ccx()).ptr_to();
927 *callee
= Fn(PointerCast(bcx
, fn_ptr
, llty
));
928 llargs
.extend_from_slice(&vs
[2..]);
930 _
=> llargs
.extend_from_slice(vs
)
938 fn trans_arg_datum
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
939 arg_datum
: Datum
<'tcx
, Expr
>,
940 callee
: &mut CalleeData
,
942 next_idx
: &mut usize,
943 arg_cleanup_scope
: cleanup
::ScopeId
,
944 llargs
: &mut Vec
<ValueRef
>)
945 -> Block
<'blk
, 'tcx
> {
946 let _icx
= push_ctxt("trans_arg_datum");
949 debug
!("trans_arg_datum({:?})", arg_datum
);
951 let arg
= &fn_ty
.args
[*next_idx
];
954 // Fill padding with undef value, where applicable.
955 if let Some(ty
) = arg
.pad
{
956 llargs
.push(C_undef(ty
));
959 // Determine whether we want a by-ref datum even if not appropriate.
960 let want_by_ref
= arg
.is_indirect() || arg
.cast
.is_some();
962 let fat_ptr
= common
::type_is_fat_ptr(bcx
.tcx(), arg_datum
.ty
);
963 let (by_ref
, val
) = if fat_ptr
&& !bcx
.fcx
.type_needs_drop(arg_datum
.ty
) {
964 (true, arg_datum
.val
)
966 // Make this an rvalue, since we are going to be
967 // passing ownership.
968 let arg_datum
= unpack_datum
!(
969 bcx
, arg_datum
.to_rvalue_datum(bcx
, "arg"));
971 // Now that arg_datum is owned, get it into the appropriate
972 // mode (ref vs value).
973 let arg_datum
= unpack_datum
!(bcx
, if want_by_ref
{
974 arg_datum
.to_ref_datum(bcx
)
976 arg_datum
.to_appropriate_datum(bcx
)
979 // Technically, ownership of val passes to the callee.
980 // However, we must cleanup should we panic before the
981 // callee is actually invoked.
982 (arg_datum
.kind
.is_by_ref(),
983 arg_datum
.add_clean(bcx
.fcx
, arg_cleanup_scope
))
990 debug
!("--- trans_arg_datum passing {:?}", Value(val
));
993 // Fat pointers should be passed without any transformations.
994 assert
!(!arg
.is_indirect() && arg
.cast
.is_none());
995 llargs
.push(Load(bcx
, expr
::get_dataptr(bcx
, val
)));
997 let info_arg
= &fn_ty
.args
[*next_idx
];
999 assert
!(!info_arg
.is_indirect() && info_arg
.cast
.is_none());
1000 let info
= Load(bcx
, expr
::get_meta(bcx
, val
));
1002 if let Virtual(idx
) = *callee
{
1003 // We have to grab the fn pointer from the vtable when
1004 // handling the first argument, ensure that here.
1005 assert_eq
!(*next_idx
, 2);
1006 assert
!(info_arg
.is_ignore());
1007 let fn_ptr
= meth
::get_virtual_method(bcx
, info
, idx
);
1008 let llty
= fn_ty
.llvm_type(bcx
.ccx()).ptr_to();
1009 *callee
= Fn(PointerCast(bcx
, fn_ptr
, llty
));
1011 assert
!(!info_arg
.is_ignore());
1018 if by_ref
&& !arg
.is_indirect() {
1019 // Have to load the argument, maybe while casting it.
1020 if arg
.original_ty
== Type
::i1(bcx
.ccx()) {
1021 // We store bools as i8 so we need to truncate to i1.
1022 val
= LoadRangeAssert(bcx
, val
, 0, 2, llvm
::False
);
1023 val
= Trunc(bcx
, val
, arg
.original_ty
);
1024 } else if let Some(ty
) = arg
.cast
{
1025 val
= Load(bcx
, PointerCast(bcx
, val
, ty
.ptr_to()));
1026 if !bcx
.unreachable
.get() {
1027 let llalign
= llalign_of_min(bcx
.ccx(), arg
.ty
);
1029 llvm
::LLVMSetAlignment(val
, llalign
);
1033 val
= Load(bcx
, val
);