]> git.proxmox.com Git - rustc.git/blob - src/librustc_trans/callee.rs
Imported Upstream version 1.9.0+dfsg1
[rustc.git] / src / librustc_trans / callee.rs
1 // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 //! Handles translation of callees as well as other call-related
12 //! things. Callees are a superset of normal rust values and sometimes
13 //! have different representations. In particular, top-level fn items
14 //! and methods are represented as just a fn ptr and not a full
15 //! closure.
16
17 pub use self::CalleeData::*;
18 pub use self::CallArgs::*;
19
20 use arena::TypedArena;
21 use back::symbol_names;
22 use llvm::{self, ValueRef, get_params};
23 use middle::cstore::LOCAL_CRATE;
24 use rustc::hir::def_id::DefId;
25 use rustc::infer;
26 use rustc::ty::subst;
27 use rustc::ty::subst::{Substs};
28 use rustc::traits;
29 use rustc::hir::map as hir_map;
30 use abi::{Abi, FnType};
31 use adt;
32 use attributes;
33 use base;
34 use base::*;
35 use build::*;
36 use cleanup;
37 use cleanup::CleanupMethods;
38 use closure;
39 use common::{self, Block, Result, CrateContext, FunctionContext};
40 use common::{C_uint, C_undef};
41 use consts;
42 use datum::*;
43 use debuginfo::DebugLoc;
44 use declare;
45 use expr;
46 use glue;
47 use inline;
48 use intrinsic;
49 use machine::{llalign_of_min, llsize_of_store};
50 use meth;
51 use monomorphize::{self, Instance};
52 use type_::Type;
53 use type_of;
54 use value::Value;
55 use Disr;
56 use rustc::ty::{self, Ty, TyCtxt, TypeFoldable};
57 use rustc::hir;
58
59 use syntax::codemap::DUMMY_SP;
60 use syntax::errors;
61 use syntax::ptr::P;
62
63 use std::cmp;
64
65 #[derive(Debug)]
66 pub enum CalleeData {
67 /// Constructor for enum variant/tuple-like-struct.
68 NamedTupleConstructor(Disr),
69
70 /// Function pointer.
71 Fn(ValueRef),
72
73 Intrinsic,
74
75 /// Trait object found in the vtable at that index.
76 Virtual(usize)
77 }
78
79 #[derive(Debug)]
80 pub struct Callee<'tcx> {
81 pub data: CalleeData,
82 pub ty: Ty<'tcx>
83 }
84
85 impl<'tcx> Callee<'tcx> {
86 /// Function pointer.
87 pub fn ptr(datum: Datum<'tcx, Rvalue>) -> Callee<'tcx> {
88 Callee {
89 data: Fn(datum.val),
90 ty: datum.ty
91 }
92 }
93
94 /// Trait or impl method call.
95 pub fn method_call<'blk>(bcx: Block<'blk, 'tcx>,
96 method_call: ty::MethodCall)
97 -> Callee<'tcx> {
98 let method = bcx.tcx().tables.borrow().method_map[&method_call];
99 Callee::method(bcx, method)
100 }
101
102 /// Trait or impl method.
103 pub fn method<'blk>(bcx: Block<'blk, 'tcx>,
104 method: ty::MethodCallee<'tcx>) -> Callee<'tcx> {
105 let substs = bcx.tcx().mk_substs(bcx.fcx.monomorphize(&method.substs));
106 Callee::def(bcx.ccx(), method.def_id, substs)
107 }
108
109 /// Function or method definition.
110 pub fn def<'a>(ccx: &CrateContext<'a, 'tcx>,
111 def_id: DefId,
112 substs: &'tcx subst::Substs<'tcx>)
113 -> Callee<'tcx> {
114 let tcx = ccx.tcx();
115
116 if substs.self_ty().is_some() {
117 // Only trait methods can have a Self parameter.
118 return Callee::trait_method(ccx, def_id, substs);
119 }
120
121 let maybe_node_id = inline::get_local_instance(ccx, def_id)
122 .and_then(|def_id| tcx.map.as_local_node_id(def_id));
123 let maybe_ast_node = maybe_node_id.and_then(|node_id| {
124 tcx.map.find(node_id)
125 });
126
127 let data = match maybe_ast_node {
128 Some(hir_map::NodeStructCtor(_)) => {
129 NamedTupleConstructor(Disr(0))
130 }
131 Some(hir_map::NodeVariant(_)) => {
132 let vinfo = common::inlined_variant_def(ccx, maybe_node_id.unwrap());
133 NamedTupleConstructor(Disr::from(vinfo.disr_val))
134 }
135 Some(hir_map::NodeForeignItem(fi)) if {
136 let abi = tcx.map.get_foreign_abi(fi.id);
137 abi == Abi::RustIntrinsic || abi == Abi::PlatformIntrinsic
138 } => Intrinsic,
139
140 _ => return Callee::ptr(get_fn(ccx, def_id, substs))
141 };
142
143 Callee {
144 data: data,
145 ty: def_ty(tcx, def_id, substs)
146 }
147 }
148
149 /// Trait method, which has to be resolved to an impl method.
150 pub fn trait_method<'a>(ccx: &CrateContext<'a, 'tcx>,
151 def_id: DefId,
152 substs: &'tcx subst::Substs<'tcx>)
153 -> Callee<'tcx> {
154 let tcx = ccx.tcx();
155
156 let method_item = tcx.impl_or_trait_item(def_id);
157 let trait_id = method_item.container().id();
158 let trait_ref = ty::Binder(substs.to_trait_ref(tcx, trait_id));
159 let trait_ref = infer::normalize_associated_type(tcx, &trait_ref);
160 match common::fulfill_obligation(ccx, DUMMY_SP, trait_ref) {
161 traits::VtableImpl(vtable_impl) => {
162 let impl_did = vtable_impl.impl_def_id;
163 let mname = tcx.item_name(def_id);
164 // create a concatenated set of substitutions which includes
165 // those from the impl and those from the method:
166 let impl_substs = vtable_impl.substs.with_method_from(&substs);
167 let substs = tcx.mk_substs(impl_substs);
168 let mth = meth::get_impl_method(tcx, impl_did, substs, mname);
169
170 // Translate the function, bypassing Callee::def.
171 // That is because default methods have the same ID as the
172 // trait method used to look up the impl method that ended
173 // up here, so calling Callee::def would infinitely recurse.
174 Callee::ptr(get_fn(ccx, mth.method.def_id, mth.substs))
175 }
176 traits::VtableClosure(vtable_closure) => {
177 // The substitutions should have no type parameters remaining
178 // after passing through fulfill_obligation
179 let trait_closure_kind = tcx.lang_items.fn_trait_kind(trait_id).unwrap();
180 let llfn = closure::trans_closure_method(ccx,
181 vtable_closure.closure_def_id,
182 vtable_closure.substs,
183 trait_closure_kind);
184
185 let method_ty = def_ty(tcx, def_id, substs);
186 let fn_ptr_ty = match method_ty.sty {
187 ty::TyFnDef(_, _, fty) => tcx.mk_ty(ty::TyFnPtr(fty)),
188 _ => bug!("expected fn item type, found {}",
189 method_ty)
190 };
191 Callee::ptr(immediate_rvalue(llfn, fn_ptr_ty))
192 }
193 traits::VtableFnPointer(fn_ty) => {
194 let trait_closure_kind = tcx.lang_items.fn_trait_kind(trait_id).unwrap();
195 let llfn = trans_fn_pointer_shim(ccx, trait_closure_kind, fn_ty);
196
197 let method_ty = def_ty(tcx, def_id, substs);
198 let fn_ptr_ty = match method_ty.sty {
199 ty::TyFnDef(_, _, fty) => tcx.mk_ty(ty::TyFnPtr(fty)),
200 _ => bug!("expected fn item type, found {}",
201 method_ty)
202 };
203 Callee::ptr(immediate_rvalue(llfn, fn_ptr_ty))
204 }
205 traits::VtableObject(ref data) => {
206 Callee {
207 data: Virtual(traits::get_vtable_index_of_object_method(
208 tcx, data, def_id)),
209 ty: def_ty(tcx, def_id, substs)
210 }
211 }
212 vtable => {
213 bug!("resolved vtable bad vtable {:?} in trans", vtable);
214 }
215 }
216 }
217
218 /// Get the abi::FnType for a direct call. Mainly deals with the fact
219 /// that a Virtual call doesn't take the vtable, like its shim does.
220 /// The extra argument types are for variadic (extern "C") functions.
221 pub fn direct_fn_type<'a>(&self, ccx: &CrateContext<'a, 'tcx>,
222 extra_args: &[Ty<'tcx>]) -> FnType {
223 let abi = self.ty.fn_abi();
224 let sig = ccx.tcx().erase_late_bound_regions(self.ty.fn_sig());
225 let sig = infer::normalize_associated_type(ccx.tcx(), &sig);
226 let mut fn_ty = FnType::unadjusted(ccx, abi, &sig, extra_args);
227 if let Virtual(_) = self.data {
228 // Don't pass the vtable, it's not an argument of the virtual fn.
229 fn_ty.args[1].ignore();
230 }
231 fn_ty.adjust_for_abi(ccx, abi, &sig);
232 fn_ty
233 }
234
235 /// This behemoth of a function translates function calls. Unfortunately, in
236 /// order to generate more efficient LLVM output at -O0, it has quite a complex
237 /// signature (refactoring this into two functions seems like a good idea).
238 ///
239 /// In particular, for lang items, it is invoked with a dest of None, and in
240 /// that case the return value contains the result of the fn. The lang item must
241 /// not return a structural type or else all heck breaks loose.
242 ///
243 /// For non-lang items, `dest` is always Some, and hence the result is written
244 /// into memory somewhere. Nonetheless we return the actual return value of the
245 /// function.
246 pub fn call<'a, 'blk>(self, bcx: Block<'blk, 'tcx>,
247 debug_loc: DebugLoc,
248 args: CallArgs<'a, 'tcx>,
249 dest: Option<expr::Dest>)
250 -> Result<'blk, 'tcx> {
251 trans_call_inner(bcx, debug_loc, self, args, dest)
252 }
253
254 /// Turn the callee into a function pointer.
255 pub fn reify<'a>(self, ccx: &CrateContext<'a, 'tcx>)
256 -> Datum<'tcx, Rvalue> {
257 let fn_ptr_ty = match self.ty.sty {
258 ty::TyFnDef(_, _, f) => ccx.tcx().mk_ty(ty::TyFnPtr(f)),
259 _ => self.ty
260 };
261 match self.data {
262 Fn(llfn) => {
263 immediate_rvalue(llfn, fn_ptr_ty)
264 }
265 Virtual(idx) => {
266 let llfn = meth::trans_object_shim(ccx, self.ty, idx);
267 immediate_rvalue(llfn, fn_ptr_ty)
268 }
269 NamedTupleConstructor(_) => match self.ty.sty {
270 ty::TyFnDef(def_id, substs, _) => {
271 return get_fn(ccx, def_id, substs);
272 }
273 _ => bug!("expected fn item type, found {}", self.ty)
274 },
275 Intrinsic => bug!("intrinsic {} getting reified", self.ty)
276 }
277 }
278 }
279
280 /// Given a DefId and some Substs, produces the monomorphic item type.
281 fn def_ty<'tcx>(tcx: &TyCtxt<'tcx>,
282 def_id: DefId,
283 substs: &'tcx subst::Substs<'tcx>)
284 -> Ty<'tcx> {
285 let ty = tcx.lookup_item_type(def_id).ty;
286 monomorphize::apply_param_substs(tcx, substs, &ty)
287 }
288
289 /// Translates an adapter that implements the `Fn` trait for a fn
290 /// pointer. This is basically the equivalent of something like:
291 ///
292 /// ```
293 /// impl<'a> Fn(&'a int) -> &'a int for fn(&int) -> &int {
294 /// extern "rust-abi" fn call(&self, args: (&'a int,)) -> &'a int {
295 /// (*self)(args.0)
296 /// }
297 /// }
298 /// ```
299 ///
300 /// but for the bare function type given.
301 pub fn trans_fn_pointer_shim<'a, 'tcx>(
302 ccx: &'a CrateContext<'a, 'tcx>,
303 closure_kind: ty::ClosureKind,
304 bare_fn_ty: Ty<'tcx>)
305 -> ValueRef
306 {
307 let _icx = push_ctxt("trans_fn_pointer_shim");
308 let tcx = ccx.tcx();
309
310 // Normalize the type for better caching.
311 let bare_fn_ty = tcx.erase_regions(&bare_fn_ty);
312
313 // If this is an impl of `Fn` or `FnMut` trait, the receiver is `&self`.
314 let is_by_ref = match closure_kind {
315 ty::ClosureKind::Fn | ty::ClosureKind::FnMut => true,
316 ty::ClosureKind::FnOnce => false,
317 };
318
319 let llfnpointer = match bare_fn_ty.sty {
320 ty::TyFnDef(def_id, substs, _) => {
321 // Function definitions have to be turned into a pointer.
322 let llfn = Callee::def(ccx, def_id, substs).reify(ccx).val;
323 if !is_by_ref {
324 // A by-value fn item is ignored, so the shim has
325 // the same signature as the original function.
326 return llfn;
327 }
328 Some(llfn)
329 }
330 _ => None
331 };
332
333 let bare_fn_ty_maybe_ref = if is_by_ref {
334 tcx.mk_imm_ref(tcx.mk_region(ty::ReStatic), bare_fn_ty)
335 } else {
336 bare_fn_ty
337 };
338
339 // Check if we already trans'd this shim.
340 match ccx.fn_pointer_shims().borrow().get(&bare_fn_ty_maybe_ref) {
341 Some(&llval) => { return llval; }
342 None => { }
343 }
344
345 debug!("trans_fn_pointer_shim(bare_fn_ty={:?})",
346 bare_fn_ty);
347
348 // Construct the "tuply" version of `bare_fn_ty`. It takes two arguments: `self`,
349 // which is the fn pointer, and `args`, which is the arguments tuple.
350 let sig = match bare_fn_ty.sty {
351 ty::TyFnDef(_, _,
352 &ty::BareFnTy { unsafety: hir::Unsafety::Normal,
353 abi: Abi::Rust,
354 ref sig }) |
355 ty::TyFnPtr(&ty::BareFnTy { unsafety: hir::Unsafety::Normal,
356 abi: Abi::Rust,
357 ref sig }) => sig,
358
359 _ => {
360 bug!("trans_fn_pointer_shim invoked on invalid type: {}",
361 bare_fn_ty);
362 }
363 };
364 let sig = tcx.erase_late_bound_regions(sig);
365 let sig = infer::normalize_associated_type(ccx.tcx(), &sig);
366 let tuple_input_ty = tcx.mk_tup(sig.inputs.to_vec());
367 let sig = ty::FnSig {
368 inputs: vec![bare_fn_ty_maybe_ref,
369 tuple_input_ty],
370 output: sig.output,
371 variadic: false
372 };
373 let fn_ty = FnType::new(ccx, Abi::RustCall, &sig, &[]);
374 let tuple_fn_ty = tcx.mk_fn_ptr(ty::BareFnTy {
375 unsafety: hir::Unsafety::Normal,
376 abi: Abi::RustCall,
377 sig: ty::Binder(sig)
378 });
379 debug!("tuple_fn_ty: {:?}", tuple_fn_ty);
380
381 //
382 let function_name =
383 symbol_names::internal_name_from_type_and_suffix(ccx,
384 bare_fn_ty,
385 "fn_pointer_shim");
386 let llfn = declare::define_internal_fn(ccx, &function_name, tuple_fn_ty);
387
388 //
389 let empty_substs = tcx.mk_substs(Substs::empty());
390 let (block_arena, fcx): (TypedArena<_>, FunctionContext);
391 block_arena = TypedArena::new();
392 fcx = FunctionContext::new(ccx, llfn, fn_ty, None, empty_substs, &block_arena);
393 let mut bcx = fcx.init(false, None);
394
395 let llargs = get_params(fcx.llfn);
396
397 let self_idx = fcx.fn_ty.ret.is_indirect() as usize;
398 let llfnpointer = llfnpointer.unwrap_or_else(|| {
399 // the first argument (`self`) will be ptr to the fn pointer
400 if is_by_ref {
401 Load(bcx, llargs[self_idx])
402 } else {
403 llargs[self_idx]
404 }
405 });
406
407 assert!(!fcx.needs_ret_allocas);
408
409 let dest = fcx.llretslotptr.get().map(|_|
410 expr::SaveIn(fcx.get_ret_slot(bcx, "ret_slot"))
411 );
412
413 let callee = Callee {
414 data: Fn(llfnpointer),
415 ty: bare_fn_ty
416 };
417 bcx = callee.call(bcx, DebugLoc::None, ArgVals(&llargs[(self_idx + 1)..]), dest).bcx;
418
419 fcx.finish(bcx, DebugLoc::None);
420
421 ccx.fn_pointer_shims().borrow_mut().insert(bare_fn_ty_maybe_ref, llfn);
422
423 llfn
424 }
425
426 /// Translates a reference to a fn/method item, monomorphizing and
427 /// inlining as it goes.
428 ///
429 /// # Parameters
430 ///
431 /// - `ccx`: the crate context
432 /// - `def_id`: def id of the fn or method item being referenced
433 /// - `substs`: values for each of the fn/method's parameters
434 fn get_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
435 def_id: DefId,
436 substs: &'tcx subst::Substs<'tcx>)
437 -> Datum<'tcx, Rvalue> {
438 let tcx = ccx.tcx();
439
440 debug!("get_fn(def_id={:?}, substs={:?})", def_id, substs);
441
442 assert!(!substs.types.needs_infer());
443 assert!(!substs.types.has_escaping_regions());
444
445 // Check whether this fn has an inlined copy and, if so, redirect
446 // def_id to the local id of the inlined copy.
447 let def_id = inline::maybe_instantiate_inline(ccx, def_id);
448
449 fn is_named_tuple_constructor(tcx: &TyCtxt, def_id: DefId) -> bool {
450 let node_id = match tcx.map.as_local_node_id(def_id) {
451 Some(n) => n,
452 None => { return false; }
453 };
454 let map_node = errors::expect(
455 &tcx.sess.diagnostic(),
456 tcx.map.find(node_id),
457 || "local item should be in ast map".to_string());
458
459 match map_node {
460 hir_map::NodeVariant(v) => {
461 v.node.data.is_tuple()
462 }
463 hir_map::NodeStructCtor(_) => true,
464 _ => false
465 }
466 }
467 let must_monomorphise =
468 !substs.types.is_empty() || is_named_tuple_constructor(tcx, def_id);
469
470 debug!("get_fn({:?}) must_monomorphise: {}",
471 def_id, must_monomorphise);
472
473 // Create a monomorphic version of generic functions
474 if must_monomorphise {
475 // Should be either intra-crate or inlined.
476 assert_eq!(def_id.krate, LOCAL_CRATE);
477
478 let substs = tcx.mk_substs(substs.clone().erase_regions());
479 let (val, fn_ty) = monomorphize::monomorphic_fn(ccx, def_id, substs);
480 let fn_ptr_ty = match fn_ty.sty {
481 ty::TyFnDef(_, _, fty) => {
482 // Create a fn pointer with the substituted signature.
483 tcx.mk_ty(ty::TyFnPtr(fty))
484 }
485 _ => bug!("expected fn item type, found {}", fn_ty)
486 };
487 assert_eq!(type_of::type_of(ccx, fn_ptr_ty), common::val_ty(val));
488 return immediate_rvalue(val, fn_ptr_ty);
489 }
490
491 // Find the actual function pointer.
492 let ty = ccx.tcx().lookup_item_type(def_id).ty;
493 let fn_ptr_ty = match ty.sty {
494 ty::TyFnDef(_, _, fty) => {
495 // Create a fn pointer with the normalized signature.
496 tcx.mk_fn_ptr(infer::normalize_associated_type(tcx, fty))
497 }
498 _ => bug!("expected fn item type, found {}", ty)
499 };
500
501 let instance = Instance::mono(ccx.tcx(), def_id);
502 if let Some(&llfn) = ccx.instances().borrow().get(&instance) {
503 return immediate_rvalue(llfn, fn_ptr_ty);
504 }
505
506 let attrs;
507 let local_id = ccx.tcx().map.as_local_node_id(def_id);
508 let maybe_node = local_id.and_then(|id| tcx.map.find(id));
509 let (sym, attrs, local_item) = match maybe_node {
510 Some(hir_map::NodeItem(&hir::Item {
511 ref attrs, id, span, node: hir::ItemFn(..), ..
512 })) |
513 Some(hir_map::NodeTraitItem(&hir::TraitItem {
514 ref attrs, id, span, node: hir::MethodTraitItem(_, Some(_)), ..
515 })) |
516 Some(hir_map::NodeImplItem(&hir::ImplItem {
517 ref attrs, id, span, node: hir::ImplItemKind::Method(..), ..
518 })) => {
519 let sym = exported_name(ccx, instance, attrs);
520
521 if declare::get_defined_value(ccx, &sym).is_some() {
522 ccx.sess().span_fatal(span,
523 &format!("symbol `{}` is already defined", sym));
524 }
525
526 (sym, &attrs[..], Some(id))
527 }
528
529 Some(hir_map::NodeForeignItem(&hir::ForeignItem {
530 ref attrs, name, node: hir::ForeignItemFn(..), ..
531 })) => {
532 (imported_name(name, attrs).to_string(), &attrs[..], None)
533 }
534
535 None => {
536 attrs = ccx.sess().cstore.item_attrs(def_id);
537 (ccx.sess().cstore.item_symbol(def_id), &attrs[..], None)
538 }
539
540 ref variant => {
541 bug!("get_fn: unexpected variant: {:?}", variant)
542 }
543 };
544
545 // This is subtle and surprising, but sometimes we have to bitcast
546 // the resulting fn pointer. The reason has to do with external
547 // functions. If you have two crates that both bind the same C
548 // library, they may not use precisely the same types: for
549 // example, they will probably each declare their own structs,
550 // which are distinct types from LLVM's point of view (nominal
551 // types).
552 //
553 // Now, if those two crates are linked into an application, and
554 // they contain inlined code, you can wind up with a situation
555 // where both of those functions wind up being loaded into this
556 // application simultaneously. In that case, the same function
557 // (from LLVM's point of view) requires two types. But of course
558 // LLVM won't allow one function to have two types.
559 //
560 // What we currently do, therefore, is declare the function with
561 // one of the two types (whichever happens to come first) and then
562 // bitcast as needed when the function is referenced to make sure
563 // it has the type we expect.
564 //
565 // This can occur on either a crate-local or crate-external
566 // reference. It also occurs when testing libcore and in some
567 // other weird situations. Annoying.
568
569 let llptrty = type_of::type_of(ccx, fn_ptr_ty);
570 let llfn = if let Some(llfn) = declare::get_declared_value(ccx, &sym) {
571 if common::val_ty(llfn) != llptrty {
572 if local_item.is_some() {
573 bug!("symbol `{}` previously declared as {:?}, now wanted as {:?}",
574 sym, Value(llfn), llptrty);
575 }
576 debug!("get_fn: casting {:?} to {:?}", llfn, llptrty);
577 consts::ptrcast(llfn, llptrty)
578 } else {
579 debug!("get_fn: not casting pointer!");
580 llfn
581 }
582 } else {
583 let llfn = declare::declare_fn(ccx, &sym, ty);
584 assert_eq!(common::val_ty(llfn), llptrty);
585 debug!("get_fn: not casting pointer!");
586
587 attributes::from_fn_attrs(ccx, attrs, llfn);
588 if local_item.is_some() {
589 // FIXME(eddyb) Doubt all extern fn should allow unwinding.
590 attributes::unwind(llfn, true);
591 }
592
593 llfn
594 };
595
596 // Always insert into item_symbols, in case this item is exported.
597 if let Some(id) = local_item {
598 ccx.item_symbols().borrow_mut().insert(id, sym);
599 }
600
601 ccx.instances().borrow_mut().insert(instance, llfn);
602
603 immediate_rvalue(llfn, fn_ptr_ty)
604 }
605
606 // ______________________________________________________________________
607 // Translating calls
608
609 fn trans_call_inner<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
610 debug_loc: DebugLoc,
611 callee: Callee<'tcx>,
612 args: CallArgs<'a, 'tcx>,
613 dest: Option<expr::Dest>)
614 -> Result<'blk, 'tcx> {
615 // Introduce a temporary cleanup scope that will contain cleanups
616 // for the arguments while they are being evaluated. The purpose
617 // this cleanup is to ensure that, should a panic occur while
618 // evaluating argument N, the values for arguments 0...N-1 are all
619 // cleaned up. If no panic occurs, the values are handed off to
620 // the callee, and hence none of the cleanups in this temporary
621 // scope will ever execute.
622 let fcx = bcx.fcx;
623 let ccx = fcx.ccx;
624
625 let abi = callee.ty.fn_abi();
626 let sig = callee.ty.fn_sig();
627 let output = bcx.tcx().erase_late_bound_regions(&sig.output());
628 let output = infer::normalize_associated_type(bcx.tcx(), &output);
629
630 let extra_args = match args {
631 ArgExprs(args) if abi != Abi::RustCall => {
632 args[sig.0.inputs.len()..].iter().map(|expr| {
633 common::expr_ty_adjusted(bcx, expr)
634 }).collect()
635 }
636 _ => vec![]
637 };
638 let fn_ty = callee.direct_fn_type(ccx, &extra_args);
639
640 let mut callee = match callee.data {
641 Intrinsic => {
642 assert!(abi == Abi::RustIntrinsic || abi == Abi::PlatformIntrinsic);
643 assert!(dest.is_some());
644
645 return intrinsic::trans_intrinsic_call(bcx, callee.ty, &fn_ty,
646 args, dest.unwrap(),
647 debug_loc);
648 }
649 NamedTupleConstructor(disr) => {
650 assert!(dest.is_some());
651
652 return base::trans_named_tuple_constructor(bcx,
653 callee.ty,
654 disr,
655 args,
656 dest.unwrap(),
657 debug_loc);
658 }
659 f => f
660 };
661
662 // Generate a location to store the result. If the user does
663 // not care about the result, just make a stack slot.
664 let opt_llretslot = dest.and_then(|dest| match dest {
665 expr::SaveIn(dst) => Some(dst),
666 expr::Ignore => {
667 let needs_drop = || match output {
668 ty::FnConverging(ret_ty) => bcx.fcx.type_needs_drop(ret_ty),
669 ty::FnDiverging => false
670 };
671 if fn_ty.ret.is_indirect() || fn_ty.ret.cast.is_some() || needs_drop() {
672 // Push the out-pointer if we use an out-pointer for this
673 // return type, otherwise push "undef".
674 if fn_ty.ret.is_ignore() {
675 Some(C_undef(fn_ty.ret.original_ty.ptr_to()))
676 } else {
677 let llresult = alloca(bcx, fn_ty.ret.original_ty, "__llret");
678 call_lifetime_start(bcx, llresult);
679 Some(llresult)
680 }
681 } else {
682 None
683 }
684 }
685 });
686
687 // If there no destination, return must be direct, with no cast.
688 if opt_llretslot.is_none() {
689 assert!(!fn_ty.ret.is_indirect() && fn_ty.ret.cast.is_none());
690 }
691
692 let mut llargs = Vec::new();
693
694 if fn_ty.ret.is_indirect() {
695 let mut llretslot = opt_llretslot.unwrap();
696 if let Some(ty) = fn_ty.ret.cast {
697 llretslot = PointerCast(bcx, llretslot, ty.ptr_to());
698 }
699 llargs.push(llretslot);
700 }
701
702 let arg_cleanup_scope = fcx.push_custom_cleanup_scope();
703 bcx = trans_args(bcx, abi, &fn_ty, &mut callee, args, &mut llargs,
704 cleanup::CustomScope(arg_cleanup_scope));
705 fcx.scopes.borrow_mut().last_mut().unwrap().drop_non_lifetime_clean();
706
707 let llfn = match callee {
708 Fn(f) => f,
709 _ => bug!("expected fn pointer callee, found {:?}", callee)
710 };
711
712 let (llret, mut bcx) = base::invoke(bcx, llfn, &llargs, debug_loc);
713 if !bcx.unreachable.get() {
714 fn_ty.apply_attrs_callsite(llret);
715 }
716
717 // If the function we just called does not use an outpointer,
718 // store the result into the rust outpointer. Cast the outpointer
719 // type to match because some ABIs will use a different type than
720 // the Rust type. e.g., a {u32,u32} struct could be returned as
721 // u64.
722 if !fn_ty.ret.is_ignore() && !fn_ty.ret.is_indirect() {
723 if let Some(llforeign_ret_ty) = fn_ty.ret.cast {
724 let llrust_ret_ty = fn_ty.ret.original_ty;
725 let llretslot = opt_llretslot.unwrap();
726
727 // The actual return type is a struct, but the ABI
728 // adaptation code has cast it into some scalar type. The
729 // code that follows is the only reliable way I have
730 // found to do a transform like i64 -> {i32,i32}.
731 // Basically we dump the data onto the stack then memcpy it.
732 //
733 // Other approaches I tried:
734 // - Casting rust ret pointer to the foreign type and using Store
735 // is (a) unsafe if size of foreign type > size of rust type and
736 // (b) runs afoul of strict aliasing rules, yielding invalid
737 // assembly under -O (specifically, the store gets removed).
738 // - Truncating foreign type to correct integral type and then
739 // bitcasting to the struct type yields invalid cast errors.
740 let llscratch = base::alloca(bcx, llforeign_ret_ty, "__cast");
741 base::call_lifetime_start(bcx, llscratch);
742 Store(bcx, llret, llscratch);
743 let llscratch_i8 = PointerCast(bcx, llscratch, Type::i8(ccx).ptr_to());
744 let llretptr_i8 = PointerCast(bcx, llretslot, Type::i8(ccx).ptr_to());
745 let llrust_size = llsize_of_store(ccx, llrust_ret_ty);
746 let llforeign_align = llalign_of_min(ccx, llforeign_ret_ty);
747 let llrust_align = llalign_of_min(ccx, llrust_ret_ty);
748 let llalign = cmp::min(llforeign_align, llrust_align);
749 debug!("llrust_size={}", llrust_size);
750
751 if !bcx.unreachable.get() {
752 base::call_memcpy(&B(bcx), llretptr_i8, llscratch_i8,
753 C_uint(ccx, llrust_size), llalign as u32);
754 }
755 base::call_lifetime_end(bcx, llscratch);
756 } else if let Some(llretslot) = opt_llretslot {
757 base::store_ty(bcx, llret, llretslot, output.unwrap());
758 }
759 }
760
761 fcx.pop_and_trans_custom_cleanup_scope(bcx, arg_cleanup_scope);
762
763 // If the caller doesn't care about the result of this fn call,
764 // drop the temporary slot we made.
765 match (dest, opt_llretslot, output) {
766 (Some(expr::Ignore), Some(llretslot), ty::FnConverging(ret_ty)) => {
767 // drop the value if it is not being saved.
768 bcx = glue::drop_ty(bcx, llretslot, ret_ty, debug_loc);
769 call_lifetime_end(bcx, llretslot);
770 }
771 _ => {}
772 }
773
774 if output == ty::FnDiverging {
775 Unreachable(bcx);
776 }
777
778 Result::new(bcx, llret)
779 }
780
781 pub enum CallArgs<'a, 'tcx> {
782 /// Supply value of arguments as a list of expressions that must be
783 /// translated. This is used in the common case of `foo(bar, qux)`.
784 ArgExprs(&'a [P<hir::Expr>]),
785
786 /// Supply value of arguments as a list of LLVM value refs; frequently
787 /// used with lang items and so forth, when the argument is an internal
788 /// value.
789 ArgVals(&'a [ValueRef]),
790
791 /// For overloaded operators: `(lhs, Option(rhs))`.
792 /// `lhs` is the left-hand-side and `rhs` is the datum
793 /// of the right-hand-side argument (if any).
794 ArgOverloadedOp(Datum<'tcx, Expr>, Option<Datum<'tcx, Expr>>),
795
796 /// Supply value of arguments as a list of expressions that must be
797 /// translated, for overloaded call operators.
798 ArgOverloadedCall(Vec<&'a hir::Expr>),
799 }
800
801 fn trans_args_under_call_abi<'blk, 'tcx>(
802 mut bcx: Block<'blk, 'tcx>,
803 arg_exprs: &[P<hir::Expr>],
804 callee: &mut CalleeData,
805 fn_ty: &FnType,
806 llargs: &mut Vec<ValueRef>,
807 arg_cleanup_scope: cleanup::ScopeId)
808 -> Block<'blk, 'tcx>
809 {
810 let mut arg_idx = 0;
811
812 // Translate the `self` argument first.
813 let arg_datum = unpack_datum!(bcx, expr::trans(bcx, &arg_exprs[0]));
814 bcx = trans_arg_datum(bcx,
815 arg_datum,
816 callee, fn_ty, &mut arg_idx,
817 arg_cleanup_scope,
818 llargs);
819
820 // Now untuple the rest of the arguments.
821 let tuple_expr = &arg_exprs[1];
822 let tuple_type = common::node_id_type(bcx, tuple_expr.id);
823
824 match tuple_type.sty {
825 ty::TyTuple(ref field_types) => {
826 let tuple_datum = unpack_datum!(bcx,
827 expr::trans(bcx, &tuple_expr));
828 let tuple_lvalue_datum =
829 unpack_datum!(bcx,
830 tuple_datum.to_lvalue_datum(bcx,
831 "args",
832 tuple_expr.id));
833 let repr = adt::represent_type(bcx.ccx(), tuple_type);
834 let repr_ptr = &repr;
835 for (i, field_type) in field_types.iter().enumerate() {
836 let arg_datum = tuple_lvalue_datum.get_element(
837 bcx,
838 field_type,
839 |srcval| {
840 adt::trans_field_ptr(bcx, repr_ptr, srcval, Disr(0), i)
841 }).to_expr_datum();
842 bcx = trans_arg_datum(bcx,
843 arg_datum,
844 callee, fn_ty, &mut arg_idx,
845 arg_cleanup_scope,
846 llargs);
847 }
848 }
849 _ => {
850 span_bug!(tuple_expr.span,
851 "argument to `.call()` wasn't a tuple?!")
852 }
853 };
854
855 bcx
856 }
857
858 pub fn trans_args<'a, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
859 abi: Abi,
860 fn_ty: &FnType,
861 callee: &mut CalleeData,
862 args: CallArgs<'a, 'tcx>,
863 llargs: &mut Vec<ValueRef>,
864 arg_cleanup_scope: cleanup::ScopeId)
865 -> Block<'blk, 'tcx> {
866 debug!("trans_args(abi={})", abi);
867
868 let _icx = push_ctxt("trans_args");
869
870 let mut bcx = bcx;
871 let mut arg_idx = 0;
872
873 // First we figure out the caller's view of the types of the arguments.
874 // This will be needed if this is a generic call, because the callee has
875 // to cast her view of the arguments to the caller's view.
876 match args {
877 ArgExprs(arg_exprs) => {
878 if abi == Abi::RustCall {
879 // This is only used for direct calls to the `call`,
880 // `call_mut` or `call_once` functions.
881 return trans_args_under_call_abi(bcx,
882 arg_exprs, callee, fn_ty,
883 llargs,
884 arg_cleanup_scope)
885 }
886
887 for arg_expr in arg_exprs {
888 let arg_datum = unpack_datum!(bcx, expr::trans(bcx, &arg_expr));
889 bcx = trans_arg_datum(bcx,
890 arg_datum,
891 callee, fn_ty, &mut arg_idx,
892 arg_cleanup_scope,
893 llargs);
894 }
895 }
896 ArgOverloadedCall(arg_exprs) => {
897 for expr in arg_exprs {
898 let arg_datum =
899 unpack_datum!(bcx, expr::trans(bcx, expr));
900 bcx = trans_arg_datum(bcx,
901 arg_datum,
902 callee, fn_ty, &mut arg_idx,
903 arg_cleanup_scope,
904 llargs);
905 }
906 }
907 ArgOverloadedOp(lhs, rhs) => {
908 bcx = trans_arg_datum(bcx, lhs,
909 callee, fn_ty, &mut arg_idx,
910 arg_cleanup_scope,
911 llargs);
912
913 if let Some(rhs) = rhs {
914 bcx = trans_arg_datum(bcx, rhs,
915 callee, fn_ty, &mut arg_idx,
916 arg_cleanup_scope,
917 llargs);
918 }
919 }
920 ArgVals(vs) => {
921 match *callee {
922 Virtual(idx) => {
923 llargs.push(vs[0]);
924
925 let fn_ptr = meth::get_virtual_method(bcx, vs[1], idx);
926 let llty = fn_ty.llvm_type(bcx.ccx()).ptr_to();
927 *callee = Fn(PointerCast(bcx, fn_ptr, llty));
928 llargs.extend_from_slice(&vs[2..]);
929 }
930 _ => llargs.extend_from_slice(vs)
931 }
932 }
933 }
934
935 bcx
936 }
937
938 fn trans_arg_datum<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
939 arg_datum: Datum<'tcx, Expr>,
940 callee: &mut CalleeData,
941 fn_ty: &FnType,
942 next_idx: &mut usize,
943 arg_cleanup_scope: cleanup::ScopeId,
944 llargs: &mut Vec<ValueRef>)
945 -> Block<'blk, 'tcx> {
946 let _icx = push_ctxt("trans_arg_datum");
947 let mut bcx = bcx;
948
949 debug!("trans_arg_datum({:?})", arg_datum);
950
951 let arg = &fn_ty.args[*next_idx];
952 *next_idx += 1;
953
954 // Fill padding with undef value, where applicable.
955 if let Some(ty) = arg.pad {
956 llargs.push(C_undef(ty));
957 }
958
959 // Determine whether we want a by-ref datum even if not appropriate.
960 let want_by_ref = arg.is_indirect() || arg.cast.is_some();
961
962 let fat_ptr = common::type_is_fat_ptr(bcx.tcx(), arg_datum.ty);
963 let (by_ref, val) = if fat_ptr && !bcx.fcx.type_needs_drop(arg_datum.ty) {
964 (true, arg_datum.val)
965 } else {
966 // Make this an rvalue, since we are going to be
967 // passing ownership.
968 let arg_datum = unpack_datum!(
969 bcx, arg_datum.to_rvalue_datum(bcx, "arg"));
970
971 // Now that arg_datum is owned, get it into the appropriate
972 // mode (ref vs value).
973 let arg_datum = unpack_datum!(bcx, if want_by_ref {
974 arg_datum.to_ref_datum(bcx)
975 } else {
976 arg_datum.to_appropriate_datum(bcx)
977 });
978
979 // Technically, ownership of val passes to the callee.
980 // However, we must cleanup should we panic before the
981 // callee is actually invoked.
982 (arg_datum.kind.is_by_ref(),
983 arg_datum.add_clean(bcx.fcx, arg_cleanup_scope))
984 };
985
986 if arg.is_ignore() {
987 return bcx;
988 }
989
990 debug!("--- trans_arg_datum passing {:?}", Value(val));
991
992 if fat_ptr {
993 // Fat pointers should be passed without any transformations.
994 assert!(!arg.is_indirect() && arg.cast.is_none());
995 llargs.push(Load(bcx, expr::get_dataptr(bcx, val)));
996
997 let info_arg = &fn_ty.args[*next_idx];
998 *next_idx += 1;
999 assert!(!info_arg.is_indirect() && info_arg.cast.is_none());
1000 let info = Load(bcx, expr::get_meta(bcx, val));
1001
1002 if let Virtual(idx) = *callee {
1003 // We have to grab the fn pointer from the vtable when
1004 // handling the first argument, ensure that here.
1005 assert_eq!(*next_idx, 2);
1006 assert!(info_arg.is_ignore());
1007 let fn_ptr = meth::get_virtual_method(bcx, info, idx);
1008 let llty = fn_ty.llvm_type(bcx.ccx()).ptr_to();
1009 *callee = Fn(PointerCast(bcx, fn_ptr, llty));
1010 } else {
1011 assert!(!info_arg.is_ignore());
1012 llargs.push(info);
1013 }
1014 return bcx;
1015 }
1016
1017 let mut val = val;
1018 if by_ref && !arg.is_indirect() {
1019 // Have to load the argument, maybe while casting it.
1020 if arg.original_ty == Type::i1(bcx.ccx()) {
1021 // We store bools as i8 so we need to truncate to i1.
1022 val = LoadRangeAssert(bcx, val, 0, 2, llvm::False);
1023 val = Trunc(bcx, val, arg.original_ty);
1024 } else if let Some(ty) = arg.cast {
1025 val = Load(bcx, PointerCast(bcx, val, ty.ptr_to()));
1026 if !bcx.unreachable.get() {
1027 let llalign = llalign_of_min(bcx.ccx(), arg.ty);
1028 unsafe {
1029 llvm::LLVMSetAlignment(val, llalign);
1030 }
1031 }
1032 } else {
1033 val = Load(bcx, val);
1034 }
1035 }
1036
1037 llargs.push(val);
1038 bcx
1039 }