]> git.proxmox.com Git - rustc.git/blame - src/librustc_trans/trans/common.rs
Imported Upstream version 1.3.0+dfsg1
[rustc.git] / src / librustc_trans / trans / common.rs
CommitLineData
1a4d82fc
JJ
1// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2// file at the top-level directory of this distribution and at
3// http://rust-lang.org/COPYRIGHT.
4//
5// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8// option. This file may not be copied, modified, or distributed
9// except according to those terms.
10
11#![allow(non_camel_case_types, non_snake_case)]
12
13//! Code that is useful in various trans modules.
14
15pub use self::ExprOrMethodCall::*;
16
17use session::Session;
18use llvm;
19use llvm::{ValueRef, BasicBlockRef, BuilderRef, ContextRef};
20use llvm::{True, False, Bool};
21use middle::cfg;
22use middle::def;
23use middle::infer;
24use middle::lang_items::LangItem;
c1a9b12d 25use middle::subst::{self, Substs};
1a4d82fc
JJ
26use trans::base;
27use trans::build;
c1a9b12d 28use trans::callee;
1a4d82fc
JJ
29use trans::cleanup;
30use trans::consts;
31use trans::datum;
85aaf69f 32use trans::debuginfo::{self, DebugLoc};
9346a6ac 33use trans::declare;
1a4d82fc
JJ
34use trans::machine;
35use trans::monomorphize;
36use trans::type_::Type;
37use trans::type_of;
38use middle::traits;
c1a9b12d 39use middle::ty::{self, HasTypeFlags, Ty};
1a4d82fc
JJ
40use middle::ty_fold;
41use middle::ty_fold::{TypeFolder, TypeFoldable};
62682a34 42use rustc::ast_map::{PathElem, PathName};
1a4d82fc
JJ
43use util::nodemap::{FnvHashMap, NodeMap};
44
45use arena::TypedArena;
46use libc::{c_uint, c_char};
47use std::ffi::CString;
48use std::cell::{Cell, RefCell};
c34b1796 49use std::result::Result as StdResult;
1a4d82fc 50use std::vec::Vec;
1a4d82fc 51use syntax::ast;
1a4d82fc
JJ
52use syntax::codemap::{DUMMY_SP, Span};
53use syntax::parse::token::InternedString;
54use syntax::parse::token;
1a4d82fc
JJ
55
56pub use trans::context::CrateContext;
57
58/// Returns an equivalent value with all free regions removed (note
59/// that late-bound regions remain, because they are important for
60/// subtyping, but they are anonymized and normalized as well). This
61/// is a stronger, caching version of `ty_fold::erase_regions`.
62pub fn erase_regions<'tcx,T>(cx: &ty::ctxt<'tcx>, value: &T) -> T
62682a34 63 where T : TypeFoldable<'tcx>
1a4d82fc
JJ
64{
65 let value1 = value.fold_with(&mut RegionEraser(cx));
62682a34
SL
66 debug!("erase_regions({:?}) = {:?}",
67 value, value1);
1a4d82fc
JJ
68 return value1;
69
70 struct RegionEraser<'a, 'tcx: 'a>(&'a ty::ctxt<'tcx>);
71
72 impl<'a, 'tcx> TypeFolder<'tcx> for RegionEraser<'a, 'tcx> {
73 fn tcx(&self) -> &ty::ctxt<'tcx> { self.0 }
74
75 fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
76 match self.tcx().normalized_cache.borrow().get(&ty).cloned() {
77 None => {}
78 Some(u) => return u
79 }
80
81 let t_norm = ty_fold::super_fold_ty(self, ty);
82 self.tcx().normalized_cache.borrow_mut().insert(ty, t_norm);
83 return t_norm;
84 }
85
86 fn fold_binder<T>(&mut self, t: &ty::Binder<T>) -> ty::Binder<T>
62682a34 87 where T : TypeFoldable<'tcx>
1a4d82fc 88 {
c1a9b12d 89 let u = self.tcx().anonymize_late_bound_regions(t);
1a4d82fc
JJ
90 ty_fold::super_fold_binder(self, &u)
91 }
92
93 fn fold_region(&mut self, r: ty::Region) -> ty::Region {
94 // because late-bound regions affect subtyping, we can't
95 // erase the bound/free distinction, but we can replace
96 // all free regions with 'static.
97 //
98 // Note that we *CAN* replace early-bound regions -- the
99 // type system never "sees" those, they get substituted
100 // away. In trans, they will always be erased to 'static
101 // whenever a substitution occurs.
102 match r {
103 ty::ReLateBound(..) => r,
104 _ => ty::ReStatic
105 }
106 }
107
108 fn fold_substs(&mut self,
109 substs: &subst::Substs<'tcx>)
110 -> subst::Substs<'tcx> {
111 subst::Substs { regions: subst::ErasedRegions,
112 types: substs.types.fold_with(self) }
113 }
114 }
115}
116
62682a34 117/// Is the type's representation size known at compile time?
1a4d82fc 118pub fn type_is_sized<'tcx>(tcx: &ty::ctxt<'tcx>, ty: Ty<'tcx>) -> bool {
c1a9b12d 119 ty.is_sized(&tcx.empty_parameter_environment(), DUMMY_SP)
1a4d82fc
JJ
120}
121
122pub fn type_is_fat_ptr<'tcx>(cx: &ty::ctxt<'tcx>, ty: Ty<'tcx>) -> bool {
123 match ty.sty {
c1a9b12d
SL
124 ty::TyRawPtr(ty::TypeAndMut{ty, ..}) |
125 ty::TyRef(_, ty::TypeAndMut{ty, ..}) |
62682a34 126 ty::TyBox(ty) => {
1a4d82fc
JJ
127 !type_is_sized(cx, ty)
128 }
129 _ => {
130 false
131 }
132 }
133}
134
c34b1796
AL
135/// If `type_needs_drop` returns true, then `ty` is definitely
136/// non-copy and *might* have a destructor attached; if it returns
137/// false, then `ty` definitely has no destructor (i.e. no drop glue).
138///
139/// (Note that this implies that if `ty` has a destructor attached,
140/// then `type_needs_drop` will definitely return `true` for `ty`.)
141pub fn type_needs_drop<'tcx>(cx: &ty::ctxt<'tcx>, ty: Ty<'tcx>) -> bool {
c1a9b12d 142 type_needs_drop_given_env(cx, ty, &cx.empty_parameter_environment())
c34b1796
AL
143}
144
145/// Core implementation of type_needs_drop, potentially making use of
146/// and/or updating caches held in the `param_env`.
147fn type_needs_drop_given_env<'a,'tcx>(cx: &ty::ctxt<'tcx>,
148 ty: Ty<'tcx>,
149 param_env: &ty::ParameterEnvironment<'a,'tcx>) -> bool {
150 // Issue #22536: We first query type_moves_by_default. It sees a
151 // normalized version of the type, and therefore will definitely
152 // know whether the type implements Copy (and thus needs no
153 // cleanup/drop/zeroing) ...
c1a9b12d 154 let implements_copy = !ty.moves_by_default(param_env, DUMMY_SP);
c34b1796
AL
155
156 if implements_copy { return false; }
157
158 // ... (issue #22536 continued) but as an optimization, still use
159 // prior logic of asking if the `needs_drop` bit is set; we need
160 // not zero non-Copy types if they have no destructor.
161
162 // FIXME(#22815): Note that calling `ty::type_contents` is a
163 // conservative heuristic; it may report that `needs_drop` is set
164 // when actual type does not actually have a destructor associated
165 // with it. But since `ty` absolutely did not have the `Copy`
166 // bound attached (see above), it is sound to treat it as having a
167 // destructor (e.g. zero its memory on move).
168
c1a9b12d 169 let contents = ty.type_contents(cx);
62682a34 170 debug!("type_needs_drop ty={:?} contents={:?}", ty, contents);
c34b1796 171 contents.needs_drop(cx)
1a4d82fc
JJ
172}
173
85aaf69f 174fn type_is_newtype_immediate<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ty: Ty<'tcx>) -> bool {
1a4d82fc 175 match ty.sty {
62682a34 176 ty::TyStruct(def_id, substs) => {
c1a9b12d 177 let fields = ccx.tcx().lookup_struct_fields(def_id);
85aaf69f 178 fields.len() == 1 && {
c1a9b12d 179 let ty = ccx.tcx().lookup_field_type(def_id, fields[0].id, substs);
85aaf69f
SL
180 let ty = monomorphize::normalize_associated_type(ccx.tcx(), &ty);
181 type_is_immediate(ccx, ty)
182 }
1a4d82fc
JJ
183 }
184 _ => false
185 }
186}
187
188pub fn type_is_immediate<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ty: Ty<'tcx>) -> bool {
189 use trans::machine::llsize_of_alloc;
190 use trans::type_of::sizing_type_of;
191
192 let tcx = ccx.tcx();
c1a9b12d
SL
193 let simple = ty.is_scalar() ||
194 ty.is_unique() || ty.is_region_ptr() ||
1a4d82fc 195 type_is_newtype_immediate(ccx, ty) ||
c1a9b12d 196 ty.is_simd(tcx);
1a4d82fc
JJ
197 if simple && !type_is_fat_ptr(tcx, ty) {
198 return true;
199 }
200 if !type_is_sized(tcx, ty) {
201 return false;
202 }
203 match ty.sty {
62682a34
SL
204 ty::TyStruct(..) | ty::TyEnum(..) | ty::TyTuple(..) | ty::TyArray(_, _) |
205 ty::TyClosure(..) => {
1a4d82fc
JJ
206 let llty = sizing_type_of(ccx, ty);
207 llsize_of_alloc(ccx, llty) <= llsize_of_alloc(ccx, ccx.int_type())
208 }
209 _ => type_is_zero_size(ccx, ty)
210 }
211}
212
213/// Identify types which have size zero at runtime.
214pub fn type_is_zero_size<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ty: Ty<'tcx>) -> bool {
215 use trans::machine::llsize_of_alloc;
216 use trans::type_of::sizing_type_of;
217 let llty = sizing_type_of(ccx, ty);
218 llsize_of_alloc(ccx, llty) == 0
219}
220
221/// Identifies types which we declare to be equivalent to `void` in C for the purpose of function
222/// return types. These are `()`, bot, and uninhabited enums. Note that all such types are also
223/// zero-size, but not all zero-size types use a `void` return type (in order to aid with C ABI
224/// compatibility).
225pub fn return_type_is_void(ccx: &CrateContext, ty: Ty) -> bool {
c1a9b12d 226 ty.is_nil() || ty.is_empty(ccx.tcx())
1a4d82fc
JJ
227}
228
229/// Generates a unique symbol based off the name given. This is used to create
230/// unique symbols for things like closures.
231pub fn gensym_name(name: &str) -> PathElem {
85aaf69f 232 let num = token::gensym(name).usize();
1a4d82fc
JJ
233 // use one colon which will get translated to a period by the mangler, and
234 // we're guaranteed that `num` is globally unique for this crate.
c34b1796 235 PathName(token::gensym(&format!("{}:{}", name, num)))
1a4d82fc
JJ
236}
237
238/*
239* A note on nomenclature of linking: "extern", "foreign", and "upcall".
240*
241* An "extern" is an LLVM symbol we wind up emitting an undefined external
242* reference to. This means "we don't have the thing in this compilation unit,
243* please make sure you link it in at runtime". This could be a reference to
244* C code found in a C library, or rust code found in a rust crate.
245*
246* Most "externs" are implicitly declared (automatically) as a result of a
247* user declaring an extern _module_ dependency; this causes the rust driver
248* to locate an extern crate, scan its compilation metadata, and emit extern
249* declarations for any symbols used by the declaring crate.
250*
251* A "foreign" is an extern that references C (or other non-rust ABI) code.
252* There is no metadata to scan for extern references so in these cases either
253* a header-digester like bindgen, or manual function prototypes, have to
254* serve as declarators. So these are usually given explicitly as prototype
255* declarations, in rust code, with ABI attributes on them noting which ABI to
256* link via.
257*
258* An "upcall" is a foreign call generated by the compiler (not corresponding
259* to any user-written call in the code) into the runtime library, to perform
260* some helper task such as bringing a task to life, allocating memory, etc.
261*
262*/
263
c34b1796 264#[derive(Copy, Clone)]
85aaf69f 265pub struct NodeIdAndSpan {
1a4d82fc
JJ
266 pub id: ast::NodeId,
267 pub span: Span,
268}
269
85aaf69f
SL
270pub fn expr_info(expr: &ast::Expr) -> NodeIdAndSpan {
271 NodeIdAndSpan { id: expr.id, span: expr.span }
1a4d82fc
JJ
272}
273
274pub struct BuilderRef_res {
275 pub b: BuilderRef,
276}
277
278impl Drop for BuilderRef_res {
279 fn drop(&mut self) {
280 unsafe {
281 llvm::LLVMDisposeBuilder(self.b);
282 }
283 }
284}
285
286pub fn BuilderRef_res(b: BuilderRef) -> BuilderRef_res {
287 BuilderRef_res {
288 b: b
289 }
290}
291
292pub type ExternMap = FnvHashMap<String, ValueRef>;
293
294pub fn validate_substs(substs: &Substs) {
c1a9b12d 295 assert!(!substs.types.needs_infer());
1a4d82fc
JJ
296}
297
298// work around bizarre resolve errors
299type RvalueDatum<'tcx> = datum::Datum<'tcx, datum::Rvalue>;
c34b1796 300pub type LvalueDatum<'tcx> = datum::Datum<'tcx, datum::Lvalue>;
1a4d82fc 301
c1a9b12d
SL
302#[derive(Clone, Debug)]
303struct HintEntry<'tcx> {
304 // The datum for the dropflag-hint itself; note that many
305 // source-level Lvalues will be associated with the same
306 // dropflag-hint datum.
307 datum: cleanup::DropHintDatum<'tcx>,
308}
309
310pub struct DropFlagHintsMap<'tcx> {
311 // Maps NodeId for expressions that read/write unfragmented state
312 // to that state's drop-flag "hint." (A stack-local hint
313 // indicates either that (1.) it is certain that no-drop is
314 // needed, or (2.) inline drop-flag must be consulted.)
315 node_map: NodeMap<HintEntry<'tcx>>,
316}
317
318impl<'tcx> DropFlagHintsMap<'tcx> {
319 pub fn new() -> DropFlagHintsMap<'tcx> { DropFlagHintsMap { node_map: NodeMap() } }
320 pub fn has_hint(&self, id: ast::NodeId) -> bool { self.node_map.contains_key(&id) }
321 pub fn insert(&mut self, id: ast::NodeId, datum: cleanup::DropHintDatum<'tcx>) {
322 self.node_map.insert(id, HintEntry { datum: datum });
323 }
324 pub fn hint_datum(&self, id: ast::NodeId) -> Option<cleanup::DropHintDatum<'tcx>> {
325 self.node_map.get(&id).map(|t|t.datum)
326 }
327}
328
1a4d82fc
JJ
329// Function context. Every LLVM function we create will have one of
330// these.
331pub struct FunctionContext<'a, 'tcx: 'a> {
332 // The ValueRef returned from a call to llvm::LLVMAddFunction; the
333 // address of the first instruction in the sequence of
334 // instructions for this function that will go in the .text
335 // section of the executable we're generating.
336 pub llfn: ValueRef,
337
c1a9b12d 338 // always an empty parameter-environment NOTE: @jroesch another use of ParamEnv
1a4d82fc
JJ
339 pub param_env: ty::ParameterEnvironment<'a, 'tcx>,
340
341 // The environment argument in a closure.
342 pub llenv: Option<ValueRef>,
343
344 // A pointer to where to store the return value. If the return type is
345 // immediate, this points to an alloca in the function. Otherwise, it's a
346 // pointer to the hidden first parameter of the function. After function
347 // construction, this should always be Some.
348 pub llretslotptr: Cell<Option<ValueRef>>,
349
350 // These pub elements: "hoisted basic blocks" containing
351 // administrative activities that have to happen in only one place in
352 // the function, due to LLVM's quirks.
353 // A marker for the place where we want to insert the function's static
354 // allocas, so that LLVM will coalesce them into a single alloca call.
355 pub alloca_insert_pt: Cell<Option<ValueRef>>,
356 pub llreturn: Cell<Option<BasicBlockRef>>,
357
358 // If the function has any nested return's, including something like:
359 // fn foo() -> Option<Foo> { Some(Foo { x: return None }) }, then
360 // we use a separate alloca for each return
361 pub needs_ret_allocas: bool,
362
363 // The a value alloca'd for calls to upcalls.rust_personality. Used when
364 // outputting the resume instruction.
365 pub personality: Cell<Option<ValueRef>>,
366
367 // True if the caller expects this fn to use the out pointer to
368 // return. Either way, your code should write into the slot llretslotptr
369 // points to, but if this value is false, that slot will be a local alloca.
370 pub caller_expects_out_pointer: bool,
371
372 // Maps the DefId's for local variables to the allocas created for
373 // them in llallocas.
374 pub lllocals: RefCell<NodeMap<LvalueDatum<'tcx>>>,
375
376 // Same as above, but for closure upvars
377 pub llupvars: RefCell<NodeMap<ValueRef>>,
378
c1a9b12d
SL
379 // Carries info about drop-flags for local bindings (longer term,
380 // paths) for the code being compiled.
381 pub lldropflag_hints: RefCell<DropFlagHintsMap<'tcx>>,
382
1a4d82fc
JJ
383 // The NodeId of the function, or -1 if it doesn't correspond to
384 // a user-defined function.
385 pub id: ast::NodeId,
386
387 // If this function is being monomorphized, this contains the type
388 // substitutions used.
85aaf69f 389 pub param_substs: &'tcx Substs<'tcx>,
1a4d82fc
JJ
390
391 // The source span and nesting context where this function comes from, for
392 // error reporting and symbol generation.
393 pub span: Option<Span>,
394
395 // The arena that blocks are allocated from.
396 pub block_arena: &'a TypedArena<BlockS<'a, 'tcx>>,
397
398 // This function's enclosing crate context.
399 pub ccx: &'a CrateContext<'a, 'tcx>,
400
401 // Used and maintained by the debuginfo module.
402 pub debug_context: debuginfo::FunctionDebugContext,
403
404 // Cleanup scopes.
405 pub scopes: RefCell<Vec<cleanup::CleanupScope<'a, 'tcx>>>,
406
407 pub cfg: Option<cfg::CFG>,
408}
409
410impl<'a, 'tcx> FunctionContext<'a, 'tcx> {
62682a34
SL
411 pub fn arg_offset(&self) -> usize {
412 self.env_arg_pos() + if self.llenv.is_some() { 1 } else { 0 }
1a4d82fc
JJ
413 }
414
c34b1796 415 pub fn env_arg_pos(&self) -> usize {
1a4d82fc 416 if self.caller_expects_out_pointer {
85aaf69f 417 1
1a4d82fc 418 } else {
85aaf69f 419 0
1a4d82fc
JJ
420 }
421 }
422
423 pub fn cleanup(&self) {
424 unsafe {
425 llvm::LLVMInstructionEraseFromParent(self.alloca_insert_pt
426 .get()
427 .unwrap());
428 }
429 }
430
431 pub fn get_llreturn(&self) -> BasicBlockRef {
432 if self.llreturn.get().is_none() {
433
434 self.llreturn.set(Some(unsafe {
435 llvm::LLVMAppendBasicBlockInContext(self.ccx.llcx(), self.llfn,
436 "return\0".as_ptr() as *const _)
437 }))
438 }
439
440 self.llreturn.get().unwrap()
441 }
442
443 pub fn get_ret_slot(&self, bcx: Block<'a, 'tcx>,
444 output: ty::FnOutput<'tcx>,
445 name: &str) -> ValueRef {
446 if self.needs_ret_allocas {
447 base::alloca_no_lifetime(bcx, match output {
448 ty::FnConverging(output_type) => type_of::type_of(bcx.ccx(), output_type),
449 ty::FnDiverging => Type::void(bcx.ccx())
450 }, name)
451 } else {
452 self.llretslotptr.get().unwrap()
453 }
454 }
455
456 pub fn new_block(&'a self,
457 is_lpad: bool,
458 name: &str,
459 opt_node_id: Option<ast::NodeId>)
460 -> Block<'a, 'tcx> {
461 unsafe {
85aaf69f 462 let name = CString::new(name).unwrap();
1a4d82fc
JJ
463 let llbb = llvm::LLVMAppendBasicBlockInContext(self.ccx.llcx(),
464 self.llfn,
465 name.as_ptr());
466 BlockS::new(llbb, is_lpad, opt_node_id, self)
467 }
468 }
469
470 pub fn new_id_block(&'a self,
471 name: &str,
472 node_id: ast::NodeId)
473 -> Block<'a, 'tcx> {
474 self.new_block(false, name, Some(node_id))
475 }
476
477 pub fn new_temp_block(&'a self,
478 name: &str)
479 -> Block<'a, 'tcx> {
480 self.new_block(false, name, None)
481 }
482
483 pub fn join_blocks(&'a self,
484 id: ast::NodeId,
485 in_cxs: &[Block<'a, 'tcx>])
486 -> Block<'a, 'tcx> {
487 let out = self.new_id_block("join", id);
488 let mut reachable = false;
85aaf69f 489 for bcx in in_cxs {
1a4d82fc 490 if !bcx.unreachable.get() {
85aaf69f 491 build::Br(*bcx, out.llbb, DebugLoc::None);
1a4d82fc
JJ
492 reachable = true;
493 }
494 }
495 if !reachable {
496 build::Unreachable(out);
497 }
498 return out;
499 }
500
501 pub fn monomorphize<T>(&self, value: &T) -> T
c1a9b12d 502 where T : TypeFoldable<'tcx> + HasTypeFlags
1a4d82fc
JJ
503 {
504 monomorphize::apply_param_substs(self.ccx.tcx(),
505 self.param_substs,
506 value)
507 }
c34b1796
AL
508
509 /// This is the same as `common::type_needs_drop`, except that it
510 /// may use or update caches within this `FunctionContext`.
511 pub fn type_needs_drop(&self, ty: Ty<'tcx>) -> bool {
512 type_needs_drop_given_env(self.ccx.tcx(), ty, &self.param_env)
513 }
c1a9b12d
SL
514
515 pub fn eh_personality(&self) -> ValueRef {
516 // The exception handling personality function.
517 //
518 // If our compilation unit has the `eh_personality` lang item somewhere
519 // within it, then we just need to translate that. Otherwise, we're
520 // building an rlib which will depend on some upstream implementation of
521 // this function, so we just codegen a generic reference to it. We don't
522 // specify any of the types for the function, we just make it a symbol
523 // that LLVM can later use.
524 //
525 // Note that MSVC is a little special here in that we don't use the
526 // `eh_personality` lang item at all. Currently LLVM has support for
527 // both Dwarf and SEH unwind mechanisms for MSVC targets and uses the
528 // *name of the personality function* to decide what kind of unwind side
529 // tables/landing pads to emit. It looks like Dwarf is used by default,
530 // injecting a dependency on the `_Unwind_Resume` symbol for resuming
531 // an "exception", but for MSVC we want to force SEH. This means that we
532 // can't actually have the personality function be our standard
533 // `rust_eh_personality` function, but rather we wired it up to the
534 // CRT's custom personality function, which forces LLVM to consider
535 // landing pads as "landing pads for SEH".
536 let target = &self.ccx.sess().target.target;
537 match self.ccx.tcx().lang_items.eh_personality() {
538 Some(def_id) if !target.options.is_like_msvc => {
539 callee::trans_fn_ref(self.ccx, def_id, ExprId(0),
540 self.param_substs).val
541 }
542 _ => {
543 let mut personality = self.ccx.eh_personality().borrow_mut();
544 match *personality {
545 Some(llpersonality) => llpersonality,
546 None => {
547 let name = if !target.options.is_like_msvc {
548 "rust_eh_personality"
549 } else if target.arch == "x86" {
550 "_except_handler3"
551 } else {
552 "__C_specific_handler"
553 };
554 let fty = Type::variadic_func(&[], &Type::i32(self.ccx));
555 let f = declare::declare_cfn(self.ccx, name, fty,
556 self.ccx.tcx().types.i32);
557 *personality = Some(f);
558 f
559 }
560 }
561 }
562 }
563 }
564
565 /// By default, LLVM lowers `resume` instructions into calls to `_Unwind_Resume`
566 /// defined in libgcc, however, unlike personality routines, there is no easy way to
567 /// override that symbol. This method injects a local-scoped `_Unwind_Resume` function
568 /// which immediately defers to the user-defined `eh_unwind_resume` lang item.
569 pub fn inject_unwind_resume_hook(&self) {
570 let ccx = self.ccx;
571 if !ccx.sess().target.target.options.custom_unwind_resume ||
572 ccx.unwind_resume_hooked().get() {
573 return;
574 }
575
576 let new_resume = match ccx.tcx().lang_items.eh_unwind_resume() {
577 Some(did) => callee::trans_fn_ref(ccx, did, ExprId(0), &self.param_substs).val,
578 None => {
579 let fty = Type::variadic_func(&[], &Type::void(self.ccx));
580 declare::declare_cfn(self.ccx, "rust_eh_unwind_resume", fty,
581 self.ccx.tcx().mk_nil())
582 }
583 };
584
585 unsafe {
586 let resume_type = Type::func(&[Type::i8(ccx).ptr_to()], &Type::void(ccx));
587 let old_resume = llvm::LLVMAddFunction(ccx.llmod(),
588 "_Unwind_Resume\0".as_ptr() as *const _,
589 resume_type.to_ref());
590 llvm::SetLinkage(old_resume, llvm::InternalLinkage);
591 let llbb = llvm::LLVMAppendBasicBlockInContext(ccx.llcx(),
592 old_resume,
593 "\0".as_ptr() as *const _);
594 let builder = ccx.builder();
595 builder.position_at_end(llbb);
596 builder.call(new_resume, &[llvm::LLVMGetFirstParam(old_resume)], None);
597 builder.unreachable(); // it should never return
598
599 // Until DwarfEHPrepare pass has run, _Unwind_Resume is not referenced by any live code
600 // and is subject to dead code elimination. Here we add _Unwind_Resume to @llvm.globals
601 // to prevent that.
602 let i8p_ty = Type::i8p(ccx);
603 let used_ty = Type::array(&i8p_ty, 1);
604 let used = llvm::LLVMAddGlobal(ccx.llmod(), used_ty.to_ref(),
605 "llvm.used\0".as_ptr() as *const _);
606 let old_resume = llvm::LLVMConstBitCast(old_resume, i8p_ty.to_ref());
607 llvm::LLVMSetInitializer(used, C_array(i8p_ty, &[old_resume]));
608 llvm::SetLinkage(used, llvm::AppendingLinkage);
609 llvm::LLVMSetSection(used, "llvm.metadata\0".as_ptr() as *const _)
610 }
611 ccx.unwind_resume_hooked().set(true);
612 }
1a4d82fc
JJ
613}
614
615// Basic block context. We create a block context for each basic block
616// (single-entry, single-exit sequence of instructions) we generate from Rust
617// code. Each basic block we generate is attached to a function, typically
618// with many basic blocks per function. All the basic blocks attached to a
619// function are organized as a directed graph.
620pub struct BlockS<'blk, 'tcx: 'blk> {
621 // The BasicBlockRef returned from a call to
622 // llvm::LLVMAppendBasicBlock(llfn, name), which adds a basic
623 // block to the function pointed to by llfn. We insert
624 // instructions into that block by way of this block context.
625 // The block pointing to this one in the function's digraph.
626 pub llbb: BasicBlockRef,
627 pub terminated: Cell<bool>,
628 pub unreachable: Cell<bool>,
629
630 // Is this block part of a landing pad?
631 pub is_lpad: bool,
632
633 // AST node-id associated with this block, if any. Used for
634 // debugging purposes only.
635 pub opt_node_id: Option<ast::NodeId>,
636
637 // The function context for the function to which this block is
638 // attached.
639 pub fcx: &'blk FunctionContext<'blk, 'tcx>,
640}
641
642pub type Block<'blk, 'tcx> = &'blk BlockS<'blk, 'tcx>;
643
644impl<'blk, 'tcx> BlockS<'blk, 'tcx> {
645 pub fn new(llbb: BasicBlockRef,
646 is_lpad: bool,
647 opt_node_id: Option<ast::NodeId>,
648 fcx: &'blk FunctionContext<'blk, 'tcx>)
649 -> Block<'blk, 'tcx> {
650 fcx.block_arena.alloc(BlockS {
651 llbb: llbb,
652 terminated: Cell::new(false),
653 unreachable: Cell::new(false),
654 is_lpad: is_lpad,
655 opt_node_id: opt_node_id,
656 fcx: fcx
657 })
658 }
659
660 pub fn ccx(&self) -> &'blk CrateContext<'blk, 'tcx> {
661 self.fcx.ccx
662 }
663 pub fn tcx(&self) -> &'blk ty::ctxt<'tcx> {
664 self.fcx.ccx.tcx()
665 }
666 pub fn sess(&self) -> &'blk Session { self.fcx.ccx.sess() }
667
9346a6ac 668 pub fn name(&self, name: ast::Name) -> String {
c1a9b12d 669 name.to_string()
1a4d82fc
JJ
670 }
671
672 pub fn node_id_to_string(&self, id: ast::NodeId) -> String {
673 self.tcx().map.node_to_string(id).to_string()
674 }
675
1a4d82fc
JJ
676 pub fn def(&self, nid: ast::NodeId) -> def::Def {
677 match self.tcx().def_map.borrow().get(&nid) {
c34b1796 678 Some(v) => v.full_def(),
1a4d82fc
JJ
679 None => {
680 self.tcx().sess.bug(&format!(
c34b1796 681 "no def associated with node id {}", nid));
1a4d82fc
JJ
682 }
683 }
684 }
685
686 pub fn val_to_string(&self, val: ValueRef) -> String {
687 self.ccx().tn().val_to_string(val)
688 }
689
690 pub fn llty_str(&self, ty: Type) -> String {
691 self.ccx().tn().type_to_string(ty)
692 }
693
1a4d82fc
JJ
694 pub fn to_str(&self) -> String {
695 format!("[block {:p}]", self)
696 }
697
698 pub fn monomorphize<T>(&self, value: &T) -> T
c1a9b12d 699 where T : TypeFoldable<'tcx> + HasTypeFlags
1a4d82fc
JJ
700 {
701 monomorphize::apply_param_substs(self.tcx(),
702 self.fcx.param_substs,
703 value)
704 }
705}
706
1a4d82fc
JJ
707pub struct Result<'blk, 'tcx: 'blk> {
708 pub bcx: Block<'blk, 'tcx>,
709 pub val: ValueRef
710}
711
712impl<'b, 'tcx> Result<'b, 'tcx> {
713 pub fn new(bcx: Block<'b, 'tcx>, val: ValueRef) -> Result<'b, 'tcx> {
714 Result {
715 bcx: bcx,
716 val: val,
717 }
718 }
719}
720
721pub fn val_ty(v: ValueRef) -> Type {
722 unsafe {
723 Type::from_ref(llvm::LLVMTypeOf(v))
724 }
725}
726
727// LLVM constant constructors.
728pub fn C_null(t: Type) -> ValueRef {
729 unsafe {
730 llvm::LLVMConstNull(t.to_ref())
731 }
732}
733
734pub fn C_undef(t: Type) -> ValueRef {
735 unsafe {
736 llvm::LLVMGetUndef(t.to_ref())
737 }
738}
739
740pub fn C_integral(t: Type, u: u64, sign_extend: bool) -> ValueRef {
741 unsafe {
742 llvm::LLVMConstInt(t.to_ref(), u, sign_extend as Bool)
743 }
744}
745
746pub fn C_floating(s: &str, t: Type) -> ValueRef {
747 unsafe {
85aaf69f 748 let s = CString::new(s).unwrap();
1a4d82fc
JJ
749 llvm::LLVMConstRealOfString(t.to_ref(), s.as_ptr())
750 }
751}
752
753pub fn C_nil(ccx: &CrateContext) -> ValueRef {
754 C_struct(ccx, &[], false)
755}
756
757pub fn C_bool(ccx: &CrateContext, val: bool) -> ValueRef {
758 C_integral(Type::i1(ccx), val as u64, false)
759}
760
761pub fn C_i32(ccx: &CrateContext, i: i32) -> ValueRef {
762 C_integral(Type::i32(ccx), i as u64, true)
763}
764
c34b1796
AL
765pub fn C_u32(ccx: &CrateContext, i: u32) -> ValueRef {
766 C_integral(Type::i32(ccx), i as u64, false)
767}
768
1a4d82fc
JJ
769pub fn C_u64(ccx: &CrateContext, i: u64) -> ValueRef {
770 C_integral(Type::i64(ccx), i, false)
771}
772
773pub fn C_int<I: AsI64>(ccx: &CrateContext, i: I) -> ValueRef {
774 let v = i.as_i64();
775
776 match machine::llbitsize_of_real(ccx, ccx.int_type()) {
777 32 => assert!(v < (1<<31) && v >= -(1<<31)),
778 64 => {},
779 n => panic!("unsupported target size: {}", n)
780 }
781
782 C_integral(ccx.int_type(), v as u64, true)
783}
784
785pub fn C_uint<I: AsU64>(ccx: &CrateContext, i: I) -> ValueRef {
786 let v = i.as_u64();
787
788 match machine::llbitsize_of_real(ccx, ccx.int_type()) {
789 32 => assert!(v < (1<<32)),
790 64 => {},
791 n => panic!("unsupported target size: {}", n)
792 }
793
794 C_integral(ccx.int_type(), v, false)
795}
796
797pub trait AsI64 { fn as_i64(self) -> i64; }
798pub trait AsU64 { fn as_u64(self) -> u64; }
799
800// FIXME: remove the intptr conversions, because they
801// are host-architecture-dependent
802impl AsI64 for i64 { fn as_i64(self) -> i64 { self as i64 }}
803impl AsI64 for i32 { fn as_i64(self) -> i64 { self as i64 }}
c34b1796 804impl AsI64 for isize { fn as_i64(self) -> i64 { self as i64 }}
1a4d82fc
JJ
805
806impl AsU64 for u64 { fn as_u64(self) -> u64 { self as u64 }}
807impl AsU64 for u32 { fn as_u64(self) -> u64 { self as u64 }}
c34b1796 808impl AsU64 for usize { fn as_u64(self) -> u64 { self as u64 }}
1a4d82fc 809
c34b1796 810pub fn C_u8(ccx: &CrateContext, i: usize) -> ValueRef {
1a4d82fc
JJ
811 C_integral(Type::i8(ccx), i as u64, false)
812}
813
814
815// This is a 'c-like' raw string, which differs from
816// our boxed-and-length-annotated strings.
817pub fn C_cstr(cx: &CrateContext, s: InternedString, null_terminated: bool) -> ValueRef {
818 unsafe {
819 match cx.const_cstr_cache().borrow().get(&s) {
820 Some(&llval) => return llval,
821 None => ()
822 }
823
824 let sc = llvm::LLVMConstStringInContext(cx.llcx(),
85aaf69f
SL
825 s.as_ptr() as *const c_char,
826 s.len() as c_uint,
1a4d82fc
JJ
827 !null_terminated as Bool);
828
829 let gsym = token::gensym("str");
9346a6ac
AL
830 let sym = format!("str{}", gsym.usize());
831 let g = declare::define_global(cx, &sym[..], val_ty(sc)).unwrap_or_else(||{
832 cx.sess().bug(&format!("symbol `{}` is already defined", sym));
833 });
1a4d82fc
JJ
834 llvm::LLVMSetInitializer(g, sc);
835 llvm::LLVMSetGlobalConstant(g, True);
836 llvm::SetLinkage(g, llvm::InternalLinkage);
837
838 cx.const_cstr_cache().borrow_mut().insert(s, g);
839 g
840 }
841}
842
843// NB: Do not use `do_spill_noroot` to make this into a constant string, or
844// you will be kicked off fast isel. See issue #4352 for an example of this.
845pub fn C_str_slice(cx: &CrateContext, s: InternedString) -> ValueRef {
85aaf69f 846 let len = s.len();
1a4d82fc
JJ
847 let cs = consts::ptrcast(C_cstr(cx, s, false), Type::i8p(cx));
848 C_named_struct(cx.tn().find_type("str_slice").unwrap(), &[cs, C_uint(cx, len)])
849}
850
1a4d82fc
JJ
851pub fn C_struct(cx: &CrateContext, elts: &[ValueRef], packed: bool) -> ValueRef {
852 C_struct_in_context(cx.llcx(), elts, packed)
853}
854
855pub fn C_struct_in_context(llcx: ContextRef, elts: &[ValueRef], packed: bool) -> ValueRef {
856 unsafe {
857 llvm::LLVMConstStructInContext(llcx,
858 elts.as_ptr(), elts.len() as c_uint,
859 packed as Bool)
860 }
861}
862
863pub fn C_named_struct(t: Type, elts: &[ValueRef]) -> ValueRef {
864 unsafe {
865 llvm::LLVMConstNamedStruct(t.to_ref(), elts.as_ptr(), elts.len() as c_uint)
866 }
867}
868
869pub fn C_array(ty: Type, elts: &[ValueRef]) -> ValueRef {
870 unsafe {
871 return llvm::LLVMConstArray(ty.to_ref(), elts.as_ptr(), elts.len() as c_uint);
872 }
873}
874
85aaf69f
SL
875pub fn C_vector(elts: &[ValueRef]) -> ValueRef {
876 unsafe {
877 return llvm::LLVMConstVector(elts.as_ptr(), elts.len() as c_uint);
878 }
879}
880
1a4d82fc
JJ
881pub fn C_bytes(cx: &CrateContext, bytes: &[u8]) -> ValueRef {
882 C_bytes_in_context(cx.llcx(), bytes)
883}
884
885pub fn C_bytes_in_context(llcx: ContextRef, bytes: &[u8]) -> ValueRef {
886 unsafe {
887 let ptr = bytes.as_ptr() as *const c_char;
888 return llvm::LLVMConstStringInContext(llcx, ptr, bytes.len() as c_uint, True);
889 }
890}
891
892pub fn const_get_elt(cx: &CrateContext, v: ValueRef, us: &[c_uint])
893 -> ValueRef {
894 unsafe {
895 let r = llvm::LLVMConstExtractValue(v, us.as_ptr(), us.len() as c_uint);
896
897 debug!("const_get_elt(v={}, us={:?}, r={})",
898 cx.tn().val_to_string(v), us, cx.tn().val_to_string(r));
899
900 return r;
901 }
902}
903
1a4d82fc
JJ
904pub fn const_to_int(v: ValueRef) -> i64 {
905 unsafe {
906 llvm::LLVMConstIntGetSExtValue(v)
907 }
908}
909
910pub fn const_to_uint(v: ValueRef) -> u64 {
911 unsafe {
912 llvm::LLVMConstIntGetZExtValue(v)
913 }
914}
915
c34b1796
AL
916fn is_const_integral(v: ValueRef) -> bool {
917 unsafe {
918 !llvm::LLVMIsAConstantInt(v).is_null()
919 }
920}
921
922pub fn const_to_opt_int(v: ValueRef) -> Option<i64> {
923 unsafe {
924 if is_const_integral(v) {
925 Some(llvm::LLVMConstIntGetSExtValue(v))
926 } else {
927 None
928 }
929 }
930}
931
932pub fn const_to_opt_uint(v: ValueRef) -> Option<u64> {
933 unsafe {
934 if is_const_integral(v) {
935 Some(llvm::LLVMConstIntGetZExtValue(v))
936 } else {
937 None
938 }
939 }
940}
941
1a4d82fc
JJ
942pub fn is_undef(val: ValueRef) -> bool {
943 unsafe {
944 llvm::LLVMIsUndef(val) != False
945 }
946}
947
948#[allow(dead_code)] // potentially useful
949pub fn is_null(val: ValueRef) -> bool {
950 unsafe {
951 llvm::LLVMIsNull(val) != False
952 }
953}
954
955pub fn monomorphize_type<'blk, 'tcx>(bcx: &BlockS<'blk, 'tcx>, t: Ty<'tcx>) -> Ty<'tcx> {
956 bcx.fcx.monomorphize(&t)
957}
958
959pub fn node_id_type<'blk, 'tcx>(bcx: &BlockS<'blk, 'tcx>, id: ast::NodeId) -> Ty<'tcx> {
960 let tcx = bcx.tcx();
c1a9b12d 961 let t = tcx.node_id_to_type(id);
1a4d82fc
JJ
962 monomorphize_type(bcx, t)
963}
964
965pub fn expr_ty<'blk, 'tcx>(bcx: &BlockS<'blk, 'tcx>, ex: &ast::Expr) -> Ty<'tcx> {
966 node_id_type(bcx, ex.id)
967}
968
969pub fn expr_ty_adjusted<'blk, 'tcx>(bcx: &BlockS<'blk, 'tcx>, ex: &ast::Expr) -> Ty<'tcx> {
c1a9b12d 970 monomorphize_type(bcx, bcx.tcx().expr_ty_adjusted(ex))
1a4d82fc
JJ
971}
972
973/// Attempts to resolve an obligation. The result is a shallow vtable resolution -- meaning that we
974/// do not (necessarily) resolve all nested obligations on the impl. Note that type check should
975/// guarantee to us that all nested obligations *could be* resolved if we wanted to.
976pub fn fulfill_obligation<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
c34b1796
AL
977 span: Span,
978 trait_ref: ty::PolyTraitRef<'tcx>)
979 -> traits::Vtable<'tcx, ()>
1a4d82fc
JJ
980{
981 let tcx = ccx.tcx();
982
983 // Remove any references to regions; this helps improve caching.
984 let trait_ref = erase_regions(tcx, &trait_ref);
985
986 // First check the cache.
987 match ccx.trait_cache().borrow().get(&trait_ref) {
988 Some(vtable) => {
62682a34 989 info!("Cache hit: {:?}", trait_ref);
1a4d82fc
JJ
990 return (*vtable).clone();
991 }
992 None => { }
993 }
994
62682a34
SL
995 debug!("trans fulfill_obligation: trait_ref={:?} def_id={:?}",
996 trait_ref, trait_ref.def_id());
1a4d82fc 997
1a4d82fc
JJ
998
999 // Do the initial selection for the obligation. This yields the
1000 // shallow result we are looking for -- that is, what specific impl.
c1a9b12d
SL
1001 let infcx = infer::normalizing_infer_ctxt(tcx, &tcx.tables);
1002 let mut selcx = traits::SelectionContext::new(&infcx);
1003
c34b1796
AL
1004 let obligation =
1005 traits::Obligation::new(traits::ObligationCause::misc(span, ast::DUMMY_NODE_ID),
1006 trait_ref.to_poly_trait_predicate());
1a4d82fc
JJ
1007 let selection = match selcx.select(&obligation) {
1008 Ok(Some(selection)) => selection,
1009 Ok(None) => {
1010 // Ambiguity can happen when monomorphizing during trans
1011 // expands to some humongo type that never occurred
1012 // statically -- this humongo type can then overflow,
1013 // leading to an ambiguous result. So report this as an
1014 // overflow bug, since I believe this is the only case
1015 // where ambiguity can result.
62682a34 1016 debug!("Encountered ambiguity selecting `{:?}` during trans, \
1a4d82fc 1017 presuming due to overflow",
62682a34 1018 trait_ref);
1a4d82fc
JJ
1019 ccx.sess().span_fatal(
1020 span,
1021 "reached the recursion limit during monomorphization");
1022 }
1023 Err(e) => {
1024 tcx.sess.span_bug(
1025 span,
62682a34
SL
1026 &format!("Encountered error `{:?}` selecting `{:?}` during trans",
1027 e,
1028 trait_ref))
1a4d82fc
JJ
1029 }
1030 };
1031
1032 // Currently, we use a fulfillment context to completely resolve
1033 // all nested obligations. This is because they can inform the
1034 // inference of the impl's type parameters.
c1a9b12d 1035 let mut fulfill_cx = infcx.fulfillment_cx.borrow_mut();
62682a34 1036 let vtable = selection.map(|predicate| {
1a4d82fc
JJ
1037 fulfill_cx.register_predicate_obligation(&infcx, predicate);
1038 });
c1a9b12d
SL
1039 let vtable = erase_regions(tcx,
1040 &drain_fulfillment_cx_or_panic(span, &infcx, &mut fulfill_cx, &vtable)
1041 );
1a4d82fc 1042
c1a9b12d
SL
1043 info!("Cache miss: {:?} => {:?}", trait_ref, vtable);
1044
1045 ccx.trait_cache().borrow_mut().insert(trait_ref, vtable.clone());
1a4d82fc
JJ
1046
1047 vtable
1048}
1049
c34b1796
AL
1050/// Normalizes the predicates and checks whether they hold. If this
1051/// returns false, then either normalize encountered an error or one
1052/// of the predicates did not hold. Used when creating vtables to
1053/// check for unsatisfiable methods.
1054pub fn normalize_and_test_predicates<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
1055 predicates: Vec<ty::Predicate<'tcx>>)
1056 -> bool
1057{
62682a34
SL
1058 debug!("normalize_and_test_predicates(predicates={:?})",
1059 predicates);
c34b1796
AL
1060
1061 let tcx = ccx.tcx();
c1a9b12d
SL
1062 let infcx = infer::normalizing_infer_ctxt(tcx, &tcx.tables);
1063 let mut selcx = traits::SelectionContext::new(&infcx);
1064 let mut fulfill_cx = infcx.fulfillment_cx.borrow_mut();
c34b1796
AL
1065 let cause = traits::ObligationCause::dummy();
1066 let traits::Normalized { value: predicates, obligations } =
1067 traits::normalize(&mut selcx, cause.clone(), &predicates);
1068 for obligation in obligations {
1069 fulfill_cx.register_predicate_obligation(&infcx, obligation);
1070 }
1071 for predicate in predicates {
1072 let obligation = traits::Obligation::new(cause.clone(), predicate);
1073 fulfill_cx.register_predicate_obligation(&infcx, obligation);
1074 }
1075 drain_fulfillment_cx(&infcx, &mut fulfill_cx, &()).is_ok()
1076}
1077
c34b1796
AL
1078pub fn drain_fulfillment_cx_or_panic<'a,'tcx,T>(span: Span,
1079 infcx: &infer::InferCtxt<'a,'tcx>,
1080 fulfill_cx: &mut traits::FulfillmentContext<'tcx>,
1081 result: &T)
1082 -> T
62682a34 1083 where T : TypeFoldable<'tcx>
c34b1796
AL
1084{
1085 match drain_fulfillment_cx(infcx, fulfill_cx, result) {
1086 Ok(v) => v,
1087 Err(errors) => {
1088 infcx.tcx.sess.span_bug(
1089 span,
62682a34
SL
1090 &format!("Encountered errors `{:?}` fulfilling during trans",
1091 errors));
c34b1796
AL
1092 }
1093 }
1094}
1095
1096/// Finishes processes any obligations that remain in the fulfillment
1097/// context, and then "freshens" and returns `result`. This is
1098/// primarily used during normalization and other cases where
1099/// processing the obligations in `fulfill_cx` may cause type
1100/// inference variables that appear in `result` to be unified, and
1101/// hence we need to process those obligations to get the complete
1102/// picture of the type.
1103pub fn drain_fulfillment_cx<'a,'tcx,T>(infcx: &infer::InferCtxt<'a,'tcx>,
1104 fulfill_cx: &mut traits::FulfillmentContext<'tcx>,
1105 result: &T)
1106 -> StdResult<T,Vec<traits::FulfillmentError<'tcx>>>
62682a34 1107 where T : TypeFoldable<'tcx>
1a4d82fc 1108{
62682a34
SL
1109 debug!("drain_fulfillment_cx(result={:?})",
1110 result);
1a4d82fc
JJ
1111
1112 // In principle, we only need to do this so long as `result`
1113 // contains unbound type parameters. It could be a slight
1114 // optimization to stop iterating early.
c1a9b12d 1115 match fulfill_cx.select_all_or_error(infcx) {
1a4d82fc
JJ
1116 Ok(()) => { }
1117 Err(errors) => {
c34b1796 1118 return Err(errors);
1a4d82fc
JJ
1119 }
1120 }
1121
1122 // Use freshen to simultaneously replace all type variables with
1123 // their bindings and replace all regions with 'static. This is
1124 // sort of overkill because we do not expect there to be any
1125 // unbound type variables, hence no `TyFresh` types should ever be
1126 // inserted.
c34b1796 1127 Ok(result.fold_with(&mut infcx.freshener()))
1a4d82fc
JJ
1128}
1129
1130// Key used to lookup values supplied for type parameters in an expr.
c34b1796 1131#[derive(Copy, Clone, PartialEq, Debug)]
1a4d82fc
JJ
1132pub enum ExprOrMethodCall {
1133 // Type parameters for a path like `None::<int>`
1134 ExprId(ast::NodeId),
1135
1136 // Type parameters for a method call like `a.foo::<int>()`
1137 MethodCallKey(ty::MethodCall)
1138}
1139
1140pub fn node_id_substs<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
1141 node: ExprOrMethodCall,
1142 param_substs: &subst::Substs<'tcx>)
1143 -> subst::Substs<'tcx> {
1144 let tcx = ccx.tcx();
1145
1146 let substs = match node {
1147 ExprId(id) => {
c1a9b12d 1148 tcx.node_id_item_substs(id).substs
1a4d82fc
JJ
1149 }
1150 MethodCallKey(method_call) => {
c1a9b12d 1151 tcx.tables.borrow().method_map[&method_call].substs.clone()
1a4d82fc
JJ
1152 }
1153 };
1154
c1a9b12d 1155 if substs.types.needs_infer() {
1a4d82fc 1156 tcx.sess.bug(&format!("type parameters for node {:?} include inference types: {:?}",
62682a34 1157 node, substs));
1a4d82fc
JJ
1158 }
1159
1160 monomorphize::apply_param_substs(tcx,
1161 param_substs,
1162 &substs.erase_regions())
1163}
1164
1165pub fn langcall(bcx: Block,
1166 span: Option<Span>,
1167 msg: &str,
1168 li: LangItem)
1169 -> ast::DefId {
1170 match bcx.tcx().lang_items.require(li) {
1171 Ok(id) => id,
1172 Err(s) => {
1173 let msg = format!("{} {}", msg, s);
1174 match span {
85aaf69f
SL
1175 Some(span) => bcx.tcx().sess.span_fatal(span, &msg[..]),
1176 None => bcx.tcx().sess.fatal(&msg[..]),
1a4d82fc
JJ
1177 }
1178 }
1179 }
1180}