]> git.proxmox.com Git - rustc.git/blame - src/librustc_trans/trans/common.rs
Imported Upstream version 1.2.0+dfsg1
[rustc.git] / src / librustc_trans / trans / common.rs
CommitLineData
1a4d82fc
JJ
1// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2// file at the top-level directory of this distribution and at
3// http://rust-lang.org/COPYRIGHT.
4//
5// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8// option. This file may not be copied, modified, or distributed
9// except according to those terms.
10
11#![allow(non_camel_case_types, non_snake_case)]
12
13//! Code that is useful in various trans modules.
14
15pub use self::ExprOrMethodCall::*;
16
17use session::Session;
18use llvm;
19use llvm::{ValueRef, BasicBlockRef, BuilderRef, ContextRef};
20use llvm::{True, False, Bool};
21use middle::cfg;
22use middle::def;
23use middle::infer;
24use middle::lang_items::LangItem;
25use middle::mem_categorization as mc;
26use middle::region;
27use middle::subst::{self, Subst, Substs};
28use trans::base;
29use trans::build;
30use trans::cleanup;
31use trans::consts;
32use trans::datum;
85aaf69f 33use trans::debuginfo::{self, DebugLoc};
9346a6ac 34use trans::declare;
1a4d82fc
JJ
35use trans::machine;
36use trans::monomorphize;
37use trans::type_::Type;
38use trans::type_of;
39use middle::traits;
40use middle::ty::{self, HasProjectionTypes, Ty};
41use middle::ty_fold;
42use middle::ty_fold::{TypeFolder, TypeFoldable};
62682a34 43use rustc::ast_map::{PathElem, PathName};
1a4d82fc
JJ
44use util::nodemap::{FnvHashMap, NodeMap};
45
46use arena::TypedArena;
47use libc::{c_uint, c_char};
48use std::ffi::CString;
49use std::cell::{Cell, RefCell};
c34b1796 50use std::result::Result as StdResult;
1a4d82fc 51use std::vec::Vec;
1a4d82fc 52use syntax::ast;
1a4d82fc
JJ
53use syntax::codemap::{DUMMY_SP, Span};
54use syntax::parse::token::InternedString;
55use syntax::parse::token;
56use util::common::memoized;
57use util::nodemap::FnvHashSet;
58
59pub use trans::context::CrateContext;
60
61/// Returns an equivalent value with all free regions removed (note
62/// that late-bound regions remain, because they are important for
63/// subtyping, but they are anonymized and normalized as well). This
64/// is a stronger, caching version of `ty_fold::erase_regions`.
65pub fn erase_regions<'tcx,T>(cx: &ty::ctxt<'tcx>, value: &T) -> T
62682a34 66 where T : TypeFoldable<'tcx>
1a4d82fc
JJ
67{
68 let value1 = value.fold_with(&mut RegionEraser(cx));
62682a34
SL
69 debug!("erase_regions({:?}) = {:?}",
70 value, value1);
1a4d82fc
JJ
71 return value1;
72
73 struct RegionEraser<'a, 'tcx: 'a>(&'a ty::ctxt<'tcx>);
74
75 impl<'a, 'tcx> TypeFolder<'tcx> for RegionEraser<'a, 'tcx> {
76 fn tcx(&self) -> &ty::ctxt<'tcx> { self.0 }
77
78 fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
79 match self.tcx().normalized_cache.borrow().get(&ty).cloned() {
80 None => {}
81 Some(u) => return u
82 }
83
84 let t_norm = ty_fold::super_fold_ty(self, ty);
85 self.tcx().normalized_cache.borrow_mut().insert(ty, t_norm);
86 return t_norm;
87 }
88
62682a34
SL
89 fn fold_existential_bounds(&mut self, s: &ty::ExistentialBounds<'tcx>)
90 -> ty::ExistentialBounds<'tcx> {
91 let mut s = ty_fold::super_fold_existential_bounds(self, s);
92
93 // this annoying flag messes up trans normalization
94 s.region_bound_will_change = false;
95
96 s
97 }
98
1a4d82fc 99 fn fold_binder<T>(&mut self, t: &ty::Binder<T>) -> ty::Binder<T>
62682a34 100 where T : TypeFoldable<'tcx>
1a4d82fc
JJ
101 {
102 let u = ty::anonymize_late_bound_regions(self.tcx(), t);
103 ty_fold::super_fold_binder(self, &u)
104 }
105
106 fn fold_region(&mut self, r: ty::Region) -> ty::Region {
107 // because late-bound regions affect subtyping, we can't
108 // erase the bound/free distinction, but we can replace
109 // all free regions with 'static.
110 //
111 // Note that we *CAN* replace early-bound regions -- the
112 // type system never "sees" those, they get substituted
113 // away. In trans, they will always be erased to 'static
114 // whenever a substitution occurs.
115 match r {
116 ty::ReLateBound(..) => r,
117 _ => ty::ReStatic
118 }
119 }
120
121 fn fold_substs(&mut self,
122 substs: &subst::Substs<'tcx>)
123 -> subst::Substs<'tcx> {
124 subst::Substs { regions: subst::ErasedRegions,
125 types: substs.types.fold_with(self) }
126 }
127 }
128}
129
62682a34 130/// Is the type's representation size known at compile time?
1a4d82fc 131pub fn type_is_sized<'tcx>(tcx: &ty::ctxt<'tcx>, ty: Ty<'tcx>) -> bool {
62682a34 132 ty::type_is_sized(None, tcx, DUMMY_SP, ty)
1a4d82fc
JJ
133}
134
135pub fn type_is_fat_ptr<'tcx>(cx: &ty::ctxt<'tcx>, ty: Ty<'tcx>) -> bool {
136 match ty.sty {
62682a34
SL
137 ty::TyRawPtr(ty::mt{ty, ..}) |
138 ty::TyRef(_, ty::mt{ty, ..}) |
139 ty::TyBox(ty) => {
1a4d82fc
JJ
140 !type_is_sized(cx, ty)
141 }
142 _ => {
143 false
144 }
145 }
146}
147
1a4d82fc 148// Some things don't need cleanups during unwinding because the
bd371182 149// thread can free them all at once later. Currently only things
1a4d82fc
JJ
150// that only contain scalars and shared boxes can avoid unwind
151// cleanups.
152pub fn type_needs_unwind_cleanup<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ty: Ty<'tcx>) -> bool {
153 return memoized(ccx.needs_unwind_cleanup_cache(), ty, |ty| {
85aaf69f 154 type_needs_unwind_cleanup_(ccx.tcx(), ty, &mut FnvHashSet())
1a4d82fc
JJ
155 });
156
157 fn type_needs_unwind_cleanup_<'tcx>(tcx: &ty::ctxt<'tcx>,
158 ty: Ty<'tcx>,
159 tycache: &mut FnvHashSet<Ty<'tcx>>)
160 -> bool
161 {
162 // Prevent infinite recursion
163 if !tycache.insert(ty) {
164 return false;
165 }
166
167 let mut needs_unwind_cleanup = false;
168 ty::maybe_walk_ty(ty, |ty| {
169 needs_unwind_cleanup |= match ty.sty {
62682a34
SL
170 ty::TyBool | ty::TyInt(_) | ty::TyUint(_) |
171 ty::TyFloat(_) | ty::TyTuple(_) | ty::TyRawPtr(_) => false,
1a4d82fc 172
62682a34 173 ty::TyEnum(did, substs) =>
1a4d82fc
JJ
174 ty::enum_variants(tcx, did).iter().any(|v|
175 v.args.iter().any(|&aty| {
176 let t = aty.subst(tcx, substs);
177 type_needs_unwind_cleanup_(tcx, t, tycache)
178 })
179 ),
180
181 _ => true
182 };
183 !needs_unwind_cleanup
184 });
185 needs_unwind_cleanup
186 }
187}
188
c34b1796
AL
189/// If `type_needs_drop` returns true, then `ty` is definitely
190/// non-copy and *might* have a destructor attached; if it returns
191/// false, then `ty` definitely has no destructor (i.e. no drop glue).
192///
193/// (Note that this implies that if `ty` has a destructor attached,
194/// then `type_needs_drop` will definitely return `true` for `ty`.)
195pub fn type_needs_drop<'tcx>(cx: &ty::ctxt<'tcx>, ty: Ty<'tcx>) -> bool {
196 type_needs_drop_given_env(cx, ty, &ty::empty_parameter_environment(cx))
197}
198
199/// Core implementation of type_needs_drop, potentially making use of
200/// and/or updating caches held in the `param_env`.
201fn type_needs_drop_given_env<'a,'tcx>(cx: &ty::ctxt<'tcx>,
202 ty: Ty<'tcx>,
203 param_env: &ty::ParameterEnvironment<'a,'tcx>) -> bool {
204 // Issue #22536: We first query type_moves_by_default. It sees a
205 // normalized version of the type, and therefore will definitely
206 // know whether the type implements Copy (and thus needs no
207 // cleanup/drop/zeroing) ...
208 let implements_copy = !ty::type_moves_by_default(&param_env, DUMMY_SP, ty);
209
210 if implements_copy { return false; }
211
212 // ... (issue #22536 continued) but as an optimization, still use
213 // prior logic of asking if the `needs_drop` bit is set; we need
214 // not zero non-Copy types if they have no destructor.
215
216 // FIXME(#22815): Note that calling `ty::type_contents` is a
217 // conservative heuristic; it may report that `needs_drop` is set
218 // when actual type does not actually have a destructor associated
219 // with it. But since `ty` absolutely did not have the `Copy`
220 // bound attached (see above), it is sound to treat it as having a
221 // destructor (e.g. zero its memory on move).
222
223 let contents = ty::type_contents(cx, ty);
62682a34 224 debug!("type_needs_drop ty={:?} contents={:?}", ty, contents);
c34b1796 225 contents.needs_drop(cx)
1a4d82fc
JJ
226}
227
85aaf69f 228fn type_is_newtype_immediate<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ty: Ty<'tcx>) -> bool {
1a4d82fc 229 match ty.sty {
62682a34 230 ty::TyStruct(def_id, substs) => {
85aaf69f
SL
231 let fields = ty::lookup_struct_fields(ccx.tcx(), def_id);
232 fields.len() == 1 && {
233 let ty = ty::lookup_field_type(ccx.tcx(), def_id, fields[0].id, substs);
234 let ty = monomorphize::normalize_associated_type(ccx.tcx(), &ty);
235 type_is_immediate(ccx, ty)
236 }
1a4d82fc
JJ
237 }
238 _ => false
239 }
240}
241
242pub fn type_is_immediate<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ty: Ty<'tcx>) -> bool {
243 use trans::machine::llsize_of_alloc;
244 use trans::type_of::sizing_type_of;
245
246 let tcx = ccx.tcx();
247 let simple = ty::type_is_scalar(ty) ||
248 ty::type_is_unique(ty) || ty::type_is_region_ptr(ty) ||
249 type_is_newtype_immediate(ccx, ty) ||
250 ty::type_is_simd(tcx, ty);
251 if simple && !type_is_fat_ptr(tcx, ty) {
252 return true;
253 }
254 if !type_is_sized(tcx, ty) {
255 return false;
256 }
257 match ty.sty {
62682a34
SL
258 ty::TyStruct(..) | ty::TyEnum(..) | ty::TyTuple(..) | ty::TyArray(_, _) |
259 ty::TyClosure(..) => {
1a4d82fc
JJ
260 let llty = sizing_type_of(ccx, ty);
261 llsize_of_alloc(ccx, llty) <= llsize_of_alloc(ccx, ccx.int_type())
262 }
263 _ => type_is_zero_size(ccx, ty)
264 }
265}
266
267/// Identify types which have size zero at runtime.
268pub fn type_is_zero_size<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ty: Ty<'tcx>) -> bool {
269 use trans::machine::llsize_of_alloc;
270 use trans::type_of::sizing_type_of;
271 let llty = sizing_type_of(ccx, ty);
272 llsize_of_alloc(ccx, llty) == 0
273}
274
275/// Identifies types which we declare to be equivalent to `void` in C for the purpose of function
276/// return types. These are `()`, bot, and uninhabited enums. Note that all such types are also
277/// zero-size, but not all zero-size types use a `void` return type (in order to aid with C ABI
278/// compatibility).
279pub fn return_type_is_void(ccx: &CrateContext, ty: Ty) -> bool {
280 ty::type_is_nil(ty) || ty::type_is_empty(ccx.tcx(), ty)
281}
282
283/// Generates a unique symbol based off the name given. This is used to create
284/// unique symbols for things like closures.
285pub fn gensym_name(name: &str) -> PathElem {
85aaf69f 286 let num = token::gensym(name).usize();
1a4d82fc
JJ
287 // use one colon which will get translated to a period by the mangler, and
288 // we're guaranteed that `num` is globally unique for this crate.
c34b1796 289 PathName(token::gensym(&format!("{}:{}", name, num)))
1a4d82fc
JJ
290}
291
292/*
293* A note on nomenclature of linking: "extern", "foreign", and "upcall".
294*
295* An "extern" is an LLVM symbol we wind up emitting an undefined external
296* reference to. This means "we don't have the thing in this compilation unit,
297* please make sure you link it in at runtime". This could be a reference to
298* C code found in a C library, or rust code found in a rust crate.
299*
300* Most "externs" are implicitly declared (automatically) as a result of a
301* user declaring an extern _module_ dependency; this causes the rust driver
302* to locate an extern crate, scan its compilation metadata, and emit extern
303* declarations for any symbols used by the declaring crate.
304*
305* A "foreign" is an extern that references C (or other non-rust ABI) code.
306* There is no metadata to scan for extern references so in these cases either
307* a header-digester like bindgen, or manual function prototypes, have to
308* serve as declarators. So these are usually given explicitly as prototype
309* declarations, in rust code, with ABI attributes on them noting which ABI to
310* link via.
311*
312* An "upcall" is a foreign call generated by the compiler (not corresponding
313* to any user-written call in the code) into the runtime library, to perform
314* some helper task such as bringing a task to life, allocating memory, etc.
315*
316*/
317
c34b1796 318#[derive(Copy, Clone)]
85aaf69f 319pub struct NodeIdAndSpan {
1a4d82fc
JJ
320 pub id: ast::NodeId,
321 pub span: Span,
322}
323
85aaf69f
SL
324pub fn expr_info(expr: &ast::Expr) -> NodeIdAndSpan {
325 NodeIdAndSpan { id: expr.id, span: expr.span }
1a4d82fc
JJ
326}
327
328pub struct BuilderRef_res {
329 pub b: BuilderRef,
330}
331
332impl Drop for BuilderRef_res {
333 fn drop(&mut self) {
334 unsafe {
335 llvm::LLVMDisposeBuilder(self.b);
336 }
337 }
338}
339
340pub fn BuilderRef_res(b: BuilderRef) -> BuilderRef_res {
341 BuilderRef_res {
342 b: b
343 }
344}
345
346pub type ExternMap = FnvHashMap<String, ValueRef>;
347
348pub fn validate_substs(substs: &Substs) {
349 assert!(substs.types.all(|t| !ty::type_needs_infer(*t)));
350}
351
352// work around bizarre resolve errors
353type RvalueDatum<'tcx> = datum::Datum<'tcx, datum::Rvalue>;
c34b1796 354pub type LvalueDatum<'tcx> = datum::Datum<'tcx, datum::Lvalue>;
1a4d82fc
JJ
355
356// Function context. Every LLVM function we create will have one of
357// these.
358pub struct FunctionContext<'a, 'tcx: 'a> {
359 // The ValueRef returned from a call to llvm::LLVMAddFunction; the
360 // address of the first instruction in the sequence of
361 // instructions for this function that will go in the .text
362 // section of the executable we're generating.
363 pub llfn: ValueRef,
364
365 // always an empty parameter-environment
366 pub param_env: ty::ParameterEnvironment<'a, 'tcx>,
367
368 // The environment argument in a closure.
369 pub llenv: Option<ValueRef>,
370
371 // A pointer to where to store the return value. If the return type is
372 // immediate, this points to an alloca in the function. Otherwise, it's a
373 // pointer to the hidden first parameter of the function. After function
374 // construction, this should always be Some.
375 pub llretslotptr: Cell<Option<ValueRef>>,
376
377 // These pub elements: "hoisted basic blocks" containing
378 // administrative activities that have to happen in only one place in
379 // the function, due to LLVM's quirks.
380 // A marker for the place where we want to insert the function's static
381 // allocas, so that LLVM will coalesce them into a single alloca call.
382 pub alloca_insert_pt: Cell<Option<ValueRef>>,
383 pub llreturn: Cell<Option<BasicBlockRef>>,
384
385 // If the function has any nested return's, including something like:
386 // fn foo() -> Option<Foo> { Some(Foo { x: return None }) }, then
387 // we use a separate alloca for each return
388 pub needs_ret_allocas: bool,
389
390 // The a value alloca'd for calls to upcalls.rust_personality. Used when
391 // outputting the resume instruction.
392 pub personality: Cell<Option<ValueRef>>,
393
394 // True if the caller expects this fn to use the out pointer to
395 // return. Either way, your code should write into the slot llretslotptr
396 // points to, but if this value is false, that slot will be a local alloca.
397 pub caller_expects_out_pointer: bool,
398
399 // Maps the DefId's for local variables to the allocas created for
400 // them in llallocas.
401 pub lllocals: RefCell<NodeMap<LvalueDatum<'tcx>>>,
402
403 // Same as above, but for closure upvars
404 pub llupvars: RefCell<NodeMap<ValueRef>>,
405
406 // The NodeId of the function, or -1 if it doesn't correspond to
407 // a user-defined function.
408 pub id: ast::NodeId,
409
410 // If this function is being monomorphized, this contains the type
411 // substitutions used.
85aaf69f 412 pub param_substs: &'tcx Substs<'tcx>,
1a4d82fc
JJ
413
414 // The source span and nesting context where this function comes from, for
415 // error reporting and symbol generation.
416 pub span: Option<Span>,
417
418 // The arena that blocks are allocated from.
419 pub block_arena: &'a TypedArena<BlockS<'a, 'tcx>>,
420
421 // This function's enclosing crate context.
422 pub ccx: &'a CrateContext<'a, 'tcx>,
423
424 // Used and maintained by the debuginfo module.
425 pub debug_context: debuginfo::FunctionDebugContext,
426
427 // Cleanup scopes.
428 pub scopes: RefCell<Vec<cleanup::CleanupScope<'a, 'tcx>>>,
429
430 pub cfg: Option<cfg::CFG>,
431}
432
433impl<'a, 'tcx> FunctionContext<'a, 'tcx> {
62682a34
SL
434 pub fn arg_offset(&self) -> usize {
435 self.env_arg_pos() + if self.llenv.is_some() { 1 } else { 0 }
1a4d82fc
JJ
436 }
437
c34b1796 438 pub fn env_arg_pos(&self) -> usize {
1a4d82fc 439 if self.caller_expects_out_pointer {
85aaf69f 440 1
1a4d82fc 441 } else {
85aaf69f 442 0
1a4d82fc
JJ
443 }
444 }
445
446 pub fn cleanup(&self) {
447 unsafe {
448 llvm::LLVMInstructionEraseFromParent(self.alloca_insert_pt
449 .get()
450 .unwrap());
451 }
452 }
453
454 pub fn get_llreturn(&self) -> BasicBlockRef {
455 if self.llreturn.get().is_none() {
456
457 self.llreturn.set(Some(unsafe {
458 llvm::LLVMAppendBasicBlockInContext(self.ccx.llcx(), self.llfn,
459 "return\0".as_ptr() as *const _)
460 }))
461 }
462
463 self.llreturn.get().unwrap()
464 }
465
466 pub fn get_ret_slot(&self, bcx: Block<'a, 'tcx>,
467 output: ty::FnOutput<'tcx>,
468 name: &str) -> ValueRef {
469 if self.needs_ret_allocas {
470 base::alloca_no_lifetime(bcx, match output {
471 ty::FnConverging(output_type) => type_of::type_of(bcx.ccx(), output_type),
472 ty::FnDiverging => Type::void(bcx.ccx())
473 }, name)
474 } else {
475 self.llretslotptr.get().unwrap()
476 }
477 }
478
479 pub fn new_block(&'a self,
480 is_lpad: bool,
481 name: &str,
482 opt_node_id: Option<ast::NodeId>)
483 -> Block<'a, 'tcx> {
484 unsafe {
85aaf69f 485 let name = CString::new(name).unwrap();
1a4d82fc
JJ
486 let llbb = llvm::LLVMAppendBasicBlockInContext(self.ccx.llcx(),
487 self.llfn,
488 name.as_ptr());
489 BlockS::new(llbb, is_lpad, opt_node_id, self)
490 }
491 }
492
493 pub fn new_id_block(&'a self,
494 name: &str,
495 node_id: ast::NodeId)
496 -> Block<'a, 'tcx> {
497 self.new_block(false, name, Some(node_id))
498 }
499
500 pub fn new_temp_block(&'a self,
501 name: &str)
502 -> Block<'a, 'tcx> {
503 self.new_block(false, name, None)
504 }
505
506 pub fn join_blocks(&'a self,
507 id: ast::NodeId,
508 in_cxs: &[Block<'a, 'tcx>])
509 -> Block<'a, 'tcx> {
510 let out = self.new_id_block("join", id);
511 let mut reachable = false;
85aaf69f 512 for bcx in in_cxs {
1a4d82fc 513 if !bcx.unreachable.get() {
85aaf69f 514 build::Br(*bcx, out.llbb, DebugLoc::None);
1a4d82fc
JJ
515 reachable = true;
516 }
517 }
518 if !reachable {
519 build::Unreachable(out);
520 }
521 return out;
522 }
523
524 pub fn monomorphize<T>(&self, value: &T) -> T
62682a34 525 where T : TypeFoldable<'tcx> + HasProjectionTypes
1a4d82fc
JJ
526 {
527 monomorphize::apply_param_substs(self.ccx.tcx(),
528 self.param_substs,
529 value)
530 }
c34b1796
AL
531
532 /// This is the same as `common::type_needs_drop`, except that it
533 /// may use or update caches within this `FunctionContext`.
534 pub fn type_needs_drop(&self, ty: Ty<'tcx>) -> bool {
535 type_needs_drop_given_env(self.ccx.tcx(), ty, &self.param_env)
536 }
1a4d82fc
JJ
537}
538
539// Basic block context. We create a block context for each basic block
540// (single-entry, single-exit sequence of instructions) we generate from Rust
541// code. Each basic block we generate is attached to a function, typically
542// with many basic blocks per function. All the basic blocks attached to a
543// function are organized as a directed graph.
544pub struct BlockS<'blk, 'tcx: 'blk> {
545 // The BasicBlockRef returned from a call to
546 // llvm::LLVMAppendBasicBlock(llfn, name), which adds a basic
547 // block to the function pointed to by llfn. We insert
548 // instructions into that block by way of this block context.
549 // The block pointing to this one in the function's digraph.
550 pub llbb: BasicBlockRef,
551 pub terminated: Cell<bool>,
552 pub unreachable: Cell<bool>,
553
554 // Is this block part of a landing pad?
555 pub is_lpad: bool,
556
557 // AST node-id associated with this block, if any. Used for
558 // debugging purposes only.
559 pub opt_node_id: Option<ast::NodeId>,
560
561 // The function context for the function to which this block is
562 // attached.
563 pub fcx: &'blk FunctionContext<'blk, 'tcx>,
564}
565
566pub type Block<'blk, 'tcx> = &'blk BlockS<'blk, 'tcx>;
567
568impl<'blk, 'tcx> BlockS<'blk, 'tcx> {
569 pub fn new(llbb: BasicBlockRef,
570 is_lpad: bool,
571 opt_node_id: Option<ast::NodeId>,
572 fcx: &'blk FunctionContext<'blk, 'tcx>)
573 -> Block<'blk, 'tcx> {
574 fcx.block_arena.alloc(BlockS {
575 llbb: llbb,
576 terminated: Cell::new(false),
577 unreachable: Cell::new(false),
578 is_lpad: is_lpad,
579 opt_node_id: opt_node_id,
580 fcx: fcx
581 })
582 }
583
584 pub fn ccx(&self) -> &'blk CrateContext<'blk, 'tcx> {
585 self.fcx.ccx
586 }
587 pub fn tcx(&self) -> &'blk ty::ctxt<'tcx> {
588 self.fcx.ccx.tcx()
589 }
590 pub fn sess(&self) -> &'blk Session { self.fcx.ccx.sess() }
591
9346a6ac
AL
592 pub fn name(&self, name: ast::Name) -> String {
593 token::get_name(name).to_string()
1a4d82fc
JJ
594 }
595
596 pub fn node_id_to_string(&self, id: ast::NodeId) -> String {
597 self.tcx().map.node_to_string(id).to_string()
598 }
599
1a4d82fc
JJ
600 pub fn def(&self, nid: ast::NodeId) -> def::Def {
601 match self.tcx().def_map.borrow().get(&nid) {
c34b1796 602 Some(v) => v.full_def(),
1a4d82fc
JJ
603 None => {
604 self.tcx().sess.bug(&format!(
c34b1796 605 "no def associated with node id {}", nid));
1a4d82fc
JJ
606 }
607 }
608 }
609
610 pub fn val_to_string(&self, val: ValueRef) -> String {
611 self.ccx().tn().val_to_string(val)
612 }
613
614 pub fn llty_str(&self, ty: Type) -> String {
615 self.ccx().tn().type_to_string(ty)
616 }
617
1a4d82fc
JJ
618 pub fn to_str(&self) -> String {
619 format!("[block {:p}]", self)
620 }
621
622 pub fn monomorphize<T>(&self, value: &T) -> T
62682a34 623 where T : TypeFoldable<'tcx> + HasProjectionTypes
1a4d82fc
JJ
624 {
625 monomorphize::apply_param_substs(self.tcx(),
626 self.fcx.param_substs,
627 value)
628 }
629}
630
631impl<'blk, 'tcx> mc::Typer<'tcx> for BlockS<'blk, 'tcx> {
1a4d82fc
JJ
632 fn node_ty(&self, id: ast::NodeId) -> mc::McResult<Ty<'tcx>> {
633 Ok(node_id_type(self, id))
634 }
635
636 fn expr_ty_adjusted(&self, expr: &ast::Expr) -> mc::McResult<Ty<'tcx>> {
637 Ok(expr_ty_adjusted(self, expr))
638 }
639
640 fn node_method_ty(&self, method_call: ty::MethodCall) -> Option<Ty<'tcx>> {
641 self.tcx()
642 .method_map
643 .borrow()
644 .get(&method_call)
645 .map(|method| monomorphize_type(self, method.ty))
646 }
647
648 fn node_method_origin(&self, method_call: ty::MethodCall)
649 -> Option<ty::MethodOrigin<'tcx>>
650 {
651 self.tcx()
652 .method_map
653 .borrow()
654 .get(&method_call)
655 .map(|method| method.origin.clone())
656 }
657
658 fn adjustments<'a>(&'a self) -> &'a RefCell<NodeMap<ty::AutoAdjustment<'tcx>>> {
659 &self.tcx().adjustments
660 }
661
662 fn is_method_call(&self, id: ast::NodeId) -> bool {
663 self.tcx().method_map.borrow().contains_key(&ty::MethodCall::expr(id))
664 }
665
666 fn temporary_scope(&self, rvalue_id: ast::NodeId) -> Option<region::CodeExtent> {
667 self.tcx().region_maps.temporary_scope(rvalue_id)
668 }
669
85aaf69f 670 fn upvar_capture(&self, upvar_id: ty::UpvarId) -> Option<ty::UpvarCapture> {
c34b1796 671 Some(self.tcx().upvar_capture_map.borrow().get(&upvar_id).unwrap().clone())
1a4d82fc
JJ
672 }
673
674 fn type_moves_by_default(&self, span: Span, ty: Ty<'tcx>) -> bool {
675 self.fcx.param_env.type_moves_by_default(span, ty)
676 }
677}
678
85aaf69f 679impl<'blk, 'tcx> ty::ClosureTyper<'tcx> for BlockS<'blk, 'tcx> {
1a4d82fc
JJ
680 fn param_env<'a>(&'a self) -> &'a ty::ParameterEnvironment<'a, 'tcx> {
681 &self.fcx.param_env
682 }
683
85aaf69f
SL
684 fn closure_kind(&self,
685 def_id: ast::DefId)
686 -> Option<ty::ClosureKind>
1a4d82fc 687 {
85aaf69f
SL
688 let typer = NormalizingClosureTyper::new(self.tcx());
689 typer.closure_kind(def_id)
1a4d82fc
JJ
690 }
691
85aaf69f
SL
692 fn closure_type(&self,
693 def_id: ast::DefId,
694 substs: &subst::Substs<'tcx>)
695 -> ty::ClosureTy<'tcx>
1a4d82fc 696 {
85aaf69f
SL
697 let typer = NormalizingClosureTyper::new(self.tcx());
698 typer.closure_type(def_id, substs)
1a4d82fc
JJ
699 }
700
85aaf69f
SL
701 fn closure_upvars(&self,
702 def_id: ast::DefId,
703 substs: &Substs<'tcx>)
704 -> Option<Vec<ty::ClosureUpvar<'tcx>>>
1a4d82fc 705 {
85aaf69f
SL
706 let typer = NormalizingClosureTyper::new(self.tcx());
707 typer.closure_upvars(def_id, substs)
1a4d82fc
JJ
708 }
709}
710
711pub struct Result<'blk, 'tcx: 'blk> {
712 pub bcx: Block<'blk, 'tcx>,
713 pub val: ValueRef
714}
715
716impl<'b, 'tcx> Result<'b, 'tcx> {
717 pub fn new(bcx: Block<'b, 'tcx>, val: ValueRef) -> Result<'b, 'tcx> {
718 Result {
719 bcx: bcx,
720 val: val,
721 }
722 }
723}
724
725pub fn val_ty(v: ValueRef) -> Type {
726 unsafe {
727 Type::from_ref(llvm::LLVMTypeOf(v))
728 }
729}
730
731// LLVM constant constructors.
732pub fn C_null(t: Type) -> ValueRef {
733 unsafe {
734 llvm::LLVMConstNull(t.to_ref())
735 }
736}
737
738pub fn C_undef(t: Type) -> ValueRef {
739 unsafe {
740 llvm::LLVMGetUndef(t.to_ref())
741 }
742}
743
744pub fn C_integral(t: Type, u: u64, sign_extend: bool) -> ValueRef {
745 unsafe {
746 llvm::LLVMConstInt(t.to_ref(), u, sign_extend as Bool)
747 }
748}
749
750pub fn C_floating(s: &str, t: Type) -> ValueRef {
751 unsafe {
85aaf69f 752 let s = CString::new(s).unwrap();
1a4d82fc
JJ
753 llvm::LLVMConstRealOfString(t.to_ref(), s.as_ptr())
754 }
755}
756
757pub fn C_nil(ccx: &CrateContext) -> ValueRef {
758 C_struct(ccx, &[], false)
759}
760
761pub fn C_bool(ccx: &CrateContext, val: bool) -> ValueRef {
762 C_integral(Type::i1(ccx), val as u64, false)
763}
764
765pub fn C_i32(ccx: &CrateContext, i: i32) -> ValueRef {
766 C_integral(Type::i32(ccx), i as u64, true)
767}
768
c34b1796
AL
769pub fn C_u32(ccx: &CrateContext, i: u32) -> ValueRef {
770 C_integral(Type::i32(ccx), i as u64, false)
771}
772
1a4d82fc
JJ
773pub fn C_u64(ccx: &CrateContext, i: u64) -> ValueRef {
774 C_integral(Type::i64(ccx), i, false)
775}
776
777pub fn C_int<I: AsI64>(ccx: &CrateContext, i: I) -> ValueRef {
778 let v = i.as_i64();
779
780 match machine::llbitsize_of_real(ccx, ccx.int_type()) {
781 32 => assert!(v < (1<<31) && v >= -(1<<31)),
782 64 => {},
783 n => panic!("unsupported target size: {}", n)
784 }
785
786 C_integral(ccx.int_type(), v as u64, true)
787}
788
789pub fn C_uint<I: AsU64>(ccx: &CrateContext, i: I) -> ValueRef {
790 let v = i.as_u64();
791
792 match machine::llbitsize_of_real(ccx, ccx.int_type()) {
793 32 => assert!(v < (1<<32)),
794 64 => {},
795 n => panic!("unsupported target size: {}", n)
796 }
797
798 C_integral(ccx.int_type(), v, false)
799}
800
801pub trait AsI64 { fn as_i64(self) -> i64; }
802pub trait AsU64 { fn as_u64(self) -> u64; }
803
804// FIXME: remove the intptr conversions, because they
805// are host-architecture-dependent
806impl AsI64 for i64 { fn as_i64(self) -> i64 { self as i64 }}
807impl AsI64 for i32 { fn as_i64(self) -> i64 { self as i64 }}
c34b1796 808impl AsI64 for isize { fn as_i64(self) -> i64 { self as i64 }}
1a4d82fc
JJ
809
810impl AsU64 for u64 { fn as_u64(self) -> u64 { self as u64 }}
811impl AsU64 for u32 { fn as_u64(self) -> u64 { self as u64 }}
c34b1796 812impl AsU64 for usize { fn as_u64(self) -> u64 { self as u64 }}
1a4d82fc 813
c34b1796 814pub fn C_u8(ccx: &CrateContext, i: usize) -> ValueRef {
1a4d82fc
JJ
815 C_integral(Type::i8(ccx), i as u64, false)
816}
817
818
819// This is a 'c-like' raw string, which differs from
820// our boxed-and-length-annotated strings.
821pub fn C_cstr(cx: &CrateContext, s: InternedString, null_terminated: bool) -> ValueRef {
822 unsafe {
823 match cx.const_cstr_cache().borrow().get(&s) {
824 Some(&llval) => return llval,
825 None => ()
826 }
827
828 let sc = llvm::LLVMConstStringInContext(cx.llcx(),
85aaf69f
SL
829 s.as_ptr() as *const c_char,
830 s.len() as c_uint,
1a4d82fc
JJ
831 !null_terminated as Bool);
832
833 let gsym = token::gensym("str");
9346a6ac
AL
834 let sym = format!("str{}", gsym.usize());
835 let g = declare::define_global(cx, &sym[..], val_ty(sc)).unwrap_or_else(||{
836 cx.sess().bug(&format!("symbol `{}` is already defined", sym));
837 });
1a4d82fc
JJ
838 llvm::LLVMSetInitializer(g, sc);
839 llvm::LLVMSetGlobalConstant(g, True);
840 llvm::SetLinkage(g, llvm::InternalLinkage);
841
842 cx.const_cstr_cache().borrow_mut().insert(s, g);
843 g
844 }
845}
846
847// NB: Do not use `do_spill_noroot` to make this into a constant string, or
848// you will be kicked off fast isel. See issue #4352 for an example of this.
849pub fn C_str_slice(cx: &CrateContext, s: InternedString) -> ValueRef {
85aaf69f 850 let len = s.len();
1a4d82fc
JJ
851 let cs = consts::ptrcast(C_cstr(cx, s, false), Type::i8p(cx));
852 C_named_struct(cx.tn().find_type("str_slice").unwrap(), &[cs, C_uint(cx, len)])
853}
854
1a4d82fc
JJ
855pub fn C_struct(cx: &CrateContext, elts: &[ValueRef], packed: bool) -> ValueRef {
856 C_struct_in_context(cx.llcx(), elts, packed)
857}
858
859pub fn C_struct_in_context(llcx: ContextRef, elts: &[ValueRef], packed: bool) -> ValueRef {
860 unsafe {
861 llvm::LLVMConstStructInContext(llcx,
862 elts.as_ptr(), elts.len() as c_uint,
863 packed as Bool)
864 }
865}
866
867pub fn C_named_struct(t: Type, elts: &[ValueRef]) -> ValueRef {
868 unsafe {
869 llvm::LLVMConstNamedStruct(t.to_ref(), elts.as_ptr(), elts.len() as c_uint)
870 }
871}
872
873pub fn C_array(ty: Type, elts: &[ValueRef]) -> ValueRef {
874 unsafe {
875 return llvm::LLVMConstArray(ty.to_ref(), elts.as_ptr(), elts.len() as c_uint);
876 }
877}
878
85aaf69f
SL
879pub fn C_vector(elts: &[ValueRef]) -> ValueRef {
880 unsafe {
881 return llvm::LLVMConstVector(elts.as_ptr(), elts.len() as c_uint);
882 }
883}
884
1a4d82fc
JJ
885pub fn C_bytes(cx: &CrateContext, bytes: &[u8]) -> ValueRef {
886 C_bytes_in_context(cx.llcx(), bytes)
887}
888
889pub fn C_bytes_in_context(llcx: ContextRef, bytes: &[u8]) -> ValueRef {
890 unsafe {
891 let ptr = bytes.as_ptr() as *const c_char;
892 return llvm::LLVMConstStringInContext(llcx, ptr, bytes.len() as c_uint, True);
893 }
894}
895
896pub fn const_get_elt(cx: &CrateContext, v: ValueRef, us: &[c_uint])
897 -> ValueRef {
898 unsafe {
899 let r = llvm::LLVMConstExtractValue(v, us.as_ptr(), us.len() as c_uint);
900
901 debug!("const_get_elt(v={}, us={:?}, r={})",
902 cx.tn().val_to_string(v), us, cx.tn().val_to_string(r));
903
904 return r;
905 }
906}
907
1a4d82fc
JJ
908pub fn const_to_int(v: ValueRef) -> i64 {
909 unsafe {
910 llvm::LLVMConstIntGetSExtValue(v)
911 }
912}
913
914pub fn const_to_uint(v: ValueRef) -> u64 {
915 unsafe {
916 llvm::LLVMConstIntGetZExtValue(v)
917 }
918}
919
c34b1796
AL
920fn is_const_integral(v: ValueRef) -> bool {
921 unsafe {
922 !llvm::LLVMIsAConstantInt(v).is_null()
923 }
924}
925
926pub fn const_to_opt_int(v: ValueRef) -> Option<i64> {
927 unsafe {
928 if is_const_integral(v) {
929 Some(llvm::LLVMConstIntGetSExtValue(v))
930 } else {
931 None
932 }
933 }
934}
935
936pub fn const_to_opt_uint(v: ValueRef) -> Option<u64> {
937 unsafe {
938 if is_const_integral(v) {
939 Some(llvm::LLVMConstIntGetZExtValue(v))
940 } else {
941 None
942 }
943 }
944}
945
1a4d82fc
JJ
946pub fn is_undef(val: ValueRef) -> bool {
947 unsafe {
948 llvm::LLVMIsUndef(val) != False
949 }
950}
951
952#[allow(dead_code)] // potentially useful
953pub fn is_null(val: ValueRef) -> bool {
954 unsafe {
955 llvm::LLVMIsNull(val) != False
956 }
957}
958
959pub fn monomorphize_type<'blk, 'tcx>(bcx: &BlockS<'blk, 'tcx>, t: Ty<'tcx>) -> Ty<'tcx> {
960 bcx.fcx.monomorphize(&t)
961}
962
963pub fn node_id_type<'blk, 'tcx>(bcx: &BlockS<'blk, 'tcx>, id: ast::NodeId) -> Ty<'tcx> {
964 let tcx = bcx.tcx();
965 let t = ty::node_id_to_type(tcx, id);
966 monomorphize_type(bcx, t)
967}
968
969pub fn expr_ty<'blk, 'tcx>(bcx: &BlockS<'blk, 'tcx>, ex: &ast::Expr) -> Ty<'tcx> {
970 node_id_type(bcx, ex.id)
971}
972
973pub fn expr_ty_adjusted<'blk, 'tcx>(bcx: &BlockS<'blk, 'tcx>, ex: &ast::Expr) -> Ty<'tcx> {
974 monomorphize_type(bcx, ty::expr_ty_adjusted(bcx.tcx(), ex))
975}
976
977/// Attempts to resolve an obligation. The result is a shallow vtable resolution -- meaning that we
978/// do not (necessarily) resolve all nested obligations on the impl. Note that type check should
979/// guarantee to us that all nested obligations *could be* resolved if we wanted to.
980pub fn fulfill_obligation<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
c34b1796
AL
981 span: Span,
982 trait_ref: ty::PolyTraitRef<'tcx>)
983 -> traits::Vtable<'tcx, ()>
1a4d82fc
JJ
984{
985 let tcx = ccx.tcx();
986
987 // Remove any references to regions; this helps improve caching.
988 let trait_ref = erase_regions(tcx, &trait_ref);
989
990 // First check the cache.
991 match ccx.trait_cache().borrow().get(&trait_ref) {
992 Some(vtable) => {
62682a34 993 info!("Cache hit: {:?}", trait_ref);
1a4d82fc
JJ
994 return (*vtable).clone();
995 }
996 None => { }
997 }
998
62682a34
SL
999 debug!("trans fulfill_obligation: trait_ref={:?} def_id={:?}",
1000 trait_ref, trait_ref.def_id());
1a4d82fc
JJ
1001
1002 ty::populate_implementations_for_trait_if_necessary(tcx, trait_ref.def_id());
1003 let infcx = infer::new_infer_ctxt(tcx);
1004
1005 // Do the initial selection for the obligation. This yields the
1006 // shallow result we are looking for -- that is, what specific impl.
85aaf69f 1007 let typer = NormalizingClosureTyper::new(tcx);
1a4d82fc 1008 let mut selcx = traits::SelectionContext::new(&infcx, &typer);
c34b1796
AL
1009 let obligation =
1010 traits::Obligation::new(traits::ObligationCause::misc(span, ast::DUMMY_NODE_ID),
1011 trait_ref.to_poly_trait_predicate());
1a4d82fc
JJ
1012 let selection = match selcx.select(&obligation) {
1013 Ok(Some(selection)) => selection,
1014 Ok(None) => {
1015 // Ambiguity can happen when monomorphizing during trans
1016 // expands to some humongo type that never occurred
1017 // statically -- this humongo type can then overflow,
1018 // leading to an ambiguous result. So report this as an
1019 // overflow bug, since I believe this is the only case
1020 // where ambiguity can result.
62682a34 1021 debug!("Encountered ambiguity selecting `{:?}` during trans, \
1a4d82fc 1022 presuming due to overflow",
62682a34 1023 trait_ref);
1a4d82fc
JJ
1024 ccx.sess().span_fatal(
1025 span,
1026 "reached the recursion limit during monomorphization");
1027 }
1028 Err(e) => {
1029 tcx.sess.span_bug(
1030 span,
62682a34
SL
1031 &format!("Encountered error `{:?}` selecting `{:?}` during trans",
1032 e,
1033 trait_ref))
1a4d82fc
JJ
1034 }
1035 };
1036
1037 // Currently, we use a fulfillment context to completely resolve
1038 // all nested obligations. This is because they can inform the
1039 // inference of the impl's type parameters.
62682a34
SL
1040 let mut fulfill_cx = traits::FulfillmentContext::new(true);
1041 let vtable = selection.map(|predicate| {
1a4d82fc
JJ
1042 fulfill_cx.register_predicate_obligation(&infcx, predicate);
1043 });
c34b1796 1044 let vtable = drain_fulfillment_cx_or_panic(span, &infcx, &mut fulfill_cx, &vtable);
1a4d82fc 1045
62682a34 1046 info!("Cache miss: {:?}", trait_ref);
1a4d82fc
JJ
1047 ccx.trait_cache().borrow_mut().insert(trait_ref,
1048 vtable.clone());
1049
1050 vtable
1051}
1052
c34b1796
AL
1053/// Normalizes the predicates and checks whether they hold. If this
1054/// returns false, then either normalize encountered an error or one
1055/// of the predicates did not hold. Used when creating vtables to
1056/// check for unsatisfiable methods.
1057pub fn normalize_and_test_predicates<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
1058 predicates: Vec<ty::Predicate<'tcx>>)
1059 -> bool
1060{
62682a34
SL
1061 debug!("normalize_and_test_predicates(predicates={:?})",
1062 predicates);
c34b1796
AL
1063
1064 let tcx = ccx.tcx();
1065 let infcx = infer::new_infer_ctxt(tcx);
1066 let typer = NormalizingClosureTyper::new(tcx);
1067 let mut selcx = traits::SelectionContext::new(&infcx, &typer);
62682a34 1068 let mut fulfill_cx = traits::FulfillmentContext::new(false);
c34b1796
AL
1069 let cause = traits::ObligationCause::dummy();
1070 let traits::Normalized { value: predicates, obligations } =
1071 traits::normalize(&mut selcx, cause.clone(), &predicates);
1072 for obligation in obligations {
1073 fulfill_cx.register_predicate_obligation(&infcx, obligation);
1074 }
1075 for predicate in predicates {
1076 let obligation = traits::Obligation::new(cause.clone(), predicate);
1077 fulfill_cx.register_predicate_obligation(&infcx, obligation);
1078 }
1079 drain_fulfillment_cx(&infcx, &mut fulfill_cx, &()).is_ok()
1080}
1081
85aaf69f 1082pub struct NormalizingClosureTyper<'a,'tcx:'a> {
1a4d82fc
JJ
1083 param_env: ty::ParameterEnvironment<'a, 'tcx>
1084}
1085
85aaf69f
SL
1086impl<'a,'tcx> NormalizingClosureTyper<'a,'tcx> {
1087 pub fn new(tcx: &'a ty::ctxt<'tcx>) -> NormalizingClosureTyper<'a,'tcx> {
1a4d82fc
JJ
1088 // Parameter environment is used to give details about type parameters,
1089 // but since we are in trans, everything is fully monomorphized.
85aaf69f 1090 NormalizingClosureTyper { param_env: ty::empty_parameter_environment(tcx) }
1a4d82fc
JJ
1091 }
1092}
1093
85aaf69f 1094impl<'a,'tcx> ty::ClosureTyper<'tcx> for NormalizingClosureTyper<'a,'tcx> {
1a4d82fc
JJ
1095 fn param_env<'b>(&'b self) -> &'b ty::ParameterEnvironment<'b,'tcx> {
1096 &self.param_env
1097 }
1098
85aaf69f
SL
1099 fn closure_kind(&self,
1100 def_id: ast::DefId)
1101 -> Option<ty::ClosureKind>
1a4d82fc 1102 {
85aaf69f 1103 self.param_env.closure_kind(def_id)
1a4d82fc
JJ
1104 }
1105
85aaf69f
SL
1106 fn closure_type(&self,
1107 def_id: ast::DefId,
1108 substs: &subst::Substs<'tcx>)
1109 -> ty::ClosureTy<'tcx>
1a4d82fc
JJ
1110 {
1111 // the substitutions in `substs` are already monomorphized,
1112 // but we still must normalize associated types
85aaf69f 1113 let closure_ty = self.param_env.tcx.closure_type(def_id, substs);
1a4d82fc
JJ
1114 monomorphize::normalize_associated_type(self.param_env.tcx, &closure_ty)
1115 }
1116
85aaf69f
SL
1117 fn closure_upvars(&self,
1118 def_id: ast::DefId,
1119 substs: &Substs<'tcx>)
1120 -> Option<Vec<ty::ClosureUpvar<'tcx>>>
1a4d82fc
JJ
1121 {
1122 // the substitutions in `substs` are already monomorphized,
1123 // but we still must normalize associated types
85aaf69f 1124 let result = ty::closure_upvars(&self.param_env, def_id, substs);
1a4d82fc
JJ
1125 monomorphize::normalize_associated_type(self.param_env.tcx, &result)
1126 }
1127}
1128
c34b1796
AL
1129pub fn drain_fulfillment_cx_or_panic<'a,'tcx,T>(span: Span,
1130 infcx: &infer::InferCtxt<'a,'tcx>,
1131 fulfill_cx: &mut traits::FulfillmentContext<'tcx>,
1132 result: &T)
1133 -> T
62682a34 1134 where T : TypeFoldable<'tcx>
c34b1796
AL
1135{
1136 match drain_fulfillment_cx(infcx, fulfill_cx, result) {
1137 Ok(v) => v,
1138 Err(errors) => {
1139 infcx.tcx.sess.span_bug(
1140 span,
62682a34
SL
1141 &format!("Encountered errors `{:?}` fulfilling during trans",
1142 errors));
c34b1796
AL
1143 }
1144 }
1145}
1146
1147/// Finishes processes any obligations that remain in the fulfillment
1148/// context, and then "freshens" and returns `result`. This is
1149/// primarily used during normalization and other cases where
1150/// processing the obligations in `fulfill_cx` may cause type
1151/// inference variables that appear in `result` to be unified, and
1152/// hence we need to process those obligations to get the complete
1153/// picture of the type.
1154pub fn drain_fulfillment_cx<'a,'tcx,T>(infcx: &infer::InferCtxt<'a,'tcx>,
1155 fulfill_cx: &mut traits::FulfillmentContext<'tcx>,
1156 result: &T)
1157 -> StdResult<T,Vec<traits::FulfillmentError<'tcx>>>
62682a34 1158 where T : TypeFoldable<'tcx>
1a4d82fc 1159{
62682a34
SL
1160 debug!("drain_fulfillment_cx(result={:?})",
1161 result);
1a4d82fc
JJ
1162
1163 // In principle, we only need to do this so long as `result`
1164 // contains unbound type parameters. It could be a slight
1165 // optimization to stop iterating early.
85aaf69f 1166 let typer = NormalizingClosureTyper::new(infcx.tcx);
1a4d82fc
JJ
1167 match fulfill_cx.select_all_or_error(infcx, &typer) {
1168 Ok(()) => { }
1169 Err(errors) => {
c34b1796 1170 return Err(errors);
1a4d82fc
JJ
1171 }
1172 }
1173
1174 // Use freshen to simultaneously replace all type variables with
1175 // their bindings and replace all regions with 'static. This is
1176 // sort of overkill because we do not expect there to be any
1177 // unbound type variables, hence no `TyFresh` types should ever be
1178 // inserted.
c34b1796 1179 Ok(result.fold_with(&mut infcx.freshener()))
1a4d82fc
JJ
1180}
1181
1182// Key used to lookup values supplied for type parameters in an expr.
c34b1796 1183#[derive(Copy, Clone, PartialEq, Debug)]
1a4d82fc
JJ
1184pub enum ExprOrMethodCall {
1185 // Type parameters for a path like `None::<int>`
1186 ExprId(ast::NodeId),
1187
1188 // Type parameters for a method call like `a.foo::<int>()`
1189 MethodCallKey(ty::MethodCall)
1190}
1191
1192pub fn node_id_substs<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
1193 node: ExprOrMethodCall,
1194 param_substs: &subst::Substs<'tcx>)
1195 -> subst::Substs<'tcx> {
1196 let tcx = ccx.tcx();
1197
1198 let substs = match node {
1199 ExprId(id) => {
1200 ty::node_id_item_substs(tcx, id).substs
1201 }
1202 MethodCallKey(method_call) => {
c34b1796 1203 tcx.method_map.borrow().get(&method_call).unwrap().substs.clone()
1a4d82fc
JJ
1204 }
1205 };
1206
1207 if substs.types.any(|t| ty::type_needs_infer(*t)) {
1208 tcx.sess.bug(&format!("type parameters for node {:?} include inference types: {:?}",
62682a34 1209 node, substs));
1a4d82fc
JJ
1210 }
1211
1212 monomorphize::apply_param_substs(tcx,
1213 param_substs,
1214 &substs.erase_regions())
1215}
1216
1217pub fn langcall(bcx: Block,
1218 span: Option<Span>,
1219 msg: &str,
1220 li: LangItem)
1221 -> ast::DefId {
1222 match bcx.tcx().lang_items.require(li) {
1223 Ok(id) => id,
1224 Err(s) => {
1225 let msg = format!("{} {}", msg, s);
1226 match span {
85aaf69f
SL
1227 Some(span) => bcx.tcx().sess.span_fatal(span, &msg[..]),
1228 None => bcx.tcx().sess.fatal(&msg[..]),
1a4d82fc
JJ
1229 }
1230 }
1231 }
1232}