]> git.proxmox.com Git - rustc.git/blame - src/librustc_trans/trans/common.rs
Imported Upstream version 1.6.0+dfsg1
[rustc.git] / src / librustc_trans / trans / common.rs
CommitLineData
1a4d82fc
JJ
1// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2// file at the top-level directory of this distribution and at
3// http://rust-lang.org/COPYRIGHT.
4//
5// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8// option. This file may not be copied, modified, or distributed
9// except according to those terms.
10
11#![allow(non_camel_case_types, non_snake_case)]
12
13//! Code that is useful in various trans modules.
14
15pub use self::ExprOrMethodCall::*;
16
17use session::Session;
18use llvm;
92a42be0 19use llvm::{ValueRef, BasicBlockRef, BuilderRef, ContextRef, TypeKind};
1a4d82fc
JJ
20use llvm::{True, False, Bool};
21use middle::cfg;
22use middle::def;
e9174d1e 23use middle::def_id::DefId;
1a4d82fc
JJ
24use middle::infer;
25use middle::lang_items::LangItem;
c1a9b12d 26use middle::subst::{self, Substs};
1a4d82fc
JJ
27use trans::base;
28use trans::build;
c1a9b12d 29use trans::callee;
1a4d82fc
JJ
30use trans::cleanup;
31use trans::consts;
32use trans::datum;
85aaf69f 33use trans::debuginfo::{self, DebugLoc};
9346a6ac 34use trans::declare;
1a4d82fc
JJ
35use trans::machine;
36use trans::monomorphize;
37use trans::type_::Type;
38use trans::type_of;
39use middle::traits;
c1a9b12d 40use middle::ty::{self, HasTypeFlags, Ty};
e9174d1e 41use middle::ty::fold::{TypeFolder, TypeFoldable};
e9174d1e 42use rustc_front::hir;
92a42be0 43use rustc::mir::repr::Mir;
1a4d82fc
JJ
44use util::nodemap::{FnvHashMap, NodeMap};
45
46use arena::TypedArena;
47use libc::{c_uint, c_char};
48use std::ffi::CString;
49use std::cell::{Cell, RefCell};
50use std::vec::Vec;
1a4d82fc 51use syntax::ast;
1a4d82fc
JJ
52use syntax::codemap::{DUMMY_SP, Span};
53use syntax::parse::token::InternedString;
54use syntax::parse::token;
1a4d82fc
JJ
55
56pub use trans::context::CrateContext;
57
62682a34 58/// Is the type's representation size known at compile time?
1a4d82fc 59pub fn type_is_sized<'tcx>(tcx: &ty::ctxt<'tcx>, ty: Ty<'tcx>) -> bool {
c1a9b12d 60 ty.is_sized(&tcx.empty_parameter_environment(), DUMMY_SP)
1a4d82fc
JJ
61}
62
63pub fn type_is_fat_ptr<'tcx>(cx: &ty::ctxt<'tcx>, ty: Ty<'tcx>) -> bool {
64 match ty.sty {
c1a9b12d
SL
65 ty::TyRawPtr(ty::TypeAndMut{ty, ..}) |
66 ty::TyRef(_, ty::TypeAndMut{ty, ..}) |
62682a34 67 ty::TyBox(ty) => {
1a4d82fc
JJ
68 !type_is_sized(cx, ty)
69 }
70 _ => {
71 false
72 }
73 }
74}
75
c34b1796
AL
76/// If `type_needs_drop` returns true, then `ty` is definitely
77/// non-copy and *might* have a destructor attached; if it returns
78/// false, then `ty` definitely has no destructor (i.e. no drop glue).
79///
80/// (Note that this implies that if `ty` has a destructor attached,
81/// then `type_needs_drop` will definitely return `true` for `ty`.)
82pub fn type_needs_drop<'tcx>(cx: &ty::ctxt<'tcx>, ty: Ty<'tcx>) -> bool {
c1a9b12d 83 type_needs_drop_given_env(cx, ty, &cx.empty_parameter_environment())
c34b1796
AL
84}
85
86/// Core implementation of type_needs_drop, potentially making use of
87/// and/or updating caches held in the `param_env`.
88fn type_needs_drop_given_env<'a,'tcx>(cx: &ty::ctxt<'tcx>,
89 ty: Ty<'tcx>,
90 param_env: &ty::ParameterEnvironment<'a,'tcx>) -> bool {
91 // Issue #22536: We first query type_moves_by_default. It sees a
92 // normalized version of the type, and therefore will definitely
93 // know whether the type implements Copy (and thus needs no
94 // cleanup/drop/zeroing) ...
c1a9b12d 95 let implements_copy = !ty.moves_by_default(param_env, DUMMY_SP);
c34b1796
AL
96
97 if implements_copy { return false; }
98
99 // ... (issue #22536 continued) but as an optimization, still use
100 // prior logic of asking if the `needs_drop` bit is set; we need
101 // not zero non-Copy types if they have no destructor.
102
103 // FIXME(#22815): Note that calling `ty::type_contents` is a
104 // conservative heuristic; it may report that `needs_drop` is set
105 // when actual type does not actually have a destructor associated
106 // with it. But since `ty` absolutely did not have the `Copy`
107 // bound attached (see above), it is sound to treat it as having a
108 // destructor (e.g. zero its memory on move).
109
c1a9b12d 110 let contents = ty.type_contents(cx);
62682a34 111 debug!("type_needs_drop ty={:?} contents={:?}", ty, contents);
c34b1796 112 contents.needs_drop(cx)
1a4d82fc
JJ
113}
114
85aaf69f 115fn type_is_newtype_immediate<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ty: Ty<'tcx>) -> bool {
1a4d82fc 116 match ty.sty {
e9174d1e
SL
117 ty::TyStruct(def, substs) => {
118 let fields = &def.struct_variant().fields;
85aaf69f 119 fields.len() == 1 && {
e9174d1e 120 type_is_immediate(ccx, monomorphize::field_ty(ccx.tcx(), substs, &fields[0]))
85aaf69f 121 }
1a4d82fc
JJ
122 }
123 _ => false
124 }
125}
126
127pub fn type_is_immediate<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ty: Ty<'tcx>) -> bool {
128 use trans::machine::llsize_of_alloc;
129 use trans::type_of::sizing_type_of;
130
131 let tcx = ccx.tcx();
c1a9b12d
SL
132 let simple = ty.is_scalar() ||
133 ty.is_unique() || ty.is_region_ptr() ||
1a4d82fc 134 type_is_newtype_immediate(ccx, ty) ||
e9174d1e 135 ty.is_simd();
1a4d82fc
JJ
136 if simple && !type_is_fat_ptr(tcx, ty) {
137 return true;
138 }
139 if !type_is_sized(tcx, ty) {
140 return false;
141 }
142 match ty.sty {
62682a34
SL
143 ty::TyStruct(..) | ty::TyEnum(..) | ty::TyTuple(..) | ty::TyArray(_, _) |
144 ty::TyClosure(..) => {
1a4d82fc
JJ
145 let llty = sizing_type_of(ccx, ty);
146 llsize_of_alloc(ccx, llty) <= llsize_of_alloc(ccx, ccx.int_type())
147 }
148 _ => type_is_zero_size(ccx, ty)
149 }
150}
151
152/// Identify types which have size zero at runtime.
153pub fn type_is_zero_size<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ty: Ty<'tcx>) -> bool {
154 use trans::machine::llsize_of_alloc;
155 use trans::type_of::sizing_type_of;
156 let llty = sizing_type_of(ccx, ty);
157 llsize_of_alloc(ccx, llty) == 0
158}
159
160/// Identifies types which we declare to be equivalent to `void` in C for the purpose of function
161/// return types. These are `()`, bot, and uninhabited enums. Note that all such types are also
162/// zero-size, but not all zero-size types use a `void` return type (in order to aid with C ABI
163/// compatibility).
164pub fn return_type_is_void(ccx: &CrateContext, ty: Ty) -> bool {
c1a9b12d 165 ty.is_nil() || ty.is_empty(ccx.tcx())
1a4d82fc
JJ
166}
167
168/// Generates a unique symbol based off the name given. This is used to create
169/// unique symbols for things like closures.
b039eaaf
SL
170pub fn gensym_name(name: &str) -> ast::Name {
171 let num = token::gensym(name).0;
1a4d82fc
JJ
172 // use one colon which will get translated to a period by the mangler, and
173 // we're guaranteed that `num` is globally unique for this crate.
b039eaaf 174 token::gensym(&format!("{}:{}", name, num))
1a4d82fc
JJ
175}
176
177/*
178* A note on nomenclature of linking: "extern", "foreign", and "upcall".
179*
180* An "extern" is an LLVM symbol we wind up emitting an undefined external
181* reference to. This means "we don't have the thing in this compilation unit,
182* please make sure you link it in at runtime". This could be a reference to
183* C code found in a C library, or rust code found in a rust crate.
184*
185* Most "externs" are implicitly declared (automatically) as a result of a
186* user declaring an extern _module_ dependency; this causes the rust driver
187* to locate an extern crate, scan its compilation metadata, and emit extern
188* declarations for any symbols used by the declaring crate.
189*
190* A "foreign" is an extern that references C (or other non-rust ABI) code.
191* There is no metadata to scan for extern references so in these cases either
192* a header-digester like bindgen, or manual function prototypes, have to
193* serve as declarators. So these are usually given explicitly as prototype
194* declarations, in rust code, with ABI attributes on them noting which ABI to
195* link via.
196*
197* An "upcall" is a foreign call generated by the compiler (not corresponding
198* to any user-written call in the code) into the runtime library, to perform
199* some helper task such as bringing a task to life, allocating memory, etc.
200*
201*/
202
c34b1796 203#[derive(Copy, Clone)]
85aaf69f 204pub struct NodeIdAndSpan {
1a4d82fc
JJ
205 pub id: ast::NodeId,
206 pub span: Span,
207}
208
e9174d1e 209pub fn expr_info(expr: &hir::Expr) -> NodeIdAndSpan {
85aaf69f 210 NodeIdAndSpan { id: expr.id, span: expr.span }
1a4d82fc
JJ
211}
212
e9174d1e
SL
213/// The concrete version of ty::FieldDef. The name is the field index if
214/// the field is numeric.
215pub struct Field<'tcx>(pub ast::Name, pub Ty<'tcx>);
216
217/// The concrete version of ty::VariantDef
218pub struct VariantInfo<'tcx> {
219 pub discr: ty::Disr,
220 pub fields: Vec<Field<'tcx>>
221}
222
223impl<'tcx> VariantInfo<'tcx> {
224 pub fn from_ty(tcx: &ty::ctxt<'tcx>,
225 ty: Ty<'tcx>,
226 opt_def: Option<def::Def>)
227 -> Self
228 {
229 match ty.sty {
230 ty::TyStruct(adt, substs) | ty::TyEnum(adt, substs) => {
231 let variant = match opt_def {
232 None => adt.struct_variant(),
233 Some(def) => adt.variant_of_def(def)
234 };
235
236 VariantInfo {
237 discr: variant.disr_val,
238 fields: variant.fields.iter().map(|f| {
239 Field(f.name, monomorphize::field_ty(tcx, substs, f))
240 }).collect()
241 }
242 }
243
244 ty::TyTuple(ref v) => {
245 VariantInfo {
246 discr: 0,
247 fields: v.iter().enumerate().map(|(i, &t)| {
248 Field(token::intern(&i.to_string()), t)
249 }).collect()
250 }
251 }
252
253 _ => {
254 tcx.sess.bug(&format!(
255 "cannot get field types from the type {:?}",
256 ty));
257 }
258 }
259 }
260
261 /// Return the variant corresponding to a given node (e.g. expr)
262 pub fn of_node(tcx: &ty::ctxt<'tcx>, ty: Ty<'tcx>, id: ast::NodeId) -> Self {
263 let node_def = tcx.def_map.borrow().get(&id).map(|v| v.full_def());
264 Self::from_ty(tcx, ty, node_def)
265 }
266
267 pub fn field_index(&self, name: ast::Name) -> usize {
268 self.fields.iter().position(|&Field(n,_)| n == name).unwrap_or_else(|| {
269 panic!("unknown field `{}`", name)
270 })
271 }
272}
273
1a4d82fc
JJ
274pub struct BuilderRef_res {
275 pub b: BuilderRef,
276}
277
278impl Drop for BuilderRef_res {
279 fn drop(&mut self) {
280 unsafe {
281 llvm::LLVMDisposeBuilder(self.b);
282 }
283 }
284}
285
286pub fn BuilderRef_res(b: BuilderRef) -> BuilderRef_res {
287 BuilderRef_res {
288 b: b
289 }
290}
291
292pub type ExternMap = FnvHashMap<String, ValueRef>;
293
294pub fn validate_substs(substs: &Substs) {
c1a9b12d 295 assert!(!substs.types.needs_infer());
1a4d82fc
JJ
296}
297
298// work around bizarre resolve errors
299type RvalueDatum<'tcx> = datum::Datum<'tcx, datum::Rvalue>;
c34b1796 300pub type LvalueDatum<'tcx> = datum::Datum<'tcx, datum::Lvalue>;
1a4d82fc 301
c1a9b12d
SL
302#[derive(Clone, Debug)]
303struct HintEntry<'tcx> {
304 // The datum for the dropflag-hint itself; note that many
305 // source-level Lvalues will be associated with the same
306 // dropflag-hint datum.
307 datum: cleanup::DropHintDatum<'tcx>,
308}
309
310pub struct DropFlagHintsMap<'tcx> {
311 // Maps NodeId for expressions that read/write unfragmented state
312 // to that state's drop-flag "hint." (A stack-local hint
313 // indicates either that (1.) it is certain that no-drop is
314 // needed, or (2.) inline drop-flag must be consulted.)
315 node_map: NodeMap<HintEntry<'tcx>>,
316}
317
318impl<'tcx> DropFlagHintsMap<'tcx> {
319 pub fn new() -> DropFlagHintsMap<'tcx> { DropFlagHintsMap { node_map: NodeMap() } }
320 pub fn has_hint(&self, id: ast::NodeId) -> bool { self.node_map.contains_key(&id) }
321 pub fn insert(&mut self, id: ast::NodeId, datum: cleanup::DropHintDatum<'tcx>) {
322 self.node_map.insert(id, HintEntry { datum: datum });
323 }
324 pub fn hint_datum(&self, id: ast::NodeId) -> Option<cleanup::DropHintDatum<'tcx>> {
325 self.node_map.get(&id).map(|t|t.datum)
326 }
327}
328
1a4d82fc
JJ
329// Function context. Every LLVM function we create will have one of
330// these.
331pub struct FunctionContext<'a, 'tcx: 'a> {
92a42be0
SL
332 // The MIR for this function. At present, this is optional because
333 // we only have MIR available for things that are local to the
334 // crate.
335 pub mir: Option<&'a Mir<'tcx>>,
336
1a4d82fc
JJ
337 // The ValueRef returned from a call to llvm::LLVMAddFunction; the
338 // address of the first instruction in the sequence of
339 // instructions for this function that will go in the .text
340 // section of the executable we're generating.
341 pub llfn: ValueRef,
342
c1a9b12d 343 // always an empty parameter-environment NOTE: @jroesch another use of ParamEnv
1a4d82fc
JJ
344 pub param_env: ty::ParameterEnvironment<'a, 'tcx>,
345
346 // The environment argument in a closure.
347 pub llenv: Option<ValueRef>,
348
349 // A pointer to where to store the return value. If the return type is
350 // immediate, this points to an alloca in the function. Otherwise, it's a
351 // pointer to the hidden first parameter of the function. After function
352 // construction, this should always be Some.
353 pub llretslotptr: Cell<Option<ValueRef>>,
354
355 // These pub elements: "hoisted basic blocks" containing
356 // administrative activities that have to happen in only one place in
357 // the function, due to LLVM's quirks.
358 // A marker for the place where we want to insert the function's static
359 // allocas, so that LLVM will coalesce them into a single alloca call.
360 pub alloca_insert_pt: Cell<Option<ValueRef>>,
361 pub llreturn: Cell<Option<BasicBlockRef>>,
362
363 // If the function has any nested return's, including something like:
364 // fn foo() -> Option<Foo> { Some(Foo { x: return None }) }, then
365 // we use a separate alloca for each return
366 pub needs_ret_allocas: bool,
367
368 // The a value alloca'd for calls to upcalls.rust_personality. Used when
369 // outputting the resume instruction.
370 pub personality: Cell<Option<ValueRef>>,
371
372 // True if the caller expects this fn to use the out pointer to
373 // return. Either way, your code should write into the slot llretslotptr
374 // points to, but if this value is false, that slot will be a local alloca.
375 pub caller_expects_out_pointer: bool,
376
377 // Maps the DefId's for local variables to the allocas created for
378 // them in llallocas.
379 pub lllocals: RefCell<NodeMap<LvalueDatum<'tcx>>>,
380
381 // Same as above, but for closure upvars
382 pub llupvars: RefCell<NodeMap<ValueRef>>,
383
c1a9b12d
SL
384 // Carries info about drop-flags for local bindings (longer term,
385 // paths) for the code being compiled.
386 pub lldropflag_hints: RefCell<DropFlagHintsMap<'tcx>>,
387
1a4d82fc
JJ
388 // The NodeId of the function, or -1 if it doesn't correspond to
389 // a user-defined function.
390 pub id: ast::NodeId,
391
392 // If this function is being monomorphized, this contains the type
393 // substitutions used.
85aaf69f 394 pub param_substs: &'tcx Substs<'tcx>,
1a4d82fc
JJ
395
396 // The source span and nesting context where this function comes from, for
397 // error reporting and symbol generation.
398 pub span: Option<Span>,
399
400 // The arena that blocks are allocated from.
401 pub block_arena: &'a TypedArena<BlockS<'a, 'tcx>>,
402
403 // This function's enclosing crate context.
404 pub ccx: &'a CrateContext<'a, 'tcx>,
405
406 // Used and maintained by the debuginfo module.
407 pub debug_context: debuginfo::FunctionDebugContext,
408
409 // Cleanup scopes.
410 pub scopes: RefCell<Vec<cleanup::CleanupScope<'a, 'tcx>>>,
411
412 pub cfg: Option<cfg::CFG>,
413}
414
415impl<'a, 'tcx> FunctionContext<'a, 'tcx> {
92a42be0
SL
416 pub fn mir(&self) -> &'a Mir<'tcx> {
417 self.mir.unwrap()
418 }
419
62682a34
SL
420 pub fn arg_offset(&self) -> usize {
421 self.env_arg_pos() + if self.llenv.is_some() { 1 } else { 0 }
1a4d82fc
JJ
422 }
423
c34b1796 424 pub fn env_arg_pos(&self) -> usize {
1a4d82fc 425 if self.caller_expects_out_pointer {
85aaf69f 426 1
1a4d82fc 427 } else {
85aaf69f 428 0
1a4d82fc
JJ
429 }
430 }
431
432 pub fn cleanup(&self) {
433 unsafe {
434 llvm::LLVMInstructionEraseFromParent(self.alloca_insert_pt
435 .get()
436 .unwrap());
437 }
438 }
439
440 pub fn get_llreturn(&self) -> BasicBlockRef {
441 if self.llreturn.get().is_none() {
442
443 self.llreturn.set(Some(unsafe {
444 llvm::LLVMAppendBasicBlockInContext(self.ccx.llcx(), self.llfn,
445 "return\0".as_ptr() as *const _)
446 }))
447 }
448
449 self.llreturn.get().unwrap()
450 }
451
452 pub fn get_ret_slot(&self, bcx: Block<'a, 'tcx>,
453 output: ty::FnOutput<'tcx>,
454 name: &str) -> ValueRef {
455 if self.needs_ret_allocas {
e9174d1e 456 base::alloca(bcx, match output {
1a4d82fc
JJ
457 ty::FnConverging(output_type) => type_of::type_of(bcx.ccx(), output_type),
458 ty::FnDiverging => Type::void(bcx.ccx())
459 }, name)
460 } else {
461 self.llretslotptr.get().unwrap()
462 }
463 }
464
465 pub fn new_block(&'a self,
466 is_lpad: bool,
467 name: &str,
468 opt_node_id: Option<ast::NodeId>)
469 -> Block<'a, 'tcx> {
470 unsafe {
85aaf69f 471 let name = CString::new(name).unwrap();
1a4d82fc
JJ
472 let llbb = llvm::LLVMAppendBasicBlockInContext(self.ccx.llcx(),
473 self.llfn,
474 name.as_ptr());
475 BlockS::new(llbb, is_lpad, opt_node_id, self)
476 }
477 }
478
479 pub fn new_id_block(&'a self,
480 name: &str,
481 node_id: ast::NodeId)
482 -> Block<'a, 'tcx> {
483 self.new_block(false, name, Some(node_id))
484 }
485
486 pub fn new_temp_block(&'a self,
487 name: &str)
488 -> Block<'a, 'tcx> {
489 self.new_block(false, name, None)
490 }
491
492 pub fn join_blocks(&'a self,
493 id: ast::NodeId,
494 in_cxs: &[Block<'a, 'tcx>])
495 -> Block<'a, 'tcx> {
496 let out = self.new_id_block("join", id);
497 let mut reachable = false;
85aaf69f 498 for bcx in in_cxs {
1a4d82fc 499 if !bcx.unreachable.get() {
85aaf69f 500 build::Br(*bcx, out.llbb, DebugLoc::None);
1a4d82fc
JJ
501 reachable = true;
502 }
503 }
504 if !reachable {
505 build::Unreachable(out);
506 }
507 return out;
508 }
509
510 pub fn monomorphize<T>(&self, value: &T) -> T
c1a9b12d 511 where T : TypeFoldable<'tcx> + HasTypeFlags
1a4d82fc
JJ
512 {
513 monomorphize::apply_param_substs(self.ccx.tcx(),
514 self.param_substs,
515 value)
516 }
c34b1796
AL
517
518 /// This is the same as `common::type_needs_drop`, except that it
519 /// may use or update caches within this `FunctionContext`.
520 pub fn type_needs_drop(&self, ty: Ty<'tcx>) -> bool {
521 type_needs_drop_given_env(self.ccx.tcx(), ty, &self.param_env)
522 }
c1a9b12d
SL
523
524 pub fn eh_personality(&self) -> ValueRef {
525 // The exception handling personality function.
526 //
527 // If our compilation unit has the `eh_personality` lang item somewhere
528 // within it, then we just need to translate that. Otherwise, we're
529 // building an rlib which will depend on some upstream implementation of
530 // this function, so we just codegen a generic reference to it. We don't
531 // specify any of the types for the function, we just make it a symbol
532 // that LLVM can later use.
533 //
534 // Note that MSVC is a little special here in that we don't use the
535 // `eh_personality` lang item at all. Currently LLVM has support for
536 // both Dwarf and SEH unwind mechanisms for MSVC targets and uses the
537 // *name of the personality function* to decide what kind of unwind side
538 // tables/landing pads to emit. It looks like Dwarf is used by default,
539 // injecting a dependency on the `_Unwind_Resume` symbol for resuming
540 // an "exception", but for MSVC we want to force SEH. This means that we
541 // can't actually have the personality function be our standard
542 // `rust_eh_personality` function, but rather we wired it up to the
543 // CRT's custom personality function, which forces LLVM to consider
544 // landing pads as "landing pads for SEH".
545 let target = &self.ccx.sess().target.target;
546 match self.ccx.tcx().lang_items.eh_personality() {
e9174d1e 547 Some(def_id) if !base::wants_msvc_seh(self.ccx.sess()) => {
c1a9b12d
SL
548 callee::trans_fn_ref(self.ccx, def_id, ExprId(0),
549 self.param_substs).val
550 }
551 _ => {
552 let mut personality = self.ccx.eh_personality().borrow_mut();
553 match *personality {
554 Some(llpersonality) => llpersonality,
555 None => {
e9174d1e 556 let name = if !base::wants_msvc_seh(self.ccx.sess()) {
c1a9b12d
SL
557 "rust_eh_personality"
558 } else if target.arch == "x86" {
559 "_except_handler3"
560 } else {
561 "__C_specific_handler"
562 };
563 let fty = Type::variadic_func(&[], &Type::i32(self.ccx));
564 let f = declare::declare_cfn(self.ccx, name, fty,
565 self.ccx.tcx().types.i32);
566 *personality = Some(f);
567 f
568 }
569 }
570 }
571 }
572 }
573
92a42be0
SL
574 // Returns a ValueRef of the "eh_unwind_resume" lang item if one is defined,
575 // otherwise declares it as an external funtion.
576 pub fn eh_unwind_resume(&self) -> ValueRef {
577 use trans::attributes;
578 assert!(self.ccx.sess().target.target.options.custom_unwind_resume);
579 match self.ccx.tcx().lang_items.eh_unwind_resume() {
580 Some(def_id) => {
581 callee::trans_fn_ref(self.ccx, def_id, ExprId(0),
582 self.param_substs).val
583 }
c1a9b12d 584 None => {
92a42be0
SL
585 let mut unwresume = self.ccx.eh_unwind_resume().borrow_mut();
586 match *unwresume {
587 Some(llfn) => llfn,
588 None => {
589 let fty = Type::func(&[Type::i8p(self.ccx)], &Type::void(self.ccx));
590 let llfn = declare::declare_fn(self.ccx,
591 "rust_eh_unwind_resume",
592 llvm::CCallConv,
593 fty, ty::FnDiverging);
594 attributes::unwind(llfn, true);
595 *unwresume = Some(llfn);
596 llfn
597 }
598 }
c1a9b12d 599 }
c1a9b12d 600 }
c1a9b12d 601 }
1a4d82fc
JJ
602}
603
604// Basic block context. We create a block context for each basic block
605// (single-entry, single-exit sequence of instructions) we generate from Rust
606// code. Each basic block we generate is attached to a function, typically
607// with many basic blocks per function. All the basic blocks attached to a
608// function are organized as a directed graph.
609pub struct BlockS<'blk, 'tcx: 'blk> {
610 // The BasicBlockRef returned from a call to
611 // llvm::LLVMAppendBasicBlock(llfn, name), which adds a basic
612 // block to the function pointed to by llfn. We insert
613 // instructions into that block by way of this block context.
614 // The block pointing to this one in the function's digraph.
615 pub llbb: BasicBlockRef,
616 pub terminated: Cell<bool>,
617 pub unreachable: Cell<bool>,
618
619 // Is this block part of a landing pad?
620 pub is_lpad: bool,
621
622 // AST node-id associated with this block, if any. Used for
623 // debugging purposes only.
624 pub opt_node_id: Option<ast::NodeId>,
625
626 // The function context for the function to which this block is
627 // attached.
628 pub fcx: &'blk FunctionContext<'blk, 'tcx>,
629}
630
631pub type Block<'blk, 'tcx> = &'blk BlockS<'blk, 'tcx>;
632
633impl<'blk, 'tcx> BlockS<'blk, 'tcx> {
634 pub fn new(llbb: BasicBlockRef,
635 is_lpad: bool,
636 opt_node_id: Option<ast::NodeId>,
637 fcx: &'blk FunctionContext<'blk, 'tcx>)
638 -> Block<'blk, 'tcx> {
639 fcx.block_arena.alloc(BlockS {
640 llbb: llbb,
641 terminated: Cell::new(false),
642 unreachable: Cell::new(false),
643 is_lpad: is_lpad,
644 opt_node_id: opt_node_id,
645 fcx: fcx
646 })
647 }
648
649 pub fn ccx(&self) -> &'blk CrateContext<'blk, 'tcx> {
650 self.fcx.ccx
651 }
652 pub fn tcx(&self) -> &'blk ty::ctxt<'tcx> {
653 self.fcx.ccx.tcx()
654 }
655 pub fn sess(&self) -> &'blk Session { self.fcx.ccx.sess() }
656
92a42be0
SL
657 pub fn mir(&self) -> &'blk Mir<'tcx> {
658 self.fcx.mir()
659 }
660
9346a6ac 661 pub fn name(&self, name: ast::Name) -> String {
c1a9b12d 662 name.to_string()
1a4d82fc
JJ
663 }
664
665 pub fn node_id_to_string(&self, id: ast::NodeId) -> String {
666 self.tcx().map.node_to_string(id).to_string()
667 }
668
1a4d82fc
JJ
669 pub fn def(&self, nid: ast::NodeId) -> def::Def {
670 match self.tcx().def_map.borrow().get(&nid) {
c34b1796 671 Some(v) => v.full_def(),
1a4d82fc
JJ
672 None => {
673 self.tcx().sess.bug(&format!(
c34b1796 674 "no def associated with node id {}", nid));
1a4d82fc
JJ
675 }
676 }
677 }
678
679 pub fn val_to_string(&self, val: ValueRef) -> String {
680 self.ccx().tn().val_to_string(val)
681 }
682
683 pub fn llty_str(&self, ty: Type) -> String {
684 self.ccx().tn().type_to_string(ty)
685 }
686
1a4d82fc
JJ
687 pub fn to_str(&self) -> String {
688 format!("[block {:p}]", self)
689 }
690
691 pub fn monomorphize<T>(&self, value: &T) -> T
c1a9b12d 692 where T : TypeFoldable<'tcx> + HasTypeFlags
1a4d82fc
JJ
693 {
694 monomorphize::apply_param_substs(self.tcx(),
695 self.fcx.param_substs,
696 value)
697 }
698}
699
1a4d82fc
JJ
700pub struct Result<'blk, 'tcx: 'blk> {
701 pub bcx: Block<'blk, 'tcx>,
702 pub val: ValueRef
703}
704
705impl<'b, 'tcx> Result<'b, 'tcx> {
706 pub fn new(bcx: Block<'b, 'tcx>, val: ValueRef) -> Result<'b, 'tcx> {
707 Result {
708 bcx: bcx,
709 val: val,
710 }
711 }
712}
713
714pub fn val_ty(v: ValueRef) -> Type {
715 unsafe {
716 Type::from_ref(llvm::LLVMTypeOf(v))
717 }
718}
719
720// LLVM constant constructors.
721pub fn C_null(t: Type) -> ValueRef {
722 unsafe {
723 llvm::LLVMConstNull(t.to_ref())
724 }
725}
726
727pub fn C_undef(t: Type) -> ValueRef {
728 unsafe {
729 llvm::LLVMGetUndef(t.to_ref())
730 }
731}
732
733pub fn C_integral(t: Type, u: u64, sign_extend: bool) -> ValueRef {
734 unsafe {
735 llvm::LLVMConstInt(t.to_ref(), u, sign_extend as Bool)
736 }
737}
738
739pub fn C_floating(s: &str, t: Type) -> ValueRef {
740 unsafe {
85aaf69f 741 let s = CString::new(s).unwrap();
1a4d82fc
JJ
742 llvm::LLVMConstRealOfString(t.to_ref(), s.as_ptr())
743 }
744}
745
92a42be0
SL
746pub fn C_floating_f64(f: f64, t: Type) -> ValueRef {
747 unsafe {
748 llvm::LLVMConstReal(t.to_ref(), f)
749 }
750}
751
1a4d82fc
JJ
752pub fn C_nil(ccx: &CrateContext) -> ValueRef {
753 C_struct(ccx, &[], false)
754}
755
756pub fn C_bool(ccx: &CrateContext, val: bool) -> ValueRef {
757 C_integral(Type::i1(ccx), val as u64, false)
758}
759
760pub fn C_i32(ccx: &CrateContext, i: i32) -> ValueRef {
761 C_integral(Type::i32(ccx), i as u64, true)
762}
763
c34b1796
AL
764pub fn C_u32(ccx: &CrateContext, i: u32) -> ValueRef {
765 C_integral(Type::i32(ccx), i as u64, false)
766}
767
1a4d82fc
JJ
768pub fn C_u64(ccx: &CrateContext, i: u64) -> ValueRef {
769 C_integral(Type::i64(ccx), i, false)
770}
771
772pub fn C_int<I: AsI64>(ccx: &CrateContext, i: I) -> ValueRef {
773 let v = i.as_i64();
774
e9174d1e
SL
775 let bit_size = machine::llbitsize_of_real(ccx, ccx.int_type());
776
777 if bit_size < 64 {
778 // make sure it doesn't overflow
779 assert!(v < (1<<(bit_size-1)) && v >= -(1<<(bit_size-1)));
1a4d82fc
JJ
780 }
781
782 C_integral(ccx.int_type(), v as u64, true)
783}
784
785pub fn C_uint<I: AsU64>(ccx: &CrateContext, i: I) -> ValueRef {
786 let v = i.as_u64();
787
e9174d1e
SL
788 let bit_size = machine::llbitsize_of_real(ccx, ccx.int_type());
789
790 if bit_size < 64 {
791 // make sure it doesn't overflow
792 assert!(v < (1<<bit_size));
1a4d82fc
JJ
793 }
794
795 C_integral(ccx.int_type(), v, false)
796}
797
798pub trait AsI64 { fn as_i64(self) -> i64; }
799pub trait AsU64 { fn as_u64(self) -> u64; }
800
801// FIXME: remove the intptr conversions, because they
802// are host-architecture-dependent
803impl AsI64 for i64 { fn as_i64(self) -> i64 { self as i64 }}
804impl AsI64 for i32 { fn as_i64(self) -> i64 { self as i64 }}
c34b1796 805impl AsI64 for isize { fn as_i64(self) -> i64 { self as i64 }}
1a4d82fc
JJ
806
807impl AsU64 for u64 { fn as_u64(self) -> u64 { self as u64 }}
808impl AsU64 for u32 { fn as_u64(self) -> u64 { self as u64 }}
c34b1796 809impl AsU64 for usize { fn as_u64(self) -> u64 { self as u64 }}
1a4d82fc 810
e9174d1e 811pub fn C_u8(ccx: &CrateContext, i: u8) -> ValueRef {
1a4d82fc
JJ
812 C_integral(Type::i8(ccx), i as u64, false)
813}
814
815
816// This is a 'c-like' raw string, which differs from
817// our boxed-and-length-annotated strings.
818pub fn C_cstr(cx: &CrateContext, s: InternedString, null_terminated: bool) -> ValueRef {
819 unsafe {
820 match cx.const_cstr_cache().borrow().get(&s) {
821 Some(&llval) => return llval,
822 None => ()
823 }
824
825 let sc = llvm::LLVMConstStringInContext(cx.llcx(),
85aaf69f
SL
826 s.as_ptr() as *const c_char,
827 s.len() as c_uint,
1a4d82fc
JJ
828 !null_terminated as Bool);
829
830 let gsym = token::gensym("str");
b039eaaf 831 let sym = format!("str{}", gsym.0);
9346a6ac
AL
832 let g = declare::define_global(cx, &sym[..], val_ty(sc)).unwrap_or_else(||{
833 cx.sess().bug(&format!("symbol `{}` is already defined", sym));
834 });
1a4d82fc
JJ
835 llvm::LLVMSetInitializer(g, sc);
836 llvm::LLVMSetGlobalConstant(g, True);
837 llvm::SetLinkage(g, llvm::InternalLinkage);
838
839 cx.const_cstr_cache().borrow_mut().insert(s, g);
840 g
841 }
842}
843
844// NB: Do not use `do_spill_noroot` to make this into a constant string, or
845// you will be kicked off fast isel. See issue #4352 for an example of this.
846pub fn C_str_slice(cx: &CrateContext, s: InternedString) -> ValueRef {
85aaf69f 847 let len = s.len();
1a4d82fc
JJ
848 let cs = consts::ptrcast(C_cstr(cx, s, false), Type::i8p(cx));
849 C_named_struct(cx.tn().find_type("str_slice").unwrap(), &[cs, C_uint(cx, len)])
850}
851
1a4d82fc
JJ
852pub fn C_struct(cx: &CrateContext, elts: &[ValueRef], packed: bool) -> ValueRef {
853 C_struct_in_context(cx.llcx(), elts, packed)
854}
855
856pub fn C_struct_in_context(llcx: ContextRef, elts: &[ValueRef], packed: bool) -> ValueRef {
857 unsafe {
858 llvm::LLVMConstStructInContext(llcx,
859 elts.as_ptr(), elts.len() as c_uint,
860 packed as Bool)
861 }
862}
863
864pub fn C_named_struct(t: Type, elts: &[ValueRef]) -> ValueRef {
865 unsafe {
866 llvm::LLVMConstNamedStruct(t.to_ref(), elts.as_ptr(), elts.len() as c_uint)
867 }
868}
869
870pub fn C_array(ty: Type, elts: &[ValueRef]) -> ValueRef {
871 unsafe {
872 return llvm::LLVMConstArray(ty.to_ref(), elts.as_ptr(), elts.len() as c_uint);
873 }
874}
875
85aaf69f
SL
876pub fn C_vector(elts: &[ValueRef]) -> ValueRef {
877 unsafe {
878 return llvm::LLVMConstVector(elts.as_ptr(), elts.len() as c_uint);
879 }
880}
881
1a4d82fc
JJ
882pub fn C_bytes(cx: &CrateContext, bytes: &[u8]) -> ValueRef {
883 C_bytes_in_context(cx.llcx(), bytes)
884}
885
886pub fn C_bytes_in_context(llcx: ContextRef, bytes: &[u8]) -> ValueRef {
887 unsafe {
888 let ptr = bytes.as_ptr() as *const c_char;
889 return llvm::LLVMConstStringInContext(llcx, ptr, bytes.len() as c_uint, True);
890 }
891}
892
893pub fn const_get_elt(cx: &CrateContext, v: ValueRef, us: &[c_uint])
894 -> ValueRef {
895 unsafe {
896 let r = llvm::LLVMConstExtractValue(v, us.as_ptr(), us.len() as c_uint);
897
898 debug!("const_get_elt(v={}, us={:?}, r={})",
899 cx.tn().val_to_string(v), us, cx.tn().val_to_string(r));
900
901 return r;
902 }
903}
904
1a4d82fc
JJ
905pub fn const_to_int(v: ValueRef) -> i64 {
906 unsafe {
907 llvm::LLVMConstIntGetSExtValue(v)
908 }
909}
910
911pub fn const_to_uint(v: ValueRef) -> u64 {
912 unsafe {
913 llvm::LLVMConstIntGetZExtValue(v)
914 }
915}
916
c34b1796
AL
917fn is_const_integral(v: ValueRef) -> bool {
918 unsafe {
919 !llvm::LLVMIsAConstantInt(v).is_null()
920 }
921}
922
923pub fn const_to_opt_int(v: ValueRef) -> Option<i64> {
924 unsafe {
925 if is_const_integral(v) {
926 Some(llvm::LLVMConstIntGetSExtValue(v))
927 } else {
928 None
929 }
930 }
931}
932
933pub fn const_to_opt_uint(v: ValueRef) -> Option<u64> {
934 unsafe {
935 if is_const_integral(v) {
936 Some(llvm::LLVMConstIntGetZExtValue(v))
937 } else {
938 None
939 }
940 }
941}
942
1a4d82fc
JJ
943pub fn is_undef(val: ValueRef) -> bool {
944 unsafe {
945 llvm::LLVMIsUndef(val) != False
946 }
947}
948
949#[allow(dead_code)] // potentially useful
950pub fn is_null(val: ValueRef) -> bool {
951 unsafe {
952 llvm::LLVMIsNull(val) != False
953 }
954}
955
956pub fn monomorphize_type<'blk, 'tcx>(bcx: &BlockS<'blk, 'tcx>, t: Ty<'tcx>) -> Ty<'tcx> {
957 bcx.fcx.monomorphize(&t)
958}
959
960pub fn node_id_type<'blk, 'tcx>(bcx: &BlockS<'blk, 'tcx>, id: ast::NodeId) -> Ty<'tcx> {
961 let tcx = bcx.tcx();
c1a9b12d 962 let t = tcx.node_id_to_type(id);
1a4d82fc
JJ
963 monomorphize_type(bcx, t)
964}
965
e9174d1e 966pub fn expr_ty<'blk, 'tcx>(bcx: &BlockS<'blk, 'tcx>, ex: &hir::Expr) -> Ty<'tcx> {
1a4d82fc
JJ
967 node_id_type(bcx, ex.id)
968}
969
e9174d1e 970pub fn expr_ty_adjusted<'blk, 'tcx>(bcx: &BlockS<'blk, 'tcx>, ex: &hir::Expr) -> Ty<'tcx> {
c1a9b12d 971 monomorphize_type(bcx, bcx.tcx().expr_ty_adjusted(ex))
1a4d82fc
JJ
972}
973
974/// Attempts to resolve an obligation. The result is a shallow vtable resolution -- meaning that we
975/// do not (necessarily) resolve all nested obligations on the impl. Note that type check should
976/// guarantee to us that all nested obligations *could be* resolved if we wanted to.
977pub fn fulfill_obligation<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
c34b1796
AL
978 span: Span,
979 trait_ref: ty::PolyTraitRef<'tcx>)
980 -> traits::Vtable<'tcx, ()>
1a4d82fc
JJ
981{
982 let tcx = ccx.tcx();
983
984 // Remove any references to regions; this helps improve caching.
e9174d1e 985 let trait_ref = tcx.erase_regions(&trait_ref);
1a4d82fc
JJ
986
987 // First check the cache.
988 match ccx.trait_cache().borrow().get(&trait_ref) {
989 Some(vtable) => {
62682a34 990 info!("Cache hit: {:?}", trait_ref);
1a4d82fc
JJ
991 return (*vtable).clone();
992 }
993 None => { }
994 }
995
62682a34
SL
996 debug!("trans fulfill_obligation: trait_ref={:?} def_id={:?}",
997 trait_ref, trait_ref.def_id());
1a4d82fc 998
1a4d82fc
JJ
999
1000 // Do the initial selection for the obligation. This yields the
1001 // shallow result we are looking for -- that is, what specific impl.
c1a9b12d
SL
1002 let infcx = infer::normalizing_infer_ctxt(tcx, &tcx.tables);
1003 let mut selcx = traits::SelectionContext::new(&infcx);
1004
c34b1796
AL
1005 let obligation =
1006 traits::Obligation::new(traits::ObligationCause::misc(span, ast::DUMMY_NODE_ID),
1007 trait_ref.to_poly_trait_predicate());
1a4d82fc
JJ
1008 let selection = match selcx.select(&obligation) {
1009 Ok(Some(selection)) => selection,
1010 Ok(None) => {
1011 // Ambiguity can happen when monomorphizing during trans
1012 // expands to some humongo type that never occurred
1013 // statically -- this humongo type can then overflow,
1014 // leading to an ambiguous result. So report this as an
1015 // overflow bug, since I believe this is the only case
1016 // where ambiguity can result.
62682a34 1017 debug!("Encountered ambiguity selecting `{:?}` during trans, \
1a4d82fc 1018 presuming due to overflow",
62682a34 1019 trait_ref);
1a4d82fc
JJ
1020 ccx.sess().span_fatal(
1021 span,
b039eaaf 1022 "reached the recursion limit during monomorphization (selection ambiguity)");
1a4d82fc
JJ
1023 }
1024 Err(e) => {
1025 tcx.sess.span_bug(
1026 span,
62682a34
SL
1027 &format!("Encountered error `{:?}` selecting `{:?}` during trans",
1028 e,
1029 trait_ref))
1a4d82fc
JJ
1030 }
1031 };
1032
1033 // Currently, we use a fulfillment context to completely resolve
1034 // all nested obligations. This is because they can inform the
1035 // inference of the impl's type parameters.
c1a9b12d 1036 let mut fulfill_cx = infcx.fulfillment_cx.borrow_mut();
62682a34 1037 let vtable = selection.map(|predicate| {
1a4d82fc
JJ
1038 fulfill_cx.register_predicate_obligation(&infcx, predicate);
1039 });
e9174d1e
SL
1040 let vtable = infer::drain_fulfillment_cx_or_panic(
1041 span, &infcx, &mut fulfill_cx, &vtable
c1a9b12d 1042 );
1a4d82fc 1043
c1a9b12d
SL
1044 info!("Cache miss: {:?} => {:?}", trait_ref, vtable);
1045
1046 ccx.trait_cache().borrow_mut().insert(trait_ref, vtable.clone());
1a4d82fc
JJ
1047
1048 vtable
1049}
1050
c34b1796
AL
1051/// Normalizes the predicates and checks whether they hold. If this
1052/// returns false, then either normalize encountered an error or one
1053/// of the predicates did not hold. Used when creating vtables to
1054/// check for unsatisfiable methods.
1055pub fn normalize_and_test_predicates<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
1056 predicates: Vec<ty::Predicate<'tcx>>)
1057 -> bool
1058{
62682a34
SL
1059 debug!("normalize_and_test_predicates(predicates={:?})",
1060 predicates);
c34b1796
AL
1061
1062 let tcx = ccx.tcx();
c1a9b12d
SL
1063 let infcx = infer::normalizing_infer_ctxt(tcx, &tcx.tables);
1064 let mut selcx = traits::SelectionContext::new(&infcx);
1065 let mut fulfill_cx = infcx.fulfillment_cx.borrow_mut();
c34b1796
AL
1066 let cause = traits::ObligationCause::dummy();
1067 let traits::Normalized { value: predicates, obligations } =
1068 traits::normalize(&mut selcx, cause.clone(), &predicates);
1069 for obligation in obligations {
1070 fulfill_cx.register_predicate_obligation(&infcx, obligation);
1071 }
1072 for predicate in predicates {
1073 let obligation = traits::Obligation::new(cause.clone(), predicate);
1074 fulfill_cx.register_predicate_obligation(&infcx, obligation);
1075 }
c34b1796 1076
e9174d1e 1077 infer::drain_fulfillment_cx(&infcx, &mut fulfill_cx, &()).is_ok()
1a4d82fc
JJ
1078}
1079
1080// Key used to lookup values supplied for type parameters in an expr.
c34b1796 1081#[derive(Copy, Clone, PartialEq, Debug)]
1a4d82fc
JJ
1082pub enum ExprOrMethodCall {
1083 // Type parameters for a path like `None::<int>`
1084 ExprId(ast::NodeId),
1085
1086 // Type parameters for a method call like `a.foo::<int>()`
1087 MethodCallKey(ty::MethodCall)
1088}
1089
1090pub fn node_id_substs<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
1091 node: ExprOrMethodCall,
1092 param_substs: &subst::Substs<'tcx>)
1093 -> subst::Substs<'tcx> {
1094 let tcx = ccx.tcx();
1095
1096 let substs = match node {
1097 ExprId(id) => {
c1a9b12d 1098 tcx.node_id_item_substs(id).substs
1a4d82fc
JJ
1099 }
1100 MethodCallKey(method_call) => {
c1a9b12d 1101 tcx.tables.borrow().method_map[&method_call].substs.clone()
1a4d82fc
JJ
1102 }
1103 };
1104
c1a9b12d 1105 if substs.types.needs_infer() {
1a4d82fc 1106 tcx.sess.bug(&format!("type parameters for node {:?} include inference types: {:?}",
62682a34 1107 node, substs));
1a4d82fc
JJ
1108 }
1109
1110 monomorphize::apply_param_substs(tcx,
1111 param_substs,
1112 &substs.erase_regions())
1113}
1114
1115pub fn langcall(bcx: Block,
1116 span: Option<Span>,
1117 msg: &str,
1118 li: LangItem)
e9174d1e 1119 -> DefId {
1a4d82fc
JJ
1120 match bcx.tcx().lang_items.require(li) {
1121 Ok(id) => id,
1122 Err(s) => {
1123 let msg = format!("{} {}", msg, s);
1124 match span {
85aaf69f
SL
1125 Some(span) => bcx.tcx().sess.span_fatal(span, &msg[..]),
1126 None => bcx.tcx().sess.fatal(&msg[..]),
1a4d82fc
JJ
1127 }
1128 }
1129 }
1130}
e9174d1e
SL
1131
1132/// Return the VariantDef corresponding to an inlined variant node
1133pub fn inlined_variant_def<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
1134 inlined_vid: ast::NodeId)
1135 -> ty::VariantDef<'tcx>
1136{
1137
1138 let ctor_ty = ccx.tcx().node_id_to_type(inlined_vid);
1139 debug!("inlined_variant_def: ctor_ty={:?} inlined_vid={:?}", ctor_ty,
1140 inlined_vid);
1141 let adt_def = match ctor_ty.sty {
1142 ty::TyBareFn(_, &ty::BareFnTy { sig: ty::Binder(ty::FnSig {
1143 output: ty::FnConverging(ty), ..
1144 }), ..}) => ty,
1145 _ => ctor_ty
1146 }.ty_adt_def().unwrap();
b039eaaf 1147 let inlined_vid_def_id = ccx.tcx().map.local_def_id(inlined_vid);
e9174d1e 1148 adt_def.variants.iter().find(|v| {
b039eaaf 1149 inlined_vid_def_id == v.did ||
e9174d1e
SL
1150 ccx.external().borrow().get(&v.did) == Some(&Some(inlined_vid))
1151 }).unwrap_or_else(|| {
1152 ccx.sess().bug(&format!("no variant for {:?}::{}", adt_def, inlined_vid))
1153 })
1154}
92a42be0
SL
1155
1156// To avoid UB from LLVM, these two functions mask RHS with an
1157// appropriate mask unconditionally (i.e. the fallback behavior for
1158// all shifts). For 32- and 64-bit types, this matches the semantics
1159// of Java. (See related discussion on #1877 and #10183.)
1160
1161pub fn build_unchecked_lshift<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1162 lhs: ValueRef,
1163 rhs: ValueRef,
1164 binop_debug_loc: DebugLoc) -> ValueRef {
1165 let rhs = base::cast_shift_expr_rhs(bcx, hir::BinOp_::BiShl, lhs, rhs);
1166 // #1877, #10183: Ensure that input is always valid
1167 let rhs = shift_mask_rhs(bcx, rhs, binop_debug_loc);
1168 build::Shl(bcx, lhs, rhs, binop_debug_loc)
1169}
1170
1171pub fn build_unchecked_rshift<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1172 lhs_t: Ty<'tcx>,
1173 lhs: ValueRef,
1174 rhs: ValueRef,
1175 binop_debug_loc: DebugLoc) -> ValueRef {
1176 let rhs = base::cast_shift_expr_rhs(bcx, hir::BinOp_::BiShr, lhs, rhs);
1177 // #1877, #10183: Ensure that input is always valid
1178 let rhs = shift_mask_rhs(bcx, rhs, binop_debug_loc);
1179 let is_signed = lhs_t.is_signed();
1180 if is_signed {
1181 build::AShr(bcx, lhs, rhs, binop_debug_loc)
1182 } else {
1183 build::LShr(bcx, lhs, rhs, binop_debug_loc)
1184 }
1185}
1186
1187fn shift_mask_rhs<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1188 rhs: ValueRef,
1189 debug_loc: DebugLoc) -> ValueRef {
1190 let rhs_llty = val_ty(rhs);
1191 build::And(bcx, rhs, shift_mask_val(bcx, rhs_llty, rhs_llty, false), debug_loc)
1192}
1193
1194pub fn shift_mask_val<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1195 llty: Type,
1196 mask_llty: Type,
1197 invert: bool) -> ValueRef {
1198 let kind = llty.kind();
1199 match kind {
1200 TypeKind::Integer => {
1201 // i8/u8 can shift by at most 7, i16/u16 by at most 15, etc.
1202 let val = llty.int_width() - 1;
1203 if invert {
1204 C_integral(mask_llty, !val, true)
1205 } else {
1206 C_integral(mask_llty, val, false)
1207 }
1208 },
1209 TypeKind::Vector => {
1210 let mask = shift_mask_val(bcx, llty.element_type(), mask_llty.element_type(), invert);
1211 build::VectorSplat(bcx, mask_llty.vector_length(), mask)
1212 },
1213 _ => panic!("shift_mask_val: expected Integer or Vector, found {:?}", kind),
1214 }
1215}
1216
1217pub fn get_static_val<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
1218 did: DefId,
1219 ty: Ty<'tcx>)
1220 -> ValueRef {
1221 if let Some(node_id) = ccx.tcx().map.as_local_node_id(did) {
1222 base::get_item_val(ccx, node_id)
1223 } else {
1224 base::get_extern_const(ccx, did, ty)
1225 }
1226}