]> git.proxmox.com Git - rustc.git/blob - src/librustc_trans/common.rs
Imported Upstream version 1.9.0+dfsg1
[rustc.git] / src / librustc_trans / common.rs
1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 #![allow(non_camel_case_types, non_snake_case)]
12
13 //! Code that is useful in various trans modules.
14
15 use session::Session;
16 use llvm;
17 use llvm::{ValueRef, BasicBlockRef, BuilderRef, ContextRef, TypeKind};
18 use llvm::{True, False, Bool, OperandBundleDef};
19 use rustc::cfg;
20 use rustc::hir::def::Def;
21 use rustc::hir::def_id::DefId;
22 use rustc::infer;
23 use middle::lang_items::LangItem;
24 use rustc::ty::subst::Substs;
25 use abi::{Abi, FnType};
26 use base;
27 use build;
28 use builder::Builder;
29 use callee::Callee;
30 use cleanup;
31 use consts;
32 use datum;
33 use debuginfo::{self, DebugLoc};
34 use declare;
35 use machine;
36 use mir::CachedMir;
37 use monomorphize;
38 use type_::Type;
39 use value::Value;
40 use rustc::ty::{self, Ty, TyCtxt};
41 use rustc::traits::{self, SelectionContext, ProjectionMode};
42 use rustc::ty::fold::{TypeFolder, TypeFoldable};
43 use rustc::hir;
44 use util::nodemap::NodeMap;
45
46 use arena::TypedArena;
47 use libc::{c_uint, c_char};
48 use std::ops::Deref;
49 use std::ffi::CString;
50 use std::cell::{Cell, RefCell};
51
52 use syntax::ast;
53 use syntax::codemap::{DUMMY_SP, Span};
54 use syntax::parse::token::InternedString;
55 use syntax::parse::token;
56
57 pub use context::CrateContext;
58
59 /// Is the type's representation size known at compile time?
60 pub fn type_is_sized<'tcx>(tcx: &TyCtxt<'tcx>, ty: Ty<'tcx>) -> bool {
61 ty.is_sized(&tcx.empty_parameter_environment(), DUMMY_SP)
62 }
63
64 pub fn type_is_fat_ptr<'tcx>(cx: &TyCtxt<'tcx>, ty: Ty<'tcx>) -> bool {
65 match ty.sty {
66 ty::TyRawPtr(ty::TypeAndMut{ty, ..}) |
67 ty::TyRef(_, ty::TypeAndMut{ty, ..}) |
68 ty::TyBox(ty) => {
69 !type_is_sized(cx, ty)
70 }
71 _ => {
72 false
73 }
74 }
75 }
76
77 pub fn type_is_immediate<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ty: Ty<'tcx>) -> bool {
78 use machine::llsize_of_alloc;
79 use type_of::sizing_type_of;
80
81 let tcx = ccx.tcx();
82 let simple = ty.is_scalar() ||
83 ty.is_unique() || ty.is_region_ptr() ||
84 ty.is_simd();
85 if simple && !type_is_fat_ptr(tcx, ty) {
86 return true;
87 }
88 if !type_is_sized(tcx, ty) {
89 return false;
90 }
91 match ty.sty {
92 ty::TyStruct(..) | ty::TyEnum(..) | ty::TyTuple(..) | ty::TyArray(_, _) |
93 ty::TyClosure(..) => {
94 let llty = sizing_type_of(ccx, ty);
95 llsize_of_alloc(ccx, llty) <= llsize_of_alloc(ccx, ccx.int_type())
96 }
97 _ => type_is_zero_size(ccx, ty)
98 }
99 }
100
101 /// Identify types which have size zero at runtime.
102 pub fn type_is_zero_size<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ty: Ty<'tcx>) -> bool {
103 use machine::llsize_of_alloc;
104 use type_of::sizing_type_of;
105 let llty = sizing_type_of(ccx, ty);
106 llsize_of_alloc(ccx, llty) == 0
107 }
108
109 /// Generates a unique symbol based off the name given. This is used to create
110 /// unique symbols for things like closures.
111 pub fn gensym_name(name: &str) -> ast::Name {
112 let num = token::gensym(name).0;
113 // use one colon which will get translated to a period by the mangler, and
114 // we're guaranteed that `num` is globally unique for this crate.
115 token::gensym(&format!("{}:{}", name, num))
116 }
117
118 /*
119 * A note on nomenclature of linking: "extern", "foreign", and "upcall".
120 *
121 * An "extern" is an LLVM symbol we wind up emitting an undefined external
122 * reference to. This means "we don't have the thing in this compilation unit,
123 * please make sure you link it in at runtime". This could be a reference to
124 * C code found in a C library, or rust code found in a rust crate.
125 *
126 * Most "externs" are implicitly declared (automatically) as a result of a
127 * user declaring an extern _module_ dependency; this causes the rust driver
128 * to locate an extern crate, scan its compilation metadata, and emit extern
129 * declarations for any symbols used by the declaring crate.
130 *
131 * A "foreign" is an extern that references C (or other non-rust ABI) code.
132 * There is no metadata to scan for extern references so in these cases either
133 * a header-digester like bindgen, or manual function prototypes, have to
134 * serve as declarators. So these are usually given explicitly as prototype
135 * declarations, in rust code, with ABI attributes on them noting which ABI to
136 * link via.
137 *
138 * An "upcall" is a foreign call generated by the compiler (not corresponding
139 * to any user-written call in the code) into the runtime library, to perform
140 * some helper task such as bringing a task to life, allocating memory, etc.
141 *
142 */
143
144 use Disr;
145
146 #[derive(Copy, Clone)]
147 pub struct NodeIdAndSpan {
148 pub id: ast::NodeId,
149 pub span: Span,
150 }
151
152 pub fn expr_info(expr: &hir::Expr) -> NodeIdAndSpan {
153 NodeIdAndSpan { id: expr.id, span: expr.span }
154 }
155
156 /// The concrete version of ty::FieldDef. The name is the field index if
157 /// the field is numeric.
158 pub struct Field<'tcx>(pub ast::Name, pub Ty<'tcx>);
159
160 /// The concrete version of ty::VariantDef
161 pub struct VariantInfo<'tcx> {
162 pub discr: Disr,
163 pub fields: Vec<Field<'tcx>>
164 }
165
166 impl<'tcx> VariantInfo<'tcx> {
167 pub fn from_ty(tcx: &TyCtxt<'tcx>,
168 ty: Ty<'tcx>,
169 opt_def: Option<Def>)
170 -> Self
171 {
172 match ty.sty {
173 ty::TyStruct(adt, substs) | ty::TyEnum(adt, substs) => {
174 let variant = match opt_def {
175 None => adt.struct_variant(),
176 Some(def) => adt.variant_of_def(def)
177 };
178
179 VariantInfo {
180 discr: Disr::from(variant.disr_val),
181 fields: variant.fields.iter().map(|f| {
182 Field(f.name, monomorphize::field_ty(tcx, substs, f))
183 }).collect()
184 }
185 }
186
187 ty::TyTuple(ref v) => {
188 VariantInfo {
189 discr: Disr(0),
190 fields: v.iter().enumerate().map(|(i, &t)| {
191 Field(token::intern(&i.to_string()), t)
192 }).collect()
193 }
194 }
195
196 _ => {
197 bug!("cannot get field types from the type {:?}", ty);
198 }
199 }
200 }
201
202 /// Return the variant corresponding to a given node (e.g. expr)
203 pub fn of_node(tcx: &TyCtxt<'tcx>, ty: Ty<'tcx>, id: ast::NodeId) -> Self {
204 let node_def = tcx.def_map.borrow().get(&id).map(|v| v.full_def());
205 Self::from_ty(tcx, ty, node_def)
206 }
207
208 pub fn field_index(&self, name: ast::Name) -> usize {
209 self.fields.iter().position(|&Field(n,_)| n == name).unwrap_or_else(|| {
210 bug!("unknown field `{}`", name)
211 })
212 }
213 }
214
215 pub struct BuilderRef_res {
216 pub b: BuilderRef,
217 }
218
219 impl Drop for BuilderRef_res {
220 fn drop(&mut self) {
221 unsafe {
222 llvm::LLVMDisposeBuilder(self.b);
223 }
224 }
225 }
226
227 pub fn BuilderRef_res(b: BuilderRef) -> BuilderRef_res {
228 BuilderRef_res {
229 b: b
230 }
231 }
232
233 pub fn validate_substs(substs: &Substs) {
234 assert!(!substs.types.needs_infer());
235 }
236
237 // work around bizarre resolve errors
238 type RvalueDatum<'tcx> = datum::Datum<'tcx, datum::Rvalue>;
239 pub type LvalueDatum<'tcx> = datum::Datum<'tcx, datum::Lvalue>;
240
241 #[derive(Clone, Debug)]
242 struct HintEntry<'tcx> {
243 // The datum for the dropflag-hint itself; note that many
244 // source-level Lvalues will be associated with the same
245 // dropflag-hint datum.
246 datum: cleanup::DropHintDatum<'tcx>,
247 }
248
249 pub struct DropFlagHintsMap<'tcx> {
250 // Maps NodeId for expressions that read/write unfragmented state
251 // to that state's drop-flag "hint." (A stack-local hint
252 // indicates either that (1.) it is certain that no-drop is
253 // needed, or (2.) inline drop-flag must be consulted.)
254 node_map: NodeMap<HintEntry<'tcx>>,
255 }
256
257 impl<'tcx> DropFlagHintsMap<'tcx> {
258 pub fn new() -> DropFlagHintsMap<'tcx> { DropFlagHintsMap { node_map: NodeMap() } }
259 pub fn has_hint(&self, id: ast::NodeId) -> bool { self.node_map.contains_key(&id) }
260 pub fn insert(&mut self, id: ast::NodeId, datum: cleanup::DropHintDatum<'tcx>) {
261 self.node_map.insert(id, HintEntry { datum: datum });
262 }
263 pub fn hint_datum(&self, id: ast::NodeId) -> Option<cleanup::DropHintDatum<'tcx>> {
264 self.node_map.get(&id).map(|t|t.datum)
265 }
266 }
267
268 // Function context. Every LLVM function we create will have one of
269 // these.
270 pub struct FunctionContext<'a, 'tcx: 'a> {
271 // The MIR for this function. At present, this is optional because
272 // we only have MIR available for things that are local to the
273 // crate.
274 pub mir: Option<CachedMir<'a, 'tcx>>,
275
276 // The ValueRef returned from a call to llvm::LLVMAddFunction; the
277 // address of the first instruction in the sequence of
278 // instructions for this function that will go in the .text
279 // section of the executable we're generating.
280 pub llfn: ValueRef,
281
282 // always an empty parameter-environment NOTE: @jroesch another use of ParamEnv
283 pub param_env: ty::ParameterEnvironment<'a, 'tcx>,
284
285 // A pointer to where to store the return value. If the return type is
286 // immediate, this points to an alloca in the function. Otherwise, it's a
287 // pointer to the hidden first parameter of the function. After function
288 // construction, this should always be Some.
289 pub llretslotptr: Cell<Option<ValueRef>>,
290
291 // These pub elements: "hoisted basic blocks" containing
292 // administrative activities that have to happen in only one place in
293 // the function, due to LLVM's quirks.
294 // A marker for the place where we want to insert the function's static
295 // allocas, so that LLVM will coalesce them into a single alloca call.
296 pub alloca_insert_pt: Cell<Option<ValueRef>>,
297 pub llreturn: Cell<Option<BasicBlockRef>>,
298
299 // If the function has any nested return's, including something like:
300 // fn foo() -> Option<Foo> { Some(Foo { x: return None }) }, then
301 // we use a separate alloca for each return
302 pub needs_ret_allocas: bool,
303
304 // When working with landingpad-based exceptions this value is alloca'd and
305 // later loaded when using the resume instruction. This ends up being
306 // critical to chaining landing pads and resuing already-translated
307 // cleanups.
308 //
309 // Note that for cleanuppad-based exceptions this is not used.
310 pub landingpad_alloca: Cell<Option<ValueRef>>,
311
312 // Maps the DefId's for local variables to the allocas created for
313 // them in llallocas.
314 pub lllocals: RefCell<NodeMap<LvalueDatum<'tcx>>>,
315
316 // Same as above, but for closure upvars
317 pub llupvars: RefCell<NodeMap<ValueRef>>,
318
319 // Carries info about drop-flags for local bindings (longer term,
320 // paths) for the code being compiled.
321 pub lldropflag_hints: RefCell<DropFlagHintsMap<'tcx>>,
322
323 // Describes the return/argument LLVM types and their ABI handling.
324 pub fn_ty: FnType,
325
326 // If this function is being monomorphized, this contains the type
327 // substitutions used.
328 pub param_substs: &'tcx Substs<'tcx>,
329
330 // The source span and nesting context where this function comes from, for
331 // error reporting and symbol generation.
332 pub span: Option<Span>,
333
334 // The arena that blocks are allocated from.
335 pub block_arena: &'a TypedArena<BlockS<'a, 'tcx>>,
336
337 // The arena that landing pads are allocated from.
338 pub lpad_arena: TypedArena<LandingPad>,
339
340 // This function's enclosing crate context.
341 pub ccx: &'a CrateContext<'a, 'tcx>,
342
343 // Used and maintained by the debuginfo module.
344 pub debug_context: debuginfo::FunctionDebugContext,
345
346 // Cleanup scopes.
347 pub scopes: RefCell<Vec<cleanup::CleanupScope<'a, 'tcx>>>,
348
349 pub cfg: Option<cfg::CFG>,
350 }
351
352 impl<'a, 'tcx> FunctionContext<'a, 'tcx> {
353 pub fn mir(&self) -> CachedMir<'a, 'tcx> {
354 self.mir.clone().expect("fcx.mir was empty")
355 }
356
357 pub fn cleanup(&self) {
358 unsafe {
359 llvm::LLVMInstructionEraseFromParent(self.alloca_insert_pt
360 .get()
361 .unwrap());
362 }
363 }
364
365 pub fn get_llreturn(&self) -> BasicBlockRef {
366 if self.llreturn.get().is_none() {
367
368 self.llreturn.set(Some(unsafe {
369 llvm::LLVMAppendBasicBlockInContext(self.ccx.llcx(), self.llfn,
370 "return\0".as_ptr() as *const _)
371 }))
372 }
373
374 self.llreturn.get().unwrap()
375 }
376
377 pub fn get_ret_slot(&self, bcx: Block<'a, 'tcx>, name: &str) -> ValueRef {
378 if self.needs_ret_allocas {
379 base::alloca(bcx, self.fn_ty.ret.memory_ty(self.ccx), name)
380 } else {
381 self.llretslotptr.get().unwrap()
382 }
383 }
384
385 pub fn new_block(&'a self,
386 name: &str,
387 opt_node_id: Option<ast::NodeId>)
388 -> Block<'a, 'tcx> {
389 unsafe {
390 let name = CString::new(name).unwrap();
391 let llbb = llvm::LLVMAppendBasicBlockInContext(self.ccx.llcx(),
392 self.llfn,
393 name.as_ptr());
394 BlockS::new(llbb, opt_node_id, self)
395 }
396 }
397
398 pub fn new_id_block(&'a self,
399 name: &str,
400 node_id: ast::NodeId)
401 -> Block<'a, 'tcx> {
402 self.new_block(name, Some(node_id))
403 }
404
405 pub fn new_temp_block(&'a self,
406 name: &str)
407 -> Block<'a, 'tcx> {
408 self.new_block(name, None)
409 }
410
411 pub fn join_blocks(&'a self,
412 id: ast::NodeId,
413 in_cxs: &[Block<'a, 'tcx>])
414 -> Block<'a, 'tcx> {
415 let out = self.new_id_block("join", id);
416 let mut reachable = false;
417 for bcx in in_cxs {
418 if !bcx.unreachable.get() {
419 build::Br(*bcx, out.llbb, DebugLoc::None);
420 reachable = true;
421 }
422 }
423 if !reachable {
424 build::Unreachable(out);
425 }
426 return out;
427 }
428
429 pub fn monomorphize<T>(&self, value: &T) -> T
430 where T : TypeFoldable<'tcx>
431 {
432 monomorphize::apply_param_substs(self.ccx.tcx(),
433 self.param_substs,
434 value)
435 }
436
437 /// This is the same as `common::type_needs_drop`, except that it
438 /// may use or update caches within this `FunctionContext`.
439 pub fn type_needs_drop(&self, ty: Ty<'tcx>) -> bool {
440 self.ccx.tcx().type_needs_drop_given_env(ty, &self.param_env)
441 }
442
443 pub fn eh_personality(&self) -> ValueRef {
444 // The exception handling personality function.
445 //
446 // If our compilation unit has the `eh_personality` lang item somewhere
447 // within it, then we just need to translate that. Otherwise, we're
448 // building an rlib which will depend on some upstream implementation of
449 // this function, so we just codegen a generic reference to it. We don't
450 // specify any of the types for the function, we just make it a symbol
451 // that LLVM can later use.
452 //
453 // Note that MSVC is a little special here in that we don't use the
454 // `eh_personality` lang item at all. Currently LLVM has support for
455 // both Dwarf and SEH unwind mechanisms for MSVC targets and uses the
456 // *name of the personality function* to decide what kind of unwind side
457 // tables/landing pads to emit. It looks like Dwarf is used by default,
458 // injecting a dependency on the `_Unwind_Resume` symbol for resuming
459 // an "exception", but for MSVC we want to force SEH. This means that we
460 // can't actually have the personality function be our standard
461 // `rust_eh_personality` function, but rather we wired it up to the
462 // CRT's custom personality function, which forces LLVM to consider
463 // landing pads as "landing pads for SEH".
464 let ccx = self.ccx;
465 let tcx = ccx.tcx();
466 let target = &ccx.sess().target.target;
467 match tcx.lang_items.eh_personality() {
468 Some(def_id) if !base::wants_msvc_seh(ccx.sess()) => {
469 Callee::def(ccx, def_id, tcx.mk_substs(Substs::empty())).reify(ccx).val
470 }
471 _ => if let Some(llpersonality) = ccx.eh_personality().get() {
472 llpersonality
473 } else {
474 let name = if !base::wants_msvc_seh(ccx.sess()) {
475 "rust_eh_personality"
476 } else if target.arch == "x86" {
477 "_except_handler3"
478 } else {
479 "__C_specific_handler"
480 };
481 let fty = Type::variadic_func(&[], &Type::i32(ccx));
482 let f = declare::declare_cfn(ccx, name, fty);
483 ccx.eh_personality().set(Some(f));
484 f
485 }
486 }
487 }
488
489 // Returns a ValueRef of the "eh_unwind_resume" lang item if one is defined,
490 // otherwise declares it as an external function.
491 pub fn eh_unwind_resume(&self) -> Callee<'tcx> {
492 use attributes;
493 let ccx = self.ccx;
494 let tcx = ccx.tcx();
495 assert!(ccx.sess().target.target.options.custom_unwind_resume);
496 if let Some(def_id) = tcx.lang_items.eh_unwind_resume() {
497 return Callee::def(ccx, def_id, tcx.mk_substs(Substs::empty()));
498 }
499
500 let ty = tcx.mk_fn_ptr(ty::BareFnTy {
501 unsafety: hir::Unsafety::Unsafe,
502 abi: Abi::C,
503 sig: ty::Binder(ty::FnSig {
504 inputs: vec![tcx.mk_mut_ptr(tcx.types.u8)],
505 output: ty::FnDiverging,
506 variadic: false
507 }),
508 });
509
510 let unwresume = ccx.eh_unwind_resume();
511 if let Some(llfn) = unwresume.get() {
512 return Callee::ptr(datum::immediate_rvalue(llfn, ty));
513 }
514 let llfn = declare::declare_fn(ccx, "rust_eh_unwind_resume", ty);
515 attributes::unwind(llfn, true);
516 unwresume.set(Some(llfn));
517 Callee::ptr(datum::immediate_rvalue(llfn, ty))
518 }
519 }
520
521 // Basic block context. We create a block context for each basic block
522 // (single-entry, single-exit sequence of instructions) we generate from Rust
523 // code. Each basic block we generate is attached to a function, typically
524 // with many basic blocks per function. All the basic blocks attached to a
525 // function are organized as a directed graph.
526 pub struct BlockS<'blk, 'tcx: 'blk> {
527 // The BasicBlockRef returned from a call to
528 // llvm::LLVMAppendBasicBlock(llfn, name), which adds a basic
529 // block to the function pointed to by llfn. We insert
530 // instructions into that block by way of this block context.
531 // The block pointing to this one in the function's digraph.
532 pub llbb: BasicBlockRef,
533 pub terminated: Cell<bool>,
534 pub unreachable: Cell<bool>,
535
536 // If this block part of a landing pad, then this is `Some` indicating what
537 // kind of landing pad its in, otherwise this is none.
538 pub lpad: Cell<Option<&'blk LandingPad>>,
539
540 // AST node-id associated with this block, if any. Used for
541 // debugging purposes only.
542 pub opt_node_id: Option<ast::NodeId>,
543
544 // The function context for the function to which this block is
545 // attached.
546 pub fcx: &'blk FunctionContext<'blk, 'tcx>,
547 }
548
549 pub type Block<'blk, 'tcx> = &'blk BlockS<'blk, 'tcx>;
550
551 impl<'blk, 'tcx> BlockS<'blk, 'tcx> {
552 pub fn new(llbb: BasicBlockRef,
553 opt_node_id: Option<ast::NodeId>,
554 fcx: &'blk FunctionContext<'blk, 'tcx>)
555 -> Block<'blk, 'tcx> {
556 fcx.block_arena.alloc(BlockS {
557 llbb: llbb,
558 terminated: Cell::new(false),
559 unreachable: Cell::new(false),
560 lpad: Cell::new(None),
561 opt_node_id: opt_node_id,
562 fcx: fcx
563 })
564 }
565
566 pub fn ccx(&self) -> &'blk CrateContext<'blk, 'tcx> {
567 self.fcx.ccx
568 }
569 pub fn fcx(&self) -> &'blk FunctionContext<'blk, 'tcx> {
570 self.fcx
571 }
572 pub fn tcx(&self) -> &'blk TyCtxt<'tcx> {
573 self.fcx.ccx.tcx()
574 }
575 pub fn sess(&self) -> &'blk Session { self.fcx.ccx.sess() }
576
577 pub fn lpad(&self) -> Option<&'blk LandingPad> {
578 self.lpad.get()
579 }
580
581 pub fn mir(&self) -> CachedMir<'blk, 'tcx> {
582 self.fcx.mir()
583 }
584
585 pub fn name(&self, name: ast::Name) -> String {
586 name.to_string()
587 }
588
589 pub fn node_id_to_string(&self, id: ast::NodeId) -> String {
590 self.tcx().map.node_to_string(id).to_string()
591 }
592
593 pub fn def(&self, nid: ast::NodeId) -> Def {
594 match self.tcx().def_map.borrow().get(&nid) {
595 Some(v) => v.full_def(),
596 None => {
597 bug!("no def associated with node id {}", nid);
598 }
599 }
600 }
601
602 pub fn to_str(&self) -> String {
603 format!("[block {:p}]", self)
604 }
605
606 pub fn monomorphize<T>(&self, value: &T) -> T
607 where T : TypeFoldable<'tcx>
608 {
609 monomorphize::apply_param_substs(self.tcx(),
610 self.fcx.param_substs,
611 value)
612 }
613
614 pub fn build(&'blk self) -> BlockAndBuilder<'blk, 'tcx> {
615 BlockAndBuilder::new(self, OwnedBuilder::new_with_ccx(self.ccx()))
616 }
617 }
618
619 pub struct OwnedBuilder<'blk, 'tcx: 'blk> {
620 builder: Builder<'blk, 'tcx>
621 }
622
623 impl<'blk, 'tcx> OwnedBuilder<'blk, 'tcx> {
624 pub fn new_with_ccx(ccx: &'blk CrateContext<'blk, 'tcx>) -> Self {
625 // Create a fresh builder from the crate context.
626 let llbuilder = unsafe {
627 llvm::LLVMCreateBuilderInContext(ccx.llcx())
628 };
629 OwnedBuilder {
630 builder: Builder {
631 llbuilder: llbuilder,
632 ccx: ccx,
633 }
634 }
635 }
636 }
637
638 impl<'blk, 'tcx> Drop for OwnedBuilder<'blk, 'tcx> {
639 fn drop(&mut self) {
640 unsafe {
641 llvm::LLVMDisposeBuilder(self.builder.llbuilder);
642 }
643 }
644 }
645
646 pub struct BlockAndBuilder<'blk, 'tcx: 'blk> {
647 bcx: Block<'blk, 'tcx>,
648 owned_builder: OwnedBuilder<'blk, 'tcx>,
649 }
650
651 impl<'blk, 'tcx> BlockAndBuilder<'blk, 'tcx> {
652 pub fn new(bcx: Block<'blk, 'tcx>, owned_builder: OwnedBuilder<'blk, 'tcx>) -> Self {
653 // Set the builder's position to this block's end.
654 owned_builder.builder.position_at_end(bcx.llbb);
655 BlockAndBuilder {
656 bcx: bcx,
657 owned_builder: owned_builder,
658 }
659 }
660
661 pub fn with_block<F, R>(&self, f: F) -> R
662 where F: FnOnce(Block<'blk, 'tcx>) -> R
663 {
664 let result = f(self.bcx);
665 self.position_at_end(self.bcx.llbb);
666 result
667 }
668
669 pub fn map_block<F>(self, f: F) -> Self
670 where F: FnOnce(Block<'blk, 'tcx>) -> Block<'blk, 'tcx>
671 {
672 let BlockAndBuilder { bcx, owned_builder } = self;
673 let bcx = f(bcx);
674 BlockAndBuilder::new(bcx, owned_builder)
675 }
676
677 pub fn at_start<F, R>(&self, f: F) -> R
678 where F: FnOnce(&BlockAndBuilder<'blk, 'tcx>) -> R
679 {
680 self.position_at_start(self.bcx.llbb);
681 let r = f(self);
682 self.position_at_end(self.bcx.llbb);
683 r
684 }
685
686 // Methods delegated to bcx
687
688 pub fn is_unreachable(&self) -> bool {
689 self.bcx.unreachable.get()
690 }
691
692 pub fn ccx(&self) -> &'blk CrateContext<'blk, 'tcx> {
693 self.bcx.ccx()
694 }
695 pub fn fcx(&self) -> &'blk FunctionContext<'blk, 'tcx> {
696 self.bcx.fcx()
697 }
698 pub fn tcx(&self) -> &'blk TyCtxt<'tcx> {
699 self.bcx.tcx()
700 }
701 pub fn sess(&self) -> &'blk Session {
702 self.bcx.sess()
703 }
704
705 pub fn llbb(&self) -> BasicBlockRef {
706 self.bcx.llbb
707 }
708
709 pub fn mir(&self) -> CachedMir<'blk, 'tcx> {
710 self.bcx.mir()
711 }
712
713 pub fn monomorphize<T>(&self, value: &T) -> T
714 where T: TypeFoldable<'tcx>
715 {
716 self.bcx.monomorphize(value)
717 }
718
719 pub fn set_lpad(&self, lpad: Option<LandingPad>) {
720 self.bcx.lpad.set(lpad.map(|p| &*self.fcx().lpad_arena.alloc(p)))
721 }
722 }
723
724 impl<'blk, 'tcx> Deref for BlockAndBuilder<'blk, 'tcx> {
725 type Target = Builder<'blk, 'tcx>;
726 fn deref(&self) -> &Self::Target {
727 &self.owned_builder.builder
728 }
729 }
730
731 /// A structure representing an active landing pad for the duration of a basic
732 /// block.
733 ///
734 /// Each `Block` may contain an instance of this, indicating whether the block
735 /// is part of a landing pad or not. This is used to make decision about whether
736 /// to emit `invoke` instructions (e.g. in a landing pad we don't continue to
737 /// use `invoke`) and also about various function call metadata.
738 ///
739 /// For GNU exceptions (`landingpad` + `resume` instructions) this structure is
740 /// just a bunch of `None` instances (not too interesting), but for MSVC
741 /// exceptions (`cleanuppad` + `cleanupret` instructions) this contains data.
742 /// When inside of a landing pad, each function call in LLVM IR needs to be
743 /// annotated with which landing pad it's a part of. This is accomplished via
744 /// the `OperandBundleDef` value created for MSVC landing pads.
745 pub struct LandingPad {
746 cleanuppad: Option<ValueRef>,
747 operand: Option<OperandBundleDef>,
748 }
749
750 impl LandingPad {
751 pub fn gnu() -> LandingPad {
752 LandingPad { cleanuppad: None, operand: None }
753 }
754
755 pub fn msvc(cleanuppad: ValueRef) -> LandingPad {
756 LandingPad {
757 cleanuppad: Some(cleanuppad),
758 operand: Some(OperandBundleDef::new("funclet", &[cleanuppad])),
759 }
760 }
761
762 pub fn bundle(&self) -> Option<&OperandBundleDef> {
763 self.operand.as_ref()
764 }
765 }
766
767 impl Clone for LandingPad {
768 fn clone(&self) -> LandingPad {
769 LandingPad {
770 cleanuppad: self.cleanuppad,
771 operand: self.cleanuppad.map(|p| {
772 OperandBundleDef::new("funclet", &[p])
773 }),
774 }
775 }
776 }
777
778 pub struct Result<'blk, 'tcx: 'blk> {
779 pub bcx: Block<'blk, 'tcx>,
780 pub val: ValueRef
781 }
782
783 impl<'b, 'tcx> Result<'b, 'tcx> {
784 pub fn new(bcx: Block<'b, 'tcx>, val: ValueRef) -> Result<'b, 'tcx> {
785 Result {
786 bcx: bcx,
787 val: val,
788 }
789 }
790 }
791
792 pub fn val_ty(v: ValueRef) -> Type {
793 unsafe {
794 Type::from_ref(llvm::LLVMTypeOf(v))
795 }
796 }
797
798 // LLVM constant constructors.
799 pub fn C_null(t: Type) -> ValueRef {
800 unsafe {
801 llvm::LLVMConstNull(t.to_ref())
802 }
803 }
804
805 pub fn C_undef(t: Type) -> ValueRef {
806 unsafe {
807 llvm::LLVMGetUndef(t.to_ref())
808 }
809 }
810
811 pub fn C_integral(t: Type, u: u64, sign_extend: bool) -> ValueRef {
812 unsafe {
813 llvm::LLVMConstInt(t.to_ref(), u, sign_extend as Bool)
814 }
815 }
816
817 pub fn C_floating(s: &str, t: Type) -> ValueRef {
818 unsafe {
819 let s = CString::new(s).unwrap();
820 llvm::LLVMConstRealOfString(t.to_ref(), s.as_ptr())
821 }
822 }
823
824 pub fn C_floating_f64(f: f64, t: Type) -> ValueRef {
825 unsafe {
826 llvm::LLVMConstReal(t.to_ref(), f)
827 }
828 }
829
830 pub fn C_nil(ccx: &CrateContext) -> ValueRef {
831 C_struct(ccx, &[], false)
832 }
833
834 pub fn C_bool(ccx: &CrateContext, val: bool) -> ValueRef {
835 C_integral(Type::i1(ccx), val as u64, false)
836 }
837
838 pub fn C_i32(ccx: &CrateContext, i: i32) -> ValueRef {
839 C_integral(Type::i32(ccx), i as u64, true)
840 }
841
842 pub fn C_u32(ccx: &CrateContext, i: u32) -> ValueRef {
843 C_integral(Type::i32(ccx), i as u64, false)
844 }
845
846 pub fn C_u64(ccx: &CrateContext, i: u64) -> ValueRef {
847 C_integral(Type::i64(ccx), i, false)
848 }
849
850 pub fn C_int<I: AsI64>(ccx: &CrateContext, i: I) -> ValueRef {
851 let v = i.as_i64();
852
853 let bit_size = machine::llbitsize_of_real(ccx, ccx.int_type());
854
855 if bit_size < 64 {
856 // make sure it doesn't overflow
857 assert!(v < (1<<(bit_size-1)) && v >= -(1<<(bit_size-1)));
858 }
859
860 C_integral(ccx.int_type(), v as u64, true)
861 }
862
863 pub fn C_uint<I: AsU64>(ccx: &CrateContext, i: I) -> ValueRef {
864 let v = i.as_u64();
865
866 let bit_size = machine::llbitsize_of_real(ccx, ccx.int_type());
867
868 if bit_size < 64 {
869 // make sure it doesn't overflow
870 assert!(v < (1<<bit_size));
871 }
872
873 C_integral(ccx.int_type(), v, false)
874 }
875
876 pub trait AsI64 { fn as_i64(self) -> i64; }
877 pub trait AsU64 { fn as_u64(self) -> u64; }
878
879 // FIXME: remove the intptr conversions, because they
880 // are host-architecture-dependent
881 impl AsI64 for i64 { fn as_i64(self) -> i64 { self as i64 }}
882 impl AsI64 for i32 { fn as_i64(self) -> i64 { self as i64 }}
883 impl AsI64 for isize { fn as_i64(self) -> i64 { self as i64 }}
884
885 impl AsU64 for u64 { fn as_u64(self) -> u64 { self as u64 }}
886 impl AsU64 for u32 { fn as_u64(self) -> u64 { self as u64 }}
887 impl AsU64 for usize { fn as_u64(self) -> u64 { self as u64 }}
888
889 pub fn C_u8(ccx: &CrateContext, i: u8) -> ValueRef {
890 C_integral(Type::i8(ccx), i as u64, false)
891 }
892
893
894 // This is a 'c-like' raw string, which differs from
895 // our boxed-and-length-annotated strings.
896 pub fn C_cstr(cx: &CrateContext, s: InternedString, null_terminated: bool) -> ValueRef {
897 unsafe {
898 if let Some(&llval) = cx.const_cstr_cache().borrow().get(&s) {
899 return llval;
900 }
901
902 let sc = llvm::LLVMConstStringInContext(cx.llcx(),
903 s.as_ptr() as *const c_char,
904 s.len() as c_uint,
905 !null_terminated as Bool);
906
907 let gsym = token::gensym("str");
908 let sym = format!("str{}", gsym.0);
909 let g = declare::define_global(cx, &sym[..], val_ty(sc)).unwrap_or_else(||{
910 bug!("symbol `{}` is already defined", sym);
911 });
912 llvm::LLVMSetInitializer(g, sc);
913 llvm::LLVMSetGlobalConstant(g, True);
914 llvm::SetLinkage(g, llvm::InternalLinkage);
915
916 cx.const_cstr_cache().borrow_mut().insert(s, g);
917 g
918 }
919 }
920
921 // NB: Do not use `do_spill_noroot` to make this into a constant string, or
922 // you will be kicked off fast isel. See issue #4352 for an example of this.
923 pub fn C_str_slice(cx: &CrateContext, s: InternedString) -> ValueRef {
924 let len = s.len();
925 let cs = consts::ptrcast(C_cstr(cx, s, false), Type::i8p(cx));
926 C_named_struct(cx.tn().find_type("str_slice").unwrap(), &[cs, C_uint(cx, len)])
927 }
928
929 pub fn C_struct(cx: &CrateContext, elts: &[ValueRef], packed: bool) -> ValueRef {
930 C_struct_in_context(cx.llcx(), elts, packed)
931 }
932
933 pub fn C_struct_in_context(llcx: ContextRef, elts: &[ValueRef], packed: bool) -> ValueRef {
934 unsafe {
935 llvm::LLVMConstStructInContext(llcx,
936 elts.as_ptr(), elts.len() as c_uint,
937 packed as Bool)
938 }
939 }
940
941 pub fn C_named_struct(t: Type, elts: &[ValueRef]) -> ValueRef {
942 unsafe {
943 llvm::LLVMConstNamedStruct(t.to_ref(), elts.as_ptr(), elts.len() as c_uint)
944 }
945 }
946
947 pub fn C_array(ty: Type, elts: &[ValueRef]) -> ValueRef {
948 unsafe {
949 return llvm::LLVMConstArray(ty.to_ref(), elts.as_ptr(), elts.len() as c_uint);
950 }
951 }
952
953 pub fn C_vector(elts: &[ValueRef]) -> ValueRef {
954 unsafe {
955 return llvm::LLVMConstVector(elts.as_ptr(), elts.len() as c_uint);
956 }
957 }
958
959 pub fn C_bytes(cx: &CrateContext, bytes: &[u8]) -> ValueRef {
960 C_bytes_in_context(cx.llcx(), bytes)
961 }
962
963 pub fn C_bytes_in_context(llcx: ContextRef, bytes: &[u8]) -> ValueRef {
964 unsafe {
965 let ptr = bytes.as_ptr() as *const c_char;
966 return llvm::LLVMConstStringInContext(llcx, ptr, bytes.len() as c_uint, True);
967 }
968 }
969
970 pub fn const_get_elt(v: ValueRef, us: &[c_uint])
971 -> ValueRef {
972 unsafe {
973 let r = llvm::LLVMConstExtractValue(v, us.as_ptr(), us.len() as c_uint);
974
975 debug!("const_get_elt(v={:?}, us={:?}, r={:?})",
976 Value(v), us, Value(r));
977
978 r
979 }
980 }
981
982 pub fn const_to_int(v: ValueRef) -> i64 {
983 unsafe {
984 llvm::LLVMConstIntGetSExtValue(v)
985 }
986 }
987
988 pub fn const_to_uint(v: ValueRef) -> u64 {
989 unsafe {
990 llvm::LLVMConstIntGetZExtValue(v)
991 }
992 }
993
994 fn is_const_integral(v: ValueRef) -> bool {
995 unsafe {
996 !llvm::LLVMIsAConstantInt(v).is_null()
997 }
998 }
999
1000 pub fn const_to_opt_int(v: ValueRef) -> Option<i64> {
1001 unsafe {
1002 if is_const_integral(v) {
1003 Some(llvm::LLVMConstIntGetSExtValue(v))
1004 } else {
1005 None
1006 }
1007 }
1008 }
1009
1010 pub fn const_to_opt_uint(v: ValueRef) -> Option<u64> {
1011 unsafe {
1012 if is_const_integral(v) {
1013 Some(llvm::LLVMConstIntGetZExtValue(v))
1014 } else {
1015 None
1016 }
1017 }
1018 }
1019
1020 pub fn is_undef(val: ValueRef) -> bool {
1021 unsafe {
1022 llvm::LLVMIsUndef(val) != False
1023 }
1024 }
1025
1026 #[allow(dead_code)] // potentially useful
1027 pub fn is_null(val: ValueRef) -> bool {
1028 unsafe {
1029 llvm::LLVMIsNull(val) != False
1030 }
1031 }
1032
1033 pub fn monomorphize_type<'blk, 'tcx>(bcx: &BlockS<'blk, 'tcx>, t: Ty<'tcx>) -> Ty<'tcx> {
1034 bcx.fcx.monomorphize(&t)
1035 }
1036
1037 pub fn node_id_type<'blk, 'tcx>(bcx: &BlockS<'blk, 'tcx>, id: ast::NodeId) -> Ty<'tcx> {
1038 let tcx = bcx.tcx();
1039 let t = tcx.node_id_to_type(id);
1040 monomorphize_type(bcx, t)
1041 }
1042
1043 pub fn expr_ty<'blk, 'tcx>(bcx: &BlockS<'blk, 'tcx>, ex: &hir::Expr) -> Ty<'tcx> {
1044 node_id_type(bcx, ex.id)
1045 }
1046
1047 pub fn expr_ty_adjusted<'blk, 'tcx>(bcx: &BlockS<'blk, 'tcx>, ex: &hir::Expr) -> Ty<'tcx> {
1048 monomorphize_type(bcx, bcx.tcx().expr_ty_adjusted(ex))
1049 }
1050
1051 /// Attempts to resolve an obligation. The result is a shallow vtable resolution -- meaning that we
1052 /// do not (necessarily) resolve all nested obligations on the impl. Note that type check should
1053 /// guarantee to us that all nested obligations *could be* resolved if we wanted to.
1054 pub fn fulfill_obligation<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
1055 span: Span,
1056 trait_ref: ty::PolyTraitRef<'tcx>)
1057 -> traits::Vtable<'tcx, ()>
1058 {
1059 let tcx = ccx.tcx();
1060
1061 // Remove any references to regions; this helps improve caching.
1062 let trait_ref = tcx.erase_regions(&trait_ref);
1063
1064 // First check the cache.
1065 match ccx.trait_cache().borrow().get(&trait_ref) {
1066 Some(vtable) => {
1067 info!("Cache hit: {:?}", trait_ref);
1068 return (*vtable).clone();
1069 }
1070 None => { }
1071 }
1072
1073 debug!("trans fulfill_obligation: trait_ref={:?} def_id={:?}",
1074 trait_ref, trait_ref.def_id());
1075
1076
1077 // Do the initial selection for the obligation. This yields the
1078 // shallow result we are looking for -- that is, what specific impl.
1079 let infcx = infer::normalizing_infer_ctxt(tcx, &tcx.tables, ProjectionMode::Any);
1080 let mut selcx = SelectionContext::new(&infcx);
1081
1082 let obligation =
1083 traits::Obligation::new(traits::ObligationCause::misc(span, ast::DUMMY_NODE_ID),
1084 trait_ref.to_poly_trait_predicate());
1085 let selection = match selcx.select(&obligation) {
1086 Ok(Some(selection)) => selection,
1087 Ok(None) => {
1088 // Ambiguity can happen when monomorphizing during trans
1089 // expands to some humongo type that never occurred
1090 // statically -- this humongo type can then overflow,
1091 // leading to an ambiguous result. So report this as an
1092 // overflow bug, since I believe this is the only case
1093 // where ambiguity can result.
1094 debug!("Encountered ambiguity selecting `{:?}` during trans, \
1095 presuming due to overflow",
1096 trait_ref);
1097 ccx.sess().span_fatal(
1098 span,
1099 "reached the recursion limit during monomorphization (selection ambiguity)");
1100 }
1101 Err(e) => {
1102 span_bug!(
1103 span,
1104 "Encountered error `{:?}` selecting `{:?}` during trans",
1105 e,
1106 trait_ref)
1107 }
1108 };
1109
1110 // Currently, we use a fulfillment context to completely resolve
1111 // all nested obligations. This is because they can inform the
1112 // inference of the impl's type parameters.
1113 let mut fulfill_cx = traits::FulfillmentContext::new();
1114 let vtable = selection.map(|predicate| {
1115 fulfill_cx.register_predicate_obligation(&infcx, predicate);
1116 });
1117 let vtable = infer::drain_fulfillment_cx_or_panic(
1118 span, &infcx, &mut fulfill_cx, &vtable
1119 );
1120
1121 info!("Cache miss: {:?} => {:?}", trait_ref, vtable);
1122
1123 ccx.trait_cache().borrow_mut().insert(trait_ref, vtable.clone());
1124
1125 vtable
1126 }
1127
1128 /// Normalizes the predicates and checks whether they hold. If this
1129 /// returns false, then either normalize encountered an error or one
1130 /// of the predicates did not hold. Used when creating vtables to
1131 /// check for unsatisfiable methods.
1132 pub fn normalize_and_test_predicates<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
1133 predicates: Vec<ty::Predicate<'tcx>>)
1134 -> bool
1135 {
1136 debug!("normalize_and_test_predicates(predicates={:?})",
1137 predicates);
1138
1139 let tcx = ccx.tcx();
1140 let infcx = infer::normalizing_infer_ctxt(tcx, &tcx.tables, ProjectionMode::Any);
1141 let mut selcx = SelectionContext::new(&infcx);
1142 let mut fulfill_cx = traits::FulfillmentContext::new();
1143 let cause = traits::ObligationCause::dummy();
1144 let traits::Normalized { value: predicates, obligations } =
1145 traits::normalize(&mut selcx, cause.clone(), &predicates);
1146 for obligation in obligations {
1147 fulfill_cx.register_predicate_obligation(&infcx, obligation);
1148 }
1149 for predicate in predicates {
1150 let obligation = traits::Obligation::new(cause.clone(), predicate);
1151 fulfill_cx.register_predicate_obligation(&infcx, obligation);
1152 }
1153
1154 infer::drain_fulfillment_cx(&infcx, &mut fulfill_cx, &()).is_ok()
1155 }
1156
1157 pub fn langcall(bcx: Block,
1158 span: Option<Span>,
1159 msg: &str,
1160 li: LangItem)
1161 -> DefId {
1162 match bcx.tcx().lang_items.require(li) {
1163 Ok(id) => id,
1164 Err(s) => {
1165 let msg = format!("{} {}", msg, s);
1166 match span {
1167 Some(span) => bcx.tcx().sess.span_fatal(span, &msg[..]),
1168 None => bcx.tcx().sess.fatal(&msg[..]),
1169 }
1170 }
1171 }
1172 }
1173
1174 /// Return the VariantDef corresponding to an inlined variant node
1175 pub fn inlined_variant_def<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
1176 inlined_vid: ast::NodeId)
1177 -> ty::VariantDef<'tcx>
1178 {
1179
1180 let ctor_ty = ccx.tcx().node_id_to_type(inlined_vid);
1181 debug!("inlined_variant_def: ctor_ty={:?} inlined_vid={:?}", ctor_ty,
1182 inlined_vid);
1183 let adt_def = match ctor_ty.sty {
1184 ty::TyFnDef(_, _, &ty::BareFnTy { sig: ty::Binder(ty::FnSig {
1185 output: ty::FnConverging(ty), ..
1186 }), ..}) => ty,
1187 _ => ctor_ty
1188 }.ty_adt_def().unwrap();
1189 let inlined_vid_def_id = ccx.tcx().map.local_def_id(inlined_vid);
1190 adt_def.variants.iter().find(|v| {
1191 inlined_vid_def_id == v.did ||
1192 ccx.external().borrow().get(&v.did) == Some(&Some(inlined_vid))
1193 }).unwrap_or_else(|| {
1194 bug!("no variant for {:?}::{}", adt_def, inlined_vid)
1195 })
1196 }
1197
1198 // To avoid UB from LLVM, these two functions mask RHS with an
1199 // appropriate mask unconditionally (i.e. the fallback behavior for
1200 // all shifts). For 32- and 64-bit types, this matches the semantics
1201 // of Java. (See related discussion on #1877 and #10183.)
1202
1203 pub fn build_unchecked_lshift<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1204 lhs: ValueRef,
1205 rhs: ValueRef,
1206 binop_debug_loc: DebugLoc) -> ValueRef {
1207 let rhs = base::cast_shift_expr_rhs(bcx, hir::BinOp_::BiShl, lhs, rhs);
1208 // #1877, #10183: Ensure that input is always valid
1209 let rhs = shift_mask_rhs(bcx, rhs, binop_debug_loc);
1210 build::Shl(bcx, lhs, rhs, binop_debug_loc)
1211 }
1212
1213 pub fn build_unchecked_rshift<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1214 lhs_t: Ty<'tcx>,
1215 lhs: ValueRef,
1216 rhs: ValueRef,
1217 binop_debug_loc: DebugLoc) -> ValueRef {
1218 let rhs = base::cast_shift_expr_rhs(bcx, hir::BinOp_::BiShr, lhs, rhs);
1219 // #1877, #10183: Ensure that input is always valid
1220 let rhs = shift_mask_rhs(bcx, rhs, binop_debug_loc);
1221 let is_signed = lhs_t.is_signed();
1222 if is_signed {
1223 build::AShr(bcx, lhs, rhs, binop_debug_loc)
1224 } else {
1225 build::LShr(bcx, lhs, rhs, binop_debug_loc)
1226 }
1227 }
1228
1229 fn shift_mask_rhs<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1230 rhs: ValueRef,
1231 debug_loc: DebugLoc) -> ValueRef {
1232 let rhs_llty = val_ty(rhs);
1233 build::And(bcx, rhs, shift_mask_val(bcx, rhs_llty, rhs_llty, false), debug_loc)
1234 }
1235
1236 pub fn shift_mask_val<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1237 llty: Type,
1238 mask_llty: Type,
1239 invert: bool) -> ValueRef {
1240 let kind = llty.kind();
1241 match kind {
1242 TypeKind::Integer => {
1243 // i8/u8 can shift by at most 7, i16/u16 by at most 15, etc.
1244 let val = llty.int_width() - 1;
1245 if invert {
1246 C_integral(mask_llty, !val, true)
1247 } else {
1248 C_integral(mask_llty, val, false)
1249 }
1250 },
1251 TypeKind::Vector => {
1252 let mask = shift_mask_val(bcx, llty.element_type(), mask_llty.element_type(), invert);
1253 build::VectorSplat(bcx, mask_llty.vector_length(), mask)
1254 },
1255 _ => bug!("shift_mask_val: expected Integer or Vector, found {:?}", kind),
1256 }
1257 }