]> git.proxmox.com Git - rustc.git/blob - src/librustc_trans/common.rs
Imported Upstream version 1.10.0+dfsg1
[rustc.git] / src / librustc_trans / common.rs
1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 #![allow(non_camel_case_types, non_snake_case)]
12
13 //! Code that is useful in various trans modules.
14
15 use session::Session;
16 use llvm;
17 use llvm::{ValueRef, BasicBlockRef, BuilderRef, ContextRef, TypeKind};
18 use llvm::{True, False, Bool, OperandBundleDef};
19 use rustc::cfg;
20 use rustc::hir::def::Def;
21 use rustc::hir::def_id::DefId;
22 use rustc::infer::TransNormalize;
23 use rustc::util::common::MemoizationMap;
24 use middle::lang_items::LangItem;
25 use rustc::ty::subst::Substs;
26 use abi::{Abi, FnType};
27 use base;
28 use build;
29 use builder::Builder;
30 use callee::Callee;
31 use cleanup;
32 use consts;
33 use datum;
34 use debuginfo::{self, DebugLoc};
35 use declare;
36 use machine;
37 use mir::CachedMir;
38 use monomorphize;
39 use type_::Type;
40 use value::Value;
41 use rustc::ty::{self, Ty, TyCtxt};
42 use rustc::traits::{self, SelectionContext, ProjectionMode};
43 use rustc::ty::fold::TypeFoldable;
44 use rustc::hir;
45 use util::nodemap::NodeMap;
46
47 use arena::TypedArena;
48 use libc::{c_uint, c_char};
49 use std::ops::Deref;
50 use std::ffi::CString;
51 use std::cell::{Cell, RefCell};
52
53 use syntax::ast;
54 use syntax::codemap::{DUMMY_SP, Span};
55 use syntax::parse::token::InternedString;
56 use syntax::parse::token;
57
58 pub use context::{CrateContext, SharedCrateContext};
59
60 /// Is the type's representation size known at compile time?
61 pub fn type_is_sized<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ty: Ty<'tcx>) -> bool {
62 ty.is_sized(tcx, &tcx.empty_parameter_environment(), DUMMY_SP)
63 }
64
65 pub fn type_is_fat_ptr<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ty: Ty<'tcx>) -> bool {
66 match ty.sty {
67 ty::TyRawPtr(ty::TypeAndMut{ty, ..}) |
68 ty::TyRef(_, ty::TypeAndMut{ty, ..}) |
69 ty::TyBox(ty) => {
70 !type_is_sized(tcx, ty)
71 }
72 _ => {
73 false
74 }
75 }
76 }
77
78 pub fn type_is_immediate<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ty: Ty<'tcx>) -> bool {
79 use machine::llsize_of_alloc;
80 use type_of::sizing_type_of;
81
82 let tcx = ccx.tcx();
83 let simple = ty.is_scalar() ||
84 ty.is_unique() || ty.is_region_ptr() ||
85 ty.is_simd();
86 if simple && !type_is_fat_ptr(tcx, ty) {
87 return true;
88 }
89 if !type_is_sized(tcx, ty) {
90 return false;
91 }
92 match ty.sty {
93 ty::TyStruct(..) | ty::TyEnum(..) | ty::TyTuple(..) | ty::TyArray(_, _) |
94 ty::TyClosure(..) => {
95 let llty = sizing_type_of(ccx, ty);
96 llsize_of_alloc(ccx, llty) <= llsize_of_alloc(ccx, ccx.int_type())
97 }
98 _ => type_is_zero_size(ccx, ty)
99 }
100 }
101
102 /// Identify types which have size zero at runtime.
103 pub fn type_is_zero_size<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ty: Ty<'tcx>) -> bool {
104 use machine::llsize_of_alloc;
105 use type_of::sizing_type_of;
106 let llty = sizing_type_of(ccx, ty);
107 llsize_of_alloc(ccx, llty) == 0
108 }
109
110 /// Generates a unique symbol based off the name given. This is used to create
111 /// unique symbols for things like closures.
112 pub fn gensym_name(name: &str) -> ast::Name {
113 let num = token::gensym(name).0;
114 // use one colon which will get translated to a period by the mangler, and
115 // we're guaranteed that `num` is globally unique for this crate.
116 token::gensym(&format!("{}:{}", name, num))
117 }
118
119 /*
120 * A note on nomenclature of linking: "extern", "foreign", and "upcall".
121 *
122 * An "extern" is an LLVM symbol we wind up emitting an undefined external
123 * reference to. This means "we don't have the thing in this compilation unit,
124 * please make sure you link it in at runtime". This could be a reference to
125 * C code found in a C library, or rust code found in a rust crate.
126 *
127 * Most "externs" are implicitly declared (automatically) as a result of a
128 * user declaring an extern _module_ dependency; this causes the rust driver
129 * to locate an extern crate, scan its compilation metadata, and emit extern
130 * declarations for any symbols used by the declaring crate.
131 *
132 * A "foreign" is an extern that references C (or other non-rust ABI) code.
133 * There is no metadata to scan for extern references so in these cases either
134 * a header-digester like bindgen, or manual function prototypes, have to
135 * serve as declarators. So these are usually given explicitly as prototype
136 * declarations, in rust code, with ABI attributes on them noting which ABI to
137 * link via.
138 *
139 * An "upcall" is a foreign call generated by the compiler (not corresponding
140 * to any user-written call in the code) into the runtime library, to perform
141 * some helper task such as bringing a task to life, allocating memory, etc.
142 *
143 */
144
145 use Disr;
146
147 #[derive(Copy, Clone)]
148 pub struct NodeIdAndSpan {
149 pub id: ast::NodeId,
150 pub span: Span,
151 }
152
153 pub fn expr_info(expr: &hir::Expr) -> NodeIdAndSpan {
154 NodeIdAndSpan { id: expr.id, span: expr.span }
155 }
156
157 /// The concrete version of ty::FieldDef. The name is the field index if
158 /// the field is numeric.
159 pub struct Field<'tcx>(pub ast::Name, pub Ty<'tcx>);
160
161 /// The concrete version of ty::VariantDef
162 pub struct VariantInfo<'tcx> {
163 pub discr: Disr,
164 pub fields: Vec<Field<'tcx>>
165 }
166
167 impl<'a, 'tcx> VariantInfo<'tcx> {
168 pub fn from_ty(tcx: TyCtxt<'a, 'tcx, 'tcx>,
169 ty: Ty<'tcx>,
170 opt_def: Option<Def>)
171 -> Self
172 {
173 match ty.sty {
174 ty::TyStruct(adt, substs) | ty::TyEnum(adt, substs) => {
175 let variant = match opt_def {
176 None => adt.struct_variant(),
177 Some(def) => adt.variant_of_def(def)
178 };
179
180 VariantInfo {
181 discr: Disr::from(variant.disr_val),
182 fields: variant.fields.iter().map(|f| {
183 Field(f.name, monomorphize::field_ty(tcx, substs, f))
184 }).collect()
185 }
186 }
187
188 ty::TyTuple(ref v) => {
189 VariantInfo {
190 discr: Disr(0),
191 fields: v.iter().enumerate().map(|(i, &t)| {
192 Field(token::intern(&i.to_string()), t)
193 }).collect()
194 }
195 }
196
197 _ => {
198 bug!("cannot get field types from the type {:?}", ty);
199 }
200 }
201 }
202
203 /// Return the variant corresponding to a given node (e.g. expr)
204 pub fn of_node(tcx: TyCtxt<'a, 'tcx, 'tcx>, ty: Ty<'tcx>, id: ast::NodeId) -> Self {
205 let node_def = tcx.def_map.borrow().get(&id).map(|v| v.full_def());
206 Self::from_ty(tcx, ty, node_def)
207 }
208
209 pub fn field_index(&self, name: ast::Name) -> usize {
210 self.fields.iter().position(|&Field(n,_)| n == name).unwrap_or_else(|| {
211 bug!("unknown field `{}`", name)
212 })
213 }
214 }
215
216 pub struct BuilderRef_res {
217 pub b: BuilderRef,
218 }
219
220 impl Drop for BuilderRef_res {
221 fn drop(&mut self) {
222 unsafe {
223 llvm::LLVMDisposeBuilder(self.b);
224 }
225 }
226 }
227
228 pub fn BuilderRef_res(b: BuilderRef) -> BuilderRef_res {
229 BuilderRef_res {
230 b: b
231 }
232 }
233
234 pub fn validate_substs(substs: &Substs) {
235 assert!(!substs.types.needs_infer());
236 }
237
238 // work around bizarre resolve errors
239 type RvalueDatum<'tcx> = datum::Datum<'tcx, datum::Rvalue>;
240 pub type LvalueDatum<'tcx> = datum::Datum<'tcx, datum::Lvalue>;
241
242 #[derive(Clone, Debug)]
243 struct HintEntry<'tcx> {
244 // The datum for the dropflag-hint itself; note that many
245 // source-level Lvalues will be associated with the same
246 // dropflag-hint datum.
247 datum: cleanup::DropHintDatum<'tcx>,
248 }
249
250 pub struct DropFlagHintsMap<'tcx> {
251 // Maps NodeId for expressions that read/write unfragmented state
252 // to that state's drop-flag "hint." (A stack-local hint
253 // indicates either that (1.) it is certain that no-drop is
254 // needed, or (2.) inline drop-flag must be consulted.)
255 node_map: NodeMap<HintEntry<'tcx>>,
256 }
257
258 impl<'tcx> DropFlagHintsMap<'tcx> {
259 pub fn new() -> DropFlagHintsMap<'tcx> { DropFlagHintsMap { node_map: NodeMap() } }
260 pub fn has_hint(&self, id: ast::NodeId) -> bool { self.node_map.contains_key(&id) }
261 pub fn insert(&mut self, id: ast::NodeId, datum: cleanup::DropHintDatum<'tcx>) {
262 self.node_map.insert(id, HintEntry { datum: datum });
263 }
264 pub fn hint_datum(&self, id: ast::NodeId) -> Option<cleanup::DropHintDatum<'tcx>> {
265 self.node_map.get(&id).map(|t|t.datum)
266 }
267 }
268
269 // Function context. Every LLVM function we create will have one of
270 // these.
271 pub struct FunctionContext<'a, 'tcx: 'a> {
272 // The MIR for this function. At present, this is optional because
273 // we only have MIR available for things that are local to the
274 // crate.
275 pub mir: Option<CachedMir<'a, 'tcx>>,
276
277 // The ValueRef returned from a call to llvm::LLVMAddFunction; the
278 // address of the first instruction in the sequence of
279 // instructions for this function that will go in the .text
280 // section of the executable we're generating.
281 pub llfn: ValueRef,
282
283 // always an empty parameter-environment NOTE: @jroesch another use of ParamEnv
284 pub param_env: ty::ParameterEnvironment<'tcx>,
285
286 // A pointer to where to store the return value. If the return type is
287 // immediate, this points to an alloca in the function. Otherwise, it's a
288 // pointer to the hidden first parameter of the function. After function
289 // construction, this should always be Some.
290 pub llretslotptr: Cell<Option<ValueRef>>,
291
292 // These pub elements: "hoisted basic blocks" containing
293 // administrative activities that have to happen in only one place in
294 // the function, due to LLVM's quirks.
295 // A marker for the place where we want to insert the function's static
296 // allocas, so that LLVM will coalesce them into a single alloca call.
297 pub alloca_insert_pt: Cell<Option<ValueRef>>,
298 pub llreturn: Cell<Option<BasicBlockRef>>,
299
300 // If the function has any nested return's, including something like:
301 // fn foo() -> Option<Foo> { Some(Foo { x: return None }) }, then
302 // we use a separate alloca for each return
303 pub needs_ret_allocas: bool,
304
305 // When working with landingpad-based exceptions this value is alloca'd and
306 // later loaded when using the resume instruction. This ends up being
307 // critical to chaining landing pads and resuing already-translated
308 // cleanups.
309 //
310 // Note that for cleanuppad-based exceptions this is not used.
311 pub landingpad_alloca: Cell<Option<ValueRef>>,
312
313 // Maps the DefId's for local variables to the allocas created for
314 // them in llallocas.
315 pub lllocals: RefCell<NodeMap<LvalueDatum<'tcx>>>,
316
317 // Same as above, but for closure upvars
318 pub llupvars: RefCell<NodeMap<ValueRef>>,
319
320 // Carries info about drop-flags for local bindings (longer term,
321 // paths) for the code being compiled.
322 pub lldropflag_hints: RefCell<DropFlagHintsMap<'tcx>>,
323
324 // Describes the return/argument LLVM types and their ABI handling.
325 pub fn_ty: FnType,
326
327 // If this function is being monomorphized, this contains the type
328 // substitutions used.
329 pub param_substs: &'tcx Substs<'tcx>,
330
331 // The source span and nesting context where this function comes from, for
332 // error reporting and symbol generation.
333 pub span: Option<Span>,
334
335 // The arena that blocks are allocated from.
336 pub block_arena: &'a TypedArena<BlockS<'a, 'tcx>>,
337
338 // The arena that landing pads are allocated from.
339 pub lpad_arena: TypedArena<LandingPad>,
340
341 // This function's enclosing crate context.
342 pub ccx: &'a CrateContext<'a, 'tcx>,
343
344 // Used and maintained by the debuginfo module.
345 pub debug_context: debuginfo::FunctionDebugContext,
346
347 // Cleanup scopes.
348 pub scopes: RefCell<Vec<cleanup::CleanupScope<'a, 'tcx>>>,
349
350 pub cfg: Option<cfg::CFG>,
351 }
352
353 impl<'a, 'tcx> FunctionContext<'a, 'tcx> {
354 pub fn mir(&self) -> CachedMir<'a, 'tcx> {
355 self.mir.clone().expect("fcx.mir was empty")
356 }
357
358 pub fn cleanup(&self) {
359 unsafe {
360 llvm::LLVMInstructionEraseFromParent(self.alloca_insert_pt
361 .get()
362 .unwrap());
363 }
364 }
365
366 pub fn get_llreturn(&self) -> BasicBlockRef {
367 if self.llreturn.get().is_none() {
368
369 self.llreturn.set(Some(unsafe {
370 llvm::LLVMAppendBasicBlockInContext(self.ccx.llcx(), self.llfn,
371 "return\0".as_ptr() as *const _)
372 }))
373 }
374
375 self.llreturn.get().unwrap()
376 }
377
378 pub fn get_ret_slot(&self, bcx: Block<'a, 'tcx>, name: &str) -> ValueRef {
379 if self.needs_ret_allocas {
380 base::alloca(bcx, self.fn_ty.ret.memory_ty(self.ccx), name)
381 } else {
382 self.llretslotptr.get().unwrap()
383 }
384 }
385
386 pub fn new_block(&'a self,
387 name: &str,
388 opt_node_id: Option<ast::NodeId>)
389 -> Block<'a, 'tcx> {
390 unsafe {
391 let name = CString::new(name).unwrap();
392 let llbb = llvm::LLVMAppendBasicBlockInContext(self.ccx.llcx(),
393 self.llfn,
394 name.as_ptr());
395 BlockS::new(llbb, opt_node_id, self)
396 }
397 }
398
399 pub fn new_id_block(&'a self,
400 name: &str,
401 node_id: ast::NodeId)
402 -> Block<'a, 'tcx> {
403 self.new_block(name, Some(node_id))
404 }
405
406 pub fn new_temp_block(&'a self,
407 name: &str)
408 -> Block<'a, 'tcx> {
409 self.new_block(name, None)
410 }
411
412 pub fn join_blocks(&'a self,
413 id: ast::NodeId,
414 in_cxs: &[Block<'a, 'tcx>])
415 -> Block<'a, 'tcx> {
416 let out = self.new_id_block("join", id);
417 let mut reachable = false;
418 for bcx in in_cxs {
419 if !bcx.unreachable.get() {
420 build::Br(*bcx, out.llbb, DebugLoc::None);
421 reachable = true;
422 }
423 }
424 if !reachable {
425 build::Unreachable(out);
426 }
427 return out;
428 }
429
430 pub fn monomorphize<T>(&self, value: &T) -> T
431 where T: TransNormalize<'tcx>
432 {
433 monomorphize::apply_param_substs(self.ccx.tcx(),
434 self.param_substs,
435 value)
436 }
437
438 /// This is the same as `common::type_needs_drop`, except that it
439 /// may use or update caches within this `FunctionContext`.
440 pub fn type_needs_drop(&self, ty: Ty<'tcx>) -> bool {
441 self.ccx.tcx().type_needs_drop_given_env(ty, &self.param_env)
442 }
443
444 pub fn eh_personality(&self) -> ValueRef {
445 // The exception handling personality function.
446 //
447 // If our compilation unit has the `eh_personality` lang item somewhere
448 // within it, then we just need to translate that. Otherwise, we're
449 // building an rlib which will depend on some upstream implementation of
450 // this function, so we just codegen a generic reference to it. We don't
451 // specify any of the types for the function, we just make it a symbol
452 // that LLVM can later use.
453 //
454 // Note that MSVC is a little special here in that we don't use the
455 // `eh_personality` lang item at all. Currently LLVM has support for
456 // both Dwarf and SEH unwind mechanisms for MSVC targets and uses the
457 // *name of the personality function* to decide what kind of unwind side
458 // tables/landing pads to emit. It looks like Dwarf is used by default,
459 // injecting a dependency on the `_Unwind_Resume` symbol for resuming
460 // an "exception", but for MSVC we want to force SEH. This means that we
461 // can't actually have the personality function be our standard
462 // `rust_eh_personality` function, but rather we wired it up to the
463 // CRT's custom personality function, which forces LLVM to consider
464 // landing pads as "landing pads for SEH".
465 let ccx = self.ccx;
466 let tcx = ccx.tcx();
467 match tcx.lang_items.eh_personality() {
468 Some(def_id) if !base::wants_msvc_seh(ccx.sess()) => {
469 Callee::def(ccx, def_id, tcx.mk_substs(Substs::empty())).reify(ccx).val
470 }
471 _ => {
472 if let Some(llpersonality) = ccx.eh_personality().get() {
473 return llpersonality
474 }
475 let name = if base::wants_msvc_seh(ccx.sess()) {
476 "__CxxFrameHandler3"
477 } else {
478 "rust_eh_personality"
479 };
480 let fty = Type::variadic_func(&[], &Type::i32(ccx));
481 let f = declare::declare_cfn(ccx, name, fty);
482 ccx.eh_personality().set(Some(f));
483 f
484 }
485 }
486 }
487
488 // Returns a ValueRef of the "eh_unwind_resume" lang item if one is defined,
489 // otherwise declares it as an external function.
490 pub fn eh_unwind_resume(&self) -> Callee<'tcx> {
491 use attributes;
492 let ccx = self.ccx;
493 let tcx = ccx.tcx();
494 assert!(ccx.sess().target.target.options.custom_unwind_resume);
495 if let Some(def_id) = tcx.lang_items.eh_unwind_resume() {
496 return Callee::def(ccx, def_id, tcx.mk_substs(Substs::empty()));
497 }
498
499 let ty = tcx.mk_fn_ptr(tcx.mk_bare_fn(ty::BareFnTy {
500 unsafety: hir::Unsafety::Unsafe,
501 abi: Abi::C,
502 sig: ty::Binder(ty::FnSig {
503 inputs: vec![tcx.mk_mut_ptr(tcx.types.u8)],
504 output: ty::FnDiverging,
505 variadic: false
506 }),
507 }));
508
509 let unwresume = ccx.eh_unwind_resume();
510 if let Some(llfn) = unwresume.get() {
511 return Callee::ptr(datum::immediate_rvalue(llfn, ty));
512 }
513 let llfn = declare::declare_fn(ccx, "rust_eh_unwind_resume", ty);
514 attributes::unwind(llfn, true);
515 unwresume.set(Some(llfn));
516 Callee::ptr(datum::immediate_rvalue(llfn, ty))
517 }
518 }
519
520 // Basic block context. We create a block context for each basic block
521 // (single-entry, single-exit sequence of instructions) we generate from Rust
522 // code. Each basic block we generate is attached to a function, typically
523 // with many basic blocks per function. All the basic blocks attached to a
524 // function are organized as a directed graph.
525 pub struct BlockS<'blk, 'tcx: 'blk> {
526 // The BasicBlockRef returned from a call to
527 // llvm::LLVMAppendBasicBlock(llfn, name), which adds a basic
528 // block to the function pointed to by llfn. We insert
529 // instructions into that block by way of this block context.
530 // The block pointing to this one in the function's digraph.
531 pub llbb: BasicBlockRef,
532 pub terminated: Cell<bool>,
533 pub unreachable: Cell<bool>,
534
535 // If this block part of a landing pad, then this is `Some` indicating what
536 // kind of landing pad its in, otherwise this is none.
537 pub lpad: Cell<Option<&'blk LandingPad>>,
538
539 // AST node-id associated with this block, if any. Used for
540 // debugging purposes only.
541 pub opt_node_id: Option<ast::NodeId>,
542
543 // The function context for the function to which this block is
544 // attached.
545 pub fcx: &'blk FunctionContext<'blk, 'tcx>,
546 }
547
548 pub type Block<'blk, 'tcx> = &'blk BlockS<'blk, 'tcx>;
549
550 impl<'blk, 'tcx> BlockS<'blk, 'tcx> {
551 pub fn new(llbb: BasicBlockRef,
552 opt_node_id: Option<ast::NodeId>,
553 fcx: &'blk FunctionContext<'blk, 'tcx>)
554 -> Block<'blk, 'tcx> {
555 fcx.block_arena.alloc(BlockS {
556 llbb: llbb,
557 terminated: Cell::new(false),
558 unreachable: Cell::new(false),
559 lpad: Cell::new(None),
560 opt_node_id: opt_node_id,
561 fcx: fcx
562 })
563 }
564
565 pub fn ccx(&self) -> &'blk CrateContext<'blk, 'tcx> {
566 self.fcx.ccx
567 }
568 pub fn fcx(&self) -> &'blk FunctionContext<'blk, 'tcx> {
569 self.fcx
570 }
571 pub fn tcx(&self) -> TyCtxt<'blk, 'tcx, 'tcx> {
572 self.fcx.ccx.tcx()
573 }
574 pub fn sess(&self) -> &'blk Session { self.fcx.ccx.sess() }
575
576 pub fn lpad(&self) -> Option<&'blk LandingPad> {
577 self.lpad.get()
578 }
579
580 pub fn mir(&self) -> CachedMir<'blk, 'tcx> {
581 self.fcx.mir()
582 }
583
584 pub fn name(&self, name: ast::Name) -> String {
585 name.to_string()
586 }
587
588 pub fn node_id_to_string(&self, id: ast::NodeId) -> String {
589 self.tcx().map.node_to_string(id).to_string()
590 }
591
592 pub fn def(&self, nid: ast::NodeId) -> Def {
593 match self.tcx().def_map.borrow().get(&nid) {
594 Some(v) => v.full_def(),
595 None => {
596 bug!("no def associated with node id {}", nid);
597 }
598 }
599 }
600
601 pub fn to_str(&self) -> String {
602 format!("[block {:p}]", self)
603 }
604
605 pub fn monomorphize<T>(&self, value: &T) -> T
606 where T: TransNormalize<'tcx>
607 {
608 monomorphize::apply_param_substs(self.tcx(),
609 self.fcx.param_substs,
610 value)
611 }
612
613 pub fn build(&'blk self) -> BlockAndBuilder<'blk, 'tcx> {
614 BlockAndBuilder::new(self, OwnedBuilder::new_with_ccx(self.ccx()))
615 }
616 }
617
618 pub struct OwnedBuilder<'blk, 'tcx: 'blk> {
619 builder: Builder<'blk, 'tcx>
620 }
621
622 impl<'blk, 'tcx> OwnedBuilder<'blk, 'tcx> {
623 pub fn new_with_ccx(ccx: &'blk CrateContext<'blk, 'tcx>) -> Self {
624 // Create a fresh builder from the crate context.
625 let llbuilder = unsafe {
626 llvm::LLVMCreateBuilderInContext(ccx.llcx())
627 };
628 OwnedBuilder {
629 builder: Builder {
630 llbuilder: llbuilder,
631 ccx: ccx,
632 }
633 }
634 }
635 }
636
637 impl<'blk, 'tcx> Drop for OwnedBuilder<'blk, 'tcx> {
638 fn drop(&mut self) {
639 unsafe {
640 llvm::LLVMDisposeBuilder(self.builder.llbuilder);
641 }
642 }
643 }
644
645 pub struct BlockAndBuilder<'blk, 'tcx: 'blk> {
646 bcx: Block<'blk, 'tcx>,
647 owned_builder: OwnedBuilder<'blk, 'tcx>,
648 }
649
650 impl<'blk, 'tcx> BlockAndBuilder<'blk, 'tcx> {
651 pub fn new(bcx: Block<'blk, 'tcx>, owned_builder: OwnedBuilder<'blk, 'tcx>) -> Self {
652 // Set the builder's position to this block's end.
653 owned_builder.builder.position_at_end(bcx.llbb);
654 BlockAndBuilder {
655 bcx: bcx,
656 owned_builder: owned_builder,
657 }
658 }
659
660 pub fn with_block<F, R>(&self, f: F) -> R
661 where F: FnOnce(Block<'blk, 'tcx>) -> R
662 {
663 let result = f(self.bcx);
664 self.position_at_end(self.bcx.llbb);
665 result
666 }
667
668 pub fn map_block<F>(self, f: F) -> Self
669 where F: FnOnce(Block<'blk, 'tcx>) -> Block<'blk, 'tcx>
670 {
671 let BlockAndBuilder { bcx, owned_builder } = self;
672 let bcx = f(bcx);
673 BlockAndBuilder::new(bcx, owned_builder)
674 }
675
676 pub fn at_start<F, R>(&self, f: F) -> R
677 where F: FnOnce(&BlockAndBuilder<'blk, 'tcx>) -> R
678 {
679 self.position_at_start(self.bcx.llbb);
680 let r = f(self);
681 self.position_at_end(self.bcx.llbb);
682 r
683 }
684
685 // Methods delegated to bcx
686
687 pub fn is_unreachable(&self) -> bool {
688 self.bcx.unreachable.get()
689 }
690
691 pub fn ccx(&self) -> &'blk CrateContext<'blk, 'tcx> {
692 self.bcx.ccx()
693 }
694 pub fn fcx(&self) -> &'blk FunctionContext<'blk, 'tcx> {
695 self.bcx.fcx()
696 }
697 pub fn tcx(&self) -> TyCtxt<'blk, 'tcx, 'tcx> {
698 self.bcx.tcx()
699 }
700 pub fn sess(&self) -> &'blk Session {
701 self.bcx.sess()
702 }
703
704 pub fn llbb(&self) -> BasicBlockRef {
705 self.bcx.llbb
706 }
707
708 pub fn mir(&self) -> CachedMir<'blk, 'tcx> {
709 self.bcx.mir()
710 }
711
712 pub fn monomorphize<T>(&self, value: &T) -> T
713 where T: TransNormalize<'tcx>
714 {
715 self.bcx.monomorphize(value)
716 }
717
718 pub fn set_lpad(&self, lpad: Option<LandingPad>) {
719 self.bcx.lpad.set(lpad.map(|p| &*self.fcx().lpad_arena.alloc(p)))
720 }
721 }
722
723 impl<'blk, 'tcx> Deref for BlockAndBuilder<'blk, 'tcx> {
724 type Target = Builder<'blk, 'tcx>;
725 fn deref(&self) -> &Self::Target {
726 &self.owned_builder.builder
727 }
728 }
729
730 /// A structure representing an active landing pad for the duration of a basic
731 /// block.
732 ///
733 /// Each `Block` may contain an instance of this, indicating whether the block
734 /// is part of a landing pad or not. This is used to make decision about whether
735 /// to emit `invoke` instructions (e.g. in a landing pad we don't continue to
736 /// use `invoke`) and also about various function call metadata.
737 ///
738 /// For GNU exceptions (`landingpad` + `resume` instructions) this structure is
739 /// just a bunch of `None` instances (not too interesting), but for MSVC
740 /// exceptions (`cleanuppad` + `cleanupret` instructions) this contains data.
741 /// When inside of a landing pad, each function call in LLVM IR needs to be
742 /// annotated with which landing pad it's a part of. This is accomplished via
743 /// the `OperandBundleDef` value created for MSVC landing pads.
744 pub struct LandingPad {
745 cleanuppad: Option<ValueRef>,
746 operand: Option<OperandBundleDef>,
747 }
748
749 impl LandingPad {
750 pub fn gnu() -> LandingPad {
751 LandingPad { cleanuppad: None, operand: None }
752 }
753
754 pub fn msvc(cleanuppad: ValueRef) -> LandingPad {
755 LandingPad {
756 cleanuppad: Some(cleanuppad),
757 operand: Some(OperandBundleDef::new("funclet", &[cleanuppad])),
758 }
759 }
760
761 pub fn bundle(&self) -> Option<&OperandBundleDef> {
762 self.operand.as_ref()
763 }
764 }
765
766 impl Clone for LandingPad {
767 fn clone(&self) -> LandingPad {
768 LandingPad {
769 cleanuppad: self.cleanuppad,
770 operand: self.cleanuppad.map(|p| {
771 OperandBundleDef::new("funclet", &[p])
772 }),
773 }
774 }
775 }
776
777 pub struct Result<'blk, 'tcx: 'blk> {
778 pub bcx: Block<'blk, 'tcx>,
779 pub val: ValueRef
780 }
781
782 impl<'b, 'tcx> Result<'b, 'tcx> {
783 pub fn new(bcx: Block<'b, 'tcx>, val: ValueRef) -> Result<'b, 'tcx> {
784 Result {
785 bcx: bcx,
786 val: val,
787 }
788 }
789 }
790
791 pub fn val_ty(v: ValueRef) -> Type {
792 unsafe {
793 Type::from_ref(llvm::LLVMTypeOf(v))
794 }
795 }
796
797 // LLVM constant constructors.
798 pub fn C_null(t: Type) -> ValueRef {
799 unsafe {
800 llvm::LLVMConstNull(t.to_ref())
801 }
802 }
803
804 pub fn C_undef(t: Type) -> ValueRef {
805 unsafe {
806 llvm::LLVMGetUndef(t.to_ref())
807 }
808 }
809
810 pub fn C_integral(t: Type, u: u64, sign_extend: bool) -> ValueRef {
811 unsafe {
812 llvm::LLVMConstInt(t.to_ref(), u, sign_extend as Bool)
813 }
814 }
815
816 pub fn C_floating(s: &str, t: Type) -> ValueRef {
817 unsafe {
818 let s = CString::new(s).unwrap();
819 llvm::LLVMConstRealOfString(t.to_ref(), s.as_ptr())
820 }
821 }
822
823 pub fn C_floating_f64(f: f64, t: Type) -> ValueRef {
824 unsafe {
825 llvm::LLVMConstReal(t.to_ref(), f)
826 }
827 }
828
829 pub fn C_nil(ccx: &CrateContext) -> ValueRef {
830 C_struct(ccx, &[], false)
831 }
832
833 pub fn C_bool(ccx: &CrateContext, val: bool) -> ValueRef {
834 C_integral(Type::i1(ccx), val as u64, false)
835 }
836
837 pub fn C_i32(ccx: &CrateContext, i: i32) -> ValueRef {
838 C_integral(Type::i32(ccx), i as u64, true)
839 }
840
841 pub fn C_u32(ccx: &CrateContext, i: u32) -> ValueRef {
842 C_integral(Type::i32(ccx), i as u64, false)
843 }
844
845 pub fn C_u64(ccx: &CrateContext, i: u64) -> ValueRef {
846 C_integral(Type::i64(ccx), i, false)
847 }
848
849 pub fn C_int<I: AsI64>(ccx: &CrateContext, i: I) -> ValueRef {
850 let v = i.as_i64();
851
852 let bit_size = machine::llbitsize_of_real(ccx, ccx.int_type());
853
854 if bit_size < 64 {
855 // make sure it doesn't overflow
856 assert!(v < (1<<(bit_size-1)) && v >= -(1<<(bit_size-1)));
857 }
858
859 C_integral(ccx.int_type(), v as u64, true)
860 }
861
862 pub fn C_uint<I: AsU64>(ccx: &CrateContext, i: I) -> ValueRef {
863 let v = i.as_u64();
864
865 let bit_size = machine::llbitsize_of_real(ccx, ccx.int_type());
866
867 if bit_size < 64 {
868 // make sure it doesn't overflow
869 assert!(v < (1<<bit_size));
870 }
871
872 C_integral(ccx.int_type(), v, false)
873 }
874
875 pub trait AsI64 { fn as_i64(self) -> i64; }
876 pub trait AsU64 { fn as_u64(self) -> u64; }
877
878 // FIXME: remove the intptr conversions, because they
879 // are host-architecture-dependent
880 impl AsI64 for i64 { fn as_i64(self) -> i64 { self as i64 }}
881 impl AsI64 for i32 { fn as_i64(self) -> i64 { self as i64 }}
882 impl AsI64 for isize { fn as_i64(self) -> i64 { self as i64 }}
883
884 impl AsU64 for u64 { fn as_u64(self) -> u64 { self as u64 }}
885 impl AsU64 for u32 { fn as_u64(self) -> u64 { self as u64 }}
886 impl AsU64 for usize { fn as_u64(self) -> u64 { self as u64 }}
887
888 pub fn C_u8(ccx: &CrateContext, i: u8) -> ValueRef {
889 C_integral(Type::i8(ccx), i as u64, false)
890 }
891
892
893 // This is a 'c-like' raw string, which differs from
894 // our boxed-and-length-annotated strings.
895 pub fn C_cstr(cx: &CrateContext, s: InternedString, null_terminated: bool) -> ValueRef {
896 unsafe {
897 if let Some(&llval) = cx.const_cstr_cache().borrow().get(&s) {
898 return llval;
899 }
900
901 let sc = llvm::LLVMConstStringInContext(cx.llcx(),
902 s.as_ptr() as *const c_char,
903 s.len() as c_uint,
904 !null_terminated as Bool);
905
906 let gsym = token::gensym("str");
907 let sym = format!("str{}", gsym.0);
908 let g = declare::define_global(cx, &sym[..], val_ty(sc)).unwrap_or_else(||{
909 bug!("symbol `{}` is already defined", sym);
910 });
911 llvm::LLVMSetInitializer(g, sc);
912 llvm::LLVMSetGlobalConstant(g, True);
913 llvm::SetLinkage(g, llvm::InternalLinkage);
914
915 cx.const_cstr_cache().borrow_mut().insert(s, g);
916 g
917 }
918 }
919
920 // NB: Do not use `do_spill_noroot` to make this into a constant string, or
921 // you will be kicked off fast isel. See issue #4352 for an example of this.
922 pub fn C_str_slice(cx: &CrateContext, s: InternedString) -> ValueRef {
923 let len = s.len();
924 let cs = consts::ptrcast(C_cstr(cx, s, false), Type::i8p(cx));
925 C_named_struct(cx.tn().find_type("str_slice").unwrap(), &[cs, C_uint(cx, len)])
926 }
927
928 pub fn C_struct(cx: &CrateContext, elts: &[ValueRef], packed: bool) -> ValueRef {
929 C_struct_in_context(cx.llcx(), elts, packed)
930 }
931
932 pub fn C_struct_in_context(llcx: ContextRef, elts: &[ValueRef], packed: bool) -> ValueRef {
933 unsafe {
934 llvm::LLVMConstStructInContext(llcx,
935 elts.as_ptr(), elts.len() as c_uint,
936 packed as Bool)
937 }
938 }
939
940 pub fn C_named_struct(t: Type, elts: &[ValueRef]) -> ValueRef {
941 unsafe {
942 llvm::LLVMConstNamedStruct(t.to_ref(), elts.as_ptr(), elts.len() as c_uint)
943 }
944 }
945
946 pub fn C_array(ty: Type, elts: &[ValueRef]) -> ValueRef {
947 unsafe {
948 return llvm::LLVMConstArray(ty.to_ref(), elts.as_ptr(), elts.len() as c_uint);
949 }
950 }
951
952 pub fn C_vector(elts: &[ValueRef]) -> ValueRef {
953 unsafe {
954 return llvm::LLVMConstVector(elts.as_ptr(), elts.len() as c_uint);
955 }
956 }
957
958 pub fn C_bytes(cx: &CrateContext, bytes: &[u8]) -> ValueRef {
959 C_bytes_in_context(cx.llcx(), bytes)
960 }
961
962 pub fn C_bytes_in_context(llcx: ContextRef, bytes: &[u8]) -> ValueRef {
963 unsafe {
964 let ptr = bytes.as_ptr() as *const c_char;
965 return llvm::LLVMConstStringInContext(llcx, ptr, bytes.len() as c_uint, True);
966 }
967 }
968
969 pub fn const_get_elt(v: ValueRef, us: &[c_uint])
970 -> ValueRef {
971 unsafe {
972 let r = llvm::LLVMConstExtractValue(v, us.as_ptr(), us.len() as c_uint);
973
974 debug!("const_get_elt(v={:?}, us={:?}, r={:?})",
975 Value(v), us, Value(r));
976
977 r
978 }
979 }
980
981 pub fn const_to_int(v: ValueRef) -> i64 {
982 unsafe {
983 llvm::LLVMConstIntGetSExtValue(v)
984 }
985 }
986
987 pub fn const_to_uint(v: ValueRef) -> u64 {
988 unsafe {
989 llvm::LLVMConstIntGetZExtValue(v)
990 }
991 }
992
993 fn is_const_integral(v: ValueRef) -> bool {
994 unsafe {
995 !llvm::LLVMIsAConstantInt(v).is_null()
996 }
997 }
998
999 pub fn const_to_opt_int(v: ValueRef) -> Option<i64> {
1000 unsafe {
1001 if is_const_integral(v) {
1002 Some(llvm::LLVMConstIntGetSExtValue(v))
1003 } else {
1004 None
1005 }
1006 }
1007 }
1008
1009 pub fn const_to_opt_uint(v: ValueRef) -> Option<u64> {
1010 unsafe {
1011 if is_const_integral(v) {
1012 Some(llvm::LLVMConstIntGetZExtValue(v))
1013 } else {
1014 None
1015 }
1016 }
1017 }
1018
1019 pub fn is_undef(val: ValueRef) -> bool {
1020 unsafe {
1021 llvm::LLVMIsUndef(val) != False
1022 }
1023 }
1024
1025 #[allow(dead_code)] // potentially useful
1026 pub fn is_null(val: ValueRef) -> bool {
1027 unsafe {
1028 llvm::LLVMIsNull(val) != False
1029 }
1030 }
1031
1032 pub fn monomorphize_type<'blk, 'tcx>(bcx: &BlockS<'blk, 'tcx>, t: Ty<'tcx>) -> Ty<'tcx> {
1033 bcx.fcx.monomorphize(&t)
1034 }
1035
1036 pub fn node_id_type<'blk, 'tcx>(bcx: &BlockS<'blk, 'tcx>, id: ast::NodeId) -> Ty<'tcx> {
1037 let tcx = bcx.tcx();
1038 let t = tcx.node_id_to_type(id);
1039 monomorphize_type(bcx, t)
1040 }
1041
1042 pub fn expr_ty<'blk, 'tcx>(bcx: &BlockS<'blk, 'tcx>, ex: &hir::Expr) -> Ty<'tcx> {
1043 node_id_type(bcx, ex.id)
1044 }
1045
1046 pub fn expr_ty_adjusted<'blk, 'tcx>(bcx: &BlockS<'blk, 'tcx>, ex: &hir::Expr) -> Ty<'tcx> {
1047 monomorphize_type(bcx, bcx.tcx().expr_ty_adjusted(ex))
1048 }
1049
1050 /// Attempts to resolve an obligation. The result is a shallow vtable resolution -- meaning that we
1051 /// do not (necessarily) resolve all nested obligations on the impl. Note that type check should
1052 /// guarantee to us that all nested obligations *could be* resolved if we wanted to.
1053 pub fn fulfill_obligation<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
1054 span: Span,
1055 trait_ref: ty::PolyTraitRef<'tcx>)
1056 -> traits::Vtable<'tcx, ()>
1057 {
1058 let tcx = scx.tcx();
1059
1060 // Remove any references to regions; this helps improve caching.
1061 let trait_ref = tcx.erase_regions(&trait_ref);
1062
1063 scx.trait_cache().memoize(trait_ref, || {
1064 debug!("trans fulfill_obligation: trait_ref={:?} def_id={:?}",
1065 trait_ref, trait_ref.def_id());
1066
1067 // Do the initial selection for the obligation. This yields the
1068 // shallow result we are looking for -- that is, what specific impl.
1069 tcx.normalizing_infer_ctxt(ProjectionMode::Any).enter(|infcx| {
1070 let mut selcx = SelectionContext::new(&infcx);
1071
1072 let obligation_cause = traits::ObligationCause::misc(span,
1073 ast::DUMMY_NODE_ID);
1074 let obligation = traits::Obligation::new(obligation_cause,
1075 trait_ref.to_poly_trait_predicate());
1076
1077 let selection = match selcx.select(&obligation) {
1078 Ok(Some(selection)) => selection,
1079 Ok(None) => {
1080 // Ambiguity can happen when monomorphizing during trans
1081 // expands to some humongo type that never occurred
1082 // statically -- this humongo type can then overflow,
1083 // leading to an ambiguous result. So report this as an
1084 // overflow bug, since I believe this is the only case
1085 // where ambiguity can result.
1086 debug!("Encountered ambiguity selecting `{:?}` during trans, \
1087 presuming due to overflow",
1088 trait_ref);
1089 tcx.sess.span_fatal(span,
1090 "reached the recursion limit during monomorphization \
1091 (selection ambiguity)");
1092 }
1093 Err(e) => {
1094 span_bug!(span, "Encountered error `{:?}` selecting `{:?}` during trans",
1095 e, trait_ref)
1096 }
1097 };
1098
1099 // Currently, we use a fulfillment context to completely resolve
1100 // all nested obligations. This is because they can inform the
1101 // inference of the impl's type parameters.
1102 let mut fulfill_cx = traits::FulfillmentContext::new();
1103 let vtable = selection.map(|predicate| {
1104 fulfill_cx.register_predicate_obligation(&infcx, predicate);
1105 });
1106 let vtable = infcx.drain_fulfillment_cx_or_panic(span, &mut fulfill_cx, &vtable);
1107
1108 info!("Cache miss: {:?} => {:?}", trait_ref, vtable);
1109 vtable
1110 })
1111 })
1112 }
1113
1114 /// Normalizes the predicates and checks whether they hold. If this
1115 /// returns false, then either normalize encountered an error or one
1116 /// of the predicates did not hold. Used when creating vtables to
1117 /// check for unsatisfiable methods.
1118 pub fn normalize_and_test_predicates<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
1119 predicates: Vec<ty::Predicate<'tcx>>)
1120 -> bool
1121 {
1122 debug!("normalize_and_test_predicates(predicates={:?})",
1123 predicates);
1124
1125 tcx.normalizing_infer_ctxt(ProjectionMode::Any).enter(|infcx| {
1126 let mut selcx = SelectionContext::new(&infcx);
1127 let mut fulfill_cx = traits::FulfillmentContext::new();
1128 let cause = traits::ObligationCause::dummy();
1129 let traits::Normalized { value: predicates, obligations } =
1130 traits::normalize(&mut selcx, cause.clone(), &predicates);
1131 for obligation in obligations {
1132 fulfill_cx.register_predicate_obligation(&infcx, obligation);
1133 }
1134 for predicate in predicates {
1135 let obligation = traits::Obligation::new(cause.clone(), predicate);
1136 fulfill_cx.register_predicate_obligation(&infcx, obligation);
1137 }
1138
1139 infcx.drain_fulfillment_cx(&mut fulfill_cx, &()).is_ok()
1140 })
1141 }
1142
1143 pub fn langcall(bcx: Block,
1144 span: Option<Span>,
1145 msg: &str,
1146 li: LangItem)
1147 -> DefId {
1148 match bcx.tcx().lang_items.require(li) {
1149 Ok(id) => id,
1150 Err(s) => {
1151 let msg = format!("{} {}", msg, s);
1152 match span {
1153 Some(span) => bcx.tcx().sess.span_fatal(span, &msg[..]),
1154 None => bcx.tcx().sess.fatal(&msg[..]),
1155 }
1156 }
1157 }
1158 }
1159
1160 /// Return the VariantDef corresponding to an inlined variant node
1161 pub fn inlined_variant_def<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
1162 inlined_vid: ast::NodeId)
1163 -> ty::VariantDef<'tcx>
1164 {
1165
1166 let ctor_ty = ccx.tcx().node_id_to_type(inlined_vid);
1167 debug!("inlined_variant_def: ctor_ty={:?} inlined_vid={:?}", ctor_ty,
1168 inlined_vid);
1169 let adt_def = match ctor_ty.sty {
1170 ty::TyFnDef(_, _, &ty::BareFnTy { sig: ty::Binder(ty::FnSig {
1171 output: ty::FnConverging(ty), ..
1172 }), ..}) => ty,
1173 _ => ctor_ty
1174 }.ty_adt_def().unwrap();
1175 let inlined_vid_def_id = ccx.tcx().map.local_def_id(inlined_vid);
1176 adt_def.variants.iter().find(|v| {
1177 inlined_vid_def_id == v.did ||
1178 ccx.external().borrow().get(&v.did) == Some(&Some(inlined_vid))
1179 }).unwrap_or_else(|| {
1180 bug!("no variant for {:?}::{}", adt_def, inlined_vid)
1181 })
1182 }
1183
1184 // To avoid UB from LLVM, these two functions mask RHS with an
1185 // appropriate mask unconditionally (i.e. the fallback behavior for
1186 // all shifts). For 32- and 64-bit types, this matches the semantics
1187 // of Java. (See related discussion on #1877 and #10183.)
1188
1189 pub fn build_unchecked_lshift<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1190 lhs: ValueRef,
1191 rhs: ValueRef,
1192 binop_debug_loc: DebugLoc) -> ValueRef {
1193 let rhs = base::cast_shift_expr_rhs(bcx, hir::BinOp_::BiShl, lhs, rhs);
1194 // #1877, #10183: Ensure that input is always valid
1195 let rhs = shift_mask_rhs(bcx, rhs, binop_debug_loc);
1196 build::Shl(bcx, lhs, rhs, binop_debug_loc)
1197 }
1198
1199 pub fn build_unchecked_rshift<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1200 lhs_t: Ty<'tcx>,
1201 lhs: ValueRef,
1202 rhs: ValueRef,
1203 binop_debug_loc: DebugLoc) -> ValueRef {
1204 let rhs = base::cast_shift_expr_rhs(bcx, hir::BinOp_::BiShr, lhs, rhs);
1205 // #1877, #10183: Ensure that input is always valid
1206 let rhs = shift_mask_rhs(bcx, rhs, binop_debug_loc);
1207 let is_signed = lhs_t.is_signed();
1208 if is_signed {
1209 build::AShr(bcx, lhs, rhs, binop_debug_loc)
1210 } else {
1211 build::LShr(bcx, lhs, rhs, binop_debug_loc)
1212 }
1213 }
1214
1215 fn shift_mask_rhs<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1216 rhs: ValueRef,
1217 debug_loc: DebugLoc) -> ValueRef {
1218 let rhs_llty = val_ty(rhs);
1219 build::And(bcx, rhs, shift_mask_val(bcx, rhs_llty, rhs_llty, false), debug_loc)
1220 }
1221
1222 pub fn shift_mask_val<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1223 llty: Type,
1224 mask_llty: Type,
1225 invert: bool) -> ValueRef {
1226 let kind = llty.kind();
1227 match kind {
1228 TypeKind::Integer => {
1229 // i8/u8 can shift by at most 7, i16/u16 by at most 15, etc.
1230 let val = llty.int_width() - 1;
1231 if invert {
1232 C_integral(mask_llty, !val, true)
1233 } else {
1234 C_integral(mask_llty, val, false)
1235 }
1236 },
1237 TypeKind::Vector => {
1238 let mask = shift_mask_val(bcx, llty.element_type(), mask_llty.element_type(), invert);
1239 build::VectorSplat(bcx, mask_llty.vector_length(), mask)
1240 },
1241 _ => bug!("shift_mask_val: expected Integer or Vector, found {:?}", kind),
1242 }
1243 }