]> git.proxmox.com Git - rustc.git/blob - src/librustc_trans/mir/place.rs
New upstream version 1.27.2+dfsg1
[rustc.git] / src / librustc_trans / mir / place.rs
1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 use llvm::{self, ValueRef};
12 use rustc::ty::{self, Ty};
13 use rustc::ty::layout::{self, Align, TyLayout, LayoutOf};
14 use rustc::mir;
15 use rustc::mir::tcx::PlaceTy;
16 use rustc_data_structures::indexed_vec::Idx;
17 use base;
18 use builder::Builder;
19 use common::{CodegenCx, C_undef, C_usize, C_u8, C_u32, C_uint, C_null, C_uint_big};
20 use consts;
21 use type_of::LayoutLlvmExt;
22 use type_::Type;
23 use value::Value;
24 use glue;
25
26 use std::ptr;
27
28 use super::{FunctionCx, LocalRef};
29 use super::operand::{OperandRef, OperandValue};
30
31 #[derive(Copy, Clone, Debug)]
32 pub struct PlaceRef<'tcx> {
33 /// Pointer to the contents of the place
34 pub llval: ValueRef,
35
36 /// This place's extra data if it is unsized, or null
37 pub llextra: ValueRef,
38
39 /// Monomorphized type of this place, including variant information
40 pub layout: TyLayout<'tcx>,
41
42 /// What alignment we know for this place
43 pub align: Align,
44 }
45
46 impl<'a, 'tcx> PlaceRef<'tcx> {
47 pub fn new_sized(llval: ValueRef,
48 layout: TyLayout<'tcx>,
49 align: Align)
50 -> PlaceRef<'tcx> {
51 PlaceRef {
52 llval,
53 llextra: ptr::null_mut(),
54 layout,
55 align
56 }
57 }
58
59 pub fn alloca(bx: &Builder<'a, 'tcx>, layout: TyLayout<'tcx>, name: &str)
60 -> PlaceRef<'tcx> {
61 debug!("alloca({:?}: {:?})", name, layout);
62 let tmp = bx.alloca(layout.llvm_type(bx.cx), name, layout.align);
63 Self::new_sized(tmp, layout, layout.align)
64 }
65
66 pub fn len(&self, cx: &CodegenCx<'a, 'tcx>) -> ValueRef {
67 if let layout::FieldPlacement::Array { count, .. } = self.layout.fields {
68 if self.layout.is_unsized() {
69 assert!(self.has_extra());
70 assert_eq!(count, 0);
71 self.llextra
72 } else {
73 C_usize(cx, count)
74 }
75 } else {
76 bug!("unexpected layout `{:#?}` in PlaceRef::len", self.layout)
77 }
78 }
79
80 pub fn has_extra(&self) -> bool {
81 !self.llextra.is_null()
82 }
83
84 pub fn load(&self, bx: &Builder<'a, 'tcx>) -> OperandRef<'tcx> {
85 debug!("PlaceRef::load: {:?}", self);
86
87 assert!(!self.has_extra());
88
89 if self.layout.is_zst() {
90 return OperandRef::new_zst(bx.cx, self.layout);
91 }
92
93 let scalar_load_metadata = |load, scalar: &layout::Scalar| {
94 let vr = scalar.valid_range.clone();
95 match scalar.value {
96 layout::Int(..) => {
97 let range = scalar.valid_range_exclusive(bx.cx);
98 if range.start != range.end {
99 bx.range_metadata(load, range);
100 }
101 }
102 layout::Pointer if vr.start() < vr.end() && !vr.contains(&0) => {
103 bx.nonnull_metadata(load);
104 }
105 _ => {}
106 }
107 };
108
109 let val = if self.layout.is_llvm_immediate() {
110 let mut const_llval = ptr::null_mut();
111 unsafe {
112 let global = llvm::LLVMIsAGlobalVariable(self.llval);
113 if !global.is_null() && llvm::LLVMIsGlobalConstant(global) == llvm::True {
114 const_llval = llvm::LLVMGetInitializer(global);
115 }
116 }
117
118 let llval = if !const_llval.is_null() {
119 const_llval
120 } else {
121 let load = bx.load(self.llval, self.align);
122 if let layout::Abi::Scalar(ref scalar) = self.layout.abi {
123 scalar_load_metadata(load, scalar);
124 }
125 load
126 };
127 OperandValue::Immediate(base::to_immediate(bx, llval, self.layout))
128 } else if let layout::Abi::ScalarPair(ref a, ref b) = self.layout.abi {
129 let load = |i, scalar: &layout::Scalar| {
130 let mut llptr = bx.struct_gep(self.llval, i as u64);
131 // Make sure to always load i1 as i8.
132 if scalar.is_bool() {
133 llptr = bx.pointercast(llptr, Type::i8p(bx.cx));
134 }
135 let load = bx.load(llptr, self.align);
136 scalar_load_metadata(load, scalar);
137 if scalar.is_bool() {
138 bx.trunc(load, Type::i1(bx.cx))
139 } else {
140 load
141 }
142 };
143 OperandValue::Pair(load(0, a), load(1, b))
144 } else {
145 OperandValue::Ref(self.llval, self.align)
146 };
147
148 OperandRef { val, layout: self.layout }
149 }
150
151 /// Access a field, at a point when the value's case is known.
152 pub fn project_field(self, bx: &Builder<'a, 'tcx>, ix: usize) -> PlaceRef<'tcx> {
153 let cx = bx.cx;
154 let field = self.layout.field(cx, ix);
155 let offset = self.layout.fields.offset(ix);
156 let align = self.align.min(self.layout.align).min(field.align);
157
158 let simple = || {
159 // Unions and newtypes only use an offset of 0.
160 let llval = if offset.bytes() == 0 {
161 self.llval
162 } else if let layout::Abi::ScalarPair(ref a, ref b) = self.layout.abi {
163 // Offsets have to match either first or second field.
164 assert_eq!(offset, a.value.size(cx).abi_align(b.value.align(cx)));
165 bx.struct_gep(self.llval, 1)
166 } else {
167 bx.struct_gep(self.llval, self.layout.llvm_field_index(ix))
168 };
169 PlaceRef {
170 // HACK(eddyb) have to bitcast pointers until LLVM removes pointee types.
171 llval: bx.pointercast(llval, field.llvm_type(cx).ptr_to()),
172 llextra: if cx.type_has_metadata(field.ty) {
173 self.llextra
174 } else {
175 ptr::null_mut()
176 },
177 layout: field,
178 align,
179 }
180 };
181
182 // Simple cases, which don't need DST adjustment:
183 // * no metadata available - just log the case
184 // * known alignment - sized types, [T], str or a foreign type
185 // * packed struct - there is no alignment padding
186 match field.ty.sty {
187 _ if !self.has_extra() => {
188 debug!("Unsized field `{}`, of `{:?}` has no metadata for adjustment",
189 ix, Value(self.llval));
190 return simple();
191 }
192 _ if !field.is_unsized() => return simple(),
193 ty::TySlice(..) | ty::TyStr | ty::TyForeign(..) => return simple(),
194 ty::TyAdt(def, _) => {
195 if def.repr.packed() {
196 // FIXME(eddyb) generalize the adjustment when we
197 // start supporting packing to larger alignments.
198 assert_eq!(self.layout.align.abi(), 1);
199 return simple();
200 }
201 }
202 _ => {}
203 }
204
205 // We need to get the pointer manually now.
206 // We do this by casting to a *i8, then offsetting it by the appropriate amount.
207 // We do this instead of, say, simply adjusting the pointer from the result of a GEP
208 // because the field may have an arbitrary alignment in the LLVM representation
209 // anyway.
210 //
211 // To demonstrate:
212 // struct Foo<T: ?Sized> {
213 // x: u16,
214 // y: T
215 // }
216 //
217 // The type Foo<Foo<Trait>> is represented in LLVM as { u16, { u16, u8 }}, meaning that
218 // the `y` field has 16-bit alignment.
219
220 let meta = self.llextra;
221
222 let unaligned_offset = C_usize(cx, offset.bytes());
223
224 // Get the alignment of the field
225 let (_, unsized_align) = glue::size_and_align_of_dst(bx, field.ty, meta);
226
227 // Bump the unaligned offset up to the appropriate alignment using the
228 // following expression:
229 //
230 // (unaligned offset + (align - 1)) & -align
231
232 // Calculate offset
233 let align_sub_1 = bx.sub(unsized_align, C_usize(cx, 1u64));
234 let offset = bx.and(bx.add(unaligned_offset, align_sub_1),
235 bx.neg(unsized_align));
236
237 debug!("struct_field_ptr: DST field offset: {:?}", Value(offset));
238
239 // Cast and adjust pointer
240 let byte_ptr = bx.pointercast(self.llval, Type::i8p(cx));
241 let byte_ptr = bx.gep(byte_ptr, &[offset]);
242
243 // Finally, cast back to the type expected
244 let ll_fty = field.llvm_type(cx);
245 debug!("struct_field_ptr: Field type is {:?}", ll_fty);
246
247 PlaceRef {
248 llval: bx.pointercast(byte_ptr, ll_fty.ptr_to()),
249 llextra: self.llextra,
250 layout: field,
251 align,
252 }
253 }
254
255 /// Obtain the actual discriminant of a value.
256 pub fn trans_get_discr(self, bx: &Builder<'a, 'tcx>, cast_to: Ty<'tcx>) -> ValueRef {
257 let cast_to = bx.cx.layout_of(cast_to).immediate_llvm_type(bx.cx);
258 if self.layout.abi == layout::Abi::Uninhabited {
259 return C_undef(cast_to);
260 }
261 match self.layout.variants {
262 layout::Variants::Single { index } => {
263 let discr_val = self.layout.ty.ty_adt_def().map_or(
264 index as u128,
265 |def| def.discriminant_for_variant(bx.cx.tcx, index).val);
266 return C_uint_big(cast_to, discr_val);
267 }
268 layout::Variants::Tagged { .. } |
269 layout::Variants::NicheFilling { .. } => {},
270 }
271
272 let discr = self.project_field(bx, 0);
273 let lldiscr = discr.load(bx).immediate();
274 match self.layout.variants {
275 layout::Variants::Single { .. } => bug!(),
276 layout::Variants::Tagged { ref tag, .. } => {
277 let signed = match tag.value {
278 layout::Int(_, signed) => signed,
279 _ => false
280 };
281 bx.intcast(lldiscr, cast_to, signed)
282 }
283 layout::Variants::NicheFilling {
284 dataful_variant,
285 ref niche_variants,
286 niche_start,
287 ..
288 } => {
289 let niche_llty = discr.layout.immediate_llvm_type(bx.cx);
290 if niche_variants.start() == niche_variants.end() {
291 // FIXME(eddyb) Check the actual primitive type here.
292 let niche_llval = if niche_start == 0 {
293 // HACK(eddyb) Using `C_null` as it works on all types.
294 C_null(niche_llty)
295 } else {
296 C_uint_big(niche_llty, niche_start)
297 };
298 bx.select(bx.icmp(llvm::IntEQ, lldiscr, niche_llval),
299 C_uint(cast_to, *niche_variants.start() as u64),
300 C_uint(cast_to, dataful_variant as u64))
301 } else {
302 // Rebase from niche values to discriminant values.
303 let delta = niche_start.wrapping_sub(*niche_variants.start() as u128);
304 let lldiscr = bx.sub(lldiscr, C_uint_big(niche_llty, delta));
305 let lldiscr_max = C_uint(niche_llty, *niche_variants.end() as u64);
306 bx.select(bx.icmp(llvm::IntULE, lldiscr, lldiscr_max),
307 bx.intcast(lldiscr, cast_to, false),
308 C_uint(cast_to, dataful_variant as u64))
309 }
310 }
311 }
312 }
313
314 /// Set the discriminant for a new value of the given case of the given
315 /// representation.
316 pub fn trans_set_discr(&self, bx: &Builder<'a, 'tcx>, variant_index: usize) {
317 if self.layout.for_variant(bx.cx, variant_index).abi == layout::Abi::Uninhabited {
318 return;
319 }
320 match self.layout.variants {
321 layout::Variants::Single { index } => {
322 assert_eq!(index, variant_index);
323 }
324 layout::Variants::Tagged { .. } => {
325 let ptr = self.project_field(bx, 0);
326 let to = self.layout.ty.ty_adt_def().unwrap()
327 .discriminant_for_variant(bx.tcx(), variant_index)
328 .val;
329 bx.store(
330 C_uint_big(ptr.layout.llvm_type(bx.cx), to),
331 ptr.llval,
332 ptr.align);
333 }
334 layout::Variants::NicheFilling {
335 dataful_variant,
336 ref niche_variants,
337 niche_start,
338 ..
339 } => {
340 if variant_index != dataful_variant {
341 if bx.sess().target.target.arch == "arm" ||
342 bx.sess().target.target.arch == "aarch64" {
343 // Issue #34427: As workaround for LLVM bug on ARM,
344 // use memset of 0 before assigning niche value.
345 let llptr = bx.pointercast(self.llval, Type::i8(bx.cx).ptr_to());
346 let fill_byte = C_u8(bx.cx, 0);
347 let (size, align) = self.layout.size_and_align();
348 let size = C_usize(bx.cx, size.bytes());
349 let align = C_u32(bx.cx, align.abi() as u32);
350 base::call_memset(bx, llptr, fill_byte, size, align, false);
351 }
352
353 let niche = self.project_field(bx, 0);
354 let niche_llty = niche.layout.immediate_llvm_type(bx.cx);
355 let niche_value = ((variant_index - *niche_variants.start()) as u128)
356 .wrapping_add(niche_start);
357 // FIXME(eddyb) Check the actual primitive type here.
358 let niche_llval = if niche_value == 0 {
359 // HACK(eddyb) Using `C_null` as it works on all types.
360 C_null(niche_llty)
361 } else {
362 C_uint_big(niche_llty, niche_value)
363 };
364 OperandValue::Immediate(niche_llval).store(bx, niche);
365 }
366 }
367 }
368 }
369
370 pub fn project_index(&self, bx: &Builder<'a, 'tcx>, llindex: ValueRef)
371 -> PlaceRef<'tcx> {
372 PlaceRef {
373 llval: bx.inbounds_gep(self.llval, &[C_usize(bx.cx, 0), llindex]),
374 llextra: ptr::null_mut(),
375 layout: self.layout.field(bx.cx, 0),
376 align: self.align
377 }
378 }
379
380 pub fn project_downcast(&self, bx: &Builder<'a, 'tcx>, variant_index: usize)
381 -> PlaceRef<'tcx> {
382 let mut downcast = *self;
383 downcast.layout = self.layout.for_variant(bx.cx, variant_index);
384
385 // Cast to the appropriate variant struct type.
386 let variant_ty = downcast.layout.llvm_type(bx.cx);
387 downcast.llval = bx.pointercast(downcast.llval, variant_ty.ptr_to());
388
389 downcast
390 }
391
392 pub fn storage_live(&self, bx: &Builder<'a, 'tcx>) {
393 bx.lifetime_start(self.llval, self.layout.size);
394 }
395
396 pub fn storage_dead(&self, bx: &Builder<'a, 'tcx>) {
397 bx.lifetime_end(self.llval, self.layout.size);
398 }
399 }
400
401 impl<'a, 'tcx> FunctionCx<'a, 'tcx> {
402 pub fn trans_place(&mut self,
403 bx: &Builder<'a, 'tcx>,
404 place: &mir::Place<'tcx>)
405 -> PlaceRef<'tcx> {
406 debug!("trans_place(place={:?})", place);
407
408 let cx = bx.cx;
409 let tcx = cx.tcx;
410
411 if let mir::Place::Local(index) = *place {
412 match self.locals[index] {
413 LocalRef::Place(place) => {
414 return place;
415 }
416 LocalRef::Operand(..) => {
417 bug!("using operand local {:?} as place", place);
418 }
419 }
420 }
421
422 let result = match *place {
423 mir::Place::Local(_) => bug!(), // handled above
424 mir::Place::Static(box mir::Static { def_id, ty }) => {
425 let layout = cx.layout_of(self.monomorphize(&ty));
426 PlaceRef::new_sized(consts::get_static(cx, def_id), layout, layout.align)
427 },
428 mir::Place::Projection(box mir::Projection {
429 ref base,
430 elem: mir::ProjectionElem::Deref
431 }) => {
432 // Load the pointer from its location.
433 self.trans_consume(bx, base).deref(bx.cx)
434 }
435 mir::Place::Projection(ref projection) => {
436 let tr_base = self.trans_place(bx, &projection.base);
437
438 match projection.elem {
439 mir::ProjectionElem::Deref => bug!(),
440 mir::ProjectionElem::Field(ref field, _) => {
441 tr_base.project_field(bx, field.index())
442 }
443 mir::ProjectionElem::Index(index) => {
444 let index = &mir::Operand::Copy(mir::Place::Local(index));
445 let index = self.trans_operand(bx, index);
446 let llindex = index.immediate();
447 tr_base.project_index(bx, llindex)
448 }
449 mir::ProjectionElem::ConstantIndex { offset,
450 from_end: false,
451 min_length: _ } => {
452 let lloffset = C_usize(bx.cx, offset as u64);
453 tr_base.project_index(bx, lloffset)
454 }
455 mir::ProjectionElem::ConstantIndex { offset,
456 from_end: true,
457 min_length: _ } => {
458 let lloffset = C_usize(bx.cx, offset as u64);
459 let lllen = tr_base.len(bx.cx);
460 let llindex = bx.sub(lllen, lloffset);
461 tr_base.project_index(bx, llindex)
462 }
463 mir::ProjectionElem::Subslice { from, to } => {
464 let mut subslice = tr_base.project_index(bx,
465 C_usize(bx.cx, from as u64));
466 let projected_ty = PlaceTy::Ty { ty: tr_base.layout.ty }
467 .projection_ty(tcx, &projection.elem).to_ty(bx.tcx());
468 subslice.layout = bx.cx.layout_of(self.monomorphize(&projected_ty));
469
470 if subslice.layout.is_unsized() {
471 assert!(tr_base.has_extra());
472 subslice.llextra = bx.sub(tr_base.llextra,
473 C_usize(bx.cx, (from as u64) + (to as u64)));
474 }
475
476 // Cast the place pointer type to the new
477 // array or slice type (*[%_; new_len]).
478 subslice.llval = bx.pointercast(subslice.llval,
479 subslice.layout.llvm_type(bx.cx).ptr_to());
480
481 subslice
482 }
483 mir::ProjectionElem::Downcast(_, v) => {
484 tr_base.project_downcast(bx, v)
485 }
486 }
487 }
488 };
489 debug!("trans_place(place={:?}) => {:?}", place, result);
490 result
491 }
492
493 pub fn monomorphized_place_ty(&self, place: &mir::Place<'tcx>) -> Ty<'tcx> {
494 let tcx = self.cx.tcx;
495 let place_ty = place.ty(self.mir, tcx);
496 self.monomorphize(&place_ty.to_ty(tcx))
497 }
498 }
499