]> git.proxmox.com Git - rustc.git/blame - src/librustc/ty/layout.rs
New upstream version 1.28.0~beta.14+dfsg1
[rustc.git] / src / librustc / ty / layout.rs
CommitLineData
54a0048b
SL
1// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
2// file at the top-level directory of this distribution and at
3// http://rust-lang.org/COPYRIGHT.
4//
5// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8// option. This file may not be copied, modified, or distributed
9// except according to those terms.
10
83c7162d
XL
11use session::{self, DataTypeKind};
12use ty::{self, Ty, TyCtxt, TypeFoldable, ReprOptions};
54a0048b 13
94b46f34 14use syntax::ast::{self, IntTy, UintTy};
54a0048b 15use syntax::attr;
3157f602 16use syntax_pos::DUMMY_SP;
54a0048b
SL
17
18use std::cmp;
19use std::fmt;
ff7c6d11 20use std::i128;
94b46f34 21use std::iter;
ea8adc8c 22use std::mem;
54a0048b 23
ea8adc8c
XL
24use ich::StableHashingContext;
25use rustc_data_structures::stable_hasher::{HashStable, StableHasher,
26 StableHasherResult};
27
83c7162d 28pub use rustc_target::abi::*;
ff7c6d11 29
83c7162d
XL
30pub trait IntegerExt {
31 fn to_ty<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, signed: bool) -> Ty<'tcx>;
32 fn from_attr<C: HasDataLayout>(cx: C, ity: attr::IntType) -> Integer;
33 fn repr_discr<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
34 ty: Ty<'tcx>,
35 repr: &ReprOptions,
36 min: i128,
37 max: i128)
38 -> (Integer, bool);
54a0048b
SL
39}
40
83c7162d
XL
41impl IntegerExt for Integer {
42 fn to_ty<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, signed: bool) -> Ty<'tcx> {
9e0c209e 43 match (*self, signed) {
9e0c209e
SL
44 (I8, false) => tcx.types.u8,
45 (I16, false) => tcx.types.u16,
46 (I32, false) => tcx.types.u32,
47 (I64, false) => tcx.types.u64,
32a655c1 48 (I128, false) => tcx.types.u128,
9e0c209e
SL
49 (I8, true) => tcx.types.i8,
50 (I16, true) => tcx.types.i16,
51 (I32, true) => tcx.types.i32,
52 (I64, true) => tcx.types.i64,
32a655c1 53 (I128, true) => tcx.types.i128,
9e0c209e
SL
54 }
55 }
56
54a0048b 57 /// Get the Integer type from an attr::IntType.
83c7162d 58 fn from_attr<C: HasDataLayout>(cx: C, ity: attr::IntType) -> Integer {
cc61c64b
XL
59 let dl = cx.data_layout();
60
54a0048b
SL
61 match ity {
62 attr::SignedInt(IntTy::I8) | attr::UnsignedInt(UintTy::U8) => I8,
63 attr::SignedInt(IntTy::I16) | attr::UnsignedInt(UintTy::U16) => I16,
64 attr::SignedInt(IntTy::I32) | attr::UnsignedInt(UintTy::U32) => I32,
65 attr::SignedInt(IntTy::I64) | attr::UnsignedInt(UintTy::U64) => I64,
32a655c1 66 attr::SignedInt(IntTy::I128) | attr::UnsignedInt(UintTy::U128) => I128,
2c00a5a8 67 attr::SignedInt(IntTy::Isize) | attr::UnsignedInt(UintTy::Usize) => {
54a0048b
SL
68 dl.ptr_sized_integer()
69 }
70 }
71 }
72
73 /// Find the appropriate Integer type and signedness for the given
74 /// signed discriminant range and #[repr] attribute.
ff7c6d11 75 /// N.B.: u128 values above i128::MAX will be treated as signed, but
54a0048b 76 /// that shouldn't affect anything, other than maybe debuginfo.
83c7162d 77 fn repr_discr<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
ff7c6d11
XL
78 ty: Ty<'tcx>,
79 repr: &ReprOptions,
80 min: i128,
81 max: i128)
82 -> (Integer, bool) {
54a0048b
SL
83 // Theoretically, negative values could be larger in unsigned representation
84 // than the unsigned representation of the signed minimum. However, if there
ff7c6d11
XL
85 // are any negative values, the only valid unsigned representation is u128
86 // which can fit all i128 values, so the result remains unaffected.
87 let unsigned_fit = Integer::fit_unsigned(cmp::max(min as u128, max as u128));
54a0048b
SL
88 let signed_fit = cmp::max(Integer::fit_signed(min), Integer::fit_signed(max));
89
476ff2be
SL
90 let mut min_from_extern = None;
91 let min_default = I8;
92
8bb4bdeb 93 if let Some(ity) = repr.int {
cc61c64b 94 let discr = Integer::from_attr(tcx, ity);
8bb4bdeb
XL
95 let fit = if ity.is_signed() { signed_fit } else { unsigned_fit };
96 if discr < fit {
97 bug!("Integer::repr_discr: `#[repr]` hint too small for \
98 discriminant range of enum `{}", ty)
99 }
100 return (discr, ity.is_signed());
101 }
102
cc61c64b 103 if repr.c() {
8bb4bdeb
XL
104 match &tcx.sess.target.target.arch[..] {
105 // WARNING: the ARM EABI has two variants; the one corresponding
106 // to `at_least == I32` appears to be used on Linux and NetBSD,
107 // but some systems may use the variant corresponding to no
108 // lower bound. However, we don't run on those yet...?
109 "arm" => min_from_extern = Some(I32),
110 _ => min_from_extern = Some(I32),
54a0048b 111 }
476ff2be
SL
112 }
113
114 let at_least = min_from_extern.unwrap_or(min_default);
54a0048b
SL
115
116 // If there are no negative values, we can use the unsigned fit.
117 if min >= 0 {
118 (cmp::max(unsigned_fit, at_least), false)
119 } else {
120 (cmp::max(signed_fit, at_least), true)
121 }
122 }
123}
124
83c7162d
XL
125pub trait PrimitiveExt {
126 fn to_ty<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Ty<'tcx>;
54a0048b
SL
127}
128
83c7162d
XL
129impl PrimitiveExt for Primitive {
130 fn to_ty<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Ty<'tcx> {
ff7c6d11
XL
131 match *self {
132 Int(i, signed) => i.to_ty(tcx, signed),
94b46f34
XL
133 Float(FloatTy::F32) => tcx.types.f32,
134 Float(FloatTy::F64) => tcx.types.f64,
ff7c6d11
XL
135 Pointer => tcx.mk_mut_ptr(tcx.mk_nil()),
136 }
137 }
54a0048b
SL
138}
139
ff7c6d11
XL
140/// The first half of a fat pointer.
141///
142/// - For a trait object, this is the address of the box.
143/// - For a slice, this is the base address.
144pub const FAT_PTR_ADDR: usize = 0;
476ff2be 145
ff7c6d11
XL
146/// The second half of a fat pointer.
147///
148/// - For a trait object, this is the address of the vtable.
149/// - For a slice, this is the length.
150pub const FAT_PTR_EXTRA: usize = 1;
476ff2be 151
83c7162d 152#[derive(Copy, Clone, Debug, RustcEncodable, RustcDecodable)]
54a0048b
SL
153pub enum LayoutError<'tcx> {
154 Unknown(Ty<'tcx>),
155 SizeOverflow(Ty<'tcx>)
156}
157
158impl<'tcx> fmt::Display for LayoutError<'tcx> {
159 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
160 match *self {
161 LayoutError::Unknown(ty) => {
162 write!(f, "the type `{:?}` has an unknown layout", ty)
163 }
164 LayoutError::SizeOverflow(ty) => {
165 write!(f, "the type `{:?}` is too big for the current architecture", ty)
166 }
167 }
168 }
169}
170
ff7c6d11
XL
171fn layout_raw<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
172 query: ty::ParamEnvAnd<'tcx, Ty<'tcx>>)
173 -> Result<&'tcx LayoutDetails, LayoutError<'tcx>>
174{
83c7162d
XL
175 ty::tls::with_related_context(tcx, move |icx| {
176 let rec_limit = *tcx.sess.recursion_limit.get();
177 let (param_env, ty) = query.into_parts();
5bcae85e 178
83c7162d
XL
179 if icx.layout_depth > rec_limit {
180 tcx.sess.fatal(
181 &format!("overflow representing the type `{}`", ty));
182 }
54a0048b 183
83c7162d
XL
184 // Update the ImplicitCtxt to increase the layout_depth
185 let icx = ty::tls::ImplicitCtxt {
186 layout_depth: icx.layout_depth + 1,
187 ..icx.clone()
188 };
ff7c6d11 189
83c7162d
XL
190 ty::tls::enter_context(&icx, |_| {
191 let cx = LayoutCx { tcx, param_env };
192 cx.layout_raw_uncached(ty)
193 })
194 })
ff7c6d11
XL
195}
196
94b46f34
XL
197pub fn provide(providers: &mut ty::query::Providers) {
198 *providers = ty::query::Providers {
ff7c6d11
XL
199 layout_raw,
200 ..*providers
201 };
202}
203
2c00a5a8
XL
204#[derive(Copy, Clone)]
205pub struct LayoutCx<'tcx, C> {
206 pub tcx: C,
207 pub param_env: ty::ParamEnv<'tcx>
208}
209
210impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> {
211 fn layout_raw_uncached(self, ty: Ty<'tcx>)
212 -> Result<&'tcx LayoutDetails, LayoutError<'tcx>> {
213 let tcx = self.tcx;
214 let param_env = self.param_env;
215 let dl = self.data_layout();
ff7c6d11
XL
216 let scalar_unit = |value: Primitive| {
217 let bits = value.size(dl).bits();
218 assert!(bits <= 128);
219 Scalar {
220 value,
221 valid_range: 0..=(!0 >> (128 - bits))
222 }
223 };
224 let scalar = |value: Primitive| {
2c00a5a8 225 tcx.intern_layout(LayoutDetails::scalar(self, scalar_unit(value)))
ff7c6d11
XL
226 };
227 let scalar_pair = |a: Scalar, b: Scalar| {
228 let align = a.value.align(dl).max(b.value.align(dl)).max(dl.aggregate_align);
229 let b_offset = a.value.size(dl).abi_align(b.value.align(dl));
230 let size = (b_offset + b.value.size(dl)).abi_align(align);
231 LayoutDetails {
232 variants: Variants::Single { index: 0 },
233 fields: FieldPlacement::Arbitrary {
94b46f34 234 offsets: vec![Size::ZERO, b_offset],
ff7c6d11
XL
235 memory_index: vec![0, 1]
236 },
237 abi: Abi::ScalarPair(a, b),
238 align,
239 size
240 }
241 };
242
243 #[derive(Copy, Clone, Debug)]
244 enum StructKind {
245 /// A tuple, closure, or univariant which cannot be coerced to unsized.
246 AlwaysSized,
247 /// A univariant, the last field of which may be coerced to unsized.
248 MaybeUnsized,
249 /// A univariant, but with a prefix of an arbitrary size & alignment (e.g. enum tag).
250 Prefixed(Size, Align),
251 }
252 let univariant_uninterned = |fields: &[TyLayout], repr: &ReprOptions, kind| {
253 let packed = repr.packed();
254 if packed && repr.align > 0 {
255 bug!("struct cannot be packed and aligned");
256 }
257
83c7162d
XL
258 let pack = {
259 let pack = repr.pack as u64;
260 Align::from_bytes(pack, pack).unwrap()
261 };
262
ff7c6d11
XL
263 let mut align = if packed {
264 dl.i8_align
265 } else {
266 dl.aggregate_align
267 };
268
269 let mut sized = true;
94b46f34 270 let mut offsets = vec![Size::ZERO; fields.len()];
ff7c6d11
XL
271 let mut inverse_memory_index: Vec<u32> = (0..fields.len() as u32).collect();
272
83c7162d 273 let mut optimize = !repr.inhibit_struct_field_reordering_opt();
ff7c6d11
XL
274 if let StructKind::Prefixed(_, align) = kind {
275 optimize &= align.abi() == 1;
276 }
277
278 if optimize {
279 let end = if let StructKind::MaybeUnsized = kind {
280 fields.len() - 1
281 } else {
282 fields.len()
283 };
284 let optimizing = &mut inverse_memory_index[..end];
83c7162d
XL
285 let field_align = |f: &TyLayout| {
286 if packed { f.align.min(pack).abi() } else { f.align.abi() }
287 };
ff7c6d11
XL
288 match kind {
289 StructKind::AlwaysSized |
290 StructKind::MaybeUnsized => {
291 optimizing.sort_by_key(|&x| {
292 // Place ZSTs first to avoid "interesting offsets",
293 // especially with only one or two non-ZST fields.
294 let f = &fields[x as usize];
83c7162d
XL
295 (!f.is_zst(), cmp::Reverse(field_align(f)))
296 });
ff7c6d11
XL
297 }
298 StructKind::Prefixed(..) => {
83c7162d 299 optimizing.sort_by_key(|&x| field_align(&fields[x as usize]));
ea8adc8c
XL
300 }
301 }
ff7c6d11 302 }
ea8adc8c 303
ff7c6d11
XL
304 // inverse_memory_index holds field indices by increasing memory offset.
305 // That is, if field 5 has offset 0, the first element of inverse_memory_index is 5.
306 // We now write field offsets to the corresponding offset slot;
307 // field 5 with offset 0 puts 0 in offsets[5].
308 // At the bottom of this function, we use inverse_memory_index to produce memory_index.
309
94b46f34 310 let mut offset = Size::ZERO;
ff7c6d11
XL
311
312 if let StructKind::Prefixed(prefix_size, prefix_align) = kind {
83c7162d
XL
313 if packed {
314 let prefix_align = prefix_align.min(pack);
315 align = align.max(prefix_align);
316 } else {
ff7c6d11
XL
317 align = align.max(prefix_align);
318 }
319 offset = prefix_size.abi_align(prefix_align);
320 }
321
322 for &i in &inverse_memory_index {
323 let field = fields[i as usize];
324 if !sized {
325 bug!("univariant: field #{} of `{}` comes after unsized field",
326 offsets.len(), ty);
cc61c64b 327 }
ff7c6d11 328
ff7c6d11
XL
329 if field.is_unsized() {
330 sized = false;
331 }
332
333 // Invariant: offset < dl.obj_size_bound() <= 1<<61
83c7162d
XL
334 if packed {
335 let field_pack = field.align.min(pack);
336 offset = offset.abi_align(field_pack);
337 align = align.max(field_pack);
338 }
339 else {
ff7c6d11
XL
340 offset = offset.abi_align(field.align);
341 align = align.max(field.align);
342 }
343
344 debug!("univariant offset: {:?} field: {:#?}", offset, field);
345 offsets[i as usize] = offset;
346
347 offset = offset.checked_add(field.size, dl)
348 .ok_or(LayoutError::SizeOverflow(ty))?;
54a0048b 349 }
ff7c6d11
XL
350
351 if repr.align > 0 {
352 let repr_align = repr.align as u64;
353 align = align.max(Align::from_bytes(repr_align, repr_align).unwrap());
354 debug!("univariant repr_align: {:?}", repr_align);
355 }
356
357 debug!("univariant min_size: {:?}", offset);
358 let min_size = offset;
359
360 // As stated above, inverse_memory_index holds field indices by increasing offset.
361 // This makes it an already-sorted view of the offsets vec.
362 // To invert it, consider:
363 // If field 5 has offset 0, offsets[0] is 5, and memory_index[5] should be 0.
364 // Field 5 would be the first element, so memory_index is i:
365 // Note: if we didn't optimize, it's already right.
366
367 let mut memory_index;
368 if optimize {
369 memory_index = vec![0; inverse_memory_index.len()];
370
371 for i in 0..inverse_memory_index.len() {
372 memory_index[inverse_memory_index[i] as usize] = i as u32;
373 }
374 } else {
375 memory_index = inverse_memory_index;
376 }
377
378 let size = min_size.abi_align(align);
379 let mut abi = Abi::Aggregate { sized };
380
381 // Unpack newtype ABIs and find scalar pairs.
382 if sized && size.bytes() > 0 {
383 // All other fields must be ZSTs, and we need them to all start at 0.
384 let mut zst_offsets =
385 offsets.iter().enumerate().filter(|&(i, _)| fields[i].is_zst());
386 if zst_offsets.all(|(_, o)| o.bytes() == 0) {
387 let mut non_zst_fields =
388 fields.iter().enumerate().filter(|&(_, f)| !f.is_zst());
389
390 match (non_zst_fields.next(), non_zst_fields.next(), non_zst_fields.next()) {
391 // We have exactly one non-ZST field.
392 (Some((i, field)), None, None) => {
393 // Field fills the struct and it has a scalar or scalar pair ABI.
394 if offsets[i].bytes() == 0 &&
395 align.abi() == field.align.abi() &&
396 size == field.size {
397 match field.abi {
398 // For plain scalars, or vectors of them, we can't unpack
399 // newtypes for `#[repr(C)]`, as that affects C ABIs.
400 Abi::Scalar(_) | Abi::Vector { .. } if optimize => {
401 abi = field.abi.clone();
402 }
403 // But scalar pairs are Rust-specific and get
404 // treated as aggregates by C ABIs anyway.
405 Abi::ScalarPair(..) => {
406 abi = field.abi.clone();
407 }
408 _ => {}
409 }
410 }
411 }
412
413 // Two non-ZST fields, and they're both scalars.
414 (Some((i, &TyLayout {
415 details: &LayoutDetails { abi: Abi::Scalar(ref a), .. }, ..
416 })), Some((j, &TyLayout {
417 details: &LayoutDetails { abi: Abi::Scalar(ref b), .. }, ..
418 })), None) => {
419 // Order by the memory placement, not source order.
420 let ((i, a), (j, b)) = if offsets[i] < offsets[j] {
421 ((i, a), (j, b))
422 } else {
423 ((j, b), (i, a))
424 };
425 let pair = scalar_pair(a.clone(), b.clone());
426 let pair_offsets = match pair.fields {
427 FieldPlacement::Arbitrary {
428 ref offsets,
429 ref memory_index
430 } => {
431 assert_eq!(memory_index, &[0, 1]);
432 offsets
433 }
434 _ => bug!()
435 };
436 if offsets[i] == pair_offsets[0] &&
437 offsets[j] == pair_offsets[1] &&
438 align == pair.align &&
439 size == pair.size {
440 // We can use `ScalarPair` only when it matches our
441 // already computed layout (including `#[repr(C)]`).
442 abi = pair.abi;
443 }
444 }
445
446 _ => {}
447 }
448 }
449 }
450
83c7162d
XL
451 if sized && fields.iter().any(|f| f.abi == Abi::Uninhabited) {
452 abi = Abi::Uninhabited;
453 }
454
ff7c6d11
XL
455 Ok(LayoutDetails {
456 variants: Variants::Single { index: 0 },
457 fields: FieldPlacement::Arbitrary {
458 offsets,
459 memory_index
460 },
461 abi,
462 align,
463 size
464 })
465 };
466 let univariant = |fields: &[TyLayout], repr: &ReprOptions, kind| {
467 Ok(tcx.intern_layout(univariant_uninterned(fields, repr, kind)?))
468 };
469 assert!(!ty.has_infer_types());
470
471 Ok(match ty.sty {
472 // Basic scalars.
473 ty::TyBool => {
2c00a5a8 474 tcx.intern_layout(LayoutDetails::scalar(self, Scalar {
ff7c6d11
XL
475 value: Int(I8, false),
476 valid_range: 0..=1
477 }))
478 }
479 ty::TyChar => {
2c00a5a8 480 tcx.intern_layout(LayoutDetails::scalar(self, Scalar {
ff7c6d11
XL
481 value: Int(I32, false),
482 valid_range: 0..=0x10FFFF
483 }))
484 }
485 ty::TyInt(ity) => {
486 scalar(Int(Integer::from_attr(dl, attr::SignedInt(ity)), true))
487 }
488 ty::TyUint(ity) => {
489 scalar(Int(Integer::from_attr(dl, attr::UnsignedInt(ity)), false))
490 }
94b46f34 491 ty::TyFloat(fty) => scalar(Float(fty)),
ff7c6d11
XL
492 ty::TyFnPtr(_) => {
493 let mut ptr = scalar_unit(Pointer);
83c7162d 494 ptr.valid_range = 1..=*ptr.valid_range.end();
2c00a5a8 495 tcx.intern_layout(LayoutDetails::scalar(self, ptr))
ff7c6d11
XL
496 }
497
498 // The never type.
499 ty::TyNever => {
83c7162d
XL
500 tcx.intern_layout(LayoutDetails {
501 variants: Variants::Single { index: 0 },
502 fields: FieldPlacement::Union(0),
503 abi: Abi::Uninhabited,
504 align: dl.i8_align,
94b46f34 505 size: Size::ZERO
83c7162d 506 })
ff7c6d11
XL
507 }
508
509 // Potentially-fat pointers.
94b46f34 510 ty::TyRef(_, pointee, _) |
ff7c6d11
XL
511 ty::TyRawPtr(ty::TypeAndMut { ty: pointee, .. }) => {
512 let mut data_ptr = scalar_unit(Pointer);
513 if !ty.is_unsafe_ptr() {
83c7162d 514 data_ptr.valid_range = 1..=*data_ptr.valid_range.end();
ff7c6d11
XL
515 }
516
0531ce1d
XL
517 let pointee = tcx.normalize_erasing_regions(param_env, pointee);
518 if pointee.is_sized(tcx.at(DUMMY_SP), param_env) {
2c00a5a8 519 return Ok(tcx.intern_layout(LayoutDetails::scalar(self, data_ptr)));
54a0048b 520 }
ff7c6d11
XL
521
522 let unsized_part = tcx.struct_tail(pointee);
523 let metadata = match unsized_part.sty {
524 ty::TyForeign(..) => {
2c00a5a8 525 return Ok(tcx.intern_layout(LayoutDetails::scalar(self, data_ptr)));
ff7c6d11
XL
526 }
527 ty::TySlice(_) | ty::TyStr => {
528 scalar_unit(Int(dl.ptr_sized_integer(), false))
529 }
530 ty::TyDynamic(..) => {
531 let mut vtable = scalar_unit(Pointer);
83c7162d 532 vtable.valid_range = 1..=*vtable.valid_range.end();
ff7c6d11
XL
533 vtable
534 }
535 _ => return Err(LayoutError::Unknown(unsized_part))
536 };
537
538 // Effectively a (ptr, meta) tuple.
539 tcx.intern_layout(scalar_pair(data_ptr, metadata))
540 }
541
542 // Arrays and slices.
543 ty::TyArray(element, mut count) => {
544 if count.has_projections() {
0531ce1d 545 count = tcx.normalize_erasing_regions(param_env, count);
ff7c6d11
XL
546 if count.has_projections() {
547 return Err(LayoutError::Unknown(ty));
548 }
549 }
550
2c00a5a8 551 let element = self.layout_of(element)?;
94b46f34 552 let count = count.unwrap_usize(tcx);
ff7c6d11
XL
553 let size = element.size.checked_mul(count, dl)
554 .ok_or(LayoutError::SizeOverflow(ty))?;
555
556 tcx.intern_layout(LayoutDetails {
557 variants: Variants::Single { index: 0 },
558 fields: FieldPlacement::Array {
559 stride: element.size,
560 count
561 },
562 abi: Abi::Aggregate { sized: true },
563 align: element.align,
564 size
565 })
566 }
567 ty::TySlice(element) => {
2c00a5a8 568 let element = self.layout_of(element)?;
ff7c6d11
XL
569 tcx.intern_layout(LayoutDetails {
570 variants: Variants::Single { index: 0 },
571 fields: FieldPlacement::Array {
572 stride: element.size,
573 count: 0
574 },
575 abi: Abi::Aggregate { sized: false },
576 align: element.align,
94b46f34 577 size: Size::ZERO
ff7c6d11 578 })
54a0048b
SL
579 }
580 ty::TyStr => {
ff7c6d11
XL
581 tcx.intern_layout(LayoutDetails {
582 variants: Variants::Single { index: 0 },
583 fields: FieldPlacement::Array {
584 stride: Size::from_bytes(1),
585 count: 0
586 },
587 abi: Abi::Aggregate { sized: false },
54a0048b 588 align: dl.i8_align,
94b46f34 589 size: Size::ZERO
ff7c6d11 590 })
54a0048b
SL
591 }
592
593 // Odd unit types.
594 ty::TyFnDef(..) => {
ff7c6d11 595 univariant(&[], &ReprOptions::default(), StructKind::AlwaysSized)?
54a0048b 596 }
abe05a73 597 ty::TyDynamic(..) | ty::TyForeign(..) => {
ff7c6d11
XL
598 let mut unit = univariant_uninterned(&[], &ReprOptions::default(),
599 StructKind::AlwaysSized)?;
600 match unit.abi {
601 Abi::Aggregate { ref mut sized } => *sized = false,
602 _ => bug!()
603 }
604 tcx.intern_layout(unit)
54a0048b
SL
605 }
606
ea8adc8c
XL
607 // Tuples, generators and closures.
608 ty::TyGenerator(def_id, ref substs, _) => {
609 let tys = substs.field_tys(def_id, tcx);
2c00a5a8 610 univariant(&tys.map(|ty| self.layout_of(ty)).collect::<Result<Vec<_>, _>>()?,
ea8adc8c 611 &ReprOptions::default(),
ff7c6d11 612 StructKind::AlwaysSized)?
ea8adc8c
XL
613 }
614
476ff2be
SL
615 ty::TyClosure(def_id, ref substs) => {
616 let tys = substs.upvar_tys(def_id, tcx);
2c00a5a8 617 univariant(&tys.map(|ty| self.layout_of(ty)).collect::<Result<Vec<_>, _>>()?,
8bb4bdeb 618 &ReprOptions::default(),
ff7c6d11 619 StructKind::AlwaysSized)?
476ff2be
SL
620 }
621
0531ce1d 622 ty::TyTuple(tys) => {
041b39d2 623 let kind = if tys.len() == 0 {
ff7c6d11 624 StructKind::AlwaysSized
041b39d2 625 } else {
ff7c6d11 626 StructKind::MaybeUnsized
041b39d2
XL
627 };
628
2c00a5a8 629 univariant(&tys.iter().map(|ty| self.layout_of(ty)).collect::<Result<Vec<_>, _>>()?,
ff7c6d11 630 &ReprOptions::default(), kind)?
54a0048b
SL
631 }
632
9e0c209e 633 // SIMD vector types.
cc61c64b 634 ty::TyAdt(def, ..) if def.repr.simd() => {
2c00a5a8 635 let element = self.layout_of(ty.simd_type(tcx))?;
ff7c6d11
XL
636 let count = ty.simd_size(tcx) as u64;
637 assert!(count > 0);
638 let scalar = match element.abi {
639 Abi::Scalar(ref scalar) => scalar.clone(),
9e0c209e
SL
640 _ => {
641 tcx.sess.fatal(&format!("monomorphising SIMD type `{}` with \
642 a non-machine element type `{}`",
ff7c6d11 643 ty, element.ty));
54a0048b 644 }
ff7c6d11
XL
645 };
646 let size = element.size.checked_mul(count, dl)
647 .ok_or(LayoutError::SizeOverflow(ty))?;
648 let align = dl.vector_align(size);
649 let size = size.abi_align(align);
650
651 tcx.intern_layout(LayoutDetails {
652 variants: Variants::Single { index: 0 },
653 fields: FieldPlacement::Array {
654 stride: element.size,
655 count
656 },
657 abi: Abi::Vector {
658 element: scalar,
659 count
660 },
661 size,
662 align,
663 })
54a0048b 664 }
9e0c209e
SL
665
666 // ADTs.
667 ty::TyAdt(def, substs) => {
ff7c6d11
XL
668 // Cache the field layouts.
669 let variants = def.variants.iter().map(|v| {
670 v.fields.iter().map(|field| {
2c00a5a8 671 self.layout_of(field.ty(tcx, substs))
ff7c6d11
XL
672 }).collect::<Result<Vec<_>, _>>()
673 }).collect::<Result<Vec<_>, _>>()?;
54a0048b 674
ff7c6d11
XL
675 if def.is_union() {
676 let packed = def.repr.packed();
677 if packed && def.repr.align > 0 {
678 bug!("Union cannot be packed and aligned");
679 }
680
83c7162d
XL
681 let pack = {
682 let pack = def.repr.pack as u64;
683 Align::from_bytes(pack, pack).unwrap()
684 };
685
686 let mut align = if packed {
ff7c6d11
XL
687 dl.i8_align
688 } else {
689 dl.aggregate_align
690 };
54a0048b 691
ff7c6d11
XL
692 if def.repr.align > 0 {
693 let repr_align = def.repr.align as u64;
694 align = align.max(
695 Align::from_bytes(repr_align, repr_align).unwrap());
54a0048b
SL
696 }
697
94b46f34 698 let mut size = Size::ZERO;
ff7c6d11
XL
699 for field in &variants[0] {
700 assert!(!field.is_unsized());
701
83c7162d
XL
702 if packed {
703 let field_pack = field.align.min(pack);
704 align = align.max(field_pack);
705 } else {
ff7c6d11
XL
706 align = align.max(field.align);
707 }
708 size = cmp::max(size, field.size);
709 }
710
711 return Ok(tcx.intern_layout(LayoutDetails {
712 variants: Variants::Single { index: 0 },
713 fields: FieldPlacement::Union(variants[0].len()),
714 abi: Abi::Aggregate { sized: true },
715 align,
716 size: size.abi_align(align)
717 }));
718 }
719
83c7162d
XL
720 // A variant is absent if it's uninhabited and only has ZST fields.
721 // Present uninhabited variants only require space for their fields,
722 // but *not* an encoding of the discriminant (e.g. a tag value).
723 // See issue #49298 for more details on the need to leave space
724 // for non-ZST uninhabited data (mostly partial initialization).
725 let absent = |fields: &[TyLayout]| {
726 let uninhabited = fields.iter().any(|f| f.abi == Abi::Uninhabited);
727 let is_zst = fields.iter().all(|f| f.is_zst());
728 uninhabited && is_zst
729 };
730 let (present_first, present_second) = {
731 let mut present_variants = (0..variants.len()).filter(|&v| {
732 !absent(&variants[v])
54a0048b 733 });
83c7162d 734 (present_variants.next(), present_variants.next())
ff7c6d11 735 };
83c7162d
XL
736 if present_first.is_none() {
737 // Uninhabited because it has no variants, or only absent ones.
738 return tcx.layout_raw(param_env.and(tcx.types.never));
54a0048b
SL
739 }
740
ff7c6d11 741 let is_struct = !def.is_enum() ||
83c7162d
XL
742 // Only one variant is present.
743 (present_second.is_none() &&
ff7c6d11 744 // Representation optimizations are allowed.
83c7162d 745 !def.repr.inhibit_enum_layout_opt());
ff7c6d11
XL
746 if is_struct {
747 // Struct, or univariant enum equivalent to a struct.
9e0c209e
SL
748 // (Typechecking will reject discriminant-sizing attrs.)
749
83c7162d 750 let v = present_first.unwrap();
ff7c6d11
XL
751 let kind = if def.is_enum() || variants[v].len() == 0 {
752 StructKind::AlwaysSized
476ff2be 753 } else {
7cac9316 754 let param_env = tcx.param_env(def.did);
ff7c6d11 755 let last_field = def.variants[v].fields.last().unwrap();
7cac9316 756 let always_sized = tcx.type_of(last_field.did)
0531ce1d 757 .is_sized(tcx.at(DUMMY_SP), param_env);
ff7c6d11
XL
758 if !always_sized { StructKind::MaybeUnsized }
759 else { StructKind::AlwaysSized }
9e0c209e 760 };
9e0c209e 761
ff7c6d11
XL
762 let mut st = univariant_uninterned(&variants[v], &def.repr, kind)?;
763 st.variants = Variants::Single { index: v };
764 // Exclude 0 from the range of a newtype ABI NonZero<T>.
2c00a5a8 765 if Some(def.did) == self.tcx.lang_items().non_zero() {
ff7c6d11
XL
766 match st.abi {
767 Abi::Scalar(ref mut scalar) |
768 Abi::ScalarPair(ref mut scalar, _) => {
83c7162d
XL
769 if *scalar.valid_range.start() == 0 {
770 scalar.valid_range = 1..=*scalar.valid_range.end();
ff7c6d11
XL
771 }
772 }
773 _ => {}
774 }
54a0048b 775 }
ff7c6d11 776 return Ok(tcx.intern_layout(st));
54a0048b
SL
777 }
778
83c7162d
XL
779 // The current code for niche-filling relies on variant indices
780 // instead of actual discriminants, so dataful enums with
781 // explicit discriminants (RFC #2363) would misbehave.
ff7c6d11
XL
782 let no_explicit_discriminants = def.variants.iter().enumerate()
783 .all(|(i, v)| v.discr == ty::VariantDiscr::Relative(i));
784
785 // Niche-filling enum optimization.
786 if !def.repr.inhibit_enum_layout_opt() && no_explicit_discriminants {
787 let mut dataful_variant = None;
788 let mut niche_variants = usize::max_value()..=0;
789
790 // Find one non-ZST variant.
791 'variants: for (v, fields) in variants.iter().enumerate() {
83c7162d
XL
792 if absent(fields) {
793 continue 'variants;
794 }
ff7c6d11 795 for f in fields {
ff7c6d11
XL
796 if !f.is_zst() {
797 if dataful_variant.is_none() {
798 dataful_variant = Some(v);
799 continue 'variants;
800 } else {
801 dataful_variant = None;
802 break 'variants;
803 }
804 }
54a0048b 805 }
83c7162d 806 niche_variants = *niche_variants.start().min(&v)..=v;
ff7c6d11
XL
807 }
808
83c7162d 809 if niche_variants.start() > niche_variants.end() {
ff7c6d11
XL
810 dataful_variant = None;
811 }
812
813 if let Some(i) = dataful_variant {
83c7162d
XL
814 let count = (niche_variants.end() - niche_variants.start() + 1) as u128;
815 for (field_index, &field) in variants[i].iter().enumerate() {
94b46f34
XL
816 let niche = match self.find_niche(field)? {
817 Some(niche) => niche,
818 _ => continue,
819 };
820 let (niche_start, niche_scalar) = match niche.reserve(self, count) {
821 Some(pair) => pair,
822 None => continue,
823 };
824
ff7c6d11
XL
825 let mut align = dl.aggregate_align;
826 let st = variants.iter().enumerate().map(|(j, v)| {
827 let mut st = univariant_uninterned(v,
828 &def.repr, StructKind::AlwaysSized)?;
829 st.variants = Variants::Single { index: j };
830
831 align = align.max(st.align);
832
833 Ok(st)
834 }).collect::<Result<Vec<_>, _>>()?;
835
94b46f34 836 let offset = st[i].fields.offset(field_index) + niche.offset;
ff7c6d11
XL
837 let size = st[i].size;
838
83c7162d 839 let mut abi = match st[i].abi {
94b46f34 840 Abi::Scalar(_) => Abi::Scalar(niche_scalar.clone()),
0531ce1d
XL
841 Abi::ScalarPair(ref first, ref second) => {
842 // We need to use scalar_unit to reset the
843 // valid range to the maximal one for that
844 // primitive, because only the niche is
845 // guaranteed to be initialised, not the
846 // other primitive.
847 if offset.bytes() == 0 {
94b46f34
XL
848 Abi::ScalarPair(
849 niche_scalar.clone(),
850 scalar_unit(second.value),
851 )
0531ce1d 852 } else {
94b46f34
XL
853 Abi::ScalarPair(
854 scalar_unit(first.value),
855 niche_scalar.clone(),
856 )
0531ce1d
XL
857 }
858 }
859 _ => Abi::Aggregate { sized: true },
c30ab7b3 860 };
ff7c6d11 861
83c7162d
XL
862 if st.iter().all(|v| v.abi == Abi::Uninhabited) {
863 abi = Abi::Uninhabited;
864 }
865
ff7c6d11
XL
866 return Ok(tcx.intern_layout(LayoutDetails {
867 variants: Variants::NicheFilling {
868 dataful_variant: i,
869 niche_variants,
94b46f34 870 niche: niche_scalar,
ff7c6d11
XL
871 niche_start,
872 variants: st,
873 },
874 fields: FieldPlacement::Arbitrary {
875 offsets: vec![offset],
876 memory_index: vec![0]
877 },
878 abi,
879 size,
880 align,
881 }));
54a0048b 882 }
ff7c6d11
XL
883 }
884 }
54a0048b 885
ff7c6d11 886 let (mut min, mut max) = (i128::max_value(), i128::min_value());
0531ce1d
XL
887 let discr_type = def.repr.discr_type();
888 let bits = Integer::from_attr(tcx, discr_type).size().bits();
ff7c6d11
XL
889 for (i, discr) in def.discriminants(tcx).enumerate() {
890 if variants[i].iter().any(|f| f.abi == Abi::Uninhabited) {
891 continue;
54a0048b 892 }
0531ce1d
XL
893 let mut x = discr.val as i128;
894 if discr_type.is_signed() {
895 // sign extend the raw representation to be an i128
896 x = (x << (128 - bits)) >> (128 - bits);
897 }
ff7c6d11
XL
898 if x < min { min = x; }
899 if x > max { max = x; }
54a0048b 900 }
83c7162d
XL
901 // We might have no inhabited variants, so pretend there's at least one.
902 if (min, max) == (i128::max_value(), i128::min_value()) {
903 min = 0;
904 max = 0;
905 }
ff7c6d11
XL
906 assert!(min <= max, "discriminant range is {}...{}", min, max);
907 let (min_ity, signed) = Integer::repr_discr(tcx, ty, &def.repr, min, max);
54a0048b 908
54a0048b 909 let mut align = dl.aggregate_align;
94b46f34 910 let mut size = Size::ZERO;
54a0048b
SL
911
912 // We're interested in the smallest alignment, so start large.
913 let mut start_align = Align::from_bytes(256, 256).unwrap();
ff7c6d11
XL
914 assert_eq!(Integer::for_abi_align(dl, start_align), None);
915
916 // repr(C) on an enum tells us to make a (tag, union) layout,
917 // so we need to grow the prefix alignment to be at least
918 // the alignment of the union. (This value is used both for
919 // determining the alignment of the overall enum, and the
920 // determining the alignment of the payload after the tag.)
921 let mut prefix_align = min_ity.align(dl);
922 if def.repr.c() {
923 for fields in &variants {
924 for field in fields {
925 prefix_align = prefix_align.max(field.align);
926 }
927 }
928 }
54a0048b 929
ff7c6d11 930 // Create the set of structs that represent each variant.
83c7162d 931 let mut layout_variants = variants.iter().enumerate().map(|(i, field_layouts)| {
ff7c6d11
XL
932 let mut st = univariant_uninterned(&field_layouts,
933 &def.repr, StructKind::Prefixed(min_ity.size(), prefix_align))?;
934 st.variants = Variants::Single { index: i };
476ff2be
SL
935 // Find the first field we can't move later
936 // to make room for a larger discriminant.
ff7c6d11
XL
937 for field in st.fields.index_by_increasing_offset().map(|j| field_layouts[j]) {
938 if !field.is_zst() || field.align.abi() != 1 {
939 start_align = start_align.min(field.align);
476ff2be 940 break;
54a0048b 941 }
476ff2be 942 }
ff7c6d11 943 size = cmp::max(size, st.size);
54a0048b
SL
944 align = align.max(st.align);
945 Ok(st)
946 }).collect::<Result<Vec<_>, _>>()?;
947
948 // Align the maximum variant size to the largest alignment.
949 size = size.abi_align(align);
950
951 if size.bytes() >= dl.obj_size_bound() {
952 return Err(LayoutError::SizeOverflow(ty));
953 }
954
8bb4bdeb
XL
955 let typeck_ity = Integer::from_attr(dl, def.repr.discr_type());
956 if typeck_ity < min_ity {
957 // It is a bug if Layout decided on a greater discriminant size than typeck for
958 // some reason at this point (based on values discriminant can take on). Mostly
959 // because this discriminant will be loaded, and then stored into variable of
960 // type calculated by typeck. Consider such case (a bug): typeck decided on
961 // byte-sized discriminant, but layout thinks we need a 16-bit to store all
94b46f34 962 // discriminant values. That would be a bug, because then, in codegen, in order
8bb4bdeb
XL
963 // to store this 16-bit discriminant into 8-bit sized temporary some of the
964 // space necessary to represent would have to be discarded (or layout is wrong
965 // on thinking it needs 16 bits)
966 bug!("layout decided on a larger discriminant type ({:?}) than typeck ({:?})",
967 min_ity, typeck_ity);
968 // However, it is fine to make discr type however large (as an optimisation)
94b46f34 969 // after this point – we’ll just truncate the value we load in codegen.
8bb4bdeb
XL
970 }
971
54a0048b
SL
972 // Check to see if we should use a different type for the
973 // discriminant. We can safely use a type with the same size
974 // as the alignment of the first field of each variant.
975 // We increase the size of the discriminant to avoid LLVM copying
976 // padding when it doesn't need to. This normally causes unaligned
977 // load/stores and excessive memcpy/memset operations. By using a
83c7162d 978 // bigger integer size, LLVM can be sure about its contents and
54a0048b
SL
979 // won't be so conservative.
980
981 // Use the initial field alignment
83c7162d
XL
982 let mut ity = if def.repr.c() || def.repr.int.is_some() {
983 min_ity
984 } else {
985 Integer::for_abi_align(dl, start_align).unwrap_or(min_ity)
986 };
54a0048b
SL
987
988 // If the alignment is not larger than the chosen discriminant size,
989 // don't use the alignment as the final size.
990 if ity <= min_ity {
991 ity = min_ity;
992 } else {
993 // Patch up the variants' first few fields.
ff7c6d11
XL
994 let old_ity_size = min_ity.size();
995 let new_ity_size = ity.size();
83c7162d 996 for variant in &mut layout_variants {
ff7c6d11
XL
997 match variant.fields {
998 FieldPlacement::Arbitrary { ref mut offsets, .. } => {
999 for i in offsets {
1000 if *i <= old_ity_size {
1001 assert_eq!(*i, old_ity_size);
1002 *i = new_ity_size;
1003 }
1004 }
1005 // We might be making the struct larger.
1006 if variant.size <= old_ity_size {
1007 variant.size = new_ity_size;
1008 }
1009 }
1010 _ => bug!()
c30ab7b3 1011 }
54a0048b
SL
1012 }
1013 }
1014
0531ce1d
XL
1015 let tag_mask = !0u128 >> (128 - ity.size().bits());
1016 let tag = Scalar {
ff7c6d11 1017 value: Int(ity, signed),
0531ce1d 1018 valid_range: (min as u128 & tag_mask)..=(max as u128 & tag_mask),
ff7c6d11 1019 };
83c7162d
XL
1020 let mut abi = Abi::Aggregate { sized: true };
1021 if tag.value.size(dl) == size {
1022 abi = Abi::Scalar(tag.clone());
1023 } else if !tag.is_bool() {
1024 // HACK(nox): Blindly using ScalarPair for all tagged enums
1025 // where applicable leads to Option<u8> being handled as {i1, i8},
1026 // which later confuses SROA and some loop optimisations,
1027 // ultimately leading to the repeat-trusted-len test
1028 // failing. We make the trade-off of using ScalarPair only
1029 // for types where the tag isn't a boolean.
1030 let mut common_prim = None;
1031 for (field_layouts, layout_variant) in variants.iter().zip(&layout_variants) {
1032 let offsets = match layout_variant.fields {
1033 FieldPlacement::Arbitrary { ref offsets, .. } => offsets,
1034 _ => bug!(),
1035 };
1036 let mut fields = field_layouts
1037 .iter()
1038 .zip(offsets)
1039 .filter(|p| !p.0.is_zst());
1040 let (field, offset) = match (fields.next(), fields.next()) {
1041 (None, None) => continue,
1042 (Some(pair), None) => pair,
1043 _ => {
1044 common_prim = None;
1045 break;
1046 }
1047 };
1048 let prim = match field.details.abi {
1049 Abi::Scalar(ref scalar) => scalar.value,
1050 _ => {
1051 common_prim = None;
1052 break;
1053 }
1054 };
1055 if let Some(pair) = common_prim {
1056 // This is pretty conservative. We could go fancier
1057 // by conflating things like i32 and u32, or even
1058 // realising that (u8, u8) could just cohabit with
1059 // u16 or even u32.
1060 if pair != (prim, offset) {
1061 common_prim = None;
1062 break;
1063 }
1064 } else {
1065 common_prim = Some((prim, offset));
1066 }
1067 }
1068 if let Some((prim, offset)) = common_prim {
1069 let pair = scalar_pair(tag.clone(), scalar_unit(prim));
1070 let pair_offsets = match pair.fields {
1071 FieldPlacement::Arbitrary {
1072 ref offsets,
1073 ref memory_index
1074 } => {
1075 assert_eq!(memory_index, &[0, 1]);
1076 offsets
1077 }
1078 _ => bug!()
1079 };
94b46f34 1080 if pair_offsets[0] == Size::ZERO &&
83c7162d
XL
1081 pair_offsets[1] == *offset &&
1082 align == pair.align &&
1083 size == pair.size {
1084 // We can use `ScalarPair` only when it matches our
1085 // already computed layout (including `#[repr(C)]`).
1086 abi = pair.abi;
1087 }
1088 }
1089 }
1090
1091 if layout_variants.iter().all(|v| v.abi == Abi::Uninhabited) {
1092 abi = Abi::Uninhabited;
1093 }
1094
ff7c6d11
XL
1095 tcx.intern_layout(LayoutDetails {
1096 variants: Variants::Tagged {
83c7162d
XL
1097 tag,
1098 variants: layout_variants,
ff7c6d11
XL
1099 },
1100 fields: FieldPlacement::Arbitrary {
94b46f34 1101 offsets: vec![Size::ZERO],
ff7c6d11
XL
1102 memory_index: vec![0]
1103 },
1104 abi,
041b39d2 1105 align,
ff7c6d11
XL
1106 size
1107 })
54a0048b
SL
1108 }
1109
1110 // Types with no meaningful known layout.
5bcae85e 1111 ty::TyProjection(_) | ty::TyAnon(..) => {
0531ce1d 1112 let normalized = tcx.normalize_erasing_regions(param_env, ty);
5bcae85e
SL
1113 if ty == normalized {
1114 return Err(LayoutError::Unknown(ty));
1115 }
ff7c6d11 1116 tcx.layout_raw(param_env.and(normalized))?
5bcae85e
SL
1117 }
1118 ty::TyParam(_) => {
54a0048b
SL
1119 return Err(LayoutError::Unknown(ty));
1120 }
2c00a5a8 1121 ty::TyGeneratorWitness(..) | ty::TyInfer(_) | ty::TyError => {
ff7c6d11 1122 bug!("LayoutDetails::compute: unexpected type `{}`", ty)
cc61c64b 1123 }
ff7c6d11 1124 })
cc61c64b 1125 }
7cac9316
XL
1126
1127 /// This is invoked by the `layout_raw` query to record the final
1128 /// layout of each type.
1129 #[inline]
2c00a5a8 1130 fn record_layout_for_printing(self, layout: TyLayout<'tcx>) {
7cac9316
XL
1131 // If we are running with `-Zprint-type-sizes`, record layouts for
1132 // dumping later. Ignore layouts that are done with non-empty
1133 // environments or non-monomorphic layouts, as the user only wants
94b46f34 1134 // to see the stuff resulting from the final codegen session.
7cac9316 1135 if
2c00a5a8
XL
1136 !self.tcx.sess.opts.debugging_opts.print_type_sizes ||
1137 layout.ty.has_param_types() ||
1138 layout.ty.has_self_ty() ||
1139 !self.param_env.caller_bounds.is_empty()
7cac9316
XL
1140 {
1141 return;
1142 }
1143
2c00a5a8 1144 self.record_layout_for_printing_outlined(layout)
7cac9316
XL
1145 }
1146
2c00a5a8 1147 fn record_layout_for_printing_outlined(self, layout: TyLayout<'tcx>) {
7cac9316 1148 // (delay format until we actually need it)
83c7162d 1149 let record = |kind, packed, opt_discr_size, variants| {
2c00a5a8
XL
1150 let type_desc = format!("{:?}", layout.ty);
1151 self.tcx.sess.code_stats.borrow_mut().record_type_size(kind,
1152 type_desc,
1153 layout.align,
1154 layout.size,
83c7162d 1155 packed,
2c00a5a8
XL
1156 opt_discr_size,
1157 variants);
7cac9316
XL
1158 };
1159
2c00a5a8 1160 let adt_def = match layout.ty.sty {
ff7c6d11 1161 ty::TyAdt(ref adt_def, _) => {
2c00a5a8 1162 debug!("print-type-size t: `{:?}` process adt", layout.ty);
ff7c6d11 1163 adt_def
7cac9316
XL
1164 }
1165
1166 ty::TyClosure(..) => {
2c00a5a8 1167 debug!("print-type-size t: `{:?}` record closure", layout.ty);
83c7162d 1168 record(DataTypeKind::Closure, false, None, vec![]);
7cac9316
XL
1169 return;
1170 }
1171
1172 _ => {
2c00a5a8 1173 debug!("print-type-size t: `{:?}` skip non-nominal", layout.ty);
7cac9316
XL
1174 return;
1175 }
1176 };
1177
1178 let adt_kind = adt_def.adt_kind();
83c7162d 1179 let adt_packed = adt_def.repr.packed();
7cac9316 1180
ff7c6d11
XL
1181 let build_variant_info = |n: Option<ast::Name>,
1182 flds: &[ast::Name],
1183 layout: TyLayout<'tcx>| {
94b46f34 1184 let mut min_size = Size::ZERO;
ff7c6d11 1185 let field_info: Vec<_> = flds.iter().enumerate().map(|(i, &name)| {
2c00a5a8 1186 match layout.field(self, i) {
ff7c6d11
XL
1187 Err(err) => {
1188 bug!("no layout found for field {}: `{:?}`", name, err);
1189 }
1190 Ok(field_layout) => {
1191 let offset = layout.fields.offset(i);
1192 let field_end = offset + field_layout.size;
1193 if min_size < field_end {
1194 min_size = field_end;
1195 }
1196 session::FieldInfo {
1197 name: name.to_string(),
1198 offset: offset.bytes(),
1199 size: field_layout.size.bytes(),
1200 align: field_layout.align.abi(),
1201 }
7cac9316
XL
1202 }
1203 }
ff7c6d11 1204 }).collect();
7cac9316
XL
1205
1206 session::VariantInfo {
1207 name: n.map(|n|n.to_string()),
ff7c6d11
XL
1208 kind: if layout.is_unsized() {
1209 session::SizeKind::Min
1210 } else {
7cac9316 1211 session::SizeKind::Exact
ff7c6d11
XL
1212 },
1213 align: layout.align.abi(),
1214 size: if min_size.bytes() == 0 {
1215 layout.size.bytes()
7cac9316 1216 } else {
ff7c6d11 1217 min_size.bytes()
7cac9316 1218 },
7cac9316
XL
1219 fields: field_info,
1220 }
1221 };
1222
ff7c6d11
XL
1223 match layout.variants {
1224 Variants::Single { index } => {
1225 debug!("print-type-size `{:#?}` variant {}",
1226 layout, adt_def.variants[index].name);
1227 if !adt_def.variants.is_empty() {
1228 let variant_def = &adt_def.variants[index];
7cac9316 1229 let fields: Vec<_> =
94b46f34 1230 variant_def.fields.iter().map(|f| f.ident.name).collect();
7cac9316 1231 record(adt_kind.into(),
83c7162d 1232 adt_packed,
7cac9316
XL
1233 None,
1234 vec![build_variant_info(Some(variant_def.name),
1235 &fields,
ff7c6d11 1236 layout)]);
7cac9316
XL
1237 } else {
1238 // (This case arises for *empty* enums; so give it
1239 // zero variants.)
83c7162d 1240 record(adt_kind.into(), adt_packed, None, vec![]);
7cac9316
XL
1241 }
1242 }
1243
ff7c6d11
XL
1244 Variants::NicheFilling { .. } |
1245 Variants::Tagged { .. } => {
1246 debug!("print-type-size `{:#?}` adt general variants def {}",
2c00a5a8 1247 layout.ty, adt_def.variants.len());
7cac9316 1248 let variant_infos: Vec<_> =
ff7c6d11
XL
1249 adt_def.variants.iter().enumerate().map(|(i, variant_def)| {
1250 let fields: Vec<_> =
94b46f34 1251 variant_def.fields.iter().map(|f| f.ident.name).collect();
ff7c6d11
XL
1252 build_variant_info(Some(variant_def.name),
1253 &fields,
2c00a5a8 1254 layout.for_variant(self, i))
ff7c6d11
XL
1255 })
1256 .collect();
83c7162d
XL
1257 record(adt_kind.into(), adt_packed, match layout.variants {
1258 Variants::Tagged { ref tag, .. } => Some(tag.value.size(self)),
ff7c6d11
XL
1259 _ => None
1260 }, variant_infos);
7cac9316
XL
1261 }
1262 }
1263 }
54a0048b
SL
1264}
1265
1266/// Type size "skeleton", i.e. the only information determining a type's size.
1267/// While this is conservative, (aside from constant sizes, only pointers,
1268/// newtypes thereof and null pointer optimized enums are allowed), it is
1269/// enough to statically check common usecases of transmute.
1270#[derive(Copy, Clone, Debug)]
1271pub enum SizeSkeleton<'tcx> {
1272 /// Any statically computable Layout.
1273 Known(Size),
1274
1275 /// A potentially-fat pointer.
1276 Pointer {
3b2f2976 1277 /// If true, this pointer is never null.
54a0048b 1278 non_zero: bool,
3b2f2976
XL
1279 /// The type which determines the unsized metadata, if any,
1280 /// of this pointer. Either a type parameter or a projection
1281 /// depending on one, with regions erased.
54a0048b
SL
1282 tail: Ty<'tcx>
1283 }
1284}
1285
7cac9316
XL
1286impl<'a, 'tcx> SizeSkeleton<'tcx> {
1287 pub fn compute(ty: Ty<'tcx>,
1288 tcx: TyCtxt<'a, 'tcx, 'tcx>,
1289 param_env: ty::ParamEnv<'tcx>)
1290 -> Result<SizeSkeleton<'tcx>, LayoutError<'tcx>> {
54a0048b
SL
1291 assert!(!ty.has_infer_types());
1292
1293 // First try computing a static layout.
2c00a5a8 1294 let err = match tcx.layout_of(param_env.and(ty)) {
54a0048b 1295 Ok(layout) => {
ff7c6d11 1296 return Ok(SizeSkeleton::Known(layout.size));
54a0048b
SL
1297 }
1298 Err(err) => err
1299 };
1300
1301 match ty.sty {
94b46f34 1302 ty::TyRef(_, pointee, _) |
54a0048b 1303 ty::TyRawPtr(ty::TypeAndMut { ty: pointee, .. }) => {
ff7c6d11
XL
1304 let non_zero = !ty.is_unsafe_ptr();
1305 let tail = tcx.struct_tail(pointee);
1306 match tail.sty {
1307 ty::TyParam(_) | ty::TyProjection(_) => {
1308 assert!(tail.has_param_types() || tail.has_self_ty());
1309 Ok(SizeSkeleton::Pointer {
1310 non_zero,
1311 tail: tcx.erase_regions(&tail)
1312 })
1313 }
1314 _ => {
1315 bug!("SizeSkeleton::compute({}): layout errored ({}), yet \
1316 tail `{}` is not a type parameter or a projection",
1317 ty, err, tail)
1318 }
1319 }
54a0048b
SL
1320 }
1321
9e0c209e 1322 ty::TyAdt(def, substs) => {
54a0048b 1323 // Only newtypes and enums w/ nullable pointer optimization.
9e0c209e 1324 if def.is_union() || def.variants.is_empty() || def.variants.len() > 2 {
54a0048b
SL
1325 return Err(err);
1326 }
1327
1328 // Get a zero-sized variant or a pointer newtype.
1329 let zero_or_ptr_variant = |i: usize| {
1330 let fields = def.variants[i].fields.iter().map(|field| {
7cac9316 1331 SizeSkeleton::compute(field.ty(tcx, substs), tcx, param_env)
54a0048b
SL
1332 });
1333 let mut ptr = None;
1334 for field in fields {
1335 let field = field?;
1336 match field {
1337 SizeSkeleton::Known(size) => {
1338 if size.bytes() > 0 {
1339 return Err(err);
1340 }
1341 }
1342 SizeSkeleton::Pointer {..} => {
1343 if ptr.is_some() {
1344 return Err(err);
1345 }
1346 ptr = Some(field);
1347 }
1348 }
1349 }
1350 Ok(ptr)
1351 };
1352
1353 let v0 = zero_or_ptr_variant(0)?;
1354 // Newtype.
1355 if def.variants.len() == 1 {
1356 if let Some(SizeSkeleton::Pointer { non_zero, tail }) = v0 {
1357 return Ok(SizeSkeleton::Pointer {
1358 non_zero: non_zero ||
ea8adc8c 1359 Some(def.did) == tcx.lang_items().non_zero(),
041b39d2 1360 tail,
54a0048b
SL
1361 });
1362 } else {
1363 return Err(err);
1364 }
1365 }
1366
1367 let v1 = zero_or_ptr_variant(1)?;
1368 // Nullable pointer enum optimization.
1369 match (v0, v1) {
1370 (Some(SizeSkeleton::Pointer { non_zero: true, tail }), None) |
1371 (None, Some(SizeSkeleton::Pointer { non_zero: true, tail })) => {
1372 Ok(SizeSkeleton::Pointer {
1373 non_zero: false,
041b39d2 1374 tail,
54a0048b
SL
1375 })
1376 }
1377 _ => Err(err)
1378 }
1379 }
1380
5bcae85e 1381 ty::TyProjection(_) | ty::TyAnon(..) => {
0531ce1d 1382 let normalized = tcx.normalize_erasing_regions(param_env, ty);
5bcae85e
SL
1383 if ty == normalized {
1384 Err(err)
1385 } else {
7cac9316 1386 SizeSkeleton::compute(normalized, tcx, param_env)
5bcae85e
SL
1387 }
1388 }
1389
54a0048b
SL
1390 _ => Err(err)
1391 }
1392 }
1393
1394 pub fn same_size(self, other: SizeSkeleton) -> bool {
1395 match (self, other) {
1396 (SizeSkeleton::Known(a), SizeSkeleton::Known(b)) => a == b,
1397 (SizeSkeleton::Pointer { tail: a, .. },
1398 SizeSkeleton::Pointer { tail: b, .. }) => a == b,
1399 _ => false
1400 }
1401 }
1402}
cc61c64b 1403
ff7c6d11 1404pub trait HasTyCtxt<'tcx>: HasDataLayout {
cc61c64b
XL
1405 fn tcx<'a>(&'a self) -> TyCtxt<'a, 'tcx, 'tcx>;
1406}
1407
ff7c6d11
XL
1408impl<'a, 'gcx, 'tcx> HasDataLayout for TyCtxt<'a, 'gcx, 'tcx> {
1409 fn data_layout(&self) -> &TargetDataLayout {
1410 &self.data_layout
1411 }
cc61c64b
XL
1412}
1413
ff7c6d11
XL
1414impl<'a, 'gcx, 'tcx> HasTyCtxt<'gcx> for TyCtxt<'a, 'gcx, 'tcx> {
1415 fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'gcx> {
1416 self.global_tcx()
cc61c64b
XL
1417 }
1418}
1419
2c00a5a8 1420impl<'tcx, T: HasDataLayout> HasDataLayout for LayoutCx<'tcx, T> {
cc61c64b 1421 fn data_layout(&self) -> &TargetDataLayout {
2c00a5a8 1422 self.tcx.data_layout()
cc61c64b
XL
1423 }
1424}
1425
2c00a5a8 1426impl<'gcx, 'tcx, T: HasTyCtxt<'gcx>> HasTyCtxt<'gcx> for LayoutCx<'tcx, T> {
ff7c6d11 1427 fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'gcx> {
2c00a5a8 1428 self.tcx.tcx()
ff7c6d11
XL
1429 }
1430}
1431
1432pub trait MaybeResult<T> {
1433 fn from_ok(x: T) -> Self;
1434 fn map_same<F: FnOnce(T) -> T>(self, f: F) -> Self;
1435}
1436
1437impl<T> MaybeResult<T> for T {
1438 fn from_ok(x: T) -> Self {
1439 x
1440 }
1441 fn map_same<F: FnOnce(T) -> T>(self, f: F) -> Self {
1442 f(self)
1443 }
1444}
cc61c64b 1445
ff7c6d11
XL
1446impl<T, E> MaybeResult<T> for Result<T, E> {
1447 fn from_ok(x: T) -> Self {
1448 Ok(x)
1449 }
1450 fn map_same<F: FnOnce(T) -> T>(self, f: F) -> Self {
1451 self.map(f)
7cac9316 1452 }
ff7c6d11
XL
1453}
1454
83c7162d 1455pub type TyLayout<'tcx> = ::rustc_target::abi::TyLayout<'tcx, Ty<'tcx>>;
cc61c64b 1456
83c7162d
XL
1457impl<'a, 'tcx> LayoutOf for LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> {
1458 type Ty = Ty<'tcx>;
ff7c6d11
XL
1459 type TyLayout = Result<TyLayout<'tcx>, LayoutError<'tcx>>;
1460
1461 /// Computes the layout of a type. Note that this implicitly
1462 /// executes in "reveal all" mode.
7cac9316 1463 fn layout_of(self, ty: Ty<'tcx>) -> Self::TyLayout {
0531ce1d
XL
1464 let param_env = self.param_env.with_reveal_all();
1465 let ty = self.tcx.normalize_erasing_regions(param_env, ty);
2c00a5a8 1466 let details = self.tcx.layout_raw(param_env.and(ty))?;
ff7c6d11 1467 let layout = TyLayout {
041b39d2 1468 ty,
ff7c6d11
XL
1469 details
1470 };
cc61c64b 1471
ff7c6d11
XL
1472 // NB: This recording is normally disabled; when enabled, it
1473 // can however trigger recursive invocations of `layout_of`.
1474 // Therefore, we execute it *after* the main query has
1475 // completed, to avoid problems around recursive structures
0531ce1d 1476 // and the like. (Admittedly, I wasn't able to reproduce a problem
ff7c6d11 1477 // here, but it seems like the right thing to do. -nmatsakis)
2c00a5a8 1478 self.record_layout_for_printing(layout);
ff7c6d11
XL
1479
1480 Ok(layout)
cc61c64b
XL
1481 }
1482}
1483
94b46f34 1484impl<'a, 'tcx> LayoutOf for LayoutCx<'tcx, ty::query::TyCtxtAt<'a, 'tcx, 'tcx>> {
83c7162d 1485 type Ty = Ty<'tcx>;
ff7c6d11 1486 type TyLayout = Result<TyLayout<'tcx>, LayoutError<'tcx>>;
cc61c64b 1487
ff7c6d11
XL
1488 /// Computes the layout of a type. Note that this implicitly
1489 /// executes in "reveal all" mode.
ff7c6d11 1490 fn layout_of(self, ty: Ty<'tcx>) -> Self::TyLayout {
0531ce1d
XL
1491 let param_env = self.param_env.with_reveal_all();
1492 let ty = self.tcx.normalize_erasing_regions(param_env, ty);
1493 let details = self.tcx.layout_raw(param_env.and(ty))?;
ff7c6d11
XL
1494 let layout = TyLayout {
1495 ty,
1496 details
1497 };
cc61c64b 1498
ff7c6d11
XL
1499 // NB: This recording is normally disabled; when enabled, it
1500 // can however trigger recursive invocations of `layout_of`.
1501 // Therefore, we execute it *after* the main query has
1502 // completed, to avoid problems around recursive structures
0531ce1d 1503 // and the like. (Admittedly, I wasn't able to reproduce a problem
ff7c6d11 1504 // here, but it seems like the right thing to do. -nmatsakis)
2c00a5a8
XL
1505 let cx = LayoutCx {
1506 tcx: *self.tcx,
1507 param_env: self.param_env
1508 };
1509 cx.record_layout_for_printing(layout);
cc61c64b 1510
ff7c6d11
XL
1511 Ok(layout)
1512 }
1513}
cc61c64b 1514
2c00a5a8 1515// Helper (inherent) `layout_of` methods to avoid pushing `LayoutCx` to users.
94b46f34 1516impl TyCtxt<'a, 'tcx, '_> {
2c00a5a8
XL
1517 /// Computes the layout of a type. Note that this implicitly
1518 /// executes in "reveal all" mode.
1519 #[inline]
1520 pub fn layout_of(self, param_env_and_ty: ty::ParamEnvAnd<'tcx, Ty<'tcx>>)
1521 -> Result<TyLayout<'tcx>, LayoutError<'tcx>> {
1522 let cx = LayoutCx {
94b46f34 1523 tcx: self.global_tcx(),
2c00a5a8
XL
1524 param_env: param_env_and_ty.param_env
1525 };
1526 cx.layout_of(param_env_and_ty.value)
1527 }
1528}
1529
94b46f34 1530impl ty::query::TyCtxtAt<'a, 'tcx, '_> {
2c00a5a8
XL
1531 /// Computes the layout of a type. Note that this implicitly
1532 /// executes in "reveal all" mode.
1533 #[inline]
1534 pub fn layout_of(self, param_env_and_ty: ty::ParamEnvAnd<'tcx, Ty<'tcx>>)
1535 -> Result<TyLayout<'tcx>, LayoutError<'tcx>> {
1536 let cx = LayoutCx {
94b46f34 1537 tcx: self.global_tcx().at(self.span),
2c00a5a8
XL
1538 param_env: param_env_and_ty.param_env
1539 };
1540 cx.layout_of(param_env_and_ty.value)
1541 }
1542}
1543
83c7162d
XL
1544impl<'a, 'tcx, C> TyLayoutMethods<'tcx, C> for Ty<'tcx>
1545 where C: LayoutOf<Ty = Ty<'tcx>> + HasTyCtxt<'tcx>,
1546 C::TyLayout: MaybeResult<TyLayout<'tcx>>
1547{
1548 fn for_variant(this: TyLayout<'tcx>, cx: C, variant_index: usize) -> TyLayout<'tcx> {
1549 let details = match this.variants {
1550 Variants::Single { index } if index == variant_index => this.details,
ff7c6d11
XL
1551
1552 Variants::Single { index } => {
1553 // Deny calling for_variant more than once for non-Single enums.
83c7162d 1554 cx.layout_of(this.ty).map_same(|layout| {
ff7c6d11
XL
1555 assert_eq!(layout.variants, Variants::Single { index });
1556 layout
1557 });
1558
83c7162d 1559 let fields = match this.ty.sty {
ff7c6d11
XL
1560 ty::TyAdt(def, _) => def.variants[variant_index].fields.len(),
1561 _ => bug!()
1562 };
83c7162d
XL
1563 let tcx = cx.tcx();
1564 tcx.intern_layout(LayoutDetails {
1565 variants: Variants::Single { index: variant_index },
1566 fields: FieldPlacement::Union(fields),
1567 abi: Abi::Uninhabited,
1568 align: tcx.data_layout.i8_align,
94b46f34 1569 size: Size::ZERO
83c7162d 1570 })
ff7c6d11 1571 }
cc61c64b 1572
ff7c6d11
XL
1573 Variants::NicheFilling { ref variants, .. } |
1574 Variants::Tagged { ref variants, .. } => {
1575 &variants[variant_index]
cc61c64b 1576 }
ff7c6d11
XL
1577 };
1578
1579 assert_eq!(details.variants, Variants::Single { index: variant_index });
cc61c64b 1580
ff7c6d11 1581 TyLayout {
83c7162d 1582 ty: this.ty,
ff7c6d11 1583 details
cc61c64b
XL
1584 }
1585 }
1586
83c7162d 1587 fn field(this: TyLayout<'tcx>, cx: C, i: usize) -> C::TyLayout {
cc61c64b 1588 let tcx = cx.tcx();
83c7162d 1589 cx.layout_of(match this.ty.sty {
cc61c64b
XL
1590 ty::TyBool |
1591 ty::TyChar |
1592 ty::TyInt(_) |
1593 ty::TyUint(_) |
1594 ty::TyFloat(_) |
1595 ty::TyFnPtr(_) |
1596 ty::TyNever |
1597 ty::TyFnDef(..) |
2c00a5a8
XL
1598 ty::TyGeneratorWitness(..) |
1599 ty::TyForeign(..) |
1600 ty::TyDynamic(..) => {
83c7162d 1601 bug!("TyLayout::field_type({:?}): not applicable", this)
cc61c64b
XL
1602 }
1603
1604 // Potentially-fat pointers.
94b46f34 1605 ty::TyRef(_, pointee, _) |
cc61c64b 1606 ty::TyRawPtr(ty::TypeAndMut { ty: pointee, .. }) => {
ff7c6d11
XL
1607 assert!(i < 2);
1608
1609 // Reuse the fat *T type as its own thin pointer data field.
1610 // This provides information about e.g. DST struct pointees
1611 // (which may have no non-DST form), and will work as long
1612 // as the `Abi` or `FieldPlacement` is checked by users.
1613 if i == 0 {
1614 let nil = tcx.mk_nil();
83c7162d 1615 let ptr_ty = if this.ty.is_unsafe_ptr() {
ff7c6d11
XL
1616 tcx.mk_mut_ptr(nil)
1617 } else {
1618 tcx.mk_mut_ref(tcx.types.re_static, nil)
1619 };
1620 return cx.layout_of(ptr_ty).map_same(|mut ptr_layout| {
83c7162d 1621 ptr_layout.ty = this.ty;
ff7c6d11
XL
1622 ptr_layout
1623 });
1624 }
1625
1626 match tcx.struct_tail(pointee).sty {
1627 ty::TySlice(_) |
1628 ty::TyStr => tcx.types.usize,
1629 ty::TyDynamic(..) => {
1630 // FIXME(eddyb) use an usize/fn() array with
1631 // the correct number of vtables slots.
1632 tcx.mk_imm_ref(tcx.types.re_static, tcx.mk_nil())
1633 }
83c7162d 1634 _ => bug!("TyLayout::field_type({:?}): not applicable", this)
ff7c6d11 1635 }
cc61c64b
XL
1636 }
1637
1638 // Arrays and slices.
1639 ty::TyArray(element, _) |
1640 ty::TySlice(element) => element,
1641 ty::TyStr => tcx.types.u8,
1642
ea8adc8c 1643 // Tuples, generators and closures.
cc61c64b
XL
1644 ty::TyClosure(def_id, ref substs) => {
1645 substs.upvar_tys(def_id, tcx).nth(i).unwrap()
1646 }
1647
ea8adc8c
XL
1648 ty::TyGenerator(def_id, ref substs, _) => {
1649 substs.field_tys(def_id, tcx).nth(i).unwrap()
1650 }
1651
0531ce1d 1652 ty::TyTuple(tys) => tys[i],
cc61c64b
XL
1653
1654 // SIMD vector types.
1655 ty::TyAdt(def, ..) if def.repr.simd() => {
83c7162d 1656 this.ty.simd_type(tcx)
cc61c64b
XL
1657 }
1658
1659 // ADTs.
1660 ty::TyAdt(def, substs) => {
83c7162d 1661 match this.variants {
ff7c6d11
XL
1662 Variants::Single { index } => {
1663 def.variants[index].fields[i].ty(tcx, substs)
1664 }
1665
1666 // Discriminant field for enums (where applicable).
83c7162d 1667 Variants::Tagged { tag: ref discr, .. } |
ff7c6d11
XL
1668 Variants::NicheFilling { niche: ref discr, .. } => {
1669 assert_eq!(i, 0);
1670 let layout = LayoutDetails::scalar(tcx, discr.clone());
1671 return MaybeResult::from_ok(TyLayout {
1672 details: tcx.intern_layout(layout),
1673 ty: discr.value.to_ty(tcx)
1674 });
1675 }
1676 }
cc61c64b
XL
1677 }
1678
1679 ty::TyProjection(_) | ty::TyAnon(..) | ty::TyParam(_) |
1680 ty::TyInfer(_) | ty::TyError => {
83c7162d 1681 bug!("TyLayout::field_type: unexpected type `{}`", this.ty)
cc61c64b 1682 }
ff7c6d11
XL
1683 })
1684 }
83c7162d 1685}
ff7c6d11 1686
94b46f34
XL
1687struct Niche {
1688 offset: Size,
1689 scalar: Scalar,
1690 available: u128,
1691}
1692
1693impl Niche {
1694 fn reserve<'a, 'tcx>(
1695 &self,
1696 cx: LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>>,
1697 count: u128,
1698 ) -> Option<(u128, Scalar)> {
1699 if count > self.available {
1700 return None;
1701 }
1702 let Scalar { value, valid_range: ref v } = self.scalar;
1703 let bits = value.size(cx).bits();
1704 assert!(bits <= 128);
1705 let max_value = !0u128 >> (128 - bits);
1706 let start = v.end().wrapping_add(1) & max_value;
1707 let end = v.end().wrapping_add(count) & max_value;
1708 Some((start, Scalar { value, valid_range: *v.start()..=end }))
1709 }
1710}
1711
83c7162d 1712impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> {
ff7c6d11 1713 /// Find the offset of a niche leaf field, starting from
94b46f34 1714 /// the given type and recursing through aggregates.
ff7c6d11 1715 // FIXME(eddyb) traverse already optimized enums.
94b46f34
XL
1716 fn find_niche(self, layout: TyLayout<'tcx>) -> Result<Option<Niche>, LayoutError<'tcx>> {
1717 let scalar_niche = |scalar: &Scalar, offset| {
ff7c6d11
XL
1718 let Scalar { value, valid_range: ref v } = *scalar;
1719
83c7162d 1720 let bits = value.size(self).bits();
ff7c6d11
XL
1721 assert!(bits <= 128);
1722 let max_value = !0u128 >> (128 - bits);
1723
1724 // Find out how many values are outside the valid range.
94b46f34 1725 let available = if v.start() <= v.end() {
83c7162d 1726 v.start() + (max_value - v.end())
ff7c6d11 1727 } else {
83c7162d 1728 v.start() - v.end() - 1
ff7c6d11
XL
1729 };
1730
94b46f34
XL
1731 // Give up if there is no niche value available.
1732 if available == 0 {
ff7c6d11
XL
1733 return None;
1734 }
1735
94b46f34 1736 Some(Niche { offset, scalar: scalar.clone(), available })
ff7c6d11
XL
1737 };
1738
2c00a5a8
XL
1739 // Locals variables which live across yields are stored
1740 // in the generator type as fields. These may be uninitialized
1741 // so we don't look for niches there.
83c7162d 1742 if let ty::TyGenerator(..) = layout.ty.sty {
2c00a5a8
XL
1743 return Ok(None);
1744 }
1745
83c7162d 1746 match layout.abi {
ff7c6d11 1747 Abi::Scalar(ref scalar) => {
94b46f34 1748 return Ok(scalar_niche(scalar, Size::ZERO));
ff7c6d11
XL
1749 }
1750 Abi::ScalarPair(ref a, ref b) => {
94b46f34
XL
1751 // HACK(nox): We iter on `b` and then `a` because `max_by_key`
1752 // returns the last maximum.
1753 let niche = iter::once((b, a.value.size(self).abi_align(b.value.align(self))))
1754 .chain(iter::once((a, Size::ZERO)))
1755 .filter_map(|(scalar, offset)| scalar_niche(scalar, offset))
1756 .max_by_key(|niche| niche.available);
1757 return Ok(niche);
ff7c6d11
XL
1758 }
1759 Abi::Vector { ref element, .. } => {
94b46f34 1760 return Ok(scalar_niche(element, Size::ZERO));
ff7c6d11
XL
1761 }
1762 _ => {}
1763 }
1764
1765 // Perhaps one of the fields is non-zero, let's recurse and find out.
83c7162d 1766 if let FieldPlacement::Union(_) = layout.fields {
ff7c6d11
XL
1767 // Only Rust enums have safe-to-inspect fields
1768 // (a discriminant), other unions are unsafe.
83c7162d 1769 if let Variants::Single { .. } = layout.variants {
ff7c6d11
XL
1770 return Ok(None);
1771 }
1772 }
83c7162d
XL
1773 if let FieldPlacement::Array { .. } = layout.fields {
1774 if layout.fields.count() > 0 {
94b46f34
XL
1775 return self.find_niche(layout.field(self, 0)?);
1776 } else {
1777 return Ok(None);
ff7c6d11
XL
1778 }
1779 }
94b46f34
XL
1780 let mut niche = None;
1781 let mut available = 0;
83c7162d 1782 for i in 0..layout.fields.count() {
94b46f34
XL
1783 if let Some(mut c) = self.find_niche(layout.field(self, i)?)? {
1784 if c.available > available {
1785 available = c.available;
1786 c.offset += layout.fields.offset(i);
1787 niche = Some(c);
1788 }
ff7c6d11
XL
1789 }
1790 }
94b46f34 1791 Ok(niche)
cc61c64b
XL
1792 }
1793}
ea8adc8c 1794
0531ce1d 1795impl<'a> HashStable<StableHashingContext<'a>> for Variants {
ea8adc8c 1796 fn hash_stable<W: StableHasherResult>(&self,
0531ce1d 1797 hcx: &mut StableHashingContext<'a>,
ea8adc8c 1798 hasher: &mut StableHasher<W>) {
ff7c6d11 1799 use ty::layout::Variants::*;
ea8adc8c
XL
1800 mem::discriminant(self).hash_stable(hcx, hasher);
1801
1802 match *self {
ff7c6d11
XL
1803 Single { index } => {
1804 index.hash_stable(hcx, hasher);
ea8adc8c 1805 }
ff7c6d11 1806 Tagged {
83c7162d 1807 ref tag,
ff7c6d11
XL
1808 ref variants,
1809 } => {
83c7162d 1810 tag.hash_stable(hcx, hasher);
ff7c6d11 1811 variants.hash_stable(hcx, hasher);
ea8adc8c 1812 }
ff7c6d11
XL
1813 NicheFilling {
1814 dataful_variant,
83c7162d 1815 ref niche_variants,
ff7c6d11
XL
1816 ref niche,
1817 niche_start,
1818 ref variants,
1819 } => {
1820 dataful_variant.hash_stable(hcx, hasher);
83c7162d
XL
1821 niche_variants.start().hash_stable(hcx, hasher);
1822 niche_variants.end().hash_stable(hcx, hasher);
ff7c6d11
XL
1823 niche.hash_stable(hcx, hasher);
1824 niche_start.hash_stable(hcx, hasher);
1825 variants.hash_stable(hcx, hasher);
ea8adc8c 1826 }
ff7c6d11
XL
1827 }
1828 }
1829}
1830
0531ce1d 1831impl<'a> HashStable<StableHashingContext<'a>> for FieldPlacement {
ff7c6d11 1832 fn hash_stable<W: StableHasherResult>(&self,
0531ce1d 1833 hcx: &mut StableHashingContext<'a>,
ff7c6d11
XL
1834 hasher: &mut StableHasher<W>) {
1835 use ty::layout::FieldPlacement::*;
1836 mem::discriminant(self).hash_stable(hcx, hasher);
1837
1838 match *self {
1839 Union(count) => {
1840 count.hash_stable(hcx, hasher);
ea8adc8c 1841 }
ff7c6d11
XL
1842 Array { count, stride } => {
1843 count.hash_stable(hcx, hasher);
1844 stride.hash_stable(hcx, hasher);
ea8adc8c 1845 }
ff7c6d11
XL
1846 Arbitrary { ref offsets, ref memory_index } => {
1847 offsets.hash_stable(hcx, hasher);
1848 memory_index.hash_stable(hcx, hasher);
ea8adc8c 1849 }
ff7c6d11
XL
1850 }
1851 }
1852}
1853
0531ce1d 1854impl<'a> HashStable<StableHashingContext<'a>> for Abi {
ff7c6d11 1855 fn hash_stable<W: StableHasherResult>(&self,
0531ce1d 1856 hcx: &mut StableHashingContext<'a>,
ff7c6d11
XL
1857 hasher: &mut StableHasher<W>) {
1858 use ty::layout::Abi::*;
1859 mem::discriminant(self).hash_stable(hcx, hasher);
1860
1861 match *self {
1862 Uninhabited => {}
1863 Scalar(ref value) => {
1864 value.hash_stable(hcx, hasher);
ea8adc8c 1865 }
ff7c6d11
XL
1866 ScalarPair(ref a, ref b) => {
1867 a.hash_stable(hcx, hasher);
1868 b.hash_stable(hcx, hasher);
ea8adc8c 1869 }
ff7c6d11
XL
1870 Vector { ref element, count } => {
1871 element.hash_stable(hcx, hasher);
1872 count.hash_stable(hcx, hasher);
ea8adc8c 1873 }
ff7c6d11
XL
1874 Aggregate { sized } => {
1875 sized.hash_stable(hcx, hasher);
ea8adc8c
XL
1876 }
1877 }
1878 }
1879}
1880
0531ce1d 1881impl<'a> HashStable<StableHashingContext<'a>> for Scalar {
ff7c6d11 1882 fn hash_stable<W: StableHasherResult>(&self,
0531ce1d 1883 hcx: &mut StableHashingContext<'a>,
ff7c6d11 1884 hasher: &mut StableHasher<W>) {
83c7162d 1885 let Scalar { value, ref valid_range } = *self;
ff7c6d11 1886 value.hash_stable(hcx, hasher);
83c7162d
XL
1887 valid_range.start().hash_stable(hcx, hasher);
1888 valid_range.end().hash_stable(hcx, hasher);
ff7c6d11
XL
1889 }
1890}
1891
1892impl_stable_hash_for!(struct ::ty::layout::LayoutDetails {
1893 variants,
1894 fields,
1895 abi,
1896 size,
1897 align
1898});
1899
ea8adc8c 1900impl_stable_hash_for!(enum ::ty::layout::Integer {
ea8adc8c
XL
1901 I8,
1902 I16,
1903 I32,
1904 I64,
1905 I128
1906});
1907
1908impl_stable_hash_for!(enum ::ty::layout::Primitive {
ff7c6d11 1909 Int(integer, signed),
94b46f34 1910 Float(fty),
ea8adc8c
XL
1911 Pointer
1912});
1913
83c7162d
XL
1914impl<'gcx> HashStable<StableHashingContext<'gcx>> for Align {
1915 fn hash_stable<W: StableHasherResult>(&self,
1916 hcx: &mut StableHashingContext<'gcx>,
1917 hasher: &mut StableHasher<W>) {
1918 self.abi().hash_stable(hcx, hasher);
1919 self.pref().hash_stable(hcx, hasher);
1920 }
1921}
ea8adc8c 1922
83c7162d
XL
1923impl<'gcx> HashStable<StableHashingContext<'gcx>> for Size {
1924 fn hash_stable<W: StableHasherResult>(&self,
1925 hcx: &mut StableHashingContext<'gcx>,
1926 hasher: &mut StableHasher<W>) {
1927 self.bytes().hash_stable(hcx, hasher);
1928 }
1929}
ea8adc8c 1930
0531ce1d 1931impl<'a, 'gcx> HashStable<StableHashingContext<'a>> for LayoutError<'gcx>
ea8adc8c
XL
1932{
1933 fn hash_stable<W: StableHasherResult>(&self,
0531ce1d 1934 hcx: &mut StableHashingContext<'a>,
ea8adc8c
XL
1935 hasher: &mut StableHasher<W>) {
1936 use ty::layout::LayoutError::*;
1937 mem::discriminant(self).hash_stable(hcx, hasher);
1938
1939 match *self {
1940 Unknown(t) |
1941 SizeOverflow(t) => t.hash_stable(hcx, hasher)
1942 }
1943 }
1944}