]> git.proxmox.com Git - rustc.git/blame - src/librustc_codegen_llvm/type_of.rs
New upstream version 1.46.0~beta.2+dfsg1
[rustc.git] / src / librustc_codegen_llvm / type_of.rs
CommitLineData
dfeec247 1use crate::abi::FnAbi;
9fa01778
XL
2use crate::common::*;
3use crate::type_::Type;
60c5eb7d 4use log::debug;
dfeec247 5use rustc_codegen_ssa::traits::*;
ba9703b0
XL
6use rustc_middle::bug;
7use rustc_middle::ty::layout::{FnAbiExt, TyAndLayout};
8use rustc_middle::ty::print::obsolete::DefPathBasedNames;
9use rustc_middle::ty::{self, Ty, TypeFoldable};
10use rustc_target::abi::{Abi, Align, FieldsShape};
11use rustc_target::abi::{Int, Pointer, F32, F64};
12use rustc_target::abi::{LayoutOf, PointeeInfo, Scalar, Size, TyAndLayoutMethods, Variants};
54a0048b 13
ff7c6d11 14use std::fmt::Write;
1a4d82fc 15
dfeec247
XL
16fn uncached_llvm_type<'a, 'tcx>(
17 cx: &CodegenCx<'a, 'tcx>,
ba9703b0
XL
18 layout: TyAndLayout<'tcx>,
19 defer: &mut Option<(&'a Type, TyAndLayout<'tcx>)>,
dfeec247 20) -> &'a Type {
ff7c6d11 21 match layout.abi {
ba9703b0
XL
22 Abi::Scalar(_) => bug!("handled elsewhere"),
23 Abi::Vector { ref element, count } => {
ff7c6d11
XL
24 // LLVM has a separate type for 64-bit SIMD vectors on X86 called
25 // `x86_mmx` which is needed for some SIMD operations. As a bit of a
26 // hack (all SIMD definitions are super unstable anyway) we
27 // recognize any one-element SIMD vector as "this should be an
28 // x86_mmx" type. In general there shouldn't be a need for other
29 // one-element SIMD vectors, so it's assumed this won't clash with
30 // much else.
dfeec247
XL
31 let use_x86_mmx = count == 1
32 && layout.size.bits() == 64
33 && (cx.sess().target.target.arch == "x86"
34 || cx.sess().target.target.arch == "x86_64");
ff7c6d11 35 if use_x86_mmx {
dfeec247 36 return cx.type_x86_mmx();
ff7c6d11 37 } else {
94b46f34 38 let element = layout.scalar_llvm_type_at(cx, element, Size::ZERO);
a1dfa0c6 39 return cx.type_vector(element, count);
ff7c6d11 40 }
a7813a04 41 }
ba9703b0 42 Abi::ScalarPair(..) => {
dfeec247
XL
43 return cx.type_struct(
44 &[
45 layout.scalar_pair_element_llvm_type(cx, 0, false),
46 layout.scalar_pair_element_llvm_type(cx, 1, false),
47 ],
48 false,
49 );
32a655c1 50 }
ba9703b0 51 Abi::Uninhabited | Abi::Aggregate { .. } => {}
a7813a04 52 }
a7813a04 53
e74abb32 54 let name = match layout.ty.kind {
b7449926
XL
55 ty::Closure(..) |
56 ty::Generator(..) |
57 ty::Adt(..) |
ff7c6d11
XL
58 // FIXME(eddyb) producing readable type names for trait objects can result
59 // in problematically distinct types due to HRTB and subtyping (see #47638).
b7449926
XL
60 // ty::Dynamic(..) |
61 ty::Foreign(..) |
62 ty::Str => {
ff7c6d11 63 let mut name = String::with_capacity(32);
2c00a5a8 64 let printer = DefPathBasedNames::new(cx.tcx, true, true);
9fa01778 65 printer.push_type_name(layout.ty, &mut name, false);
ba9703b0 66 if let (&ty::Adt(def, _), &Variants::Single { index })
e74abb32 67 = (&layout.ty.kind, &layout.variants)
0bf4aa26
XL
68 {
69 if def.is_enum() && !def.variants.is_empty() {
0731742a 70 write!(&mut name, "::{}", def.variants[index].ident).unwrap();
ff7c6d11 71 }
ff7c6d11 72 }
f035d41b 73 if let (&ty::Generator(_, _, _), &Variants::Single { index })
e74abb32 74 = (&layout.ty.kind, &layout.variants)
48663c56 75 {
f035d41b 76 write!(&mut name, "::{}", ty::GeneratorSubsts::variant_name(index)).unwrap();
48663c56 77 }
ff7c6d11
XL
78 Some(name)
79 }
80 _ => None
81 };
82
83 match layout.fields {
ba9703b0 84 FieldsShape::Primitive | FieldsShape::Union(_) => {
a1dfa0c6 85 let fill = cx.type_padding_filler(layout.size, layout.align.abi);
ff7c6d11
XL
86 let packed = false;
87 match name {
dfeec247 88 None => cx.type_struct(&[fill], packed),
ff7c6d11 89 Some(ref name) => {
0731742a 90 let llty = cx.type_named_struct(name);
a1dfa0c6 91 cx.set_struct_body(llty, &[fill], packed);
ff7c6d11
XL
92 llty
93 }
94 }
95 }
ba9703b0
XL
96 FieldsShape::Array { count, .. } => cx.type_array(layout.field(cx, 0).llvm_type(cx), count),
97 FieldsShape::Arbitrary { .. } => match name {
dfeec247
XL
98 None => {
99 let (llfields, packed) = struct_llfields(cx, layout);
100 cx.type_struct(&llfields, packed)
ff7c6d11 101 }
dfeec247
XL
102 Some(ref name) => {
103 let llty = cx.type_named_struct(name);
104 *defer = Some((llty, layout));
105 llty
106 }
107 },
85aaf69f
SL
108 }
109}
110
dfeec247
XL
111fn struct_llfields<'a, 'tcx>(
112 cx: &CodegenCx<'a, 'tcx>,
ba9703b0 113 layout: TyAndLayout<'tcx>,
dfeec247 114) -> (Vec<&'a Type>, bool) {
ff7c6d11
XL
115 debug!("struct_llfields: {:#?}", layout);
116 let field_count = layout.fields.count();
117
118 let mut packed = false;
94b46f34 119 let mut offset = Size::ZERO;
a1dfa0c6 120 let mut prev_effective_align = layout.align.abi;
b7449926 121 let mut result: Vec<_> = Vec::with_capacity(1 + field_count * 2);
ff7c6d11 122 for i in layout.fields.index_by_increasing_offset() {
ff7c6d11 123 let target_offset = layout.fields.offset(i as usize);
b7449926 124 let field = layout.field(cx, i);
dfeec247
XL
125 let effective_field_align =
126 layout.align.abi.min(field.align.abi).restrict_for_offset(target_offset);
a1dfa0c6 127 packed |= effective_field_align < field.align.abi;
b7449926 128
dfeec247
XL
129 debug!(
130 "struct_llfields: {}: {:?} offset: {:?} target_offset: {:?} \
b7449926 131 effective_field_align: {}",
dfeec247
XL
132 i,
133 field,
134 offset,
135 target_offset,
136 effective_field_align.bytes()
137 );
ff7c6d11
XL
138 assert!(target_offset >= offset);
139 let padding = target_offset - offset;
b7449926 140 let padding_align = prev_effective_align.min(effective_field_align);
a1dfa0c6 141 assert_eq!(offset.align_to(padding_align) + padding, target_offset);
dfeec247 142 result.push(cx.type_padding_filler(padding, padding_align));
ff7c6d11
XL
143 debug!(" padding before: {:?}", padding);
144
2c00a5a8 145 result.push(field.llvm_type(cx));
ff7c6d11 146 offset = target_offset + field.size;
b7449926 147 prev_effective_align = effective_field_align;
ff7c6d11
XL
148 }
149 if !layout.is_unsized() && field_count > 0 {
150 if offset > layout.size {
dfeec247 151 bug!("layout: {:#?} stride: {:?} offset: {:?}", layout, layout.size, offset);
ff7c6d11
XL
152 }
153 let padding = layout.size - offset;
b7449926 154 let padding_align = prev_effective_align;
a1dfa0c6 155 assert_eq!(offset.align_to(padding_align) + padding, layout.size);
dfeec247
XL
156 debug!(
157 "struct_llfields: pad_bytes: {:?} offset: {:?} stride: {:?}",
158 padding, offset, layout.size
159 );
a1dfa0c6 160 result.push(cx.type_padding_filler(padding, padding_align));
0bf4aa26 161 assert_eq!(result.len(), 1 + field_count * 2);
1a4d82fc 162 } else {
dfeec247 163 debug!("struct_llfields: offset: {:?} stride: {:?}", offset, layout.size);
1a4d82fc 164 }
1a4d82fc 165
ff7c6d11 166 (result, packed)
c34b1796 167}
1a4d82fc 168
2c00a5a8 169impl<'a, 'tcx> CodegenCx<'a, 'tcx> {
ff7c6d11 170 pub fn align_of(&self, ty: Ty<'tcx>) -> Align {
a1dfa0c6 171 self.layout_of(ty).align.abi
1a4d82fc
JJ
172 }
173
ff7c6d11
XL
174 pub fn size_of(&self, ty: Ty<'tcx>) -> Size {
175 self.layout_of(ty).size
176 }
1a4d82fc 177
ff7c6d11 178 pub fn size_and_align_of(&self, ty: Ty<'tcx>) -> (Size, Align) {
a1dfa0c6
XL
179 let layout = self.layout_of(ty);
180 (layout.size, layout.align.abi)
ff7c6d11
XL
181 }
182}
183
ff7c6d11
XL
184pub trait LayoutLlvmExt<'tcx> {
185 fn is_llvm_immediate(&self) -> bool;
dc9dc135 186 fn is_llvm_scalar_pair(&self) -> bool;
b7449926
XL
187 fn llvm_type<'a>(&self, cx: &CodegenCx<'a, 'tcx>) -> &'a Type;
188 fn immediate_llvm_type<'a>(&self, cx: &CodegenCx<'a, 'tcx>) -> &'a Type;
dfeec247
XL
189 fn scalar_llvm_type_at<'a>(
190 &self,
191 cx: &CodegenCx<'a, 'tcx>,
ba9703b0 192 scalar: &Scalar,
dfeec247
XL
193 offset: Size,
194 ) -> &'a Type;
195 fn scalar_pair_element_llvm_type<'a>(
196 &self,
197 cx: &CodegenCx<'a, 'tcx>,
198 index: usize,
199 immediate: bool,
200 ) -> &'a Type;
ff7c6d11 201 fn llvm_field_index(&self, index: usize) -> u64;
dfeec247 202 fn pointee_info_at<'a>(&self, cx: &CodegenCx<'a, 'tcx>, offset: Size) -> Option<PointeeInfo>;
ff7c6d11
XL
203}
204
ba9703b0 205impl<'tcx> LayoutLlvmExt<'tcx> for TyAndLayout<'tcx> {
ff7c6d11
XL
206 fn is_llvm_immediate(&self) -> bool {
207 match self.abi {
ba9703b0
XL
208 Abi::Scalar(_) | Abi::Vector { .. } => true,
209 Abi::ScalarPair(..) => false,
210 Abi::Uninhabited | Abi::Aggregate { .. } => self.is_zst(),
ff7c6d11 211 }
1a4d82fc
JJ
212 }
213
dc9dc135 214 fn is_llvm_scalar_pair(&self) -> bool {
ff7c6d11 215 match self.abi {
ba9703b0
XL
216 Abi::ScalarPair(..) => true,
217 Abi::Uninhabited | Abi::Scalar(_) | Abi::Vector { .. } | Abi::Aggregate { .. } => false,
ff7c6d11
XL
218 }
219 }
220
ba9703b0 221 /// Gets the LLVM type corresponding to a Rust type, i.e., `rustc_middle::ty::Ty`.
ff7c6d11
XL
222 /// The pointee type of the pointer in `PlaceRef` is always this type.
223 /// For sized types, it is also the right LLVM type for an `alloca`
224 /// containing a value of that type, and most immediates (except `bool`).
225 /// Unsized types, however, are represented by a "minimal unit", e.g.
226 /// `[T]` becomes `T`, while `str` and `Trait` turn into `i8` - this
227 /// is useful for indexing slices, as `&[T]`'s data pointer is `T*`.
228 /// If the type is an unsized struct, the regular layout is generated,
229 /// with the inner-most trailing unsized field using the "minimal unit"
230 /// of that field's type - this is useful for taking the address of
231 /// that field and ensuring the struct has the right alignment.
b7449926 232 fn llvm_type<'a>(&self, cx: &CodegenCx<'a, 'tcx>) -> &'a Type {
ba9703b0 233 if let Abi::Scalar(ref scalar) = self.abi {
ff7c6d11
XL
234 // Use a different cache for scalars because pointers to DSTs
235 // can be either fat or thin (data pointers of fat pointers).
2c00a5a8 236 if let Some(&llty) = cx.scalar_lltypes.borrow().get(&self.ty) {
ff7c6d11 237 return llty;
32a655c1 238 }
e74abb32 239 let llty = match self.ty.kind {
dfeec247 240 ty::Ref(_, ty, _) | ty::RawPtr(ty::TypeAndMut { ty, .. }) => {
a1dfa0c6 241 cx.type_ptr_to(cx.layout_of(ty).llvm_type(cx))
ff7c6d11 242 }
b7449926 243 ty::Adt(def, _) if def.is_box() => {
a1dfa0c6 244 cx.type_ptr_to(cx.layout_of(self.ty.boxed_ty()).llvm_type(cx))
ff7c6d11 245 }
dfeec247
XL
246 ty::FnPtr(sig) => cx.fn_ptr_backend_type(&FnAbi::of_fn_ptr(cx, sig, &[])),
247 _ => self.scalar_llvm_type_at(cx, scalar, Size::ZERO),
ff7c6d11 248 };
2c00a5a8 249 cx.scalar_lltypes.borrow_mut().insert(self.ty, llty);
ff7c6d11 250 return llty;
32a655c1 251 }
32a655c1 252
ff7c6d11
XL
253 // Check the cache.
254 let variant_index = match self.variants {
ba9703b0 255 Variants::Single { index } => Some(index),
dfeec247 256 _ => None,
ff7c6d11 257 };
2c00a5a8 258 if let Some(&llty) = cx.lltypes.borrow().get(&(self.ty, variant_index)) {
ff7c6d11
XL
259 return llty;
260 }
261
262 debug!("llvm_type({:#?})", self);
263
a1dfa0c6 264 assert!(!self.ty.has_escaping_bound_vars(), "{:?} has escaping bound vars", self.ty);
ff7c6d11
XL
265
266 // Make sure lifetimes are erased, to avoid generating distinct LLVM
267 // types for Rust types that only differ in the choice of lifetimes.
2c00a5a8 268 let normal_ty = cx.tcx.erase_regions(&self.ty);
1a4d82fc 269
ff7c6d11
XL
270 let mut defer = None;
271 let llty = if self.ty != normal_ty {
2c00a5a8 272 let mut layout = cx.layout_of(normal_ty);
ff7c6d11 273 if let Some(v) = variant_index {
2c00a5a8 274 layout = layout.for_variant(cx, v);
ff7c6d11 275 }
2c00a5a8 276 layout.llvm_type(cx)
ff7c6d11 277 } else {
2c00a5a8 278 uncached_llvm_type(cx, *self, &mut defer)
ff7c6d11
XL
279 };
280 debug!("--> mapped {:#?} to llty={:?}", self, llty);
281
2c00a5a8 282 cx.lltypes.borrow_mut().insert((self.ty, variant_index), llty);
1a4d82fc 283
b7449926 284 if let Some((llty, layout)) = defer {
2c00a5a8 285 let (llfields, packed) = struct_llfields(cx, layout);
a1dfa0c6 286 cx.set_struct_body(llty, &llfields, packed)
1a4d82fc 287 }
1a4d82fc 288
ff7c6d11
XL
289 llty
290 }
1a4d82fc 291
b7449926 292 fn immediate_llvm_type<'a>(&self, cx: &CodegenCx<'a, 'tcx>) -> &'a Type {
ba9703b0 293 if let Abi::Scalar(ref scalar) = self.abi {
ff7c6d11 294 if scalar.is_bool() {
a1dfa0c6 295 return cx.type_i1();
ff7c6d11
XL
296 }
297 }
2c00a5a8 298 self.llvm_type(cx)
cc61c64b
XL
299 }
300
dfeec247
XL
301 fn scalar_llvm_type_at<'a>(
302 &self,
303 cx: &CodegenCx<'a, 'tcx>,
ba9703b0 304 scalar: &Scalar,
dfeec247
XL
305 offset: Size,
306 ) -> &'a Type {
ff7c6d11 307 match scalar.value {
ba9703b0
XL
308 Int(i, _) => cx.type_from_integer(i),
309 F32 => cx.type_f32(),
310 F64 => cx.type_f64(),
311 Pointer => {
ff7c6d11 312 // If we know the alignment, pick something better than i8.
2c00a5a8 313 let pointee = if let Some(pointee) = self.pointee_info_at(cx, offset) {
a1dfa0c6 314 cx.type_pointee_for_align(pointee.align)
ff7c6d11 315 } else {
a1dfa0c6 316 cx.type_i8()
ff7c6d11 317 };
a1dfa0c6 318 cx.type_ptr_to(pointee)
ff7c6d11
XL
319 }
320 }
cc61c64b
XL
321 }
322
dfeec247
XL
323 fn scalar_pair_element_llvm_type<'a>(
324 &self,
325 cx: &CodegenCx<'a, 'tcx>,
326 index: usize,
327 immediate: bool,
328 ) -> &'a Type {
ff7c6d11
XL
329 // HACK(eddyb) special-case fat pointers until LLVM removes
330 // pointee types, to avoid bitcasting every `OperandRef::deref`.
e74abb32 331 match self.ty.kind {
dfeec247 332 ty::Ref(..) | ty::RawPtr(_) => {
2c00a5a8 333 return self.field(cx, index).llvm_type(cx);
ff7c6d11 334 }
b7449926 335 ty::Adt(def, _) if def.is_box() => {
2c00a5a8 336 let ptr_ty = cx.tcx.mk_mut_ptr(self.ty.boxed_ty());
8faf50e0 337 return cx.layout_of(ptr_ty).scalar_pair_element_llvm_type(cx, index, immediate);
ff7c6d11
XL
338 }
339 _ => {}
340 }
341
342 let (a, b) = match self.abi {
ba9703b0
XL
343 Abi::ScalarPair(ref a, ref b) => (a, b),
344 _ => bug!("TyAndLayout::scalar_pair_element_llty({:?}): not applicable", self),
ff7c6d11
XL
345 };
346 let scalar = [a, b][index];
347
8faf50e0
XL
348 // Make sure to return the same type `immediate_llvm_type` would when
349 // dealing with an immediate pair. This means that `(bool, bool)` is
350 // effectively represented as `{i8, i8}` in memory and two `i1`s as an
351 // immediate, just like `bool` is typically `i8` in memory and only `i1`
352 // when immediate. We need to load/store `bool` as `i8` to avoid
353 // crippling LLVM optimizations or triggering other LLVM bugs with `i1`.
354 if immediate && scalar.is_bool() {
a1dfa0c6 355 return cx.type_i1();
ff7c6d11
XL
356 }
357
dfeec247
XL
358 let offset =
359 if index == 0 { Size::ZERO } else { a.value.size(cx).align_to(b.value.align(cx).abi) };
2c00a5a8 360 self.scalar_llvm_type_at(cx, scalar, offset)
ff7c6d11
XL
361 }
362
363 fn llvm_field_index(&self, index: usize) -> u64 {
364 match self.abi {
ba9703b0
XL
365 Abi::Scalar(_) | Abi::ScalarPair(..) => {
366 bug!("TyAndLayout::llvm_field_index({:?}): not applicable", self)
ff7c6d11
XL
367 }
368 _ => {}
369 }
370 match self.fields {
ba9703b0
XL
371 FieldsShape::Primitive | FieldsShape::Union(_) => {
372 bug!("TyAndLayout::llvm_field_index({:?}): not applicable", self)
ff7c6d11
XL
373 }
374
ba9703b0 375 FieldsShape::Array { .. } => index as u64,
ff7c6d11 376
ba9703b0 377 FieldsShape::Arbitrary { .. } => 1 + (self.fields.memory_index(index) as u64) * 2,
cc61c64b
XL
378 }
379 }
1a4d82fc 380
dfeec247 381 fn pointee_info_at<'a>(&self, cx: &CodegenCx<'a, 'tcx>, offset: Size) -> Option<PointeeInfo> {
2c00a5a8 382 if let Some(&pointee) = cx.pointee_infos.borrow().get(&(self.ty, offset)) {
ff7c6d11
XL
383 return pointee;
384 }
385
48663c56 386 let result = Ty::pointee_info_at(*self, cx, offset);
ff7c6d11 387
2c00a5a8 388 cx.pointee_infos.borrow_mut().insert((self.ty, offset), result);
ff7c6d11
XL
389 result
390 }
1a4d82fc 391}