]> git.proxmox.com Git - rustc.git/blame - compiler/rustc_codegen_gcc/src/type_of.rs
New upstream version 1.62.1+dfsg1
[rustc.git] / compiler / rustc_codegen_gcc / src / type_of.rs
CommitLineData
c295e0f8
XL
1use std::fmt::Write;
2
3use gccjit::{Struct, Type};
4use crate::rustc_codegen_ssa::traits::{BaseTypeMethods, DerivedTypeMethods, LayoutTypeMethods};
5use rustc_middle::bug;
6use rustc_middle::ty::{self, Ty, TypeFoldable};
7use rustc_middle::ty::layout::{FnAbiOf, LayoutOf, TyAndLayout};
8use rustc_middle::ty::print::with_no_trimmed_paths;
9use rustc_target::abi::{self, Abi, F32, F64, FieldsShape, Int, Integer, Pointer, PointeeInfo, Size, TyAbiInterface, Variants};
10use rustc_target::abi::call::{CastTarget, FnAbi, Reg};
11
12use crate::abi::{FnAbiGccExt, GccType};
13use crate::context::CodegenCx;
14use crate::type_::struct_fields;
15
16impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
17 fn type_from_unsigned_integer(&self, i: Integer) -> Type<'gcc> {
18 use Integer::*;
19 match i {
20 I8 => self.type_u8(),
21 I16 => self.type_u16(),
22 I32 => self.type_u32(),
23 I64 => self.type_u64(),
24 I128 => self.type_u128(),
25 }
26 }
27}
28
29pub fn uncached_gcc_type<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, layout: TyAndLayout<'tcx>, defer: &mut Option<(Struct<'gcc>, TyAndLayout<'tcx>)>) -> Type<'gcc> {
30 match layout.abi {
31 Abi::Scalar(_) => bug!("handled elsewhere"),
32 Abi::Vector { ref element, count } => {
33 let element = layout.scalar_gcc_type_at(cx, element, Size::ZERO);
34 return cx.context.new_vector_type(element, count);
35 },
36 Abi::ScalarPair(..) => {
37 return cx.type_struct(
38 &[
39 layout.scalar_pair_element_gcc_type(cx, 0, false),
40 layout.scalar_pair_element_gcc_type(cx, 1, false),
41 ],
42 false,
43 );
44 }
45 Abi::Uninhabited | Abi::Aggregate { .. } => {}
46 }
47
48 let name = match layout.ty.kind() {
49 // FIXME(eddyb) producing readable type names for trait objects can result
50 // in problematically distinct types due to HRTB and subtyping (see #47638).
51 // ty::Dynamic(..) |
52 ty::Adt(..) | ty::Closure(..) | ty::Foreign(..) | ty::Generator(..) | ty::Str
53 if !cx.sess().fewer_names() =>
54 {
5e7ed085 55 let mut name = with_no_trimmed_paths!(layout.ty.to_string());
c295e0f8
XL
56 if let (&ty::Adt(def, _), &Variants::Single { index }) =
57 (layout.ty.kind(), &layout.variants)
58 {
5e7ed085
FG
59 if def.is_enum() && !def.variants().is_empty() {
60 write!(&mut name, "::{}", def.variant(index).name).unwrap();
c295e0f8
XL
61 }
62 }
63 if let (&ty::Generator(_, _, _), &Variants::Single { index }) =
64 (layout.ty.kind(), &layout.variants)
65 {
66 write!(&mut name, "::{}", ty::GeneratorSubsts::variant_name(index)).unwrap();
67 }
68 Some(name)
69 }
70 ty::Adt(..) => {
71 // If `Some` is returned then a named struct is created in LLVM. Name collisions are
72 // avoided by LLVM (with increasing suffixes). If rustc doesn't generate names then that
73 // can improve perf.
74 // FIXME(antoyo): I don't think that's true for libgccjit.
75 Some(String::new())
76 }
77 _ => None,
78 };
79
80 match layout.fields {
81 FieldsShape::Primitive | FieldsShape::Union(_) => {
82 let fill = cx.type_padding_filler(layout.size, layout.align.abi);
83 let packed = false;
84 match name {
85 None => cx.type_struct(&[fill], packed),
86 Some(ref name) => {
87 let gcc_type = cx.type_named_struct(name);
88 cx.set_struct_body(gcc_type, &[fill], packed);
89 gcc_type.as_type()
90 },
91 }
92 }
93 FieldsShape::Array { count, .. } => cx.type_array(layout.field(cx, 0).gcc_type(cx, true), count),
94 FieldsShape::Arbitrary { .. } =>
95 match name {
96 None => {
97 let (gcc_fields, packed) = struct_fields(cx, layout);
98 cx.type_struct(&gcc_fields, packed)
99 },
100 Some(ref name) => {
101 let gcc_type = cx.type_named_struct(name);
102 *defer = Some((gcc_type, layout));
103 gcc_type.as_type()
104 },
105 },
106 }
107}
108
109pub trait LayoutGccExt<'tcx> {
110 fn is_gcc_immediate(&self) -> bool;
111 fn is_gcc_scalar_pair(&self) -> bool;
112 fn gcc_type<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>, set_fields: bool) -> Type<'gcc>;
113 fn immediate_gcc_type<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>) -> Type<'gcc>;
114 fn scalar_gcc_type_at<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>, scalar: &abi::Scalar, offset: Size) -> Type<'gcc>;
115 fn scalar_pair_element_gcc_type<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>, index: usize, immediate: bool) -> Type<'gcc>;
116 fn gcc_field_index(&self, index: usize) -> u64;
117 fn pointee_info_at<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>, offset: Size) -> Option<PointeeInfo>;
118}
119
120impl<'tcx> LayoutGccExt<'tcx> for TyAndLayout<'tcx> {
121 fn is_gcc_immediate(&self) -> bool {
122 match self.abi {
123 Abi::Scalar(_) | Abi::Vector { .. } => true,
124 Abi::ScalarPair(..) => false,
125 Abi::Uninhabited | Abi::Aggregate { .. } => self.is_zst(),
126 }
127 }
128
129 fn is_gcc_scalar_pair(&self) -> bool {
130 match self.abi {
131 Abi::ScalarPair(..) => true,
132 Abi::Uninhabited | Abi::Scalar(_) | Abi::Vector { .. } | Abi::Aggregate { .. } => false,
133 }
134 }
135
136 /// Gets the GCC type corresponding to a Rust type, i.e., `rustc_middle::ty::Ty`.
137 /// The pointee type of the pointer in `PlaceRef` is always this type.
138 /// For sized types, it is also the right LLVM type for an `alloca`
139 /// containing a value of that type, and most immediates (except `bool`).
140 /// Unsized types, however, are represented by a "minimal unit", e.g.
141 /// `[T]` becomes `T`, while `str` and `Trait` turn into `i8` - this
142 /// is useful for indexing slices, as `&[T]`'s data pointer is `T*`.
143 /// If the type is an unsized struct, the regular layout is generated,
144 /// with the inner-most trailing unsized field using the "minimal unit"
145 /// of that field's type - this is useful for taking the address of
146 /// that field and ensuring the struct has the right alignment.
147 //TODO(antoyo): do we still need the set_fields parameter?
148 fn gcc_type<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>, set_fields: bool) -> Type<'gcc> {
149 if let Abi::Scalar(ref scalar) = self.abi {
150 // Use a different cache for scalars because pointers to DSTs
151 // can be either fat or thin (data pointers of fat pointers).
152 if let Some(&ty) = cx.scalar_types.borrow().get(&self.ty) {
153 return ty;
154 }
155 let ty =
156 match *self.ty.kind() {
157 ty::Ref(_, ty, _) | ty::RawPtr(ty::TypeAndMut { ty, .. }) => {
158 cx.type_ptr_to(cx.layout_of(ty).gcc_type(cx, set_fields))
159 }
160 ty::Adt(def, _) if def.is_box() => {
161 cx.type_ptr_to(cx.layout_of(self.ty.boxed_ty()).gcc_type(cx, true))
162 }
163 ty::FnPtr(sig) => cx.fn_ptr_backend_type(&cx.fn_abi_of_fn_ptr(sig, ty::List::empty())),
164 _ => self.scalar_gcc_type_at(cx, scalar, Size::ZERO),
165 };
166 cx.scalar_types.borrow_mut().insert(self.ty, ty);
167 return ty;
168 }
169
170 // Check the cache.
171 let variant_index =
172 match self.variants {
173 Variants::Single { index } => Some(index),
174 _ => None,
175 };
176 let cached_type = cx.types.borrow().get(&(self.ty, variant_index)).cloned();
177 if let Some(ty) = cached_type {
178 let type_to_set_fields = cx.types_with_fields_to_set.borrow_mut().remove(&ty);
179 if let Some((struct_type, layout)) = type_to_set_fields {
180 // Since we might be trying to generate a type containing another type which is not
181 // completely generated yet, we deferred setting the fields until now.
182 let (fields, packed) = struct_fields(cx, layout);
183 cx.set_struct_body(struct_type, &fields, packed);
184 }
185 return ty;
186 }
187
188 assert!(!self.ty.has_escaping_bound_vars(), "{:?} has escaping bound vars", self.ty);
189
190 // Make sure lifetimes are erased, to avoid generating distinct LLVM
191 // types for Rust types that only differ in the choice of lifetimes.
192 let normal_ty = cx.tcx.erase_regions(self.ty);
193
194 let mut defer = None;
195 let ty =
196 if self.ty != normal_ty {
197 let mut layout = cx.layout_of(normal_ty);
198 if let Some(v) = variant_index {
199 layout = layout.for_variant(cx, v);
200 }
201 layout.gcc_type(cx, true)
202 }
203 else {
204 uncached_gcc_type(cx, *self, &mut defer)
205 };
206
207 cx.types.borrow_mut().insert((self.ty, variant_index), ty);
208
209 if let Some((ty, layout)) = defer {
210 let (fields, packed) = struct_fields(cx, layout);
211 cx.set_struct_body(ty, &fields, packed);
212 }
213
214 ty
215 }
216
217 fn immediate_gcc_type<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>) -> Type<'gcc> {
218 if let Abi::Scalar(ref scalar) = self.abi {
219 if scalar.is_bool() {
220 return cx.type_i1();
221 }
222 }
223 self.gcc_type(cx, true)
224 }
225
226 fn scalar_gcc_type_at<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>, scalar: &abi::Scalar, offset: Size) -> Type<'gcc> {
04454e1e 227 match scalar.primitive() {
c295e0f8
XL
228 Int(i, true) => cx.type_from_integer(i),
229 Int(i, false) => cx.type_from_unsigned_integer(i),
230 F32 => cx.type_f32(),
231 F64 => cx.type_f64(),
232 Pointer => {
233 // If we know the alignment, pick something better than i8.
234 let pointee =
235 if let Some(pointee) = self.pointee_info_at(cx, offset) {
236 cx.type_pointee_for_align(pointee.align)
237 }
238 else {
239 cx.type_i8()
240 };
241 cx.type_ptr_to(pointee)
242 }
243 }
244 }
245
246 fn scalar_pair_element_gcc_type<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>, index: usize, immediate: bool) -> Type<'gcc> {
247 // TODO(antoyo): remove llvm hack:
248 // HACK(eddyb) special-case fat pointers until LLVM removes
249 // pointee types, to avoid bitcasting every `OperandRef::deref`.
250 match self.ty.kind() {
251 ty::Ref(..) | ty::RawPtr(_) => {
252 return self.field(cx, index).gcc_type(cx, true);
253 }
5e7ed085
FG
254 // only wide pointer boxes are handled as pointers
255 // thin pointer boxes with scalar allocators are handled by the general logic below
256 ty::Adt(def, substs) if def.is_box() && cx.layout_of(substs.type_at(1)).is_zst() => {
c295e0f8
XL
257 let ptr_ty = cx.tcx.mk_mut_ptr(self.ty.boxed_ty());
258 return cx.layout_of(ptr_ty).scalar_pair_element_gcc_type(cx, index, immediate);
259 }
260 _ => {}
261 }
262
263 let (a, b) = match self.abi {
264 Abi::ScalarPair(ref a, ref b) => (a, b),
265 _ => bug!("TyAndLayout::scalar_pair_element_llty({:?}): not applicable", self),
266 };
267 let scalar = [a, b][index];
268
269 // Make sure to return the same type `immediate_gcc_type` would when
270 // dealing with an immediate pair. This means that `(bool, bool)` is
271 // effectively represented as `{i8, i8}` in memory and two `i1`s as an
272 // immediate, just like `bool` is typically `i8` in memory and only `i1`
273 // when immediate. We need to load/store `bool` as `i8` to avoid
274 // crippling LLVM optimizations or triggering other LLVM bugs with `i1`.
275 // TODO(antoyo): this bugs certainly don't happen in this case since the bool type is used instead of i1.
276 if scalar.is_bool() {
277 return cx.type_i1();
278 }
279
280 let offset =
281 if index == 0 {
282 Size::ZERO
283 }
284 else {
04454e1e 285 a.size(cx).align_to(b.align(cx).abi)
c295e0f8
XL
286 };
287 self.scalar_gcc_type_at(cx, scalar, offset)
288 }
289
290 fn gcc_field_index(&self, index: usize) -> u64 {
291 match self.abi {
292 Abi::Scalar(_) | Abi::ScalarPair(..) => {
293 bug!("TyAndLayout::gcc_field_index({:?}): not applicable", self)
294 }
295 _ => {}
296 }
297 match self.fields {
298 FieldsShape::Primitive | FieldsShape::Union(_) => {
299 bug!("TyAndLayout::gcc_field_index({:?}): not applicable", self)
300 }
301
302 FieldsShape::Array { .. } => index as u64,
303
304 FieldsShape::Arbitrary { .. } => 1 + (self.fields.memory_index(index) as u64) * 2,
305 }
306 }
307
308 fn pointee_info_at<'a>(&self, cx: &CodegenCx<'a, 'tcx>, offset: Size) -> Option<PointeeInfo> {
309 if let Some(&pointee) = cx.pointee_infos.borrow().get(&(self.ty, offset)) {
310 return pointee;
311 }
312
313 let result = Ty::ty_and_layout_pointee_info_at(*self, cx, offset);
314
315 cx.pointee_infos.borrow_mut().insert((self.ty, offset), result);
316 result
317 }
318}
319
320impl<'gcc, 'tcx> LayoutTypeMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
321 fn backend_type(&self, layout: TyAndLayout<'tcx>) -> Type<'gcc> {
322 layout.gcc_type(self, true)
323 }
324
325 fn immediate_backend_type(&self, layout: TyAndLayout<'tcx>) -> Type<'gcc> {
326 layout.immediate_gcc_type(self)
327 }
328
329 fn is_backend_immediate(&self, layout: TyAndLayout<'tcx>) -> bool {
330 layout.is_gcc_immediate()
331 }
332
333 fn is_backend_scalar_pair(&self, layout: TyAndLayout<'tcx>) -> bool {
334 layout.is_gcc_scalar_pair()
335 }
336
337 fn backend_field_index(&self, layout: TyAndLayout<'tcx>, index: usize) -> u64 {
338 layout.gcc_field_index(index)
339 }
340
341 fn scalar_pair_element_backend_type(&self, layout: TyAndLayout<'tcx>, index: usize, immediate: bool) -> Type<'gcc> {
342 layout.scalar_pair_element_gcc_type(self, index, immediate)
343 }
344
345 fn cast_backend_type(&self, ty: &CastTarget) -> Type<'gcc> {
346 ty.gcc_type(self)
347 }
348
349 fn fn_ptr_backend_type(&self, fn_abi: &FnAbi<'tcx, Ty<'tcx>>) -> Type<'gcc> {
350 fn_abi.ptr_to_gcc_type(self)
351 }
352
353 fn reg_backend_type(&self, _ty: &Reg) -> Type<'gcc> {
354 unimplemented!();
355 }
356
357 fn fn_decl_backend_type(&self, _fn_abi: &FnAbi<'tcx, Ty<'tcx>>) -> Type<'gcc> {
358 // FIXME(antoyo): return correct type.
359 self.type_void()
360 }
361}