]> git.proxmox.com Git - rustc.git/blame - compiler/rustc_codegen_cranelift/src/value_and_place.rs
New upstream version 1.67.1+dfsg1
[rustc.git] / compiler / rustc_codegen_cranelift / src / value_and_place.rs
CommitLineData
29967ef6
XL
1//! Definition of [`CValue`] and [`CPlace`]
2
3use crate::prelude::*;
4
29967ef6
XL
5use cranelift_codegen::ir::immediates::Offset32;
6
7fn codegen_field<'tcx>(
6a06907d 8 fx: &mut FunctionCx<'_, '_, 'tcx>,
29967ef6
XL
9 base: Pointer,
10 extra: Option<Value>,
11 layout: TyAndLayout<'tcx>,
12 field: mir::Field,
13) -> (Pointer, TyAndLayout<'tcx>) {
14 let field_offset = layout.fields.offset(field.index());
15 let field_layout = layout.field(&*fx, field.index());
16
6a06907d
XL
17 let simple = |fx: &mut FunctionCx<'_, '_, '_>| {
18 (base.offset_i64(fx, i64::try_from(field_offset.bytes()).unwrap()), field_layout)
29967ef6
XL
19 };
20
21 if let Some(extra) = extra {
487cf647 22 if field_layout.is_sized() {
29967ef6
XL
23 return simple(fx);
24 }
25 match field_layout.ty.kind() {
26 ty::Slice(..) | ty::Str | ty::Foreign(..) => simple(fx),
5e7ed085 27 ty::Adt(def, _) if def.repr().packed() => {
29967ef6
XL
28 assert_eq!(layout.align.abi.bytes(), 1);
29 simple(fx)
30 }
31 _ => {
32 // We have to align the offset for DST's
33 let unaligned_offset = field_offset.bytes();
34 let (_, unsized_align) =
35 crate::unsize::size_and_align_of_dst(fx, field_layout, extra);
36
94222f64 37 let one = fx.bcx.ins().iconst(fx.pointer_type, 1);
29967ef6
XL
38 let align_sub_1 = fx.bcx.ins().isub(unsized_align, one);
39 let and_lhs = fx.bcx.ins().iadd_imm(align_sub_1, unaligned_offset as i64);
94222f64 40 let zero = fx.bcx.ins().iconst(fx.pointer_type, 0);
29967ef6
XL
41 let and_rhs = fx.bcx.ins().isub(zero, unsized_align);
42 let offset = fx.bcx.ins().band(and_lhs, and_rhs);
43
44 (base.offset_value(fx, offset), field_layout)
45 }
46 }
47 } else {
48 simple(fx)
49 }
50}
51
c295e0f8 52fn scalar_pair_calculate_b_offset(tcx: TyCtxt<'_>, a_scalar: Scalar, b_scalar: Scalar) -> Offset32 {
04454e1e 53 let b_offset = a_scalar.size(&tcx).align_to(b_scalar.align(&tcx).abi);
29967ef6
XL
54 Offset32::new(b_offset.bytes().try_into().unwrap())
55}
56
57/// A read-only value
58#[derive(Debug, Copy, Clone)]
59pub(crate) struct CValue<'tcx>(CValueInner, TyAndLayout<'tcx>);
60
61#[derive(Debug, Copy, Clone)]
62enum CValueInner {
63 ByRef(Pointer, Option<Value>),
64 ByVal(Value),
65 ByValPair(Value, Value),
66}
67
68impl<'tcx> CValue<'tcx> {
69 pub(crate) fn by_ref(ptr: Pointer, layout: TyAndLayout<'tcx>) -> CValue<'tcx> {
70 CValue(CValueInner::ByRef(ptr, None), layout)
71 }
72
73 pub(crate) fn by_ref_unsized(
74 ptr: Pointer,
75 meta: Value,
76 layout: TyAndLayout<'tcx>,
77 ) -> CValue<'tcx> {
78 CValue(CValueInner::ByRef(ptr, Some(meta)), layout)
79 }
80
81 pub(crate) fn by_val(value: Value, layout: TyAndLayout<'tcx>) -> CValue<'tcx> {
82 CValue(CValueInner::ByVal(value), layout)
83 }
84
85 pub(crate) fn by_val_pair(
86 value: Value,
87 extra: Value,
88 layout: TyAndLayout<'tcx>,
89 ) -> CValue<'tcx> {
90 CValue(CValueInner::ByValPair(value, extra), layout)
91 }
92
93 pub(crate) fn layout(&self) -> TyAndLayout<'tcx> {
94 self.1
95 }
96
97 // FIXME remove
6a06907d 98 pub(crate) fn force_stack(self, fx: &mut FunctionCx<'_, '_, 'tcx>) -> (Pointer, Option<Value>) {
29967ef6
XL
99 let layout = self.1;
100 match self.0 {
101 CValueInner::ByRef(ptr, meta) => (ptr, meta),
102 CValueInner::ByVal(_) | CValueInner::ByValPair(_, _) => {
103 let cplace = CPlace::new_stack_slot(fx, layout);
104 cplace.write_cvalue(fx, self);
105 (cplace.to_ptr(), None)
106 }
107 }
108 }
109
2b03887a 110 // FIXME remove
487cf647
FG
111 /// Forces the data value of a dyn* value to the stack and returns a pointer to it as well as the
112 /// vtable pointer.
2b03887a
FG
113 pub(crate) fn dyn_star_force_data_on_stack(
114 self,
115 fx: &mut FunctionCx<'_, '_, 'tcx>,
116 ) -> (Value, Value) {
117 assert!(self.1.ty.is_dyn_star());
118
119 match self.0 {
120 CValueInner::ByRef(ptr, None) => {
121 let (a_scalar, b_scalar) = match self.1.abi {
122 Abi::ScalarPair(a, b) => (a, b),
123 _ => unreachable!("dyn_star_force_data_on_stack({:?})", self),
124 };
125 let b_offset = scalar_pair_calculate_b_offset(fx.tcx, a_scalar, b_scalar);
126 let clif_ty2 = scalar_to_clif_type(fx.tcx, b_scalar);
127 let mut flags = MemFlags::new();
128 flags.set_notrap();
129 let vtable = ptr.offset(fx, b_offset).load(fx, clif_ty2, flags);
130 (ptr.get_addr(fx), vtable)
131 }
132 CValueInner::ByValPair(data, vtable) => {
133 let stack_slot = fx.bcx.create_sized_stack_slot(StackSlotData {
134 kind: StackSlotKind::ExplicitSlot,
135 // FIXME Don't force the size to a multiple of 16 bytes once Cranelift gets a way to
136 // specify stack slot alignment.
137 size: (u32::try_from(fx.target_config.pointer_type().bytes()).unwrap() + 15)
138 / 16
139 * 16,
140 });
141 let data_ptr = Pointer::stack_slot(stack_slot);
142 let mut flags = MemFlags::new();
143 flags.set_notrap();
144 data_ptr.store(fx, data, flags);
145
146 (data_ptr.get_addr(fx), vtable)
147 }
148 CValueInner::ByRef(_, Some(_)) | CValueInner::ByVal(_) => {
149 unreachable!("dyn_star_force_data_on_stack({:?})", self)
150 }
151 }
152 }
153
29967ef6
XL
154 pub(crate) fn try_to_ptr(self) -> Option<(Pointer, Option<Value>)> {
155 match self.0 {
156 CValueInner::ByRef(ptr, meta) => Some((ptr, meta)),
157 CValueInner::ByVal(_) | CValueInner::ByValPair(_, _) => None,
158 }
159 }
160
161 /// Load a value with layout.abi of scalar
6a06907d 162 pub(crate) fn load_scalar(self, fx: &mut FunctionCx<'_, '_, 'tcx>) -> Value {
29967ef6
XL
163 let layout = self.1;
164 match self.0 {
165 CValueInner::ByRef(ptr, None) => {
166 let clif_ty = match layout.abi {
c295e0f8
XL
167 Abi::Scalar(scalar) => scalar_to_clif_type(fx.tcx, scalar),
168 Abi::Vector { element, count } => scalar_to_clif_type(fx.tcx, element)
f2b60f7d 169 .by(u32::try_from(count).unwrap())
c295e0f8 170 .unwrap(),
29967ef6
XL
171 _ => unreachable!("{:?}", layout.ty),
172 };
173 let mut flags = MemFlags::new();
174 flags.set_notrap();
175 ptr.load(fx, clif_ty, flags)
176 }
177 CValueInner::ByVal(value) => value,
178 CValueInner::ByRef(_, Some(_)) => bug!("load_scalar for unsized value not allowed"),
179 CValueInner::ByValPair(_, _) => bug!("Please use load_scalar_pair for ByValPair"),
180 }
181 }
182
183 /// Load a value pair with layout.abi of scalar pair
6a06907d 184 pub(crate) fn load_scalar_pair(self, fx: &mut FunctionCx<'_, '_, 'tcx>) -> (Value, Value) {
29967ef6
XL
185 let layout = self.1;
186 match self.0 {
187 CValueInner::ByRef(ptr, None) => {
c295e0f8 188 let (a_scalar, b_scalar) = match layout.abi {
29967ef6
XL
189 Abi::ScalarPair(a, b) => (a, b),
190 _ => unreachable!("load_scalar_pair({:?})", self),
191 };
192 let b_offset = scalar_pair_calculate_b_offset(fx.tcx, a_scalar, b_scalar);
c295e0f8
XL
193 let clif_ty1 = scalar_to_clif_type(fx.tcx, a_scalar);
194 let clif_ty2 = scalar_to_clif_type(fx.tcx, b_scalar);
29967ef6
XL
195 let mut flags = MemFlags::new();
196 flags.set_notrap();
197 let val1 = ptr.load(fx, clif_ty1, flags);
198 let val2 = ptr.offset(fx, b_offset).load(fx, clif_ty2, flags);
199 (val1, val2)
200 }
201 CValueInner::ByRef(_, Some(_)) => {
202 bug!("load_scalar_pair for unsized value not allowed")
203 }
204 CValueInner::ByVal(_) => bug!("Please use load_scalar for ByVal"),
205 CValueInner::ByValPair(val1, val2) => (val1, val2),
206 }
207 }
208
209 pub(crate) fn value_field(
210 self,
6a06907d 211 fx: &mut FunctionCx<'_, '_, 'tcx>,
29967ef6
XL
212 field: mir::Field,
213 ) -> CValue<'tcx> {
214 let layout = self.1;
215 match self.0 {
216 CValueInner::ByVal(val) => match layout.abi {
217 Abi::Vector { element: _, count } => {
218 let count = u8::try_from(count).expect("SIMD type with more than 255 lanes???");
219 let field = u8::try_from(field.index()).unwrap();
220 assert!(field < count);
221 let lane = fx.bcx.ins().extractlane(val, field);
222 let field_layout = layout.field(&*fx, usize::from(field));
223 CValue::by_val(lane, field_layout)
224 }
225 _ => unreachable!("value_field for ByVal with abi {:?}", layout.abi),
226 },
227 CValueInner::ByValPair(val1, val2) => match layout.abi {
228 Abi::ScalarPair(_, _) => {
229 let val = match field.as_u32() {
230 0 => val1,
231 1 => val2,
232 _ => bug!("field should be 0 or 1"),
233 };
234 let field_layout = layout.field(&*fx, usize::from(field));
235 CValue::by_val(val, field_layout)
236 }
237 _ => unreachable!("value_field for ByValPair with abi {:?}", layout.abi),
238 },
239 CValueInner::ByRef(ptr, None) => {
240 let (field_ptr, field_layout) = codegen_field(fx, ptr, None, layout, field);
241 CValue::by_ref(field_ptr, field_layout)
242 }
243 CValueInner::ByRef(_, Some(_)) => todo!(),
244 }
245 }
246
94222f64
XL
247 /// Like [`CValue::value_field`] except handling ADTs containing a single array field in a way
248 /// such that you can access individual lanes.
249 pub(crate) fn value_lane(
250 self,
251 fx: &mut FunctionCx<'_, '_, 'tcx>,
252 lane_idx: u64,
253 ) -> CValue<'tcx> {
254 let layout = self.1;
255 assert!(layout.ty.is_simd());
256 let (lane_count, lane_ty) = layout.ty.simd_size_and_type(fx.tcx);
257 let lane_layout = fx.layout_of(lane_ty);
258 assert!(lane_idx < lane_count);
259 match self.0 {
260 CValueInner::ByVal(val) => match layout.abi {
261 Abi::Vector { element: _, count: _ } => {
262 assert!(lane_count <= u8::MAX.into(), "SIMD type with more than 255 lanes???");
263 let lane_idx = u8::try_from(lane_idx).unwrap();
264 let lane = fx.bcx.ins().extractlane(val, lane_idx);
265 CValue::by_val(lane, lane_layout)
266 }
267 _ => unreachable!("value_lane for ByVal with abi {:?}", layout.abi),
268 },
269 CValueInner::ByValPair(_, _) => unreachable!(),
270 CValueInner::ByRef(ptr, None) => {
271 let field_offset = lane_layout.size * lane_idx;
272 let field_ptr = ptr.offset_i64(fx, i64::try_from(field_offset.bytes()).unwrap());
273 CValue::by_ref(field_ptr, lane_layout)
274 }
275 CValueInner::ByRef(_, Some(_)) => unreachable!(),
276 }
277 }
278
6a06907d 279 pub(crate) fn unsize_value(self, fx: &mut FunctionCx<'_, '_, 'tcx>, dest: CPlace<'tcx>) {
29967ef6
XL
280 crate::unsize::coerce_unsized_into(fx, self, dest);
281 }
282
2b03887a
FG
283 pub(crate) fn coerce_dyn_star(self, fx: &mut FunctionCx<'_, '_, 'tcx>, dest: CPlace<'tcx>) {
284 crate::unsize::coerce_dyn_star(fx, self, dest);
285 }
286
29967ef6
XL
287 /// If `ty` is signed, `const_val` must already be sign extended.
288 pub(crate) fn const_val(
6a06907d 289 fx: &mut FunctionCx<'_, '_, 'tcx>,
29967ef6
XL
290 layout: TyAndLayout<'tcx>,
291 const_val: ty::ScalarInt,
292 ) -> CValue<'tcx> {
6a06907d 293 assert_eq!(const_val.size(), layout.size, "{:#?}: {:?}", const_val, layout);
29967ef6
XL
294 use cranelift_codegen::ir::immediates::{Ieee32, Ieee64};
295
296 let clif_ty = fx.clif_type(layout.ty).unwrap();
297
298 if let ty::Bool = layout.ty.kind() {
299 assert!(
300 const_val == ty::ScalarInt::FALSE || const_val == ty::ScalarInt::TRUE,
301 "Invalid bool 0x{:032X}",
302 const_val
303 );
304 }
305
306 let val = match layout.ty.kind() {
307 ty::Uint(UintTy::U128) | ty::Int(IntTy::I128) => {
308 let const_val = const_val.to_bits(layout.size).unwrap();
309 let lsb = fx.bcx.ins().iconst(types::I64, const_val as u64 as i64);
6a06907d 310 let msb = fx.bcx.ins().iconst(types::I64, (const_val >> 64) as u64 as i64);
29967ef6
XL
311 fx.bcx.ins().iconcat(lsb, msb)
312 }
6a06907d
XL
313 ty::Bool | ty::Char | ty::Uint(_) | ty::Int(_) | ty::Ref(..) | ty::RawPtr(..) => {
314 fx.bcx.ins().iconst(clif_ty, const_val.to_bits(layout.size).unwrap() as i64)
29967ef6
XL
315 }
316 ty::Float(FloatTy::F32) => {
317 fx.bcx.ins().f32const(Ieee32::with_bits(u32::try_from(const_val).unwrap()))
318 }
319 ty::Float(FloatTy::F64) => {
320 fx.bcx.ins().f64const(Ieee64::with_bits(u64::try_from(const_val).unwrap()))
321 }
322 _ => panic!(
323 "CValue::const_val for non bool/char/float/integer/pointer type {:?} is not allowed",
324 layout.ty
325 ),
326 };
327
328 CValue::by_val(val, layout)
329 }
330
331 pub(crate) fn cast_pointer_to(self, layout: TyAndLayout<'tcx>) -> Self {
6a06907d
XL
332 assert!(matches!(self.layout().ty.kind(), ty::Ref(..) | ty::RawPtr(..) | ty::FnPtr(..)));
333 assert!(matches!(layout.ty.kind(), ty::Ref(..) | ty::RawPtr(..) | ty::FnPtr(..)));
29967ef6
XL
334 assert_eq!(self.layout().abi, layout.abi);
335 CValue(self.0, layout)
336 }
337}
338
339/// A place where you can write a value to or read a value from
340#[derive(Debug, Copy, Clone)]
341pub(crate) struct CPlace<'tcx> {
342 inner: CPlaceInner,
343 layout: TyAndLayout<'tcx>,
344}
345
346#[derive(Debug, Copy, Clone)]
347pub(crate) enum CPlaceInner {
348 Var(Local, Variable),
349 VarPair(Local, Variable, Variable),
350 VarLane(Local, Variable, u8),
351 Addr(Pointer, Option<Value>),
352}
353
354impl<'tcx> CPlace<'tcx> {
355 pub(crate) fn layout(&self) -> TyAndLayout<'tcx> {
356 self.layout
357 }
358
359 pub(crate) fn inner(&self) -> &CPlaceInner {
360 &self.inner
361 }
362
29967ef6 363 pub(crate) fn new_stack_slot(
6a06907d 364 fx: &mut FunctionCx<'_, '_, 'tcx>,
29967ef6
XL
365 layout: TyAndLayout<'tcx>,
366 ) -> CPlace<'tcx> {
487cf647 367 assert!(layout.is_sized());
29967ef6 368 if layout.size.bytes() == 0 {
94222f64
XL
369 return CPlace {
370 inner: CPlaceInner::Addr(Pointer::dangling(layout.align.pref), None),
371 layout,
372 };
29967ef6
XL
373 }
374
064997fb
FG
375 if layout.size.bytes() >= u64::from(u32::MAX - 16) {
376 fx.tcx
377 .sess
378 .fatal(&format!("values of type {} are too big to store on the stack", layout.ty));
379 }
380
f2b60f7d 381 let stack_slot = fx.bcx.create_sized_stack_slot(StackSlotData {
29967ef6 382 kind: StackSlotKind::ExplicitSlot,
5869c6ff
XL
383 // FIXME Don't force the size to a multiple of 16 bytes once Cranelift gets a way to
384 // specify stack slot alignment.
385 size: (u32::try_from(layout.size.bytes()).unwrap() + 15) / 16 * 16,
29967ef6 386 });
6a06907d 387 CPlace { inner: CPlaceInner::Addr(Pointer::stack_slot(stack_slot), None), layout }
29967ef6
XL
388 }
389
390 pub(crate) fn new_var(
6a06907d 391 fx: &mut FunctionCx<'_, '_, 'tcx>,
29967ef6
XL
392 local: Local,
393 layout: TyAndLayout<'tcx>,
394 ) -> CPlace<'tcx> {
395 let var = Variable::with_u32(fx.next_ssa_var);
396 fx.next_ssa_var += 1;
397 fx.bcx.declare_var(var, fx.clif_type(layout.ty).unwrap());
6a06907d 398 CPlace { inner: CPlaceInner::Var(local, var), layout }
29967ef6
XL
399 }
400
401 pub(crate) fn new_var_pair(
6a06907d 402 fx: &mut FunctionCx<'_, '_, 'tcx>,
29967ef6
XL
403 local: Local,
404 layout: TyAndLayout<'tcx>,
405 ) -> CPlace<'tcx> {
406 let var1 = Variable::with_u32(fx.next_ssa_var);
407 fx.next_ssa_var += 1;
408 let var2 = Variable::with_u32(fx.next_ssa_var);
409 fx.next_ssa_var += 1;
410
411 let (ty1, ty2) = fx.clif_pair_type(layout.ty).unwrap();
412 fx.bcx.declare_var(var1, ty1);
413 fx.bcx.declare_var(var2, ty2);
6a06907d 414 CPlace { inner: CPlaceInner::VarPair(local, var1, var2), layout }
29967ef6
XL
415 }
416
417 pub(crate) fn for_ptr(ptr: Pointer, layout: TyAndLayout<'tcx>) -> CPlace<'tcx> {
6a06907d 418 CPlace { inner: CPlaceInner::Addr(ptr, None), layout }
29967ef6
XL
419 }
420
421 pub(crate) fn for_ptr_with_extra(
422 ptr: Pointer,
423 extra: Value,
424 layout: TyAndLayout<'tcx>,
425 ) -> CPlace<'tcx> {
6a06907d 426 CPlace { inner: CPlaceInner::Addr(ptr, Some(extra)), layout }
29967ef6
XL
427 }
428
6a06907d 429 pub(crate) fn to_cvalue(self, fx: &mut FunctionCx<'_, '_, 'tcx>) -> CValue<'tcx> {
29967ef6
XL
430 let layout = self.layout();
431 match self.inner {
432 CPlaceInner::Var(_local, var) => {
433 let val = fx.bcx.use_var(var);
6a06907d 434 //fx.bcx.set_val_label(val, cranelift_codegen::ir::ValueLabel::new(var.index()));
29967ef6
XL
435 CValue::by_val(val, layout)
436 }
437 CPlaceInner::VarPair(_local, var1, var2) => {
438 let val1 = fx.bcx.use_var(var1);
6a06907d 439 //fx.bcx.set_val_label(val1, cranelift_codegen::ir::ValueLabel::new(var1.index()));
29967ef6 440 let val2 = fx.bcx.use_var(var2);
6a06907d 441 //fx.bcx.set_val_label(val2, cranelift_codegen::ir::ValueLabel::new(var2.index()));
29967ef6
XL
442 CValue::by_val_pair(val1, val2, layout)
443 }
444 CPlaceInner::VarLane(_local, var, lane) => {
445 let val = fx.bcx.use_var(var);
6a06907d 446 //fx.bcx.set_val_label(val, cranelift_codegen::ir::ValueLabel::new(var.index()));
29967ef6
XL
447 let val = fx.bcx.ins().extractlane(val, lane);
448 CValue::by_val(val, layout)
449 }
450 CPlaceInner::Addr(ptr, extra) => {
451 if let Some(extra) = extra {
452 CValue::by_ref_unsized(ptr, extra, layout)
453 } else {
454 CValue::by_ref(ptr, layout)
455 }
456 }
457 }
458 }
459
460 pub(crate) fn to_ptr(self) -> Pointer {
461 match self.to_ptr_maybe_unsized() {
462 (ptr, None) => ptr,
463 (_, Some(_)) => bug!("Expected sized cplace, found {:?}", self),
464 }
465 }
466
467 pub(crate) fn to_ptr_maybe_unsized(self) -> (Pointer, Option<Value>) {
468 match self.inner {
469 CPlaceInner::Addr(ptr, extra) => (ptr, extra),
470 CPlaceInner::Var(_, _)
471 | CPlaceInner::VarPair(_, _, _)
472 | CPlaceInner::VarLane(_, _, _) => bug!("Expected CPlace::Addr, found {:?}", self),
473 }
474 }
475
6a06907d 476 pub(crate) fn write_cvalue(self, fx: &mut FunctionCx<'_, '_, 'tcx>, from: CValue<'tcx>) {
064997fb 477 assert_assignable(fx, from.layout().ty, self.layout().ty, 16);
29967ef6
XL
478
479 self.write_cvalue_maybe_transmute(fx, from, "write_cvalue");
480 }
481
482 pub(crate) fn write_cvalue_transmute(
483 self,
6a06907d 484 fx: &mut FunctionCx<'_, '_, 'tcx>,
29967ef6
XL
485 from: CValue<'tcx>,
486 ) {
487 self.write_cvalue_maybe_transmute(fx, from, "write_cvalue_transmute");
488 }
489
490 fn write_cvalue_maybe_transmute(
491 self,
6a06907d 492 fx: &mut FunctionCx<'_, '_, 'tcx>,
29967ef6 493 from: CValue<'tcx>,
cdc7bbd5 494 method: &'static str,
29967ef6
XL
495 ) {
496 fn transmute_value<'tcx>(
6a06907d 497 fx: &mut FunctionCx<'_, '_, 'tcx>,
29967ef6
XL
498 var: Variable,
499 data: Value,
500 dst_ty: Type,
501 ) {
502 let src_ty = fx.bcx.func.dfg.value_type(data);
503 assert_eq!(
504 src_ty.bytes(),
505 dst_ty.bytes(),
506 "write_cvalue_transmute: {:?} -> {:?}",
507 src_ty,
508 dst_ty,
509 );
510 let data = match (src_ty, dst_ty) {
511 (_, _) if src_ty == dst_ty => data,
512
513 // This is a `write_cvalue_transmute`.
514 (types::I32, types::F32)
515 | (types::F32, types::I32)
516 | (types::I64, types::F64)
517 | (types::F64, types::I64) => fx.bcx.ins().bitcast(dst_ty, data),
518 _ if src_ty.is_vector() && dst_ty.is_vector() => {
519 fx.bcx.ins().raw_bitcast(dst_ty, data)
520 }
521 _ if src_ty.is_vector() || dst_ty.is_vector() => {
522 // FIXME do something more efficient for transmutes between vectors and integers.
f2b60f7d 523 let stack_slot = fx.bcx.create_sized_stack_slot(StackSlotData {
29967ef6 524 kind: StackSlotKind::ExplicitSlot,
5869c6ff
XL
525 // FIXME Don't force the size to a multiple of 16 bytes once Cranelift gets a way to
526 // specify stack slot alignment.
527 size: (src_ty.bytes() + 15) / 16 * 16,
29967ef6
XL
528 });
529 let ptr = Pointer::stack_slot(stack_slot);
530 ptr.store(fx, data, MemFlags::trusted());
531 ptr.load(fx, dst_ty, MemFlags::trusted())
532 }
136023e0
XL
533
534 // `CValue`s should never contain SSA-only types, so if you ended
535 // up here having seen an error like `B1 -> I8`, then before
536 // calling `write_cvalue` you need to add a `bint` instruction.
29967ef6
XL
537 _ => unreachable!("write_cvalue_transmute: {:?} -> {:?}", src_ty, dst_ty),
538 };
6a06907d 539 //fx.bcx.set_val_label(data, cranelift_codegen::ir::ValueLabel::new(var.index()));
29967ef6
XL
540 fx.bcx.def_var(var, data);
541 }
542
543 assert_eq!(self.layout().size, from.layout().size);
544
cdc7bbd5 545 if fx.clif_comments.enabled() {
29967ef6
XL
546 use cranelift_codegen::cursor::{Cursor, CursorPosition};
547 let cur_block = match fx.bcx.cursor().position() {
548 CursorPosition::After(block) => block,
549 _ => unreachable!(),
550 };
551 fx.add_comment(
552 fx.bcx.func.layout.last_inst(cur_block).unwrap(),
553 format!(
554 "{}: {:?}: {:?} <- {:?}: {:?}",
555 method,
556 self.inner(),
557 self.layout().ty,
558 from.0,
559 from.layout().ty
560 ),
561 );
562 }
563
564 let dst_layout = self.layout();
565 let to_ptr = match self.inner {
566 CPlaceInner::Var(_local, var) => {
a2a8927a
XL
567 if let ty::Array(element, len) = dst_layout.ty.kind() {
568 // Can only happen for vector types
569 let len =
f2b60f7d 570 u32::try_from(len.eval_usize(fx.tcx, ParamEnv::reveal_all())).unwrap();
5099ac24 571 let vector_ty = fx.clif_type(*element).unwrap().by(len).unwrap();
a2a8927a
XL
572
573 let data = match from.0 {
574 CValueInner::ByRef(ptr, None) => {
575 let mut flags = MemFlags::new();
576 flags.set_notrap();
577 ptr.load(fx, vector_ty, flags)
578 }
579 CValueInner::ByVal(_)
580 | CValueInner::ByValPair(_, _)
581 | CValueInner::ByRef(_, Some(_)) => bug!("array should be ByRef"),
582 };
583
584 fx.bcx.def_var(var, data);
585 return;
586 }
29967ef6
XL
587 let data = CValue(from.0, dst_layout).load_scalar(fx);
588 let dst_ty = fx.clif_type(self.layout().ty).unwrap();
589 transmute_value(fx, var, data, dst_ty);
590 return;
591 }
592 CPlaceInner::VarPair(_local, var1, var2) => {
593 let (data1, data2) = CValue(from.0, dst_layout).load_scalar_pair(fx);
594 let (dst_ty1, dst_ty2) = fx.clif_pair_type(self.layout().ty).unwrap();
595 transmute_value(fx, var1, data1, dst_ty1);
596 transmute_value(fx, var2, data2, dst_ty2);
597 return;
598 }
599 CPlaceInner::VarLane(_local, var, lane) => {
600 let data = from.load_scalar(fx);
601
602 // First get the old vector
603 let vector = fx.bcx.use_var(var);
6a06907d 604 //fx.bcx.set_val_label(vector, cranelift_codegen::ir::ValueLabel::new(var.index()));
29967ef6
XL
605
606 // Next insert the written lane into the vector
607 let vector = fx.bcx.ins().insertlane(vector, data, lane);
608
609 // Finally write the new vector
6a06907d 610 //fx.bcx.set_val_label(vector, cranelift_codegen::ir::ValueLabel::new(var.index()));
29967ef6
XL
611 fx.bcx.def_var(var, vector);
612
613 return;
614 }
615 CPlaceInner::Addr(ptr, None) => {
616 if dst_layout.size == Size::ZERO || dst_layout.abi == Abi::Uninhabited {
617 return;
618 }
619 ptr
620 }
621 CPlaceInner::Addr(_, Some(_)) => bug!("Can't write value to unsized place {:?}", self),
622 };
623
624 let mut flags = MemFlags::new();
625 flags.set_notrap();
626 match from.layout().abi {
627 // FIXME make Abi::Vector work too
628 Abi::Scalar(_) => {
629 let val = from.load_scalar(fx);
630 to_ptr.store(fx, val, flags);
631 return;
632 }
c295e0f8 633 Abi::ScalarPair(a_scalar, b_scalar) => {
29967ef6
XL
634 let (value, extra) = from.load_scalar_pair(fx);
635 let b_offset = scalar_pair_calculate_b_offset(fx.tcx, a_scalar, b_scalar);
636 to_ptr.store(fx, value, flags);
637 to_ptr.offset(fx, b_offset).store(fx, extra, flags);
638 return;
639 }
640 _ => {}
641 }
642
643 match from.0 {
644 CValueInner::ByVal(val) => {
645 to_ptr.store(fx, val, flags);
646 }
647 CValueInner::ByValPair(_, _) => {
6a06907d 648 bug!("Non ScalarPair abi {:?} for ByValPair CValue", dst_layout.abi);
29967ef6
XL
649 }
650 CValueInner::ByRef(from_ptr, None) => {
651 let from_addr = from_ptr.get_addr(fx);
652 let to_addr = to_ptr.get_addr(fx);
653 let src_layout = from.1;
654 let size = dst_layout.size.bytes();
655 let src_align = src_layout.align.abi.bytes() as u8;
656 let dst_align = dst_layout.align.abi.bytes() as u8;
657 fx.bcx.emit_small_memory_copy(
a2a8927a 658 fx.target_config,
29967ef6
XL
659 to_addr,
660 from_addr,
661 size,
662 dst_align,
663 src_align,
664 true,
f2b60f7d 665 flags,
29967ef6
XL
666 );
667 }
668 CValueInner::ByRef(_, Some(_)) => todo!(),
669 }
670 }
671
2b03887a
FG
672 pub(crate) fn place_opaque_cast(
673 self,
674 fx: &mut FunctionCx<'_, '_, 'tcx>,
675 ty: Ty<'tcx>,
676 ) -> CPlace<'tcx> {
677 CPlace { inner: self.inner, layout: fx.layout_of(ty) }
678 }
679
29967ef6
XL
680 pub(crate) fn place_field(
681 self,
6a06907d 682 fx: &mut FunctionCx<'_, '_, 'tcx>,
29967ef6
XL
683 field: mir::Field,
684 ) -> CPlace<'tcx> {
685 let layout = self.layout();
686
687 match self.inner {
a2a8927a
XL
688 CPlaceInner::Var(local, var) => match layout.ty.kind() {
689 ty::Array(_, _) => {
690 // Can only happen for vector types
29967ef6
XL
691 return CPlace {
692 inner: CPlaceInner::VarLane(local, var, field.as_u32().try_into().unwrap()),
693 layout: layout.field(fx, field.as_u32().try_into().unwrap()),
694 };
695 }
a2a8927a
XL
696 ty::Adt(adt_def, substs) if layout.ty.is_simd() => {
697 let f0_ty = adt_def.non_enum_variant().fields[0].ty(fx.tcx, substs);
698
699 match f0_ty.kind() {
700 ty::Array(_, _) => {
701 assert_eq!(field.as_u32(), 0);
702 return CPlace {
703 inner: CPlaceInner::Var(local, var),
704 layout: layout.field(fx, field.as_u32().try_into().unwrap()),
705 };
706 }
707 _ => {
708 return CPlace {
709 inner: CPlaceInner::VarLane(
710 local,
711 var,
712 field.as_u32().try_into().unwrap(),
713 ),
714 layout: layout.field(fx, field.as_u32().try_into().unwrap()),
715 };
716 }
717 }
718 }
719 _ => {}
720 },
29967ef6
XL
721 CPlaceInner::VarPair(local, var1, var2) => {
722 let layout = layout.field(&*fx, field.index());
723
724 match field.as_u32() {
6a06907d
XL
725 0 => return CPlace { inner: CPlaceInner::Var(local, var1), layout },
726 1 => return CPlace { inner: CPlaceInner::Var(local, var2), layout },
29967ef6
XL
727 _ => unreachable!("field should be 0 or 1"),
728 }
729 }
730 _ => {}
731 }
732
733 let (base, extra) = self.to_ptr_maybe_unsized();
734
735 let (field_ptr, field_layout) = codegen_field(fx, base, extra, layout, field);
736 if field_layout.is_unsized() {
a2a8927a
XL
737 if let ty::Foreign(_) = field_layout.ty.kind() {
738 assert!(extra.is_none());
739 CPlace::for_ptr(field_ptr, field_layout)
740 } else {
741 CPlace::for_ptr_with_extra(field_ptr, extra.unwrap(), field_layout)
742 }
29967ef6
XL
743 } else {
744 CPlace::for_ptr(field_ptr, field_layout)
745 }
746 }
747
94222f64
XL
748 /// Like [`CPlace::place_field`] except handling ADTs containing a single array field in a way
749 /// such that you can access individual lanes.
750 pub(crate) fn place_lane(
751 self,
752 fx: &mut FunctionCx<'_, '_, 'tcx>,
753 lane_idx: u64,
754 ) -> CPlace<'tcx> {
755 let layout = self.layout();
756 assert!(layout.ty.is_simd());
757 let (lane_count, lane_ty) = layout.ty.simd_size_and_type(fx.tcx);
758 let lane_layout = fx.layout_of(lane_ty);
759 assert!(lane_idx < lane_count);
760
761 match self.inner {
762 CPlaceInner::Var(local, var) => {
763 assert!(matches!(layout.abi, Abi::Vector { .. }));
764 CPlace {
765 inner: CPlaceInner::VarLane(local, var, lane_idx.try_into().unwrap()),
766 layout: lane_layout,
767 }
768 }
769 CPlaceInner::VarPair(_, _, _) => unreachable!(),
770 CPlaceInner::VarLane(_, _, _) => unreachable!(),
771 CPlaceInner::Addr(ptr, None) => {
772 let field_offset = lane_layout.size * lane_idx;
773 let field_ptr = ptr.offset_i64(fx, i64::try_from(field_offset.bytes()).unwrap());
774 CPlace::for_ptr(field_ptr, lane_layout)
775 }
776 CPlaceInner::Addr(_, Some(_)) => unreachable!(),
777 }
778 }
779
29967ef6
XL
780 pub(crate) fn place_index(
781 self,
6a06907d 782 fx: &mut FunctionCx<'_, '_, 'tcx>,
29967ef6
XL
783 index: Value,
784 ) -> CPlace<'tcx> {
785 let (elem_layout, ptr) = match self.layout().ty.kind() {
5099ac24
FG
786 ty::Array(elem_ty, _) => (fx.layout_of(*elem_ty), self.to_ptr()),
787 ty::Slice(elem_ty) => (fx.layout_of(*elem_ty), self.to_ptr_maybe_unsized().0),
29967ef6
XL
788 _ => bug!("place_index({:?})", self.layout().ty),
789 };
790
6a06907d 791 let offset = fx.bcx.ins().imul_imm(index, elem_layout.size.bytes() as i64);
29967ef6
XL
792
793 CPlace::for_ptr(ptr.offset_value(fx, offset), elem_layout)
794 }
795
6a06907d 796 pub(crate) fn place_deref(self, fx: &mut FunctionCx<'_, '_, 'tcx>) -> CPlace<'tcx> {
29967ef6
XL
797 let inner_layout = fx.layout_of(self.layout().ty.builtin_deref(true).unwrap().ty);
798 if has_ptr_meta(fx.tcx, inner_layout.ty) {
799 let (addr, extra) = self.to_cvalue(fx).load_scalar_pair(fx);
800 CPlace::for_ptr_with_extra(Pointer::new(addr), extra, inner_layout)
801 } else {
6a06907d 802 CPlace::for_ptr(Pointer::new(self.to_cvalue(fx).load_scalar(fx)), inner_layout)
29967ef6
XL
803 }
804 }
805
806 pub(crate) fn place_ref(
807 self,
6a06907d 808 fx: &mut FunctionCx<'_, '_, 'tcx>,
29967ef6
XL
809 layout: TyAndLayout<'tcx>,
810 ) -> CValue<'tcx> {
811 if has_ptr_meta(fx.tcx, self.layout().ty) {
812 let (ptr, extra) = self.to_ptr_maybe_unsized();
813 CValue::by_val_pair(
814 ptr.get_addr(fx),
815 extra.expect("unsized type without metadata"),
816 layout,
817 )
818 } else {
819 CValue::by_val(self.to_ptr().get_addr(fx), layout)
820 }
821 }
822
823 pub(crate) fn downcast_variant(
824 self,
6a06907d 825 fx: &FunctionCx<'_, '_, 'tcx>,
29967ef6
XL
826 variant: VariantIdx,
827 ) -> Self {
487cf647 828 assert!(self.layout().is_sized());
29967ef6 829 let layout = self.layout().for_variant(fx, variant);
6a06907d 830 CPlace { inner: self.inner, layout }
29967ef6
XL
831 }
832}
5869c6ff
XL
833
834#[track_caller]
835pub(crate) fn assert_assignable<'tcx>(
6a06907d 836 fx: &FunctionCx<'_, '_, 'tcx>,
5869c6ff
XL
837 from_ty: Ty<'tcx>,
838 to_ty: Ty<'tcx>,
064997fb 839 limit: usize,
5869c6ff 840) {
064997fb
FG
841 if limit == 0 {
842 // assert_assignable exists solely to catch bugs in cg_clif. it isn't necessary for
843 // soundness. don't attempt to check deep types to avoid exponential behavior in certain
844 // cases.
845 return;
846 }
5869c6ff
XL
847 match (from_ty.kind(), to_ty.kind()) {
848 (ty::Ref(_, a, _), ty::Ref(_, b, _))
849 | (
850 ty::RawPtr(TypeAndMut { ty: a, mutbl: _ }),
851 ty::RawPtr(TypeAndMut { ty: b, mutbl: _ }),
852 ) => {
064997fb 853 assert_assignable(fx, *a, *b, limit - 1);
5869c6ff
XL
854 }
855 (ty::Ref(_, a, _), ty::RawPtr(TypeAndMut { ty: b, mutbl: _ }))
856 | (ty::RawPtr(TypeAndMut { ty: a, mutbl: _ }), ty::Ref(_, b, _)) => {
064997fb 857 assert_assignable(fx, *a, *b, limit - 1);
5869c6ff
XL
858 }
859 (ty::FnPtr(_), ty::FnPtr(_)) => {
860 let from_sig = fx.tcx.normalize_erasing_late_bound_regions(
861 ParamEnv::reveal_all(),
862 from_ty.fn_sig(fx.tcx),
863 );
864 let to_sig = fx
865 .tcx
866 .normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), to_ty.fn_sig(fx.tcx));
867 assert_eq!(
868 from_sig, to_sig,
869 "Can't write fn ptr with incompatible sig {:?} to place with sig {:?}\n\n{:#?}",
870 from_sig, to_sig, fx,
871 );
872 // fn(&T) -> for<'l> fn(&'l T) is allowed
873 }
f2b60f7d
FG
874 (&ty::Dynamic(from_traits, _, _from_kind), &ty::Dynamic(to_traits, _, _to_kind)) => {
875 // FIXME(dyn-star): Do the right thing with DynKinds
5869c6ff 876 for (from, to) in from_traits.iter().zip(to_traits) {
6a06907d
XL
877 let from =
878 fx.tcx.normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), from);
879 let to = fx.tcx.normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), to);
5869c6ff
XL
880 assert_eq!(
881 from, to,
882 "Can't write trait object of incompatible traits {:?} to place with traits {:?}\n\n{:#?}",
883 from_traits, to_traits, fx,
884 );
885 }
886 // dyn for<'r> Trait<'r> -> dyn Trait<'_> is allowed
887 }
064997fb
FG
888 (&ty::Tuple(types_a), &ty::Tuple(types_b)) => {
889 let mut types_a = types_a.iter();
890 let mut types_b = types_b.iter();
891 loop {
892 match (types_a.next(), types_b.next()) {
893 (Some(a), Some(b)) => assert_assignable(fx, a, b, limit - 1),
894 (None, None) => return,
895 (Some(_), None) | (None, Some(_)) => panic!("{:#?}/{:#?}", from_ty, to_ty),
896 }
897 }
898 }
cdc7bbd5 899 (&ty::Adt(adt_def_a, substs_a), &ty::Adt(adt_def_b, substs_b))
5e7ed085 900 if adt_def_a.did() == adt_def_b.did() =>
cdc7bbd5
XL
901 {
902 let mut types_a = substs_a.types();
903 let mut types_b = substs_b.types();
904 loop {
905 match (types_a.next(), types_b.next()) {
064997fb 906 (Some(a), Some(b)) => assert_assignable(fx, a, b, limit - 1),
cdc7bbd5
XL
907 (None, None) => return,
908 (Some(_), None) | (None, Some(_)) => panic!("{:#?}/{:#?}", from_ty, to_ty),
909 }
910 }
911 }
064997fb
FG
912 (ty::Array(a, _), ty::Array(b, _)) => assert_assignable(fx, *a, *b, limit - 1),
913 (&ty::Closure(def_id_a, substs_a), &ty::Closure(def_id_b, substs_b))
914 if def_id_a == def_id_b =>
915 {
916 let mut types_a = substs_a.types();
917 let mut types_b = substs_b.types();
918 loop {
919 match (types_a.next(), types_b.next()) {
920 (Some(a), Some(b)) => assert_assignable(fx, a, b, limit - 1),
921 (None, None) => return,
922 (Some(_), None) | (None, Some(_)) => panic!("{:#?}/{:#?}", from_ty, to_ty),
923 }
924 }
925 }
926 (ty::Param(_), _) | (_, ty::Param(_)) if fx.tcx.sess.opts.unstable_opts.polymorphize => {
927 // No way to check if it is correct or not with polymorphization enabled
928 }
5869c6ff
XL
929 _ => {
930 assert_eq!(
064997fb
FG
931 from_ty,
932 to_ty,
5869c6ff 933 "Can't write value with incompatible type {:?} to place with type {:?}\n\n{:#?}",
064997fb
FG
934 from_ty.kind(),
935 to_ty.kind(),
936 fx,
5869c6ff
XL
937 );
938 }
939 }
940}