]> git.proxmox.com Git - rustc.git/blame - compiler/rustc_codegen_cranelift/src/value_and_place.rs
New upstream version 1.61.0+dfsg1
[rustc.git] / compiler / rustc_codegen_cranelift / src / value_and_place.rs
CommitLineData
29967ef6
XL
1//! Definition of [`CValue`] and [`CPlace`]
2
3use crate::prelude::*;
4
29967ef6
XL
5use cranelift_codegen::ir::immediates::Offset32;
6
7fn codegen_field<'tcx>(
6a06907d 8 fx: &mut FunctionCx<'_, '_, 'tcx>,
29967ef6
XL
9 base: Pointer,
10 extra: Option<Value>,
11 layout: TyAndLayout<'tcx>,
12 field: mir::Field,
13) -> (Pointer, TyAndLayout<'tcx>) {
14 let field_offset = layout.fields.offset(field.index());
15 let field_layout = layout.field(&*fx, field.index());
16
6a06907d
XL
17 let simple = |fx: &mut FunctionCx<'_, '_, '_>| {
18 (base.offset_i64(fx, i64::try_from(field_offset.bytes()).unwrap()), field_layout)
29967ef6
XL
19 };
20
21 if let Some(extra) = extra {
22 if !field_layout.is_unsized() {
23 return simple(fx);
24 }
25 match field_layout.ty.kind() {
26 ty::Slice(..) | ty::Str | ty::Foreign(..) => simple(fx),
5e7ed085 27 ty::Adt(def, _) if def.repr().packed() => {
29967ef6
XL
28 assert_eq!(layout.align.abi.bytes(), 1);
29 simple(fx)
30 }
31 _ => {
32 // We have to align the offset for DST's
33 let unaligned_offset = field_offset.bytes();
34 let (_, unsized_align) =
35 crate::unsize::size_and_align_of_dst(fx, field_layout, extra);
36
94222f64 37 let one = fx.bcx.ins().iconst(fx.pointer_type, 1);
29967ef6
XL
38 let align_sub_1 = fx.bcx.ins().isub(unsized_align, one);
39 let and_lhs = fx.bcx.ins().iadd_imm(align_sub_1, unaligned_offset as i64);
94222f64 40 let zero = fx.bcx.ins().iconst(fx.pointer_type, 0);
29967ef6
XL
41 let and_rhs = fx.bcx.ins().isub(zero, unsized_align);
42 let offset = fx.bcx.ins().band(and_lhs, and_rhs);
43
44 (base.offset_value(fx, offset), field_layout)
45 }
46 }
47 } else {
48 simple(fx)
49 }
50}
51
c295e0f8 52fn scalar_pair_calculate_b_offset(tcx: TyCtxt<'_>, a_scalar: Scalar, b_scalar: Scalar) -> Offset32 {
6a06907d 53 let b_offset = a_scalar.value.size(&tcx).align_to(b_scalar.value.align(&tcx).abi);
29967ef6
XL
54 Offset32::new(b_offset.bytes().try_into().unwrap())
55}
56
57/// A read-only value
58#[derive(Debug, Copy, Clone)]
59pub(crate) struct CValue<'tcx>(CValueInner, TyAndLayout<'tcx>);
60
61#[derive(Debug, Copy, Clone)]
62enum CValueInner {
63 ByRef(Pointer, Option<Value>),
64 ByVal(Value),
65 ByValPair(Value, Value),
66}
67
68impl<'tcx> CValue<'tcx> {
69 pub(crate) fn by_ref(ptr: Pointer, layout: TyAndLayout<'tcx>) -> CValue<'tcx> {
70 CValue(CValueInner::ByRef(ptr, None), layout)
71 }
72
73 pub(crate) fn by_ref_unsized(
74 ptr: Pointer,
75 meta: Value,
76 layout: TyAndLayout<'tcx>,
77 ) -> CValue<'tcx> {
78 CValue(CValueInner::ByRef(ptr, Some(meta)), layout)
79 }
80
81 pub(crate) fn by_val(value: Value, layout: TyAndLayout<'tcx>) -> CValue<'tcx> {
82 CValue(CValueInner::ByVal(value), layout)
83 }
84
85 pub(crate) fn by_val_pair(
86 value: Value,
87 extra: Value,
88 layout: TyAndLayout<'tcx>,
89 ) -> CValue<'tcx> {
90 CValue(CValueInner::ByValPair(value, extra), layout)
91 }
92
93 pub(crate) fn layout(&self) -> TyAndLayout<'tcx> {
94 self.1
95 }
96
97 // FIXME remove
6a06907d 98 pub(crate) fn force_stack(self, fx: &mut FunctionCx<'_, '_, 'tcx>) -> (Pointer, Option<Value>) {
29967ef6
XL
99 let layout = self.1;
100 match self.0 {
101 CValueInner::ByRef(ptr, meta) => (ptr, meta),
102 CValueInner::ByVal(_) | CValueInner::ByValPair(_, _) => {
103 let cplace = CPlace::new_stack_slot(fx, layout);
104 cplace.write_cvalue(fx, self);
105 (cplace.to_ptr(), None)
106 }
107 }
108 }
109
110 pub(crate) fn try_to_ptr(self) -> Option<(Pointer, Option<Value>)> {
111 match self.0 {
112 CValueInner::ByRef(ptr, meta) => Some((ptr, meta)),
113 CValueInner::ByVal(_) | CValueInner::ByValPair(_, _) => None,
114 }
115 }
116
117 /// Load a value with layout.abi of scalar
6a06907d 118 pub(crate) fn load_scalar(self, fx: &mut FunctionCx<'_, '_, 'tcx>) -> Value {
29967ef6
XL
119 let layout = self.1;
120 match self.0 {
121 CValueInner::ByRef(ptr, None) => {
122 let clif_ty = match layout.abi {
c295e0f8
XL
123 Abi::Scalar(scalar) => scalar_to_clif_type(fx.tcx, scalar),
124 Abi::Vector { element, count } => scalar_to_clif_type(fx.tcx, element)
125 .by(u16::try_from(count).unwrap())
126 .unwrap(),
29967ef6
XL
127 _ => unreachable!("{:?}", layout.ty),
128 };
129 let mut flags = MemFlags::new();
130 flags.set_notrap();
131 ptr.load(fx, clif_ty, flags)
132 }
133 CValueInner::ByVal(value) => value,
134 CValueInner::ByRef(_, Some(_)) => bug!("load_scalar for unsized value not allowed"),
135 CValueInner::ByValPair(_, _) => bug!("Please use load_scalar_pair for ByValPair"),
136 }
137 }
138
139 /// Load a value pair with layout.abi of scalar pair
6a06907d 140 pub(crate) fn load_scalar_pair(self, fx: &mut FunctionCx<'_, '_, 'tcx>) -> (Value, Value) {
29967ef6
XL
141 let layout = self.1;
142 match self.0 {
143 CValueInner::ByRef(ptr, None) => {
c295e0f8 144 let (a_scalar, b_scalar) = match layout.abi {
29967ef6
XL
145 Abi::ScalarPair(a, b) => (a, b),
146 _ => unreachable!("load_scalar_pair({:?})", self),
147 };
148 let b_offset = scalar_pair_calculate_b_offset(fx.tcx, a_scalar, b_scalar);
c295e0f8
XL
149 let clif_ty1 = scalar_to_clif_type(fx.tcx, a_scalar);
150 let clif_ty2 = scalar_to_clif_type(fx.tcx, b_scalar);
29967ef6
XL
151 let mut flags = MemFlags::new();
152 flags.set_notrap();
153 let val1 = ptr.load(fx, clif_ty1, flags);
154 let val2 = ptr.offset(fx, b_offset).load(fx, clif_ty2, flags);
155 (val1, val2)
156 }
157 CValueInner::ByRef(_, Some(_)) => {
158 bug!("load_scalar_pair for unsized value not allowed")
159 }
160 CValueInner::ByVal(_) => bug!("Please use load_scalar for ByVal"),
161 CValueInner::ByValPair(val1, val2) => (val1, val2),
162 }
163 }
164
165 pub(crate) fn value_field(
166 self,
6a06907d 167 fx: &mut FunctionCx<'_, '_, 'tcx>,
29967ef6
XL
168 field: mir::Field,
169 ) -> CValue<'tcx> {
170 let layout = self.1;
171 match self.0 {
172 CValueInner::ByVal(val) => match layout.abi {
173 Abi::Vector { element: _, count } => {
174 let count = u8::try_from(count).expect("SIMD type with more than 255 lanes???");
175 let field = u8::try_from(field.index()).unwrap();
176 assert!(field < count);
177 let lane = fx.bcx.ins().extractlane(val, field);
178 let field_layout = layout.field(&*fx, usize::from(field));
179 CValue::by_val(lane, field_layout)
180 }
181 _ => unreachable!("value_field for ByVal with abi {:?}", layout.abi),
182 },
183 CValueInner::ByValPair(val1, val2) => match layout.abi {
184 Abi::ScalarPair(_, _) => {
185 let val = match field.as_u32() {
186 0 => val1,
187 1 => val2,
188 _ => bug!("field should be 0 or 1"),
189 };
190 let field_layout = layout.field(&*fx, usize::from(field));
191 CValue::by_val(val, field_layout)
192 }
193 _ => unreachable!("value_field for ByValPair with abi {:?}", layout.abi),
194 },
195 CValueInner::ByRef(ptr, None) => {
196 let (field_ptr, field_layout) = codegen_field(fx, ptr, None, layout, field);
197 CValue::by_ref(field_ptr, field_layout)
198 }
199 CValueInner::ByRef(_, Some(_)) => todo!(),
200 }
201 }
202
94222f64
XL
203 /// Like [`CValue::value_field`] except handling ADTs containing a single array field in a way
204 /// such that you can access individual lanes.
205 pub(crate) fn value_lane(
206 self,
207 fx: &mut FunctionCx<'_, '_, 'tcx>,
208 lane_idx: u64,
209 ) -> CValue<'tcx> {
210 let layout = self.1;
211 assert!(layout.ty.is_simd());
212 let (lane_count, lane_ty) = layout.ty.simd_size_and_type(fx.tcx);
213 let lane_layout = fx.layout_of(lane_ty);
214 assert!(lane_idx < lane_count);
215 match self.0 {
216 CValueInner::ByVal(val) => match layout.abi {
217 Abi::Vector { element: _, count: _ } => {
218 assert!(lane_count <= u8::MAX.into(), "SIMD type with more than 255 lanes???");
219 let lane_idx = u8::try_from(lane_idx).unwrap();
220 let lane = fx.bcx.ins().extractlane(val, lane_idx);
221 CValue::by_val(lane, lane_layout)
222 }
223 _ => unreachable!("value_lane for ByVal with abi {:?}", layout.abi),
224 },
225 CValueInner::ByValPair(_, _) => unreachable!(),
226 CValueInner::ByRef(ptr, None) => {
227 let field_offset = lane_layout.size * lane_idx;
228 let field_ptr = ptr.offset_i64(fx, i64::try_from(field_offset.bytes()).unwrap());
229 CValue::by_ref(field_ptr, lane_layout)
230 }
231 CValueInner::ByRef(_, Some(_)) => unreachable!(),
232 }
233 }
234
6a06907d 235 pub(crate) fn unsize_value(self, fx: &mut FunctionCx<'_, '_, 'tcx>, dest: CPlace<'tcx>) {
29967ef6
XL
236 crate::unsize::coerce_unsized_into(fx, self, dest);
237 }
238
239 /// If `ty` is signed, `const_val` must already be sign extended.
240 pub(crate) fn const_val(
6a06907d 241 fx: &mut FunctionCx<'_, '_, 'tcx>,
29967ef6
XL
242 layout: TyAndLayout<'tcx>,
243 const_val: ty::ScalarInt,
244 ) -> CValue<'tcx> {
6a06907d 245 assert_eq!(const_val.size(), layout.size, "{:#?}: {:?}", const_val, layout);
29967ef6
XL
246 use cranelift_codegen::ir::immediates::{Ieee32, Ieee64};
247
248 let clif_ty = fx.clif_type(layout.ty).unwrap();
249
250 if let ty::Bool = layout.ty.kind() {
251 assert!(
252 const_val == ty::ScalarInt::FALSE || const_val == ty::ScalarInt::TRUE,
253 "Invalid bool 0x{:032X}",
254 const_val
255 );
256 }
257
258 let val = match layout.ty.kind() {
259 ty::Uint(UintTy::U128) | ty::Int(IntTy::I128) => {
260 let const_val = const_val.to_bits(layout.size).unwrap();
261 let lsb = fx.bcx.ins().iconst(types::I64, const_val as u64 as i64);
6a06907d 262 let msb = fx.bcx.ins().iconst(types::I64, (const_val >> 64) as u64 as i64);
29967ef6
XL
263 fx.bcx.ins().iconcat(lsb, msb)
264 }
6a06907d
XL
265 ty::Bool | ty::Char | ty::Uint(_) | ty::Int(_) | ty::Ref(..) | ty::RawPtr(..) => {
266 fx.bcx.ins().iconst(clif_ty, const_val.to_bits(layout.size).unwrap() as i64)
29967ef6
XL
267 }
268 ty::Float(FloatTy::F32) => {
269 fx.bcx.ins().f32const(Ieee32::with_bits(u32::try_from(const_val).unwrap()))
270 }
271 ty::Float(FloatTy::F64) => {
272 fx.bcx.ins().f64const(Ieee64::with_bits(u64::try_from(const_val).unwrap()))
273 }
274 _ => panic!(
275 "CValue::const_val for non bool/char/float/integer/pointer type {:?} is not allowed",
276 layout.ty
277 ),
278 };
279
280 CValue::by_val(val, layout)
281 }
282
283 pub(crate) fn cast_pointer_to(self, layout: TyAndLayout<'tcx>) -> Self {
6a06907d
XL
284 assert!(matches!(self.layout().ty.kind(), ty::Ref(..) | ty::RawPtr(..) | ty::FnPtr(..)));
285 assert!(matches!(layout.ty.kind(), ty::Ref(..) | ty::RawPtr(..) | ty::FnPtr(..)));
29967ef6
XL
286 assert_eq!(self.layout().abi, layout.abi);
287 CValue(self.0, layout)
288 }
289}
290
291/// A place where you can write a value to or read a value from
292#[derive(Debug, Copy, Clone)]
293pub(crate) struct CPlace<'tcx> {
294 inner: CPlaceInner,
295 layout: TyAndLayout<'tcx>,
296}
297
298#[derive(Debug, Copy, Clone)]
299pub(crate) enum CPlaceInner {
300 Var(Local, Variable),
301 VarPair(Local, Variable, Variable),
302 VarLane(Local, Variable, u8),
303 Addr(Pointer, Option<Value>),
304}
305
306impl<'tcx> CPlace<'tcx> {
307 pub(crate) fn layout(&self) -> TyAndLayout<'tcx> {
308 self.layout
309 }
310
311 pub(crate) fn inner(&self) -> &CPlaceInner {
312 &self.inner
313 }
314
29967ef6 315 pub(crate) fn new_stack_slot(
6a06907d 316 fx: &mut FunctionCx<'_, '_, 'tcx>,
29967ef6
XL
317 layout: TyAndLayout<'tcx>,
318 ) -> CPlace<'tcx> {
319 assert!(!layout.is_unsized());
320 if layout.size.bytes() == 0 {
94222f64
XL
321 return CPlace {
322 inner: CPlaceInner::Addr(Pointer::dangling(layout.align.pref), None),
323 layout,
324 };
29967ef6
XL
325 }
326
327 let stack_slot = fx.bcx.create_stack_slot(StackSlotData {
328 kind: StackSlotKind::ExplicitSlot,
5869c6ff
XL
329 // FIXME Don't force the size to a multiple of 16 bytes once Cranelift gets a way to
330 // specify stack slot alignment.
331 size: (u32::try_from(layout.size.bytes()).unwrap() + 15) / 16 * 16,
29967ef6 332 });
6a06907d 333 CPlace { inner: CPlaceInner::Addr(Pointer::stack_slot(stack_slot), None), layout }
29967ef6
XL
334 }
335
336 pub(crate) fn new_var(
6a06907d 337 fx: &mut FunctionCx<'_, '_, 'tcx>,
29967ef6
XL
338 local: Local,
339 layout: TyAndLayout<'tcx>,
340 ) -> CPlace<'tcx> {
341 let var = Variable::with_u32(fx.next_ssa_var);
342 fx.next_ssa_var += 1;
343 fx.bcx.declare_var(var, fx.clif_type(layout.ty).unwrap());
6a06907d 344 CPlace { inner: CPlaceInner::Var(local, var), layout }
29967ef6
XL
345 }
346
347 pub(crate) fn new_var_pair(
6a06907d 348 fx: &mut FunctionCx<'_, '_, 'tcx>,
29967ef6
XL
349 local: Local,
350 layout: TyAndLayout<'tcx>,
351 ) -> CPlace<'tcx> {
352 let var1 = Variable::with_u32(fx.next_ssa_var);
353 fx.next_ssa_var += 1;
354 let var2 = Variable::with_u32(fx.next_ssa_var);
355 fx.next_ssa_var += 1;
356
357 let (ty1, ty2) = fx.clif_pair_type(layout.ty).unwrap();
358 fx.bcx.declare_var(var1, ty1);
359 fx.bcx.declare_var(var2, ty2);
6a06907d 360 CPlace { inner: CPlaceInner::VarPair(local, var1, var2), layout }
29967ef6
XL
361 }
362
363 pub(crate) fn for_ptr(ptr: Pointer, layout: TyAndLayout<'tcx>) -> CPlace<'tcx> {
6a06907d 364 CPlace { inner: CPlaceInner::Addr(ptr, None), layout }
29967ef6
XL
365 }
366
367 pub(crate) fn for_ptr_with_extra(
368 ptr: Pointer,
369 extra: Value,
370 layout: TyAndLayout<'tcx>,
371 ) -> CPlace<'tcx> {
6a06907d 372 CPlace { inner: CPlaceInner::Addr(ptr, Some(extra)), layout }
29967ef6
XL
373 }
374
6a06907d 375 pub(crate) fn to_cvalue(self, fx: &mut FunctionCx<'_, '_, 'tcx>) -> CValue<'tcx> {
29967ef6
XL
376 let layout = self.layout();
377 match self.inner {
378 CPlaceInner::Var(_local, var) => {
379 let val = fx.bcx.use_var(var);
6a06907d 380 //fx.bcx.set_val_label(val, cranelift_codegen::ir::ValueLabel::new(var.index()));
29967ef6
XL
381 CValue::by_val(val, layout)
382 }
383 CPlaceInner::VarPair(_local, var1, var2) => {
384 let val1 = fx.bcx.use_var(var1);
6a06907d 385 //fx.bcx.set_val_label(val1, cranelift_codegen::ir::ValueLabel::new(var1.index()));
29967ef6 386 let val2 = fx.bcx.use_var(var2);
6a06907d 387 //fx.bcx.set_val_label(val2, cranelift_codegen::ir::ValueLabel::new(var2.index()));
29967ef6
XL
388 CValue::by_val_pair(val1, val2, layout)
389 }
390 CPlaceInner::VarLane(_local, var, lane) => {
391 let val = fx.bcx.use_var(var);
6a06907d 392 //fx.bcx.set_val_label(val, cranelift_codegen::ir::ValueLabel::new(var.index()));
29967ef6
XL
393 let val = fx.bcx.ins().extractlane(val, lane);
394 CValue::by_val(val, layout)
395 }
396 CPlaceInner::Addr(ptr, extra) => {
397 if let Some(extra) = extra {
398 CValue::by_ref_unsized(ptr, extra, layout)
399 } else {
400 CValue::by_ref(ptr, layout)
401 }
402 }
403 }
404 }
405
406 pub(crate) fn to_ptr(self) -> Pointer {
407 match self.to_ptr_maybe_unsized() {
408 (ptr, None) => ptr,
409 (_, Some(_)) => bug!("Expected sized cplace, found {:?}", self),
410 }
411 }
412
413 pub(crate) fn to_ptr_maybe_unsized(self) -> (Pointer, Option<Value>) {
414 match self.inner {
415 CPlaceInner::Addr(ptr, extra) => (ptr, extra),
416 CPlaceInner::Var(_, _)
417 | CPlaceInner::VarPair(_, _, _)
418 | CPlaceInner::VarLane(_, _, _) => bug!("Expected CPlace::Addr, found {:?}", self),
419 }
420 }
421
6a06907d 422 pub(crate) fn write_cvalue(self, fx: &mut FunctionCx<'_, '_, 'tcx>, from: CValue<'tcx>) {
29967ef6
XL
423 assert_assignable(fx, from.layout().ty, self.layout().ty);
424
425 self.write_cvalue_maybe_transmute(fx, from, "write_cvalue");
426 }
427
428 pub(crate) fn write_cvalue_transmute(
429 self,
6a06907d 430 fx: &mut FunctionCx<'_, '_, 'tcx>,
29967ef6
XL
431 from: CValue<'tcx>,
432 ) {
433 self.write_cvalue_maybe_transmute(fx, from, "write_cvalue_transmute");
434 }
435
436 fn write_cvalue_maybe_transmute(
437 self,
6a06907d 438 fx: &mut FunctionCx<'_, '_, 'tcx>,
29967ef6 439 from: CValue<'tcx>,
cdc7bbd5 440 method: &'static str,
29967ef6
XL
441 ) {
442 fn transmute_value<'tcx>(
6a06907d 443 fx: &mut FunctionCx<'_, '_, 'tcx>,
29967ef6
XL
444 var: Variable,
445 data: Value,
446 dst_ty: Type,
447 ) {
448 let src_ty = fx.bcx.func.dfg.value_type(data);
449 assert_eq!(
450 src_ty.bytes(),
451 dst_ty.bytes(),
452 "write_cvalue_transmute: {:?} -> {:?}",
453 src_ty,
454 dst_ty,
455 );
456 let data = match (src_ty, dst_ty) {
457 (_, _) if src_ty == dst_ty => data,
458
459 // This is a `write_cvalue_transmute`.
460 (types::I32, types::F32)
461 | (types::F32, types::I32)
462 | (types::I64, types::F64)
463 | (types::F64, types::I64) => fx.bcx.ins().bitcast(dst_ty, data),
464 _ if src_ty.is_vector() && dst_ty.is_vector() => {
465 fx.bcx.ins().raw_bitcast(dst_ty, data)
466 }
467 _ if src_ty.is_vector() || dst_ty.is_vector() => {
468 // FIXME do something more efficient for transmutes between vectors and integers.
469 let stack_slot = fx.bcx.create_stack_slot(StackSlotData {
470 kind: StackSlotKind::ExplicitSlot,
5869c6ff
XL
471 // FIXME Don't force the size to a multiple of 16 bytes once Cranelift gets a way to
472 // specify stack slot alignment.
473 size: (src_ty.bytes() + 15) / 16 * 16,
29967ef6
XL
474 });
475 let ptr = Pointer::stack_slot(stack_slot);
476 ptr.store(fx, data, MemFlags::trusted());
477 ptr.load(fx, dst_ty, MemFlags::trusted())
478 }
136023e0
XL
479
480 // `CValue`s should never contain SSA-only types, so if you ended
481 // up here having seen an error like `B1 -> I8`, then before
482 // calling `write_cvalue` you need to add a `bint` instruction.
29967ef6
XL
483 _ => unreachable!("write_cvalue_transmute: {:?} -> {:?}", src_ty, dst_ty),
484 };
6a06907d 485 //fx.bcx.set_val_label(data, cranelift_codegen::ir::ValueLabel::new(var.index()));
29967ef6
XL
486 fx.bcx.def_var(var, data);
487 }
488
489 assert_eq!(self.layout().size, from.layout().size);
490
cdc7bbd5 491 if fx.clif_comments.enabled() {
29967ef6
XL
492 use cranelift_codegen::cursor::{Cursor, CursorPosition};
493 let cur_block = match fx.bcx.cursor().position() {
494 CursorPosition::After(block) => block,
495 _ => unreachable!(),
496 };
497 fx.add_comment(
498 fx.bcx.func.layout.last_inst(cur_block).unwrap(),
499 format!(
500 "{}: {:?}: {:?} <- {:?}: {:?}",
501 method,
502 self.inner(),
503 self.layout().ty,
504 from.0,
505 from.layout().ty
506 ),
507 );
508 }
509
510 let dst_layout = self.layout();
511 let to_ptr = match self.inner {
512 CPlaceInner::Var(_local, var) => {
a2a8927a
XL
513 if let ty::Array(element, len) = dst_layout.ty.kind() {
514 // Can only happen for vector types
515 let len =
516 u16::try_from(len.eval_usize(fx.tcx, ParamEnv::reveal_all())).unwrap();
5099ac24 517 let vector_ty = fx.clif_type(*element).unwrap().by(len).unwrap();
a2a8927a
XL
518
519 let data = match from.0 {
520 CValueInner::ByRef(ptr, None) => {
521 let mut flags = MemFlags::new();
522 flags.set_notrap();
523 ptr.load(fx, vector_ty, flags)
524 }
525 CValueInner::ByVal(_)
526 | CValueInner::ByValPair(_, _)
527 | CValueInner::ByRef(_, Some(_)) => bug!("array should be ByRef"),
528 };
529
530 fx.bcx.def_var(var, data);
531 return;
532 }
29967ef6
XL
533 let data = CValue(from.0, dst_layout).load_scalar(fx);
534 let dst_ty = fx.clif_type(self.layout().ty).unwrap();
535 transmute_value(fx, var, data, dst_ty);
536 return;
537 }
538 CPlaceInner::VarPair(_local, var1, var2) => {
539 let (data1, data2) = CValue(from.0, dst_layout).load_scalar_pair(fx);
540 let (dst_ty1, dst_ty2) = fx.clif_pair_type(self.layout().ty).unwrap();
541 transmute_value(fx, var1, data1, dst_ty1);
542 transmute_value(fx, var2, data2, dst_ty2);
543 return;
544 }
545 CPlaceInner::VarLane(_local, var, lane) => {
546 let data = from.load_scalar(fx);
547
548 // First get the old vector
549 let vector = fx.bcx.use_var(var);
6a06907d 550 //fx.bcx.set_val_label(vector, cranelift_codegen::ir::ValueLabel::new(var.index()));
29967ef6
XL
551
552 // Next insert the written lane into the vector
553 let vector = fx.bcx.ins().insertlane(vector, data, lane);
554
555 // Finally write the new vector
6a06907d 556 //fx.bcx.set_val_label(vector, cranelift_codegen::ir::ValueLabel::new(var.index()));
29967ef6
XL
557 fx.bcx.def_var(var, vector);
558
559 return;
560 }
561 CPlaceInner::Addr(ptr, None) => {
562 if dst_layout.size == Size::ZERO || dst_layout.abi == Abi::Uninhabited {
563 return;
564 }
565 ptr
566 }
567 CPlaceInner::Addr(_, Some(_)) => bug!("Can't write value to unsized place {:?}", self),
568 };
569
570 let mut flags = MemFlags::new();
571 flags.set_notrap();
572 match from.layout().abi {
573 // FIXME make Abi::Vector work too
574 Abi::Scalar(_) => {
575 let val = from.load_scalar(fx);
576 to_ptr.store(fx, val, flags);
577 return;
578 }
c295e0f8 579 Abi::ScalarPair(a_scalar, b_scalar) => {
29967ef6
XL
580 let (value, extra) = from.load_scalar_pair(fx);
581 let b_offset = scalar_pair_calculate_b_offset(fx.tcx, a_scalar, b_scalar);
582 to_ptr.store(fx, value, flags);
583 to_ptr.offset(fx, b_offset).store(fx, extra, flags);
584 return;
585 }
586 _ => {}
587 }
588
589 match from.0 {
590 CValueInner::ByVal(val) => {
591 to_ptr.store(fx, val, flags);
592 }
593 CValueInner::ByValPair(_, _) => {
6a06907d 594 bug!("Non ScalarPair abi {:?} for ByValPair CValue", dst_layout.abi);
29967ef6
XL
595 }
596 CValueInner::ByRef(from_ptr, None) => {
597 let from_addr = from_ptr.get_addr(fx);
598 let to_addr = to_ptr.get_addr(fx);
599 let src_layout = from.1;
600 let size = dst_layout.size.bytes();
601 let src_align = src_layout.align.abi.bytes() as u8;
602 let dst_align = dst_layout.align.abi.bytes() as u8;
603 fx.bcx.emit_small_memory_copy(
a2a8927a 604 fx.target_config,
29967ef6
XL
605 to_addr,
606 from_addr,
607 size,
608 dst_align,
609 src_align,
610 true,
17df50a5 611 MemFlags::trusted(),
29967ef6
XL
612 );
613 }
614 CValueInner::ByRef(_, Some(_)) => todo!(),
615 }
616 }
617
618 pub(crate) fn place_field(
619 self,
6a06907d 620 fx: &mut FunctionCx<'_, '_, 'tcx>,
29967ef6
XL
621 field: mir::Field,
622 ) -> CPlace<'tcx> {
623 let layout = self.layout();
624
625 match self.inner {
a2a8927a
XL
626 CPlaceInner::Var(local, var) => match layout.ty.kind() {
627 ty::Array(_, _) => {
628 // Can only happen for vector types
29967ef6
XL
629 return CPlace {
630 inner: CPlaceInner::VarLane(local, var, field.as_u32().try_into().unwrap()),
631 layout: layout.field(fx, field.as_u32().try_into().unwrap()),
632 };
633 }
a2a8927a
XL
634 ty::Adt(adt_def, substs) if layout.ty.is_simd() => {
635 let f0_ty = adt_def.non_enum_variant().fields[0].ty(fx.tcx, substs);
636
637 match f0_ty.kind() {
638 ty::Array(_, _) => {
639 assert_eq!(field.as_u32(), 0);
640 return CPlace {
641 inner: CPlaceInner::Var(local, var),
642 layout: layout.field(fx, field.as_u32().try_into().unwrap()),
643 };
644 }
645 _ => {
646 return CPlace {
647 inner: CPlaceInner::VarLane(
648 local,
649 var,
650 field.as_u32().try_into().unwrap(),
651 ),
652 layout: layout.field(fx, field.as_u32().try_into().unwrap()),
653 };
654 }
655 }
656 }
657 _ => {}
658 },
29967ef6
XL
659 CPlaceInner::VarPair(local, var1, var2) => {
660 let layout = layout.field(&*fx, field.index());
661
662 match field.as_u32() {
6a06907d
XL
663 0 => return CPlace { inner: CPlaceInner::Var(local, var1), layout },
664 1 => return CPlace { inner: CPlaceInner::Var(local, var2), layout },
29967ef6
XL
665 _ => unreachable!("field should be 0 or 1"),
666 }
667 }
668 _ => {}
669 }
670
671 let (base, extra) = self.to_ptr_maybe_unsized();
672
673 let (field_ptr, field_layout) = codegen_field(fx, base, extra, layout, field);
674 if field_layout.is_unsized() {
a2a8927a
XL
675 if let ty::Foreign(_) = field_layout.ty.kind() {
676 assert!(extra.is_none());
677 CPlace::for_ptr(field_ptr, field_layout)
678 } else {
679 CPlace::for_ptr_with_extra(field_ptr, extra.unwrap(), field_layout)
680 }
29967ef6
XL
681 } else {
682 CPlace::for_ptr(field_ptr, field_layout)
683 }
684 }
685
94222f64
XL
686 /// Like [`CPlace::place_field`] except handling ADTs containing a single array field in a way
687 /// such that you can access individual lanes.
688 pub(crate) fn place_lane(
689 self,
690 fx: &mut FunctionCx<'_, '_, 'tcx>,
691 lane_idx: u64,
692 ) -> CPlace<'tcx> {
693 let layout = self.layout();
694 assert!(layout.ty.is_simd());
695 let (lane_count, lane_ty) = layout.ty.simd_size_and_type(fx.tcx);
696 let lane_layout = fx.layout_of(lane_ty);
697 assert!(lane_idx < lane_count);
698
699 match self.inner {
700 CPlaceInner::Var(local, var) => {
701 assert!(matches!(layout.abi, Abi::Vector { .. }));
702 CPlace {
703 inner: CPlaceInner::VarLane(local, var, lane_idx.try_into().unwrap()),
704 layout: lane_layout,
705 }
706 }
707 CPlaceInner::VarPair(_, _, _) => unreachable!(),
708 CPlaceInner::VarLane(_, _, _) => unreachable!(),
709 CPlaceInner::Addr(ptr, None) => {
710 let field_offset = lane_layout.size * lane_idx;
711 let field_ptr = ptr.offset_i64(fx, i64::try_from(field_offset.bytes()).unwrap());
712 CPlace::for_ptr(field_ptr, lane_layout)
713 }
714 CPlaceInner::Addr(_, Some(_)) => unreachable!(),
715 }
716 }
717
29967ef6
XL
718 pub(crate) fn place_index(
719 self,
6a06907d 720 fx: &mut FunctionCx<'_, '_, 'tcx>,
29967ef6
XL
721 index: Value,
722 ) -> CPlace<'tcx> {
723 let (elem_layout, ptr) = match self.layout().ty.kind() {
5099ac24
FG
724 ty::Array(elem_ty, _) => (fx.layout_of(*elem_ty), self.to_ptr()),
725 ty::Slice(elem_ty) => (fx.layout_of(*elem_ty), self.to_ptr_maybe_unsized().0),
29967ef6
XL
726 _ => bug!("place_index({:?})", self.layout().ty),
727 };
728
6a06907d 729 let offset = fx.bcx.ins().imul_imm(index, elem_layout.size.bytes() as i64);
29967ef6
XL
730
731 CPlace::for_ptr(ptr.offset_value(fx, offset), elem_layout)
732 }
733
6a06907d 734 pub(crate) fn place_deref(self, fx: &mut FunctionCx<'_, '_, 'tcx>) -> CPlace<'tcx> {
29967ef6
XL
735 let inner_layout = fx.layout_of(self.layout().ty.builtin_deref(true).unwrap().ty);
736 if has_ptr_meta(fx.tcx, inner_layout.ty) {
737 let (addr, extra) = self.to_cvalue(fx).load_scalar_pair(fx);
738 CPlace::for_ptr_with_extra(Pointer::new(addr), extra, inner_layout)
739 } else {
6a06907d 740 CPlace::for_ptr(Pointer::new(self.to_cvalue(fx).load_scalar(fx)), inner_layout)
29967ef6
XL
741 }
742 }
743
744 pub(crate) fn place_ref(
745 self,
6a06907d 746 fx: &mut FunctionCx<'_, '_, 'tcx>,
29967ef6
XL
747 layout: TyAndLayout<'tcx>,
748 ) -> CValue<'tcx> {
749 if has_ptr_meta(fx.tcx, self.layout().ty) {
750 let (ptr, extra) = self.to_ptr_maybe_unsized();
751 CValue::by_val_pair(
752 ptr.get_addr(fx),
753 extra.expect("unsized type without metadata"),
754 layout,
755 )
756 } else {
757 CValue::by_val(self.to_ptr().get_addr(fx), layout)
758 }
759 }
760
761 pub(crate) fn downcast_variant(
762 self,
6a06907d 763 fx: &FunctionCx<'_, '_, 'tcx>,
29967ef6
XL
764 variant: VariantIdx,
765 ) -> Self {
766 assert!(!self.layout().is_unsized());
767 let layout = self.layout().for_variant(fx, variant);
6a06907d 768 CPlace { inner: self.inner, layout }
29967ef6
XL
769 }
770}
5869c6ff
XL
771
772#[track_caller]
773pub(crate) fn assert_assignable<'tcx>(
6a06907d 774 fx: &FunctionCx<'_, '_, 'tcx>,
5869c6ff
XL
775 from_ty: Ty<'tcx>,
776 to_ty: Ty<'tcx>,
777) {
778 match (from_ty.kind(), to_ty.kind()) {
779 (ty::Ref(_, a, _), ty::Ref(_, b, _))
780 | (
781 ty::RawPtr(TypeAndMut { ty: a, mutbl: _ }),
782 ty::RawPtr(TypeAndMut { ty: b, mutbl: _ }),
783 ) => {
5099ac24 784 assert_assignable(fx, *a, *b);
5869c6ff
XL
785 }
786 (ty::Ref(_, a, _), ty::RawPtr(TypeAndMut { ty: b, mutbl: _ }))
787 | (ty::RawPtr(TypeAndMut { ty: a, mutbl: _ }), ty::Ref(_, b, _)) => {
5099ac24 788 assert_assignable(fx, *a, *b);
5869c6ff
XL
789 }
790 (ty::FnPtr(_), ty::FnPtr(_)) => {
791 let from_sig = fx.tcx.normalize_erasing_late_bound_regions(
792 ParamEnv::reveal_all(),
793 from_ty.fn_sig(fx.tcx),
794 );
795 let to_sig = fx
796 .tcx
797 .normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), to_ty.fn_sig(fx.tcx));
798 assert_eq!(
799 from_sig, to_sig,
800 "Can't write fn ptr with incompatible sig {:?} to place with sig {:?}\n\n{:#?}",
801 from_sig, to_sig, fx,
802 );
803 // fn(&T) -> for<'l> fn(&'l T) is allowed
804 }
805 (&ty::Dynamic(from_traits, _), &ty::Dynamic(to_traits, _)) => {
806 for (from, to) in from_traits.iter().zip(to_traits) {
6a06907d
XL
807 let from =
808 fx.tcx.normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), from);
809 let to = fx.tcx.normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), to);
5869c6ff
XL
810 assert_eq!(
811 from, to,
812 "Can't write trait object of incompatible traits {:?} to place with traits {:?}\n\n{:#?}",
813 from_traits, to_traits, fx,
814 );
815 }
816 // dyn for<'r> Trait<'r> -> dyn Trait<'_> is allowed
817 }
cdc7bbd5 818 (&ty::Adt(adt_def_a, substs_a), &ty::Adt(adt_def_b, substs_b))
5e7ed085 819 if adt_def_a.did() == adt_def_b.did() =>
cdc7bbd5
XL
820 {
821 let mut types_a = substs_a.types();
822 let mut types_b = substs_b.types();
823 loop {
824 match (types_a.next(), types_b.next()) {
825 (Some(a), Some(b)) => assert_assignable(fx, a, b),
826 (None, None) => return,
827 (Some(_), None) | (None, Some(_)) => panic!("{:#?}/{:#?}", from_ty, to_ty),
828 }
829 }
830 }
5869c6ff
XL
831 _ => {
832 assert_eq!(
833 from_ty, to_ty,
834 "Can't write value with incompatible type {:?} to place with type {:?}\n\n{:#?}",
835 from_ty, to_ty, fx,
836 );
837 }
838 }
839}