]>
Commit | Line | Data |
---|---|---|
29967ef6 XL |
1 | //! Definition of [`CValue`] and [`CPlace`] |
2 | ||
3 | use crate::prelude::*; | |
4 | ||
29967ef6 XL |
5 | use cranelift_codegen::ir::immediates::Offset32; |
6 | ||
7 | fn codegen_field<'tcx>( | |
6a06907d | 8 | fx: &mut FunctionCx<'_, '_, 'tcx>, |
29967ef6 XL |
9 | base: Pointer, |
10 | extra: Option<Value>, | |
11 | layout: TyAndLayout<'tcx>, | |
12 | field: mir::Field, | |
13 | ) -> (Pointer, TyAndLayout<'tcx>) { | |
14 | let field_offset = layout.fields.offset(field.index()); | |
15 | let field_layout = layout.field(&*fx, field.index()); | |
16 | ||
6a06907d XL |
17 | let simple = |fx: &mut FunctionCx<'_, '_, '_>| { |
18 | (base.offset_i64(fx, i64::try_from(field_offset.bytes()).unwrap()), field_layout) | |
29967ef6 XL |
19 | }; |
20 | ||
21 | if let Some(extra) = extra { | |
487cf647 | 22 | if field_layout.is_sized() { |
29967ef6 XL |
23 | return simple(fx); |
24 | } | |
25 | match field_layout.ty.kind() { | |
26 | ty::Slice(..) | ty::Str | ty::Foreign(..) => simple(fx), | |
5e7ed085 | 27 | ty::Adt(def, _) if def.repr().packed() => { |
29967ef6 XL |
28 | assert_eq!(layout.align.abi.bytes(), 1); |
29 | simple(fx) | |
30 | } | |
31 | _ => { | |
32 | // We have to align the offset for DST's | |
33 | let unaligned_offset = field_offset.bytes(); | |
34 | let (_, unsized_align) = | |
35 | crate::unsize::size_and_align_of_dst(fx, field_layout, extra); | |
36 | ||
94222f64 | 37 | let one = fx.bcx.ins().iconst(fx.pointer_type, 1); |
29967ef6 XL |
38 | let align_sub_1 = fx.bcx.ins().isub(unsized_align, one); |
39 | let and_lhs = fx.bcx.ins().iadd_imm(align_sub_1, unaligned_offset as i64); | |
94222f64 | 40 | let zero = fx.bcx.ins().iconst(fx.pointer_type, 0); |
29967ef6 XL |
41 | let and_rhs = fx.bcx.ins().isub(zero, unsized_align); |
42 | let offset = fx.bcx.ins().band(and_lhs, and_rhs); | |
43 | ||
44 | (base.offset_value(fx, offset), field_layout) | |
45 | } | |
46 | } | |
47 | } else { | |
48 | simple(fx) | |
49 | } | |
50 | } | |
51 | ||
c295e0f8 | 52 | fn scalar_pair_calculate_b_offset(tcx: TyCtxt<'_>, a_scalar: Scalar, b_scalar: Scalar) -> Offset32 { |
04454e1e | 53 | let b_offset = a_scalar.size(&tcx).align_to(b_scalar.align(&tcx).abi); |
29967ef6 XL |
54 | Offset32::new(b_offset.bytes().try_into().unwrap()) |
55 | } | |
56 | ||
57 | /// A read-only value | |
58 | #[derive(Debug, Copy, Clone)] | |
59 | pub(crate) struct CValue<'tcx>(CValueInner, TyAndLayout<'tcx>); | |
60 | ||
61 | #[derive(Debug, Copy, Clone)] | |
62 | enum CValueInner { | |
63 | ByRef(Pointer, Option<Value>), | |
64 | ByVal(Value), | |
65 | ByValPair(Value, Value), | |
66 | } | |
67 | ||
68 | impl<'tcx> CValue<'tcx> { | |
69 | pub(crate) fn by_ref(ptr: Pointer, layout: TyAndLayout<'tcx>) -> CValue<'tcx> { | |
70 | CValue(CValueInner::ByRef(ptr, None), layout) | |
71 | } | |
72 | ||
73 | pub(crate) fn by_ref_unsized( | |
74 | ptr: Pointer, | |
75 | meta: Value, | |
76 | layout: TyAndLayout<'tcx>, | |
77 | ) -> CValue<'tcx> { | |
78 | CValue(CValueInner::ByRef(ptr, Some(meta)), layout) | |
79 | } | |
80 | ||
81 | pub(crate) fn by_val(value: Value, layout: TyAndLayout<'tcx>) -> CValue<'tcx> { | |
82 | CValue(CValueInner::ByVal(value), layout) | |
83 | } | |
84 | ||
85 | pub(crate) fn by_val_pair( | |
86 | value: Value, | |
87 | extra: Value, | |
88 | layout: TyAndLayout<'tcx>, | |
89 | ) -> CValue<'tcx> { | |
90 | CValue(CValueInner::ByValPair(value, extra), layout) | |
91 | } | |
92 | ||
93 | pub(crate) fn layout(&self) -> TyAndLayout<'tcx> { | |
94 | self.1 | |
95 | } | |
96 | ||
97 | // FIXME remove | |
6a06907d | 98 | pub(crate) fn force_stack(self, fx: &mut FunctionCx<'_, '_, 'tcx>) -> (Pointer, Option<Value>) { |
29967ef6 XL |
99 | let layout = self.1; |
100 | match self.0 { | |
101 | CValueInner::ByRef(ptr, meta) => (ptr, meta), | |
102 | CValueInner::ByVal(_) | CValueInner::ByValPair(_, _) => { | |
103 | let cplace = CPlace::new_stack_slot(fx, layout); | |
104 | cplace.write_cvalue(fx, self); | |
105 | (cplace.to_ptr(), None) | |
106 | } | |
107 | } | |
108 | } | |
109 | ||
2b03887a | 110 | // FIXME remove |
487cf647 FG |
111 | /// Forces the data value of a dyn* value to the stack and returns a pointer to it as well as the |
112 | /// vtable pointer. | |
2b03887a FG |
113 | pub(crate) fn dyn_star_force_data_on_stack( |
114 | self, | |
115 | fx: &mut FunctionCx<'_, '_, 'tcx>, | |
116 | ) -> (Value, Value) { | |
117 | assert!(self.1.ty.is_dyn_star()); | |
118 | ||
119 | match self.0 { | |
120 | CValueInner::ByRef(ptr, None) => { | |
121 | let (a_scalar, b_scalar) = match self.1.abi { | |
122 | Abi::ScalarPair(a, b) => (a, b), | |
123 | _ => unreachable!("dyn_star_force_data_on_stack({:?})", self), | |
124 | }; | |
125 | let b_offset = scalar_pair_calculate_b_offset(fx.tcx, a_scalar, b_scalar); | |
126 | let clif_ty2 = scalar_to_clif_type(fx.tcx, b_scalar); | |
127 | let mut flags = MemFlags::new(); | |
128 | flags.set_notrap(); | |
129 | let vtable = ptr.offset(fx, b_offset).load(fx, clif_ty2, flags); | |
130 | (ptr.get_addr(fx), vtable) | |
131 | } | |
132 | CValueInner::ByValPair(data, vtable) => { | |
133 | let stack_slot = fx.bcx.create_sized_stack_slot(StackSlotData { | |
134 | kind: StackSlotKind::ExplicitSlot, | |
135 | // FIXME Don't force the size to a multiple of 16 bytes once Cranelift gets a way to | |
136 | // specify stack slot alignment. | |
137 | size: (u32::try_from(fx.target_config.pointer_type().bytes()).unwrap() + 15) | |
138 | / 16 | |
139 | * 16, | |
140 | }); | |
141 | let data_ptr = Pointer::stack_slot(stack_slot); | |
142 | let mut flags = MemFlags::new(); | |
143 | flags.set_notrap(); | |
144 | data_ptr.store(fx, data, flags); | |
145 | ||
146 | (data_ptr.get_addr(fx), vtable) | |
147 | } | |
148 | CValueInner::ByRef(_, Some(_)) | CValueInner::ByVal(_) => { | |
149 | unreachable!("dyn_star_force_data_on_stack({:?})", self) | |
150 | } | |
151 | } | |
152 | } | |
153 | ||
29967ef6 XL |
154 | pub(crate) fn try_to_ptr(self) -> Option<(Pointer, Option<Value>)> { |
155 | match self.0 { | |
156 | CValueInner::ByRef(ptr, meta) => Some((ptr, meta)), | |
157 | CValueInner::ByVal(_) | CValueInner::ByValPair(_, _) => None, | |
158 | } | |
159 | } | |
160 | ||
161 | /// Load a value with layout.abi of scalar | |
6a06907d | 162 | pub(crate) fn load_scalar(self, fx: &mut FunctionCx<'_, '_, 'tcx>) -> Value { |
29967ef6 XL |
163 | let layout = self.1; |
164 | match self.0 { | |
165 | CValueInner::ByRef(ptr, None) => { | |
166 | let clif_ty = match layout.abi { | |
c295e0f8 XL |
167 | Abi::Scalar(scalar) => scalar_to_clif_type(fx.tcx, scalar), |
168 | Abi::Vector { element, count } => scalar_to_clif_type(fx.tcx, element) | |
f2b60f7d | 169 | .by(u32::try_from(count).unwrap()) |
c295e0f8 | 170 | .unwrap(), |
29967ef6 XL |
171 | _ => unreachable!("{:?}", layout.ty), |
172 | }; | |
173 | let mut flags = MemFlags::new(); | |
174 | flags.set_notrap(); | |
175 | ptr.load(fx, clif_ty, flags) | |
176 | } | |
177 | CValueInner::ByVal(value) => value, | |
178 | CValueInner::ByRef(_, Some(_)) => bug!("load_scalar for unsized value not allowed"), | |
179 | CValueInner::ByValPair(_, _) => bug!("Please use load_scalar_pair for ByValPair"), | |
180 | } | |
181 | } | |
182 | ||
183 | /// Load a value pair with layout.abi of scalar pair | |
6a06907d | 184 | pub(crate) fn load_scalar_pair(self, fx: &mut FunctionCx<'_, '_, 'tcx>) -> (Value, Value) { |
29967ef6 XL |
185 | let layout = self.1; |
186 | match self.0 { | |
187 | CValueInner::ByRef(ptr, None) => { | |
c295e0f8 | 188 | let (a_scalar, b_scalar) = match layout.abi { |
29967ef6 XL |
189 | Abi::ScalarPair(a, b) => (a, b), |
190 | _ => unreachable!("load_scalar_pair({:?})", self), | |
191 | }; | |
192 | let b_offset = scalar_pair_calculate_b_offset(fx.tcx, a_scalar, b_scalar); | |
c295e0f8 XL |
193 | let clif_ty1 = scalar_to_clif_type(fx.tcx, a_scalar); |
194 | let clif_ty2 = scalar_to_clif_type(fx.tcx, b_scalar); | |
29967ef6 XL |
195 | let mut flags = MemFlags::new(); |
196 | flags.set_notrap(); | |
197 | let val1 = ptr.load(fx, clif_ty1, flags); | |
198 | let val2 = ptr.offset(fx, b_offset).load(fx, clif_ty2, flags); | |
199 | (val1, val2) | |
200 | } | |
201 | CValueInner::ByRef(_, Some(_)) => { | |
202 | bug!("load_scalar_pair for unsized value not allowed") | |
203 | } | |
204 | CValueInner::ByVal(_) => bug!("Please use load_scalar for ByVal"), | |
205 | CValueInner::ByValPair(val1, val2) => (val1, val2), | |
206 | } | |
207 | } | |
208 | ||
209 | pub(crate) fn value_field( | |
210 | self, | |
6a06907d | 211 | fx: &mut FunctionCx<'_, '_, 'tcx>, |
29967ef6 XL |
212 | field: mir::Field, |
213 | ) -> CValue<'tcx> { | |
214 | let layout = self.1; | |
215 | match self.0 { | |
216 | CValueInner::ByVal(val) => match layout.abi { | |
217 | Abi::Vector { element: _, count } => { | |
218 | let count = u8::try_from(count).expect("SIMD type with more than 255 lanes???"); | |
219 | let field = u8::try_from(field.index()).unwrap(); | |
220 | assert!(field < count); | |
221 | let lane = fx.bcx.ins().extractlane(val, field); | |
222 | let field_layout = layout.field(&*fx, usize::from(field)); | |
223 | CValue::by_val(lane, field_layout) | |
224 | } | |
225 | _ => unreachable!("value_field for ByVal with abi {:?}", layout.abi), | |
226 | }, | |
227 | CValueInner::ByValPair(val1, val2) => match layout.abi { | |
228 | Abi::ScalarPair(_, _) => { | |
229 | let val = match field.as_u32() { | |
230 | 0 => val1, | |
231 | 1 => val2, | |
232 | _ => bug!("field should be 0 or 1"), | |
233 | }; | |
234 | let field_layout = layout.field(&*fx, usize::from(field)); | |
235 | CValue::by_val(val, field_layout) | |
236 | } | |
237 | _ => unreachable!("value_field for ByValPair with abi {:?}", layout.abi), | |
238 | }, | |
239 | CValueInner::ByRef(ptr, None) => { | |
240 | let (field_ptr, field_layout) = codegen_field(fx, ptr, None, layout, field); | |
241 | CValue::by_ref(field_ptr, field_layout) | |
242 | } | |
243 | CValueInner::ByRef(_, Some(_)) => todo!(), | |
244 | } | |
245 | } | |
246 | ||
94222f64 XL |
247 | /// Like [`CValue::value_field`] except handling ADTs containing a single array field in a way |
248 | /// such that you can access individual lanes. | |
249 | pub(crate) fn value_lane( | |
250 | self, | |
251 | fx: &mut FunctionCx<'_, '_, 'tcx>, | |
252 | lane_idx: u64, | |
253 | ) -> CValue<'tcx> { | |
254 | let layout = self.1; | |
255 | assert!(layout.ty.is_simd()); | |
256 | let (lane_count, lane_ty) = layout.ty.simd_size_and_type(fx.tcx); | |
257 | let lane_layout = fx.layout_of(lane_ty); | |
258 | assert!(lane_idx < lane_count); | |
259 | match self.0 { | |
260 | CValueInner::ByVal(val) => match layout.abi { | |
261 | Abi::Vector { element: _, count: _ } => { | |
262 | assert!(lane_count <= u8::MAX.into(), "SIMD type with more than 255 lanes???"); | |
263 | let lane_idx = u8::try_from(lane_idx).unwrap(); | |
264 | let lane = fx.bcx.ins().extractlane(val, lane_idx); | |
265 | CValue::by_val(lane, lane_layout) | |
266 | } | |
267 | _ => unreachable!("value_lane for ByVal with abi {:?}", layout.abi), | |
268 | }, | |
269 | CValueInner::ByValPair(_, _) => unreachable!(), | |
270 | CValueInner::ByRef(ptr, None) => { | |
271 | let field_offset = lane_layout.size * lane_idx; | |
272 | let field_ptr = ptr.offset_i64(fx, i64::try_from(field_offset.bytes()).unwrap()); | |
273 | CValue::by_ref(field_ptr, lane_layout) | |
274 | } | |
275 | CValueInner::ByRef(_, Some(_)) => unreachable!(), | |
276 | } | |
277 | } | |
278 | ||
6a06907d | 279 | pub(crate) fn unsize_value(self, fx: &mut FunctionCx<'_, '_, 'tcx>, dest: CPlace<'tcx>) { |
29967ef6 XL |
280 | crate::unsize::coerce_unsized_into(fx, self, dest); |
281 | } | |
282 | ||
2b03887a FG |
283 | pub(crate) fn coerce_dyn_star(self, fx: &mut FunctionCx<'_, '_, 'tcx>, dest: CPlace<'tcx>) { |
284 | crate::unsize::coerce_dyn_star(fx, self, dest); | |
285 | } | |
286 | ||
29967ef6 XL |
287 | /// If `ty` is signed, `const_val` must already be sign extended. |
288 | pub(crate) fn const_val( | |
6a06907d | 289 | fx: &mut FunctionCx<'_, '_, 'tcx>, |
29967ef6 XL |
290 | layout: TyAndLayout<'tcx>, |
291 | const_val: ty::ScalarInt, | |
292 | ) -> CValue<'tcx> { | |
6a06907d | 293 | assert_eq!(const_val.size(), layout.size, "{:#?}: {:?}", const_val, layout); |
29967ef6 XL |
294 | use cranelift_codegen::ir::immediates::{Ieee32, Ieee64}; |
295 | ||
296 | let clif_ty = fx.clif_type(layout.ty).unwrap(); | |
297 | ||
298 | if let ty::Bool = layout.ty.kind() { | |
299 | assert!( | |
300 | const_val == ty::ScalarInt::FALSE || const_val == ty::ScalarInt::TRUE, | |
301 | "Invalid bool 0x{:032X}", | |
302 | const_val | |
303 | ); | |
304 | } | |
305 | ||
306 | let val = match layout.ty.kind() { | |
307 | ty::Uint(UintTy::U128) | ty::Int(IntTy::I128) => { | |
308 | let const_val = const_val.to_bits(layout.size).unwrap(); | |
309 | let lsb = fx.bcx.ins().iconst(types::I64, const_val as u64 as i64); | |
6a06907d | 310 | let msb = fx.bcx.ins().iconst(types::I64, (const_val >> 64) as u64 as i64); |
29967ef6 XL |
311 | fx.bcx.ins().iconcat(lsb, msb) |
312 | } | |
6a06907d XL |
313 | ty::Bool | ty::Char | ty::Uint(_) | ty::Int(_) | ty::Ref(..) | ty::RawPtr(..) => { |
314 | fx.bcx.ins().iconst(clif_ty, const_val.to_bits(layout.size).unwrap() as i64) | |
29967ef6 XL |
315 | } |
316 | ty::Float(FloatTy::F32) => { | |
317 | fx.bcx.ins().f32const(Ieee32::with_bits(u32::try_from(const_val).unwrap())) | |
318 | } | |
319 | ty::Float(FloatTy::F64) => { | |
320 | fx.bcx.ins().f64const(Ieee64::with_bits(u64::try_from(const_val).unwrap())) | |
321 | } | |
322 | _ => panic!( | |
323 | "CValue::const_val for non bool/char/float/integer/pointer type {:?} is not allowed", | |
324 | layout.ty | |
325 | ), | |
326 | }; | |
327 | ||
328 | CValue::by_val(val, layout) | |
329 | } | |
330 | ||
331 | pub(crate) fn cast_pointer_to(self, layout: TyAndLayout<'tcx>) -> Self { | |
6a06907d XL |
332 | assert!(matches!(self.layout().ty.kind(), ty::Ref(..) | ty::RawPtr(..) | ty::FnPtr(..))); |
333 | assert!(matches!(layout.ty.kind(), ty::Ref(..) | ty::RawPtr(..) | ty::FnPtr(..))); | |
29967ef6 XL |
334 | assert_eq!(self.layout().abi, layout.abi); |
335 | CValue(self.0, layout) | |
336 | } | |
337 | } | |
338 | ||
339 | /// A place where you can write a value to or read a value from | |
340 | #[derive(Debug, Copy, Clone)] | |
341 | pub(crate) struct CPlace<'tcx> { | |
342 | inner: CPlaceInner, | |
343 | layout: TyAndLayout<'tcx>, | |
344 | } | |
345 | ||
346 | #[derive(Debug, Copy, Clone)] | |
347 | pub(crate) enum CPlaceInner { | |
348 | Var(Local, Variable), | |
349 | VarPair(Local, Variable, Variable), | |
350 | VarLane(Local, Variable, u8), | |
351 | Addr(Pointer, Option<Value>), | |
352 | } | |
353 | ||
354 | impl<'tcx> CPlace<'tcx> { | |
355 | pub(crate) fn layout(&self) -> TyAndLayout<'tcx> { | |
356 | self.layout | |
357 | } | |
358 | ||
359 | pub(crate) fn inner(&self) -> &CPlaceInner { | |
360 | &self.inner | |
361 | } | |
362 | ||
29967ef6 | 363 | pub(crate) fn new_stack_slot( |
6a06907d | 364 | fx: &mut FunctionCx<'_, '_, 'tcx>, |
29967ef6 XL |
365 | layout: TyAndLayout<'tcx>, |
366 | ) -> CPlace<'tcx> { | |
487cf647 | 367 | assert!(layout.is_sized()); |
29967ef6 | 368 | if layout.size.bytes() == 0 { |
94222f64 XL |
369 | return CPlace { |
370 | inner: CPlaceInner::Addr(Pointer::dangling(layout.align.pref), None), | |
371 | layout, | |
372 | }; | |
29967ef6 XL |
373 | } |
374 | ||
064997fb FG |
375 | if layout.size.bytes() >= u64::from(u32::MAX - 16) { |
376 | fx.tcx | |
377 | .sess | |
378 | .fatal(&format!("values of type {} are too big to store on the stack", layout.ty)); | |
379 | } | |
380 | ||
f2b60f7d | 381 | let stack_slot = fx.bcx.create_sized_stack_slot(StackSlotData { |
29967ef6 | 382 | kind: StackSlotKind::ExplicitSlot, |
5869c6ff XL |
383 | // FIXME Don't force the size to a multiple of 16 bytes once Cranelift gets a way to |
384 | // specify stack slot alignment. | |
385 | size: (u32::try_from(layout.size.bytes()).unwrap() + 15) / 16 * 16, | |
29967ef6 | 386 | }); |
6a06907d | 387 | CPlace { inner: CPlaceInner::Addr(Pointer::stack_slot(stack_slot), None), layout } |
29967ef6 XL |
388 | } |
389 | ||
390 | pub(crate) fn new_var( | |
6a06907d | 391 | fx: &mut FunctionCx<'_, '_, 'tcx>, |
29967ef6 XL |
392 | local: Local, |
393 | layout: TyAndLayout<'tcx>, | |
394 | ) -> CPlace<'tcx> { | |
9c376795 | 395 | let var = Variable::from_u32(fx.next_ssa_var); |
29967ef6 XL |
396 | fx.next_ssa_var += 1; |
397 | fx.bcx.declare_var(var, fx.clif_type(layout.ty).unwrap()); | |
6a06907d | 398 | CPlace { inner: CPlaceInner::Var(local, var), layout } |
29967ef6 XL |
399 | } |
400 | ||
401 | pub(crate) fn new_var_pair( | |
6a06907d | 402 | fx: &mut FunctionCx<'_, '_, 'tcx>, |
29967ef6 XL |
403 | local: Local, |
404 | layout: TyAndLayout<'tcx>, | |
405 | ) -> CPlace<'tcx> { | |
9c376795 | 406 | let var1 = Variable::from_u32(fx.next_ssa_var); |
29967ef6 | 407 | fx.next_ssa_var += 1; |
9c376795 | 408 | let var2 = Variable::from_u32(fx.next_ssa_var); |
29967ef6 XL |
409 | fx.next_ssa_var += 1; |
410 | ||
411 | let (ty1, ty2) = fx.clif_pair_type(layout.ty).unwrap(); | |
412 | fx.bcx.declare_var(var1, ty1); | |
413 | fx.bcx.declare_var(var2, ty2); | |
6a06907d | 414 | CPlace { inner: CPlaceInner::VarPair(local, var1, var2), layout } |
29967ef6 XL |
415 | } |
416 | ||
417 | pub(crate) fn for_ptr(ptr: Pointer, layout: TyAndLayout<'tcx>) -> CPlace<'tcx> { | |
6a06907d | 418 | CPlace { inner: CPlaceInner::Addr(ptr, None), layout } |
29967ef6 XL |
419 | } |
420 | ||
421 | pub(crate) fn for_ptr_with_extra( | |
422 | ptr: Pointer, | |
423 | extra: Value, | |
424 | layout: TyAndLayout<'tcx>, | |
425 | ) -> CPlace<'tcx> { | |
6a06907d | 426 | CPlace { inner: CPlaceInner::Addr(ptr, Some(extra)), layout } |
29967ef6 XL |
427 | } |
428 | ||
6a06907d | 429 | pub(crate) fn to_cvalue(self, fx: &mut FunctionCx<'_, '_, 'tcx>) -> CValue<'tcx> { |
29967ef6 XL |
430 | let layout = self.layout(); |
431 | match self.inner { | |
432 | CPlaceInner::Var(_local, var) => { | |
433 | let val = fx.bcx.use_var(var); | |
6a06907d | 434 | //fx.bcx.set_val_label(val, cranelift_codegen::ir::ValueLabel::new(var.index())); |
29967ef6 XL |
435 | CValue::by_val(val, layout) |
436 | } | |
437 | CPlaceInner::VarPair(_local, var1, var2) => { | |
438 | let val1 = fx.bcx.use_var(var1); | |
6a06907d | 439 | //fx.bcx.set_val_label(val1, cranelift_codegen::ir::ValueLabel::new(var1.index())); |
29967ef6 | 440 | let val2 = fx.bcx.use_var(var2); |
6a06907d | 441 | //fx.bcx.set_val_label(val2, cranelift_codegen::ir::ValueLabel::new(var2.index())); |
29967ef6 XL |
442 | CValue::by_val_pair(val1, val2, layout) |
443 | } | |
444 | CPlaceInner::VarLane(_local, var, lane) => { | |
445 | let val = fx.bcx.use_var(var); | |
6a06907d | 446 | //fx.bcx.set_val_label(val, cranelift_codegen::ir::ValueLabel::new(var.index())); |
29967ef6 XL |
447 | let val = fx.bcx.ins().extractlane(val, lane); |
448 | CValue::by_val(val, layout) | |
449 | } | |
450 | CPlaceInner::Addr(ptr, extra) => { | |
451 | if let Some(extra) = extra { | |
452 | CValue::by_ref_unsized(ptr, extra, layout) | |
453 | } else { | |
454 | CValue::by_ref(ptr, layout) | |
455 | } | |
456 | } | |
457 | } | |
458 | } | |
459 | ||
460 | pub(crate) fn to_ptr(self) -> Pointer { | |
461 | match self.to_ptr_maybe_unsized() { | |
462 | (ptr, None) => ptr, | |
463 | (_, Some(_)) => bug!("Expected sized cplace, found {:?}", self), | |
464 | } | |
465 | } | |
466 | ||
467 | pub(crate) fn to_ptr_maybe_unsized(self) -> (Pointer, Option<Value>) { | |
468 | match self.inner { | |
469 | CPlaceInner::Addr(ptr, extra) => (ptr, extra), | |
470 | CPlaceInner::Var(_, _) | |
471 | | CPlaceInner::VarPair(_, _, _) | |
472 | | CPlaceInner::VarLane(_, _, _) => bug!("Expected CPlace::Addr, found {:?}", self), | |
473 | } | |
474 | } | |
475 | ||
6a06907d | 476 | pub(crate) fn write_cvalue(self, fx: &mut FunctionCx<'_, '_, 'tcx>, from: CValue<'tcx>) { |
064997fb | 477 | assert_assignable(fx, from.layout().ty, self.layout().ty, 16); |
29967ef6 XL |
478 | |
479 | self.write_cvalue_maybe_transmute(fx, from, "write_cvalue"); | |
480 | } | |
481 | ||
482 | pub(crate) fn write_cvalue_transmute( | |
483 | self, | |
6a06907d | 484 | fx: &mut FunctionCx<'_, '_, 'tcx>, |
29967ef6 XL |
485 | from: CValue<'tcx>, |
486 | ) { | |
487 | self.write_cvalue_maybe_transmute(fx, from, "write_cvalue_transmute"); | |
488 | } | |
489 | ||
490 | fn write_cvalue_maybe_transmute( | |
491 | self, | |
6a06907d | 492 | fx: &mut FunctionCx<'_, '_, 'tcx>, |
29967ef6 | 493 | from: CValue<'tcx>, |
cdc7bbd5 | 494 | method: &'static str, |
29967ef6 XL |
495 | ) { |
496 | fn transmute_value<'tcx>( | |
6a06907d | 497 | fx: &mut FunctionCx<'_, '_, 'tcx>, |
29967ef6 XL |
498 | var: Variable, |
499 | data: Value, | |
500 | dst_ty: Type, | |
501 | ) { | |
502 | let src_ty = fx.bcx.func.dfg.value_type(data); | |
503 | assert_eq!( | |
504 | src_ty.bytes(), | |
505 | dst_ty.bytes(), | |
506 | "write_cvalue_transmute: {:?} -> {:?}", | |
507 | src_ty, | |
508 | dst_ty, | |
509 | ); | |
510 | let data = match (src_ty, dst_ty) { | |
511 | (_, _) if src_ty == dst_ty => data, | |
512 | ||
513 | // This is a `write_cvalue_transmute`. | |
514 | (types::I32, types::F32) | |
515 | | (types::F32, types::I32) | |
516 | | (types::I64, types::F64) | |
517 | | (types::F64, types::I64) => fx.bcx.ins().bitcast(dst_ty, data), | |
9c376795 | 518 | _ if src_ty.is_vector() && dst_ty.is_vector() => fx.bcx.ins().bitcast(dst_ty, data), |
29967ef6 XL |
519 | _ if src_ty.is_vector() || dst_ty.is_vector() => { |
520 | // FIXME do something more efficient for transmutes between vectors and integers. | |
f2b60f7d | 521 | let stack_slot = fx.bcx.create_sized_stack_slot(StackSlotData { |
29967ef6 | 522 | kind: StackSlotKind::ExplicitSlot, |
5869c6ff XL |
523 | // FIXME Don't force the size to a multiple of 16 bytes once Cranelift gets a way to |
524 | // specify stack slot alignment. | |
525 | size: (src_ty.bytes() + 15) / 16 * 16, | |
29967ef6 XL |
526 | }); |
527 | let ptr = Pointer::stack_slot(stack_slot); | |
528 | ptr.store(fx, data, MemFlags::trusted()); | |
529 | ptr.load(fx, dst_ty, MemFlags::trusted()) | |
530 | } | |
136023e0 XL |
531 | |
532 | // `CValue`s should never contain SSA-only types, so if you ended | |
533 | // up here having seen an error like `B1 -> I8`, then before | |
534 | // calling `write_cvalue` you need to add a `bint` instruction. | |
29967ef6 XL |
535 | _ => unreachable!("write_cvalue_transmute: {:?} -> {:?}", src_ty, dst_ty), |
536 | }; | |
6a06907d | 537 | //fx.bcx.set_val_label(data, cranelift_codegen::ir::ValueLabel::new(var.index())); |
29967ef6 XL |
538 | fx.bcx.def_var(var, data); |
539 | } | |
540 | ||
541 | assert_eq!(self.layout().size, from.layout().size); | |
542 | ||
cdc7bbd5 | 543 | if fx.clif_comments.enabled() { |
29967ef6 XL |
544 | use cranelift_codegen::cursor::{Cursor, CursorPosition}; |
545 | let cur_block = match fx.bcx.cursor().position() { | |
546 | CursorPosition::After(block) => block, | |
547 | _ => unreachable!(), | |
548 | }; | |
549 | fx.add_comment( | |
550 | fx.bcx.func.layout.last_inst(cur_block).unwrap(), | |
551 | format!( | |
552 | "{}: {:?}: {:?} <- {:?}: {:?}", | |
553 | method, | |
554 | self.inner(), | |
555 | self.layout().ty, | |
556 | from.0, | |
557 | from.layout().ty | |
558 | ), | |
559 | ); | |
560 | } | |
561 | ||
562 | let dst_layout = self.layout(); | |
563 | let to_ptr = match self.inner { | |
564 | CPlaceInner::Var(_local, var) => { | |
a2a8927a XL |
565 | if let ty::Array(element, len) = dst_layout.ty.kind() { |
566 | // Can only happen for vector types | |
567 | let len = | |
f2b60f7d | 568 | u32::try_from(len.eval_usize(fx.tcx, ParamEnv::reveal_all())).unwrap(); |
5099ac24 | 569 | let vector_ty = fx.clif_type(*element).unwrap().by(len).unwrap(); |
a2a8927a XL |
570 | |
571 | let data = match from.0 { | |
572 | CValueInner::ByRef(ptr, None) => { | |
573 | let mut flags = MemFlags::new(); | |
574 | flags.set_notrap(); | |
575 | ptr.load(fx, vector_ty, flags) | |
576 | } | |
577 | CValueInner::ByVal(_) | |
578 | | CValueInner::ByValPair(_, _) | |
579 | | CValueInner::ByRef(_, Some(_)) => bug!("array should be ByRef"), | |
580 | }; | |
581 | ||
582 | fx.bcx.def_var(var, data); | |
583 | return; | |
584 | } | |
29967ef6 XL |
585 | let data = CValue(from.0, dst_layout).load_scalar(fx); |
586 | let dst_ty = fx.clif_type(self.layout().ty).unwrap(); | |
587 | transmute_value(fx, var, data, dst_ty); | |
588 | return; | |
589 | } | |
590 | CPlaceInner::VarPair(_local, var1, var2) => { | |
9c376795 FG |
591 | let (ptr, meta) = from.force_stack(fx); |
592 | assert!(meta.is_none()); | |
593 | let (data1, data2) = | |
594 | CValue(CValueInner::ByRef(ptr, None), dst_layout).load_scalar_pair(fx); | |
29967ef6 XL |
595 | let (dst_ty1, dst_ty2) = fx.clif_pair_type(self.layout().ty).unwrap(); |
596 | transmute_value(fx, var1, data1, dst_ty1); | |
597 | transmute_value(fx, var2, data2, dst_ty2); | |
598 | return; | |
599 | } | |
600 | CPlaceInner::VarLane(_local, var, lane) => { | |
601 | let data = from.load_scalar(fx); | |
602 | ||
603 | // First get the old vector | |
604 | let vector = fx.bcx.use_var(var); | |
6a06907d | 605 | //fx.bcx.set_val_label(vector, cranelift_codegen::ir::ValueLabel::new(var.index())); |
29967ef6 XL |
606 | |
607 | // Next insert the written lane into the vector | |
608 | let vector = fx.bcx.ins().insertlane(vector, data, lane); | |
609 | ||
610 | // Finally write the new vector | |
6a06907d | 611 | //fx.bcx.set_val_label(vector, cranelift_codegen::ir::ValueLabel::new(var.index())); |
29967ef6 XL |
612 | fx.bcx.def_var(var, vector); |
613 | ||
614 | return; | |
615 | } | |
616 | CPlaceInner::Addr(ptr, None) => { | |
617 | if dst_layout.size == Size::ZERO || dst_layout.abi == Abi::Uninhabited { | |
618 | return; | |
619 | } | |
620 | ptr | |
621 | } | |
622 | CPlaceInner::Addr(_, Some(_)) => bug!("Can't write value to unsized place {:?}", self), | |
623 | }; | |
624 | ||
625 | let mut flags = MemFlags::new(); | |
626 | flags.set_notrap(); | |
627 | match from.layout().abi { | |
628 | // FIXME make Abi::Vector work too | |
629 | Abi::Scalar(_) => { | |
630 | let val = from.load_scalar(fx); | |
631 | to_ptr.store(fx, val, flags); | |
632 | return; | |
633 | } | |
c295e0f8 | 634 | Abi::ScalarPair(a_scalar, b_scalar) => { |
29967ef6 XL |
635 | let (value, extra) = from.load_scalar_pair(fx); |
636 | let b_offset = scalar_pair_calculate_b_offset(fx.tcx, a_scalar, b_scalar); | |
637 | to_ptr.store(fx, value, flags); | |
638 | to_ptr.offset(fx, b_offset).store(fx, extra, flags); | |
639 | return; | |
640 | } | |
641 | _ => {} | |
642 | } | |
643 | ||
644 | match from.0 { | |
645 | CValueInner::ByVal(val) => { | |
646 | to_ptr.store(fx, val, flags); | |
647 | } | |
648 | CValueInner::ByValPair(_, _) => { | |
6a06907d | 649 | bug!("Non ScalarPair abi {:?} for ByValPair CValue", dst_layout.abi); |
29967ef6 XL |
650 | } |
651 | CValueInner::ByRef(from_ptr, None) => { | |
652 | let from_addr = from_ptr.get_addr(fx); | |
653 | let to_addr = to_ptr.get_addr(fx); | |
654 | let src_layout = from.1; | |
655 | let size = dst_layout.size.bytes(); | |
656 | let src_align = src_layout.align.abi.bytes() as u8; | |
657 | let dst_align = dst_layout.align.abi.bytes() as u8; | |
658 | fx.bcx.emit_small_memory_copy( | |
a2a8927a | 659 | fx.target_config, |
29967ef6 XL |
660 | to_addr, |
661 | from_addr, | |
662 | size, | |
663 | dst_align, | |
664 | src_align, | |
665 | true, | |
f2b60f7d | 666 | flags, |
29967ef6 XL |
667 | ); |
668 | } | |
669 | CValueInner::ByRef(_, Some(_)) => todo!(), | |
670 | } | |
671 | } | |
672 | ||
2b03887a FG |
673 | pub(crate) fn place_opaque_cast( |
674 | self, | |
675 | fx: &mut FunctionCx<'_, '_, 'tcx>, | |
676 | ty: Ty<'tcx>, | |
677 | ) -> CPlace<'tcx> { | |
678 | CPlace { inner: self.inner, layout: fx.layout_of(ty) } | |
679 | } | |
680 | ||
29967ef6 XL |
681 | pub(crate) fn place_field( |
682 | self, | |
6a06907d | 683 | fx: &mut FunctionCx<'_, '_, 'tcx>, |
29967ef6 XL |
684 | field: mir::Field, |
685 | ) -> CPlace<'tcx> { | |
686 | let layout = self.layout(); | |
687 | ||
688 | match self.inner { | |
a2a8927a XL |
689 | CPlaceInner::Var(local, var) => match layout.ty.kind() { |
690 | ty::Array(_, _) => { | |
691 | // Can only happen for vector types | |
29967ef6 XL |
692 | return CPlace { |
693 | inner: CPlaceInner::VarLane(local, var, field.as_u32().try_into().unwrap()), | |
694 | layout: layout.field(fx, field.as_u32().try_into().unwrap()), | |
695 | }; | |
696 | } | |
a2a8927a XL |
697 | ty::Adt(adt_def, substs) if layout.ty.is_simd() => { |
698 | let f0_ty = adt_def.non_enum_variant().fields[0].ty(fx.tcx, substs); | |
699 | ||
700 | match f0_ty.kind() { | |
701 | ty::Array(_, _) => { | |
702 | assert_eq!(field.as_u32(), 0); | |
703 | return CPlace { | |
704 | inner: CPlaceInner::Var(local, var), | |
705 | layout: layout.field(fx, field.as_u32().try_into().unwrap()), | |
706 | }; | |
707 | } | |
708 | _ => { | |
709 | return CPlace { | |
710 | inner: CPlaceInner::VarLane( | |
711 | local, | |
712 | var, | |
713 | field.as_u32().try_into().unwrap(), | |
714 | ), | |
715 | layout: layout.field(fx, field.as_u32().try_into().unwrap()), | |
716 | }; | |
717 | } | |
718 | } | |
719 | } | |
720 | _ => {} | |
721 | }, | |
29967ef6 XL |
722 | CPlaceInner::VarPair(local, var1, var2) => { |
723 | let layout = layout.field(&*fx, field.index()); | |
724 | ||
725 | match field.as_u32() { | |
6a06907d XL |
726 | 0 => return CPlace { inner: CPlaceInner::Var(local, var1), layout }, |
727 | 1 => return CPlace { inner: CPlaceInner::Var(local, var2), layout }, | |
29967ef6 XL |
728 | _ => unreachable!("field should be 0 or 1"), |
729 | } | |
730 | } | |
731 | _ => {} | |
732 | } | |
733 | ||
734 | let (base, extra) = self.to_ptr_maybe_unsized(); | |
735 | ||
736 | let (field_ptr, field_layout) = codegen_field(fx, base, extra, layout, field); | |
737 | if field_layout.is_unsized() { | |
a2a8927a XL |
738 | if let ty::Foreign(_) = field_layout.ty.kind() { |
739 | assert!(extra.is_none()); | |
740 | CPlace::for_ptr(field_ptr, field_layout) | |
741 | } else { | |
742 | CPlace::for_ptr_with_extra(field_ptr, extra.unwrap(), field_layout) | |
743 | } | |
29967ef6 XL |
744 | } else { |
745 | CPlace::for_ptr(field_ptr, field_layout) | |
746 | } | |
747 | } | |
748 | ||
94222f64 XL |
749 | /// Like [`CPlace::place_field`] except handling ADTs containing a single array field in a way |
750 | /// such that you can access individual lanes. | |
751 | pub(crate) fn place_lane( | |
752 | self, | |
753 | fx: &mut FunctionCx<'_, '_, 'tcx>, | |
754 | lane_idx: u64, | |
755 | ) -> CPlace<'tcx> { | |
756 | let layout = self.layout(); | |
757 | assert!(layout.ty.is_simd()); | |
758 | let (lane_count, lane_ty) = layout.ty.simd_size_and_type(fx.tcx); | |
759 | let lane_layout = fx.layout_of(lane_ty); | |
760 | assert!(lane_idx < lane_count); | |
761 | ||
762 | match self.inner { | |
763 | CPlaceInner::Var(local, var) => { | |
764 | assert!(matches!(layout.abi, Abi::Vector { .. })); | |
765 | CPlace { | |
766 | inner: CPlaceInner::VarLane(local, var, lane_idx.try_into().unwrap()), | |
767 | layout: lane_layout, | |
768 | } | |
769 | } | |
770 | CPlaceInner::VarPair(_, _, _) => unreachable!(), | |
771 | CPlaceInner::VarLane(_, _, _) => unreachable!(), | |
772 | CPlaceInner::Addr(ptr, None) => { | |
773 | let field_offset = lane_layout.size * lane_idx; | |
774 | let field_ptr = ptr.offset_i64(fx, i64::try_from(field_offset.bytes()).unwrap()); | |
775 | CPlace::for_ptr(field_ptr, lane_layout) | |
776 | } | |
777 | CPlaceInner::Addr(_, Some(_)) => unreachable!(), | |
778 | } | |
779 | } | |
780 | ||
29967ef6 XL |
781 | pub(crate) fn place_index( |
782 | self, | |
6a06907d | 783 | fx: &mut FunctionCx<'_, '_, 'tcx>, |
29967ef6 XL |
784 | index: Value, |
785 | ) -> CPlace<'tcx> { | |
786 | let (elem_layout, ptr) = match self.layout().ty.kind() { | |
5099ac24 FG |
787 | ty::Array(elem_ty, _) => (fx.layout_of(*elem_ty), self.to_ptr()), |
788 | ty::Slice(elem_ty) => (fx.layout_of(*elem_ty), self.to_ptr_maybe_unsized().0), | |
29967ef6 XL |
789 | _ => bug!("place_index({:?})", self.layout().ty), |
790 | }; | |
791 | ||
6a06907d | 792 | let offset = fx.bcx.ins().imul_imm(index, elem_layout.size.bytes() as i64); |
29967ef6 XL |
793 | |
794 | CPlace::for_ptr(ptr.offset_value(fx, offset), elem_layout) | |
795 | } | |
796 | ||
6a06907d | 797 | pub(crate) fn place_deref(self, fx: &mut FunctionCx<'_, '_, 'tcx>) -> CPlace<'tcx> { |
29967ef6 XL |
798 | let inner_layout = fx.layout_of(self.layout().ty.builtin_deref(true).unwrap().ty); |
799 | if has_ptr_meta(fx.tcx, inner_layout.ty) { | |
800 | let (addr, extra) = self.to_cvalue(fx).load_scalar_pair(fx); | |
801 | CPlace::for_ptr_with_extra(Pointer::new(addr), extra, inner_layout) | |
802 | } else { | |
6a06907d | 803 | CPlace::for_ptr(Pointer::new(self.to_cvalue(fx).load_scalar(fx)), inner_layout) |
29967ef6 XL |
804 | } |
805 | } | |
806 | ||
807 | pub(crate) fn place_ref( | |
808 | self, | |
6a06907d | 809 | fx: &mut FunctionCx<'_, '_, 'tcx>, |
29967ef6 XL |
810 | layout: TyAndLayout<'tcx>, |
811 | ) -> CValue<'tcx> { | |
812 | if has_ptr_meta(fx.tcx, self.layout().ty) { | |
813 | let (ptr, extra) = self.to_ptr_maybe_unsized(); | |
814 | CValue::by_val_pair( | |
815 | ptr.get_addr(fx), | |
816 | extra.expect("unsized type without metadata"), | |
817 | layout, | |
818 | ) | |
819 | } else { | |
820 | CValue::by_val(self.to_ptr().get_addr(fx), layout) | |
821 | } | |
822 | } | |
823 | ||
824 | pub(crate) fn downcast_variant( | |
825 | self, | |
6a06907d | 826 | fx: &FunctionCx<'_, '_, 'tcx>, |
29967ef6 XL |
827 | variant: VariantIdx, |
828 | ) -> Self { | |
487cf647 | 829 | assert!(self.layout().is_sized()); |
29967ef6 | 830 | let layout = self.layout().for_variant(fx, variant); |
6a06907d | 831 | CPlace { inner: self.inner, layout } |
29967ef6 XL |
832 | } |
833 | } | |
5869c6ff XL |
834 | |
835 | #[track_caller] | |
836 | pub(crate) fn assert_assignable<'tcx>( | |
6a06907d | 837 | fx: &FunctionCx<'_, '_, 'tcx>, |
5869c6ff XL |
838 | from_ty: Ty<'tcx>, |
839 | to_ty: Ty<'tcx>, | |
064997fb | 840 | limit: usize, |
5869c6ff | 841 | ) { |
064997fb FG |
842 | if limit == 0 { |
843 | // assert_assignable exists solely to catch bugs in cg_clif. it isn't necessary for | |
844 | // soundness. don't attempt to check deep types to avoid exponential behavior in certain | |
845 | // cases. | |
846 | return; | |
847 | } | |
5869c6ff XL |
848 | match (from_ty.kind(), to_ty.kind()) { |
849 | (ty::Ref(_, a, _), ty::Ref(_, b, _)) | |
850 | | ( | |
851 | ty::RawPtr(TypeAndMut { ty: a, mutbl: _ }), | |
852 | ty::RawPtr(TypeAndMut { ty: b, mutbl: _ }), | |
853 | ) => { | |
064997fb | 854 | assert_assignable(fx, *a, *b, limit - 1); |
5869c6ff XL |
855 | } |
856 | (ty::Ref(_, a, _), ty::RawPtr(TypeAndMut { ty: b, mutbl: _ })) | |
857 | | (ty::RawPtr(TypeAndMut { ty: a, mutbl: _ }), ty::Ref(_, b, _)) => { | |
064997fb | 858 | assert_assignable(fx, *a, *b, limit - 1); |
5869c6ff XL |
859 | } |
860 | (ty::FnPtr(_), ty::FnPtr(_)) => { | |
861 | let from_sig = fx.tcx.normalize_erasing_late_bound_regions( | |
862 | ParamEnv::reveal_all(), | |
863 | from_ty.fn_sig(fx.tcx), | |
864 | ); | |
865 | let to_sig = fx | |
866 | .tcx | |
867 | .normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), to_ty.fn_sig(fx.tcx)); | |
868 | assert_eq!( | |
869 | from_sig, to_sig, | |
870 | "Can't write fn ptr with incompatible sig {:?} to place with sig {:?}\n\n{:#?}", | |
871 | from_sig, to_sig, fx, | |
872 | ); | |
873 | // fn(&T) -> for<'l> fn(&'l T) is allowed | |
874 | } | |
f2b60f7d FG |
875 | (&ty::Dynamic(from_traits, _, _from_kind), &ty::Dynamic(to_traits, _, _to_kind)) => { |
876 | // FIXME(dyn-star): Do the right thing with DynKinds | |
5869c6ff | 877 | for (from, to) in from_traits.iter().zip(to_traits) { |
6a06907d XL |
878 | let from = |
879 | fx.tcx.normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), from); | |
880 | let to = fx.tcx.normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), to); | |
5869c6ff XL |
881 | assert_eq!( |
882 | from, to, | |
883 | "Can't write trait object of incompatible traits {:?} to place with traits {:?}\n\n{:#?}", | |
884 | from_traits, to_traits, fx, | |
885 | ); | |
886 | } | |
887 | // dyn for<'r> Trait<'r> -> dyn Trait<'_> is allowed | |
888 | } | |
064997fb FG |
889 | (&ty::Tuple(types_a), &ty::Tuple(types_b)) => { |
890 | let mut types_a = types_a.iter(); | |
891 | let mut types_b = types_b.iter(); | |
892 | loop { | |
893 | match (types_a.next(), types_b.next()) { | |
894 | (Some(a), Some(b)) => assert_assignable(fx, a, b, limit - 1), | |
895 | (None, None) => return, | |
896 | (Some(_), None) | (None, Some(_)) => panic!("{:#?}/{:#?}", from_ty, to_ty), | |
897 | } | |
898 | } | |
899 | } | |
cdc7bbd5 | 900 | (&ty::Adt(adt_def_a, substs_a), &ty::Adt(adt_def_b, substs_b)) |
5e7ed085 | 901 | if adt_def_a.did() == adt_def_b.did() => |
cdc7bbd5 XL |
902 | { |
903 | let mut types_a = substs_a.types(); | |
904 | let mut types_b = substs_b.types(); | |
905 | loop { | |
906 | match (types_a.next(), types_b.next()) { | |
064997fb | 907 | (Some(a), Some(b)) => assert_assignable(fx, a, b, limit - 1), |
cdc7bbd5 XL |
908 | (None, None) => return, |
909 | (Some(_), None) | (None, Some(_)) => panic!("{:#?}/{:#?}", from_ty, to_ty), | |
910 | } | |
911 | } | |
912 | } | |
064997fb FG |
913 | (ty::Array(a, _), ty::Array(b, _)) => assert_assignable(fx, *a, *b, limit - 1), |
914 | (&ty::Closure(def_id_a, substs_a), &ty::Closure(def_id_b, substs_b)) | |
915 | if def_id_a == def_id_b => | |
916 | { | |
917 | let mut types_a = substs_a.types(); | |
918 | let mut types_b = substs_b.types(); | |
919 | loop { | |
920 | match (types_a.next(), types_b.next()) { | |
921 | (Some(a), Some(b)) => assert_assignable(fx, a, b, limit - 1), | |
922 | (None, None) => return, | |
923 | (Some(_), None) | (None, Some(_)) => panic!("{:#?}/{:#?}", from_ty, to_ty), | |
924 | } | |
925 | } | |
926 | } | |
927 | (ty::Param(_), _) | (_, ty::Param(_)) if fx.tcx.sess.opts.unstable_opts.polymorphize => { | |
928 | // No way to check if it is correct or not with polymorphization enabled | |
929 | } | |
5869c6ff XL |
930 | _ => { |
931 | assert_eq!( | |
064997fb FG |
932 | from_ty, |
933 | to_ty, | |
5869c6ff | 934 | "Can't write value with incompatible type {:?} to place with type {:?}\n\n{:#?}", |
064997fb FG |
935 | from_ty.kind(), |
936 | to_ty.kind(), | |
937 | fx, | |
5869c6ff XL |
938 | ); |
939 | } | |
940 | } | |
941 | } |