]> git.proxmox.com Git - rustc.git/blob - compiler/rustc_const_eval/src/const_eval/valtrees.rs
New upstream version 1.63.0+dfsg1
[rustc.git] / compiler / rustc_const_eval / src / const_eval / valtrees.rs
1 use super::eval_queries::{mk_eval_cx, op_to_const};
2 use super::machine::CompileTimeEvalContext;
3 use super::{ValTreeCreationError, ValTreeCreationResult, VALTREE_MAX_NODES};
4 use crate::interpret::{
5 intern_const_alloc_recursive, ConstValue, ImmTy, Immediate, InternKind, MemPlaceMeta,
6 MemoryKind, PlaceTy, Scalar, ScalarMaybeUninit,
7 };
8 use crate::interpret::{MPlaceTy, Value};
9 use rustc_middle::ty::{self, ScalarInt, Ty, TyCtxt};
10 use rustc_span::source_map::DUMMY_SP;
11 use rustc_target::abi::{Align, VariantIdx};
12
13 #[instrument(skip(ecx), level = "debug")]
14 fn branches<'tcx>(
15 ecx: &CompileTimeEvalContext<'tcx, 'tcx>,
16 place: &MPlaceTy<'tcx>,
17 n: usize,
18 variant: Option<VariantIdx>,
19 num_nodes: &mut usize,
20 ) -> ValTreeCreationResult<'tcx> {
21 let place = match variant {
22 Some(variant) => ecx.mplace_downcast(&place, variant).unwrap(),
23 None => *place,
24 };
25 let variant = variant.map(|variant| Some(ty::ValTree::Leaf(ScalarInt::from(variant.as_u32()))));
26 debug!(?place, ?variant);
27
28 let mut fields = Vec::with_capacity(n);
29 for i in 0..n {
30 let field = ecx.mplace_field(&place, i).unwrap();
31 let valtree = const_to_valtree_inner(ecx, &field, num_nodes)?;
32 fields.push(Some(valtree));
33 }
34
35 // For enums, we prepend their variant index before the variant's fields so we can figure out
36 // the variant again when just seeing a valtree.
37 let branches = variant
38 .into_iter()
39 .chain(fields.into_iter())
40 .collect::<Option<Vec<_>>>()
41 .expect("should have already checked for errors in ValTree creation");
42
43 // Have to account for ZSTs here
44 if branches.len() == 0 {
45 *num_nodes += 1;
46 }
47
48 Ok(ty::ValTree::Branch(ecx.tcx.arena.alloc_from_iter(branches)))
49 }
50
51 #[instrument(skip(ecx), level = "debug")]
52 fn slice_branches<'tcx>(
53 ecx: &CompileTimeEvalContext<'tcx, 'tcx>,
54 place: &MPlaceTy<'tcx>,
55 num_nodes: &mut usize,
56 ) -> ValTreeCreationResult<'tcx> {
57 let n = place
58 .len(&ecx.tcx.tcx)
59 .unwrap_or_else(|_| panic!("expected to use len of place {:?}", place));
60
61 let mut elems = Vec::with_capacity(n as usize);
62 for i in 0..n {
63 let place_elem = ecx.mplace_index(place, i).unwrap();
64 let valtree = const_to_valtree_inner(ecx, &place_elem, num_nodes)?;
65 elems.push(valtree);
66 }
67
68 Ok(ty::ValTree::Branch(ecx.tcx.arena.alloc_from_iter(elems)))
69 }
70
71 #[instrument(skip(ecx), level = "debug")]
72 pub(crate) fn const_to_valtree_inner<'tcx>(
73 ecx: &CompileTimeEvalContext<'tcx, 'tcx>,
74 place: &MPlaceTy<'tcx>,
75 num_nodes: &mut usize,
76 ) -> ValTreeCreationResult<'tcx> {
77 let ty = place.layout.ty;
78 debug!("ty kind: {:?}", ty.kind());
79
80 if *num_nodes >= VALTREE_MAX_NODES {
81 return Err(ValTreeCreationError::NodesOverflow);
82 }
83
84 match ty.kind() {
85 ty::FnDef(..) => {
86 *num_nodes += 1;
87 Ok(ty::ValTree::zst())
88 }
89 ty::Bool | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Char => {
90 let Ok(val) = ecx.read_immediate(&place.into()) else {
91 return Err(ValTreeCreationError::Other);
92 };
93 let val = val.to_scalar().unwrap();
94 *num_nodes += 1;
95
96 Ok(ty::ValTree::Leaf(val.assert_int()))
97 }
98
99 // Raw pointers are not allowed in type level constants, as we cannot properly test them for
100 // equality at compile-time (see `ptr_guaranteed_eq`/`_ne`).
101 // Technically we could allow function pointers (represented as `ty::Instance`), but this is not guaranteed to
102 // agree with runtime equality tests.
103 ty::FnPtr(_) | ty::RawPtr(_) => Err(ValTreeCreationError::NonSupportedType),
104
105 ty::Ref(_, _, _) => {
106 let Ok(derefd_place)= ecx.deref_operand(&place.into()) else {
107 return Err(ValTreeCreationError::Other);
108 };
109 debug!(?derefd_place);
110
111 const_to_valtree_inner(ecx, &derefd_place, num_nodes)
112 }
113
114 ty::Str | ty::Slice(_) | ty::Array(_, _) => {
115 slice_branches(ecx, place, num_nodes)
116 }
117 // Trait objects are not allowed in type level constants, as we have no concept for
118 // resolving their backing type, even if we can do that at const eval time. We may
119 // hypothetically be able to allow `dyn StructuralEq` trait objects in the future,
120 // but it is unclear if this is useful.
121 ty::Dynamic(..) => Err(ValTreeCreationError::NonSupportedType),
122
123 ty::Tuple(elem_tys) => {
124 branches(ecx, place, elem_tys.len(), None, num_nodes)
125 }
126
127 ty::Adt(def, _) => {
128 if def.is_union() {
129 return Err(ValTreeCreationError::NonSupportedType);
130 } else if def.variants().is_empty() {
131 bug!("uninhabited types should have errored and never gotten converted to valtree")
132 }
133
134 let Ok((_, variant)) = ecx.read_discriminant(&place.into()) else {
135 return Err(ValTreeCreationError::Other);
136 };
137 branches(ecx, place, def.variant(variant).fields.len(), def.is_enum().then_some(variant), num_nodes)
138 }
139
140 ty::Never
141 | ty::Error(_)
142 | ty::Foreign(..)
143 | ty::Infer(ty::FreshIntTy(_))
144 | ty::Infer(ty::FreshFloatTy(_))
145 | ty::Projection(..)
146 | ty::Param(_)
147 | ty::Bound(..)
148 | ty::Placeholder(..)
149 // FIXME(oli-obk): we could look behind opaque types
150 | ty::Opaque(..)
151 | ty::Infer(_)
152 // FIXME(oli-obk): we can probably encode closures just like structs
153 | ty::Closure(..)
154 | ty::Generator(..)
155 | ty::GeneratorWitness(..) => Err(ValTreeCreationError::NonSupportedType),
156 }
157 }
158
159 #[instrument(skip(ecx), level = "debug")]
160 fn create_mplace_from_layout<'tcx>(
161 ecx: &mut CompileTimeEvalContext<'tcx, 'tcx>,
162 ty: Ty<'tcx>,
163 ) -> MPlaceTy<'tcx> {
164 let tcx = ecx.tcx;
165 let param_env = ecx.param_env;
166 let layout = tcx.layout_of(param_env.and(ty)).unwrap();
167 debug!(?layout);
168
169 ecx.allocate(layout, MemoryKind::Stack).unwrap()
170 }
171
172 // Walks custom DSTs and gets the type of the unsized field and the number of elements
173 // in the unsized field.
174 fn get_info_on_unsized_field<'tcx>(
175 ty: Ty<'tcx>,
176 valtree: ty::ValTree<'tcx>,
177 tcx: TyCtxt<'tcx>,
178 ) -> (Ty<'tcx>, usize) {
179 let mut last_valtree = valtree;
180 let tail = tcx.struct_tail_with_normalize(
181 ty,
182 |ty| ty,
183 || {
184 let branches = last_valtree.unwrap_branch();
185 last_valtree = branches[branches.len() - 1];
186 debug!(?branches, ?last_valtree);
187 },
188 );
189 let unsized_inner_ty = match tail.kind() {
190 ty::Slice(t) => *t,
191 ty::Str => tail,
192 _ => bug!("expected Slice or Str"),
193 };
194
195 // Have to adjust type for ty::Str
196 let unsized_inner_ty = match unsized_inner_ty.kind() {
197 ty::Str => tcx.mk_ty(ty::Uint(ty::UintTy::U8)),
198 _ => unsized_inner_ty,
199 };
200
201 // Get the number of elements in the unsized field
202 let num_elems = last_valtree.unwrap_branch().len();
203
204 (unsized_inner_ty, num_elems)
205 }
206
207 #[instrument(skip(ecx), level = "debug")]
208 fn create_pointee_place<'tcx>(
209 ecx: &mut CompileTimeEvalContext<'tcx, 'tcx>,
210 ty: Ty<'tcx>,
211 valtree: ty::ValTree<'tcx>,
212 ) -> MPlaceTy<'tcx> {
213 let tcx = ecx.tcx.tcx;
214
215 if !ty.is_sized(ecx.tcx, ty::ParamEnv::empty()) {
216 // We need to create `Allocation`s for custom DSTs
217
218 let (unsized_inner_ty, num_elems) = get_info_on_unsized_field(ty, valtree, tcx);
219 let unsized_inner_ty = match unsized_inner_ty.kind() {
220 ty::Str => tcx.mk_ty(ty::Uint(ty::UintTy::U8)),
221 _ => unsized_inner_ty,
222 };
223 let unsized_inner_ty_size =
224 tcx.layout_of(ty::ParamEnv::empty().and(unsized_inner_ty)).unwrap().layout.size();
225 debug!(?unsized_inner_ty, ?unsized_inner_ty_size, ?num_elems);
226
227 // for custom DSTs only the last field/element is unsized, but we need to also allocate
228 // space for the other fields/elements
229 let layout = tcx.layout_of(ty::ParamEnv::empty().and(ty)).unwrap();
230 let size_of_sized_part = layout.layout.size();
231
232 // Get the size of the memory behind the DST
233 let dst_size = unsized_inner_ty_size.checked_mul(num_elems as u64, &tcx).unwrap();
234
235 let size = size_of_sized_part.checked_add(dst_size, &tcx).unwrap();
236 let align = Align::from_bytes(size.bytes().next_power_of_two()).unwrap();
237 let ptr = ecx.allocate_ptr(size, align, MemoryKind::Stack).unwrap();
238 debug!(?ptr);
239
240 let place = MPlaceTy::from_aligned_ptr_with_meta(
241 ptr.into(),
242 layout,
243 MemPlaceMeta::Meta(Scalar::from_machine_usize(num_elems as u64, &tcx)),
244 );
245 debug!(?place);
246
247 place
248 } else {
249 create_mplace_from_layout(ecx, ty)
250 }
251 }
252
253 /// Converts a `ValTree` to a `ConstValue`, which is needed after mir
254 /// construction has finished.
255 // FIXME Merge `valtree_to_const_value` and `valtree_into_mplace` into one function
256 #[instrument(skip(tcx), level = "debug")]
257 pub fn valtree_to_const_value<'tcx>(
258 tcx: TyCtxt<'tcx>,
259 param_env_ty: ty::ParamEnvAnd<'tcx, Ty<'tcx>>,
260 valtree: ty::ValTree<'tcx>,
261 ) -> ConstValue<'tcx> {
262 // Basic idea: We directly construct `Scalar` values from trivial `ValTree`s
263 // (those for constants with type bool, int, uint, float or char).
264 // For all other types we create an `MPlace` and fill that by walking
265 // the `ValTree` and using `place_projection` and `place_field` to
266 // create inner `MPlace`s which are filled recursively.
267 // FIXME Does this need an example?
268
269 let (param_env, ty) = param_env_ty.into_parts();
270 let mut ecx = mk_eval_cx(tcx, DUMMY_SP, param_env, false);
271
272 match ty.kind() {
273 ty::FnDef(..) => {
274 assert!(valtree.unwrap_branch().is_empty());
275 ConstValue::Scalar(Scalar::ZST)
276 }
277 ty::Bool | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Char => match valtree {
278 ty::ValTree::Leaf(scalar_int) => ConstValue::Scalar(Scalar::Int(scalar_int)),
279 ty::ValTree::Branch(_) => bug!(
280 "ValTrees for Bool, Int, Uint, Float or Char should have the form ValTree::Leaf"
281 ),
282 },
283 ty::Ref(_, _, _) | ty::Tuple(_) | ty::Array(_, _) | ty::Adt(..) => {
284 let mut place = match ty.kind() {
285 ty::Ref(_, inner_ty, _) => {
286 // Need to create a place for the pointee to fill for Refs
287 create_pointee_place(&mut ecx, *inner_ty, valtree)
288 }
289 _ => create_mplace_from_layout(&mut ecx, ty),
290 };
291 debug!(?place);
292
293 valtree_into_mplace(&mut ecx, &mut place, valtree);
294 dump_place(&ecx, place.into());
295 intern_const_alloc_recursive(&mut ecx, InternKind::Constant, &place).unwrap();
296
297 let const_val = match ty.kind() {
298 ty::Ref(_, _, _) => {
299 let ref_place = place.to_ref(&tcx);
300 let imm =
301 ImmTy::from_immediate(ref_place, tcx.layout_of(param_env_ty).unwrap());
302
303 op_to_const(&ecx, &imm.into())
304 }
305 _ => op_to_const(&ecx, &place.into()),
306 };
307 debug!(?const_val);
308
309 const_val
310 }
311 ty::Never
312 | ty::Error(_)
313 | ty::Foreign(..)
314 | ty::Infer(ty::FreshIntTy(_))
315 | ty::Infer(ty::FreshFloatTy(_))
316 | ty::Projection(..)
317 | ty::Param(_)
318 | ty::Bound(..)
319 | ty::Placeholder(..)
320 | ty::Opaque(..)
321 | ty::Infer(_)
322 | ty::Closure(..)
323 | ty::Generator(..)
324 | ty::GeneratorWitness(..)
325 | ty::FnPtr(_)
326 | ty::RawPtr(_)
327 | ty::Str
328 | ty::Slice(_)
329 | ty::Dynamic(..) => bug!("no ValTree should have been created for type {:?}", ty.kind()),
330 }
331 }
332
333 #[instrument(skip(ecx), level = "debug")]
334 fn valtree_into_mplace<'tcx>(
335 ecx: &mut CompileTimeEvalContext<'tcx, 'tcx>,
336 place: &mut MPlaceTy<'tcx>,
337 valtree: ty::ValTree<'tcx>,
338 ) {
339 // This will match on valtree and write the value(s) corresponding to the ValTree
340 // inside the place recursively.
341
342 let tcx = ecx.tcx.tcx;
343 let ty = place.layout.ty;
344
345 match ty.kind() {
346 ty::FnDef(_, _) => {
347 ecx.write_immediate(
348 Immediate::Scalar(ScalarMaybeUninit::Scalar(Scalar::ZST)),
349 &(*place).into(),
350 )
351 .unwrap();
352 }
353 ty::Bool | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Char => {
354 let scalar_int = valtree.unwrap_leaf();
355 debug!("writing trivial valtree {:?} to place {:?}", scalar_int, place);
356 ecx.write_immediate(
357 Immediate::Scalar(ScalarMaybeUninit::Scalar(scalar_int.into())),
358 &(*place).into(),
359 )
360 .unwrap();
361 }
362 ty::Ref(_, inner_ty, _) => {
363 let mut pointee_place = create_pointee_place(ecx, *inner_ty, valtree);
364 debug!(?pointee_place);
365
366 valtree_into_mplace(ecx, &mut pointee_place, valtree);
367 dump_place(ecx, pointee_place.into());
368 intern_const_alloc_recursive(ecx, InternKind::Constant, &pointee_place).unwrap();
369
370 let imm = match inner_ty.kind() {
371 ty::Slice(_) | ty::Str => {
372 let len = valtree.unwrap_branch().len();
373 let len_scalar =
374 ScalarMaybeUninit::Scalar(Scalar::from_machine_usize(len as u64, &tcx));
375
376 Immediate::ScalarPair(
377 ScalarMaybeUninit::from_maybe_pointer((*pointee_place).ptr, &tcx),
378 len_scalar,
379 )
380 }
381 _ => pointee_place.to_ref(&tcx),
382 };
383 debug!(?imm);
384
385 ecx.write_immediate(imm, &(*place).into()).unwrap();
386 }
387 ty::Adt(_, _) | ty::Tuple(_) | ty::Array(_, _) | ty::Str | ty::Slice(_) => {
388 let branches = valtree.unwrap_branch();
389
390 // Need to downcast place for enums
391 let (place_adjusted, branches, variant_idx) = match ty.kind() {
392 ty::Adt(def, _) if def.is_enum() => {
393 // First element of valtree corresponds to variant
394 let scalar_int = branches[0].unwrap_leaf();
395 let variant_idx = VariantIdx::from_u32(scalar_int.try_to_u32().unwrap());
396 let variant = def.variant(variant_idx);
397 debug!(?variant);
398
399 (
400 place.project_downcast(ecx, variant_idx).unwrap(),
401 &branches[1..],
402 Some(variant_idx),
403 )
404 }
405 _ => (*place, branches, None),
406 };
407 debug!(?place_adjusted, ?branches);
408
409 // Create the places (by indexing into `place`) for the fields and fill
410 // them recursively
411 for (i, inner_valtree) in branches.iter().enumerate() {
412 debug!(?i, ?inner_valtree);
413
414 let mut place_inner = match ty.kind() {
415 ty::Str | ty::Slice(_) => ecx.mplace_index(&place, i as u64).unwrap(),
416 _ if !ty.is_sized(ecx.tcx, ty::ParamEnv::empty())
417 && i == branches.len() - 1 =>
418 {
419 // Note: For custom DSTs we need to manually process the last unsized field.
420 // We created a `Pointer` for the `Allocation` of the complete sized version of
421 // the Adt in `create_pointee_place` and now we fill that `Allocation` with the
422 // values in the ValTree. For the unsized field we have to additionally add the meta
423 // data.
424
425 let (unsized_inner_ty, num_elems) =
426 get_info_on_unsized_field(ty, valtree, tcx);
427 debug!(?unsized_inner_ty);
428
429 let inner_ty = match ty.kind() {
430 ty::Adt(def, substs) => {
431 def.variant(VariantIdx::from_u32(0)).fields[i].ty(tcx, substs)
432 }
433 ty::Tuple(inner_tys) => inner_tys[i],
434 _ => bug!("unexpected unsized type {:?}", ty),
435 };
436
437 let inner_layout =
438 tcx.layout_of(ty::ParamEnv::empty().and(inner_ty)).unwrap();
439 debug!(?inner_layout);
440
441 let offset = place_adjusted.layout.fields.offset(i);
442 place
443 .offset(
444 offset,
445 MemPlaceMeta::Meta(Scalar::from_machine_usize(
446 num_elems as u64,
447 &tcx,
448 )),
449 inner_layout,
450 &tcx,
451 )
452 .unwrap()
453 }
454 _ => ecx.mplace_field(&place_adjusted, i).unwrap(),
455 };
456
457 debug!(?place_inner);
458 valtree_into_mplace(ecx, &mut place_inner, *inner_valtree);
459 dump_place(&ecx, place_inner.into());
460 }
461
462 debug!("dump of place_adjusted:");
463 dump_place(ecx, place_adjusted.into());
464
465 if let Some(variant_idx) = variant_idx {
466 // don't forget filling the place with the discriminant of the enum
467 ecx.write_discriminant(variant_idx, &(*place).into()).unwrap();
468 }
469
470 debug!("dump of place after writing discriminant:");
471 dump_place(ecx, (*place).into());
472 }
473 _ => bug!("shouldn't have created a ValTree for {:?}", ty),
474 }
475 }
476
477 fn dump_place<'tcx>(ecx: &CompileTimeEvalContext<'tcx, 'tcx>, place: PlaceTy<'tcx>) {
478 trace!("{:?}", ecx.dump_place(*place));
479 }