1 use super::eval_queries
::{mk_eval_cx, op_to_const}
;
2 use super::machine
::CompileTimeEvalContext
;
3 use super::{ValTreeCreationError, ValTreeCreationResult, VALTREE_MAX_NODES}
;
4 use crate::interpret
::{
5 intern_const_alloc_recursive
, ConstValue
, ImmTy
, Immediate
, InternKind
, MemPlaceMeta
,
6 MemoryKind
, PlaceTy
, Scalar
, ScalarMaybeUninit
,
8 use crate::interpret
::{MPlaceTy, Value}
;
9 use rustc_middle
::ty
::{self, ScalarInt, Ty, TyCtxt}
;
10 use rustc_span
::source_map
::DUMMY_SP
;
11 use rustc_target
::abi
::{Align, VariantIdx}
;
13 #[instrument(skip(ecx), level = "debug")]
15 ecx
: &CompileTimeEvalContext
<'tcx
, 'tcx
>,
16 place
: &MPlaceTy
<'tcx
>,
18 variant
: Option
<VariantIdx
>,
19 num_nodes
: &mut usize,
20 ) -> ValTreeCreationResult
<'tcx
> {
21 let place
= match variant
{
22 Some(variant
) => ecx
.mplace_downcast(&place
, variant
).unwrap(),
25 let variant
= variant
.map(|variant
| Some(ty
::ValTree
::Leaf(ScalarInt
::from(variant
.as_u32()))));
26 debug
!(?place
, ?variant
);
28 let mut fields
= Vec
::with_capacity(n
);
30 let field
= ecx
.mplace_field(&place
, i
).unwrap();
31 let valtree
= const_to_valtree_inner(ecx
, &field
, num_nodes
)?
;
32 fields
.push(Some(valtree
));
35 // For enums, we prepend their variant index before the variant's fields so we can figure out
36 // the variant again when just seeing a valtree.
37 let branches
= variant
39 .chain(fields
.into_iter())
40 .collect
::<Option
<Vec
<_
>>>()
41 .expect("should have already checked for errors in ValTree creation");
43 // Have to account for ZSTs here
44 if branches
.len() == 0 {
48 Ok(ty
::ValTree
::Branch(ecx
.tcx
.arena
.alloc_from_iter(branches
)))
51 #[instrument(skip(ecx), level = "debug")]
52 fn slice_branches
<'tcx
>(
53 ecx
: &CompileTimeEvalContext
<'tcx
, 'tcx
>,
54 place
: &MPlaceTy
<'tcx
>,
55 num_nodes
: &mut usize,
56 ) -> ValTreeCreationResult
<'tcx
> {
59 .unwrap_or_else(|_
| panic
!("expected to use len of place {:?}", place
));
61 let mut elems
= Vec
::with_capacity(n
as usize);
63 let place_elem
= ecx
.mplace_index(place
, i
).unwrap();
64 let valtree
= const_to_valtree_inner(ecx
, &place_elem
, num_nodes
)?
;
68 Ok(ty
::ValTree
::Branch(ecx
.tcx
.arena
.alloc_from_iter(elems
)))
71 #[instrument(skip(ecx), level = "debug")]
72 pub(crate) fn const_to_valtree_inner
<'tcx
>(
73 ecx
: &CompileTimeEvalContext
<'tcx
, 'tcx
>,
74 place
: &MPlaceTy
<'tcx
>,
75 num_nodes
: &mut usize,
76 ) -> ValTreeCreationResult
<'tcx
> {
77 let ty
= place
.layout
.ty
;
78 debug
!("ty kind: {:?}", ty
.kind());
80 if *num_nodes
>= VALTREE_MAX_NODES
{
81 return Err(ValTreeCreationError
::NodesOverflow
);
87 Ok(ty
::ValTree
::zst())
89 ty
::Bool
| ty
::Int(_
) | ty
::Uint(_
) | ty
::Float(_
) | ty
::Char
=> {
90 let Ok(val
) = ecx
.read_immediate(&place
.into()) else {
91 return Err(ValTreeCreationError
::Other
);
93 let val
= val
.to_scalar().unwrap();
96 Ok(ty
::ValTree
::Leaf(val
.assert_int()))
99 // Raw pointers are not allowed in type level constants, as we cannot properly test them for
100 // equality at compile-time (see `ptr_guaranteed_eq`/`_ne`).
101 // Technically we could allow function pointers (represented as `ty::Instance`), but this is not guaranteed to
102 // agree with runtime equality tests.
103 ty
::FnPtr(_
) | ty
::RawPtr(_
) => Err(ValTreeCreationError
::NonSupportedType
),
105 ty
::Ref(_
, _
, _
) => {
106 let Ok(derefd_place
)= ecx
.deref_operand(&place
.into()) else {
107 return Err(ValTreeCreationError
::Other
);
109 debug
!(?derefd_place
);
111 const_to_valtree_inner(ecx
, &derefd_place
, num_nodes
)
114 ty
::Str
| ty
::Slice(_
) | ty
::Array(_
, _
) => {
115 slice_branches(ecx
, place
, num_nodes
)
117 // Trait objects are not allowed in type level constants, as we have no concept for
118 // resolving their backing type, even if we can do that at const eval time. We may
119 // hypothetically be able to allow `dyn StructuralEq` trait objects in the future,
120 // but it is unclear if this is useful.
121 ty
::Dynamic(..) => Err(ValTreeCreationError
::NonSupportedType
),
123 ty
::Tuple(elem_tys
) => {
124 branches(ecx
, place
, elem_tys
.len(), None
, num_nodes
)
129 return Err(ValTreeCreationError
::NonSupportedType
);
130 } else if def
.variants().is_empty() {
131 bug
!("uninhabited types should have errored and never gotten converted to valtree")
134 let Ok((_
, variant
)) = ecx
.read_discriminant(&place
.into()) else {
135 return Err(ValTreeCreationError
::Other
);
137 branches(ecx
, place
, def
.variant(variant
).fields
.len(), def
.is_enum().then_some(variant
), num_nodes
)
143 | ty
::Infer(ty
::FreshIntTy(_
))
144 | ty
::Infer(ty
::FreshFloatTy(_
))
148 | ty
::Placeholder(..)
149 // FIXME(oli-obk): we could look behind opaque types
152 // FIXME(oli-obk): we can probably encode closures just like structs
155 | ty
::GeneratorWitness(..) => Err(ValTreeCreationError
::NonSupportedType
),
159 #[instrument(skip(ecx), level = "debug")]
160 fn create_mplace_from_layout
<'tcx
>(
161 ecx
: &mut CompileTimeEvalContext
<'tcx
, 'tcx
>,
163 ) -> MPlaceTy
<'tcx
> {
165 let param_env
= ecx
.param_env
;
166 let layout
= tcx
.layout_of(param_env
.and(ty
)).unwrap();
169 ecx
.allocate(layout
, MemoryKind
::Stack
).unwrap()
172 // Walks custom DSTs and gets the type of the unsized field and the number of elements
173 // in the unsized field.
174 fn get_info_on_unsized_field
<'tcx
>(
176 valtree
: ty
::ValTree
<'tcx
>,
178 ) -> (Ty
<'tcx
>, usize) {
179 let mut last_valtree
= valtree
;
180 let tail
= tcx
.struct_tail_with_normalize(
184 let branches
= last_valtree
.unwrap_branch();
185 last_valtree
= branches
[branches
.len() - 1];
186 debug
!(?branches
, ?last_valtree
);
189 let unsized_inner_ty
= match tail
.kind() {
192 _
=> bug
!("expected Slice or Str"),
195 // Have to adjust type for ty::Str
196 let unsized_inner_ty
= match unsized_inner_ty
.kind() {
197 ty
::Str
=> tcx
.mk_ty(ty
::Uint(ty
::UintTy
::U8
)),
198 _
=> unsized_inner_ty
,
201 // Get the number of elements in the unsized field
202 let num_elems
= last_valtree
.unwrap_branch().len();
204 (unsized_inner_ty
, num_elems
)
207 #[instrument(skip(ecx), level = "debug")]
208 fn create_pointee_place
<'tcx
>(
209 ecx
: &mut CompileTimeEvalContext
<'tcx
, 'tcx
>,
211 valtree
: ty
::ValTree
<'tcx
>,
212 ) -> MPlaceTy
<'tcx
> {
213 let tcx
= ecx
.tcx
.tcx
;
215 if !ty
.is_sized(ecx
.tcx
, ty
::ParamEnv
::empty()) {
216 // We need to create `Allocation`s for custom DSTs
218 let (unsized_inner_ty
, num_elems
) = get_info_on_unsized_field(ty
, valtree
, tcx
);
219 let unsized_inner_ty
= match unsized_inner_ty
.kind() {
220 ty
::Str
=> tcx
.mk_ty(ty
::Uint(ty
::UintTy
::U8
)),
221 _
=> unsized_inner_ty
,
223 let unsized_inner_ty_size
=
224 tcx
.layout_of(ty
::ParamEnv
::empty().and(unsized_inner_ty
)).unwrap().layout
.size();
225 debug
!(?unsized_inner_ty
, ?unsized_inner_ty_size
, ?num_elems
);
227 // for custom DSTs only the last field/element is unsized, but we need to also allocate
228 // space for the other fields/elements
229 let layout
= tcx
.layout_of(ty
::ParamEnv
::empty().and(ty
)).unwrap();
230 let size_of_sized_part
= layout
.layout
.size();
232 // Get the size of the memory behind the DST
233 let dst_size
= unsized_inner_ty_size
.checked_mul(num_elems
as u64, &tcx
).unwrap();
235 let size
= size_of_sized_part
.checked_add(dst_size
, &tcx
).unwrap();
236 let align
= Align
::from_bytes(size
.bytes().next_power_of_two()).unwrap();
237 let ptr
= ecx
.allocate_ptr(size
, align
, MemoryKind
::Stack
).unwrap();
240 let place
= MPlaceTy
::from_aligned_ptr_with_meta(
243 MemPlaceMeta
::Meta(Scalar
::from_machine_usize(num_elems
as u64, &tcx
)),
249 create_mplace_from_layout(ecx
, ty
)
253 /// Converts a `ValTree` to a `ConstValue`, which is needed after mir
254 /// construction has finished.
255 // FIXME Merge `valtree_to_const_value` and `valtree_into_mplace` into one function
256 #[instrument(skip(tcx), level = "debug")]
257 pub fn valtree_to_const_value
<'tcx
>(
259 param_env_ty
: ty
::ParamEnvAnd
<'tcx
, Ty
<'tcx
>>,
260 valtree
: ty
::ValTree
<'tcx
>,
261 ) -> ConstValue
<'tcx
> {
262 // Basic idea: We directly construct `Scalar` values from trivial `ValTree`s
263 // (those for constants with type bool, int, uint, float or char).
264 // For all other types we create an `MPlace` and fill that by walking
265 // the `ValTree` and using `place_projection` and `place_field` to
266 // create inner `MPlace`s which are filled recursively.
267 // FIXME Does this need an example?
269 let (param_env
, ty
) = param_env_ty
.into_parts();
270 let mut ecx
= mk_eval_cx(tcx
, DUMMY_SP
, param_env
, false);
274 assert
!(valtree
.unwrap_branch().is_empty());
275 ConstValue
::Scalar(Scalar
::ZST
)
277 ty
::Bool
| ty
::Int(_
) | ty
::Uint(_
) | ty
::Float(_
) | ty
::Char
=> match valtree
{
278 ty
::ValTree
::Leaf(scalar_int
) => ConstValue
::Scalar(Scalar
::Int(scalar_int
)),
279 ty
::ValTree
::Branch(_
) => bug
!(
280 "ValTrees for Bool, Int, Uint, Float or Char should have the form ValTree::Leaf"
283 ty
::Ref(_
, _
, _
) | ty
::Tuple(_
) | ty
::Array(_
, _
) | ty
::Adt(..) => {
284 let mut place
= match ty
.kind() {
285 ty
::Ref(_
, inner_ty
, _
) => {
286 // Need to create a place for the pointee to fill for Refs
287 create_pointee_place(&mut ecx
, *inner_ty
, valtree
)
289 _
=> create_mplace_from_layout(&mut ecx
, ty
),
293 valtree_into_mplace(&mut ecx
, &mut place
, valtree
);
294 dump_place(&ecx
, place
.into());
295 intern_const_alloc_recursive(&mut ecx
, InternKind
::Constant
, &place
).unwrap();
297 let const_val
= match ty
.kind() {
298 ty
::Ref(_
, _
, _
) => {
299 let ref_place
= place
.to_ref(&tcx
);
301 ImmTy
::from_immediate(ref_place
, tcx
.layout_of(param_env_ty
).unwrap());
303 op_to_const(&ecx
, &imm
.into())
305 _
=> op_to_const(&ecx
, &place
.into()),
314 | ty
::Infer(ty
::FreshIntTy(_
))
315 | ty
::Infer(ty
::FreshFloatTy(_
))
319 | ty
::Placeholder(..)
324 | ty
::GeneratorWitness(..)
329 | ty
::Dynamic(..) => bug
!("no ValTree should have been created for type {:?}", ty
.kind()),
333 #[instrument(skip(ecx), level = "debug")]
334 fn valtree_into_mplace
<'tcx
>(
335 ecx
: &mut CompileTimeEvalContext
<'tcx
, 'tcx
>,
336 place
: &mut MPlaceTy
<'tcx
>,
337 valtree
: ty
::ValTree
<'tcx
>,
339 // This will match on valtree and write the value(s) corresponding to the ValTree
340 // inside the place recursively.
342 let tcx
= ecx
.tcx
.tcx
;
343 let ty
= place
.layout
.ty
;
348 Immediate
::Scalar(ScalarMaybeUninit
::Scalar(Scalar
::ZST
)),
353 ty
::Bool
| ty
::Int(_
) | ty
::Uint(_
) | ty
::Float(_
) | ty
::Char
=> {
354 let scalar_int
= valtree
.unwrap_leaf();
355 debug
!("writing trivial valtree {:?} to place {:?}", scalar_int
, place
);
357 Immediate
::Scalar(ScalarMaybeUninit
::Scalar(scalar_int
.into())),
362 ty
::Ref(_
, inner_ty
, _
) => {
363 let mut pointee_place
= create_pointee_place(ecx
, *inner_ty
, valtree
);
364 debug
!(?pointee_place
);
366 valtree_into_mplace(ecx
, &mut pointee_place
, valtree
);
367 dump_place(ecx
, pointee_place
.into());
368 intern_const_alloc_recursive(ecx
, InternKind
::Constant
, &pointee_place
).unwrap();
370 let imm
= match inner_ty
.kind() {
371 ty
::Slice(_
) | ty
::Str
=> {
372 let len
= valtree
.unwrap_branch().len();
374 ScalarMaybeUninit
::Scalar(Scalar
::from_machine_usize(len
as u64, &tcx
));
376 Immediate
::ScalarPair(
377 ScalarMaybeUninit
::from_maybe_pointer((*pointee_place
).ptr
, &tcx
),
381 _
=> pointee_place
.to_ref(&tcx
),
385 ecx
.write_immediate(imm
, &(*place
).into()).unwrap();
387 ty
::Adt(_
, _
) | ty
::Tuple(_
) | ty
::Array(_
, _
) | ty
::Str
| ty
::Slice(_
) => {
388 let branches
= valtree
.unwrap_branch();
390 // Need to downcast place for enums
391 let (place_adjusted
, branches
, variant_idx
) = match ty
.kind() {
392 ty
::Adt(def
, _
) if def
.is_enum() => {
393 // First element of valtree corresponds to variant
394 let scalar_int
= branches
[0].unwrap_leaf();
395 let variant_idx
= VariantIdx
::from_u32(scalar_int
.try_to_u32().unwrap());
396 let variant
= def
.variant(variant_idx
);
400 place
.project_downcast(ecx
, variant_idx
).unwrap(),
405 _
=> (*place
, branches
, None
),
407 debug
!(?place_adjusted
, ?branches
);
409 // Create the places (by indexing into `place`) for the fields and fill
411 for (i
, inner_valtree
) in branches
.iter().enumerate() {
412 debug
!(?i
, ?inner_valtree
);
414 let mut place_inner
= match ty
.kind() {
415 ty
::Str
| ty
::Slice(_
) => ecx
.mplace_index(&place
, i
as u64).unwrap(),
416 _
if !ty
.is_sized(ecx
.tcx
, ty
::ParamEnv
::empty())
417 && i
== branches
.len() - 1 =>
419 // Note: For custom DSTs we need to manually process the last unsized field.
420 // We created a `Pointer` for the `Allocation` of the complete sized version of
421 // the Adt in `create_pointee_place` and now we fill that `Allocation` with the
422 // values in the ValTree. For the unsized field we have to additionally add the meta
425 let (unsized_inner_ty
, num_elems
) =
426 get_info_on_unsized_field(ty
, valtree
, tcx
);
427 debug
!(?unsized_inner_ty
);
429 let inner_ty
= match ty
.kind() {
430 ty
::Adt(def
, substs
) => {
431 def
.variant(VariantIdx
::from_u32(0)).fields
[i
].ty(tcx
, substs
)
433 ty
::Tuple(inner_tys
) => inner_tys
[i
],
434 _
=> bug
!("unexpected unsized type {:?}", ty
),
438 tcx
.layout_of(ty
::ParamEnv
::empty().and(inner_ty
)).unwrap();
439 debug
!(?inner_layout
);
441 let offset
= place_adjusted
.layout
.fields
.offset(i
);
445 MemPlaceMeta
::Meta(Scalar
::from_machine_usize(
454 _
=> ecx
.mplace_field(&place_adjusted
, i
).unwrap(),
457 debug
!(?place_inner
);
458 valtree_into_mplace(ecx
, &mut place_inner
, *inner_valtree
);
459 dump_place(&ecx
, place_inner
.into());
462 debug
!("dump of place_adjusted:");
463 dump_place(ecx
, place_adjusted
.into());
465 if let Some(variant_idx
) = variant_idx
{
466 // don't forget filling the place with the discriminant of the enum
467 ecx
.write_discriminant(variant_idx
, &(*place
).into()).unwrap();
470 debug
!("dump of place after writing discriminant:");
471 dump_place(ecx
, (*place
).into());
473 _
=> bug
!("shouldn't have created a ValTree for {:?}", ty
),
477 fn dump_place
<'tcx
>(ecx
: &CompileTimeEvalContext
<'tcx
, 'tcx
>, place
: PlaceTy
<'tcx
>) {
478 trace
!("{:?}", ecx
.dump_place(*place
));