]> git.proxmox.com Git - rustc.git/blame - compiler/rustc_const_eval/src/const_eval/valtrees.rs
bump version to 1.79.0+dfsg1-1~bpo12+pve2
[rustc.git] / compiler / rustc_const_eval / src / const_eval / valtrees.rs
CommitLineData
e8be2606 1use rustc_data_structures::stack::ensure_sufficient_stack;
c620b35d
FG
2use rustc_middle::mir;
3use rustc_middle::mir::interpret::{EvalToValTreeResult, GlobalId};
4use rustc_middle::ty::layout::{LayoutCx, LayoutOf, TyAndLayout};
5use rustc_middle::ty::{self, ScalarInt, Ty, TyCtxt};
6use rustc_span::DUMMY_SP;
7use rustc_target::abi::{Abi, VariantIdx};
8
9use super::eval_queries::{mk_eval_cx_to_read_const_val, op_to_const};
04454e1e 10use super::machine::CompileTimeEvalContext;
923072b8 11use super::{ValTreeCreationError, ValTreeCreationResult, VALTREE_MAX_NODES};
c620b35d
FG
12use crate::const_eval::CanAccessMutGlobal;
13use crate::errors::MaxNumNodesInConstErr;
add651ee 14use crate::interpret::MPlaceTy;
04454e1e 15use crate::interpret::{
781aab86
FG
16 intern_const_alloc_recursive, ImmTy, Immediate, InternKind, MemPlaceMeta, MemoryKind, PlaceTy,
17 Projectable, Scalar,
04454e1e 18};
04454e1e 19
04454e1e
FG
20#[instrument(skip(ecx), level = "debug")]
21fn branches<'tcx>(
22 ecx: &CompileTimeEvalContext<'tcx, 'tcx>,
23 place: &MPlaceTy<'tcx>,
24 n: usize,
25 variant: Option<VariantIdx>,
923072b8
FG
26 num_nodes: &mut usize,
27) -> ValTreeCreationResult<'tcx> {
04454e1e 28 let place = match variant {
add651ee
FG
29 Some(variant) => ecx.project_downcast(place, variant).unwrap(),
30 None => place.clone(),
04454e1e
FG
31 };
32 let variant = variant.map(|variant| Some(ty::ValTree::Leaf(ScalarInt::from(variant.as_u32()))));
33 debug!(?place, ?variant);
34
923072b8
FG
35 let mut fields = Vec::with_capacity(n);
36 for i in 0..n {
add651ee 37 let field = ecx.project_field(&place, i).unwrap();
923072b8
FG
38 let valtree = const_to_valtree_inner(ecx, &field, num_nodes)?;
39 fields.push(Some(valtree));
40 }
41
42 // For enums, we prepend their variant index before the variant's fields so we can figure out
04454e1e 43 // the variant again when just seeing a valtree.
923072b8
FG
44 let branches = variant
45 .into_iter()
46 .chain(fields.into_iter())
47 .collect::<Option<Vec<_>>>()
48 .expect("should have already checked for errors in ValTree creation");
49
50 // Have to account for ZSTs here
51 if branches.len() == 0 {
52 *num_nodes += 1;
53 }
54
55 Ok(ty::ValTree::Branch(ecx.tcx.arena.alloc_from_iter(branches)))
04454e1e
FG
56}
57
58#[instrument(skip(ecx), level = "debug")]
59fn slice_branches<'tcx>(
60 ecx: &CompileTimeEvalContext<'tcx, 'tcx>,
61 place: &MPlaceTy<'tcx>,
923072b8
FG
62 num_nodes: &mut usize,
63) -> ValTreeCreationResult<'tcx> {
add651ee 64 let n = place.len(ecx).unwrap_or_else(|_| panic!("expected to use len of place {place:?}"));
923072b8
FG
65
66 let mut elems = Vec::with_capacity(n as usize);
67 for i in 0..n {
add651ee 68 let place_elem = ecx.project_index(place, i).unwrap();
923072b8
FG
69 let valtree = const_to_valtree_inner(ecx, &place_elem, num_nodes)?;
70 elems.push(valtree);
71 }
04454e1e 72
923072b8 73 Ok(ty::ValTree::Branch(ecx.tcx.arena.alloc_from_iter(elems)))
04454e1e
FG
74}
75
76#[instrument(skip(ecx), level = "debug")]
c620b35d 77fn const_to_valtree_inner<'tcx>(
04454e1e
FG
78 ecx: &CompileTimeEvalContext<'tcx, 'tcx>,
79 place: &MPlaceTy<'tcx>,
923072b8
FG
80 num_nodes: &mut usize,
81) -> ValTreeCreationResult<'tcx> {
82 let ty = place.layout.ty;
83 debug!("ty kind: {:?}", ty.kind());
84
85 if *num_nodes >= VALTREE_MAX_NODES {
86 return Err(ValTreeCreationError::NodesOverflow);
87 }
88
89 match ty.kind() {
90 ty::FnDef(..) => {
91 *num_nodes += 1;
92 Ok(ty::ValTree::zst())
93 }
04454e1e 94 ty::Bool | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Char => {
c620b35d 95 let val = ecx.read_immediate(place)?;
f2b60f7d 96 let val = val.to_scalar();
923072b8
FG
97 *num_nodes += 1;
98
99 Ok(ty::ValTree::Leaf(val.assert_int()))
04454e1e
FG
100 }
101
e8be2606
FG
102 ty::Pat(base, ..) => {
103 let mut place = place.clone();
104 // The valtree of the base type is the same as the valtree of the pattern type.
105 // Since the returned valtree does not contain the type or layout, we can just
106 // switch to the base type.
107 place.layout = ecx.layout_of(*base).unwrap();
108 ensure_sufficient_stack(|| const_to_valtree_inner(ecx, &place, num_nodes))
109 },
110
111
112 ty::RawPtr(_, _) => {
ed00b5ec
FG
113 // Not all raw pointers are allowed, as we cannot properly test them for
114 // equality at compile-time (see `ptr_guaranteed_cmp`).
115 // However we allow those that are just integers in disguise.
4b012472 116 // First, get the pointer. Remember it might be wide!
c620b35d 117 let val = ecx.read_immediate(place)?;
4b012472
FG
118 // We could allow wide raw pointers where both sides are integers in the future,
119 // but for now we reject them.
120 if matches!(val.layout.abi, Abi::ScalarPair(..)) {
c620b35d 121 return Err(ValTreeCreationError::NonSupportedType);
4b012472
FG
122 }
123 let val = val.to_scalar();
ed00b5ec
FG
124 // We are in the CTFE machine, so ptr-to-int casts will fail.
125 // This can only be `Ok` if `val` already is an integer.
126 let Ok(val) = val.try_to_int() else {
c620b35d 127 return Err(ValTreeCreationError::NonSupportedType);
ed00b5ec
FG
128 };
129 // It's just a ScalarInt!
130 Ok(ty::ValTree::Leaf(val))
131 }
132
04454e1e
FG
133 // Technically we could allow function pointers (represented as `ty::Instance`), but this is not guaranteed to
134 // agree with runtime equality tests.
ed00b5ec 135 ty::FnPtr(_) => Err(ValTreeCreationError::NonSupportedType),
04454e1e
FG
136
137 ty::Ref(_, _, _) => {
c620b35d 138 let derefd_place = ecx.deref_pointer(place)?;
923072b8 139 const_to_valtree_inner(ecx, &derefd_place, num_nodes)
04454e1e
FG
140 }
141
142 ty::Str | ty::Slice(_) | ty::Array(_, _) => {
923072b8 143 slice_branches(ecx, place, num_nodes)
04454e1e
FG
144 }
145 // Trait objects are not allowed in type level constants, as we have no concept for
146 // resolving their backing type, even if we can do that at const eval time. We may
c0240ec0 147 // hypothetically be able to allow `dyn StructuralPartialEq` trait objects in the future,
04454e1e 148 // but it is unclear if this is useful.
923072b8 149 ty::Dynamic(..) => Err(ValTreeCreationError::NonSupportedType),
04454e1e 150
923072b8
FG
151 ty::Tuple(elem_tys) => {
152 branches(ecx, place, elem_tys.len(), None, num_nodes)
153 }
04454e1e
FG
154
155 ty::Adt(def, _) => {
156 if def.is_union() {
923072b8 157 return Err(ValTreeCreationError::NonSupportedType);
04454e1e
FG
158 } else if def.variants().is_empty() {
159 bug!("uninhabited types should have errored and never gotten converted to valtree")
160 }
161
c620b35d 162 let variant = ecx.read_discriminant(place)?;
923072b8 163 branches(ecx, place, def.variant(variant).fields.len(), def.is_enum().then_some(variant), num_nodes)
04454e1e
FG
164 }
165
166 ty::Never
167 | ty::Error(_)
168 | ty::Foreign(..)
169 | ty::Infer(ty::FreshIntTy(_))
170 | ty::Infer(ty::FreshFloatTy(_))
9c376795
FG
171 // FIXME(oli-obk): we could look behind opaque types
172 | ty::Alias(..)
04454e1e
FG
173 | ty::Param(_)
174 | ty::Bound(..)
175 | ty::Placeholder(..)
04454e1e
FG
176 | ty::Infer(_)
177 // FIXME(oli-obk): we can probably encode closures just like structs
178 | ty::Closure(..)
c620b35d 179 | ty::CoroutineClosure(..)
ed00b5ec
FG
180 | ty::Coroutine(..)
181 | ty::CoroutineWitness(..) => Err(ValTreeCreationError::NonSupportedType),
04454e1e
FG
182 }
183}
184
add651ee
FG
185/// Valtrees don't store the `MemPlaceMeta` that all dynamically sized values have in the interpreter.
186/// This function reconstructs it.
187fn reconstruct_place_meta<'tcx>(
188 layout: TyAndLayout<'tcx>,
04454e1e
FG
189 valtree: ty::ValTree<'tcx>,
190 tcx: TyCtxt<'tcx>,
add651ee
FG
191) -> MemPlaceMeta {
192 if layout.is_sized() {
193 return MemPlaceMeta::None;
194 }
195
04454e1e 196 let mut last_valtree = valtree;
add651ee 197 // Traverse the type, and update `last_valtree` as we go.
04454e1e 198 let tail = tcx.struct_tail_with_normalize(
add651ee 199 layout.ty,
04454e1e
FG
200 |ty| ty,
201 || {
202 let branches = last_valtree.unwrap_branch();
add651ee 203 last_valtree = *branches.last().unwrap();
04454e1e
FG
204 debug!(?branches, ?last_valtree);
205 },
206 );
add651ee
FG
207 // Sanity-check that we got a tail we support.
208 match tail.kind() {
209 ty::Slice(..) | ty::Str => {}
210 _ => bug!("unsized tail of a valtree must be Slice or Str"),
04454e1e
FG
211 };
212
add651ee 213 // Get the number of elements in the unsized field.
04454e1e 214 let num_elems = last_valtree.unwrap_branch().len();
add651ee 215 MemPlaceMeta::Meta(Scalar::from_target_usize(num_elems as u64, &tcx))
04454e1e
FG
216}
217
f2b60f7d 218#[instrument(skip(ecx), level = "debug", ret)]
781aab86 219fn create_valtree_place<'tcx>(
04454e1e 220 ecx: &mut CompileTimeEvalContext<'tcx, 'tcx>,
781aab86 221 layout: TyAndLayout<'tcx>,
04454e1e
FG
222 valtree: ty::ValTree<'tcx>,
223) -> MPlaceTy<'tcx> {
add651ee
FG
224 let meta = reconstruct_place_meta(layout, valtree, ecx.tcx.tcx);
225 ecx.allocate_dyn(layout, MemoryKind::Stack, meta).unwrap()
04454e1e
FG
226}
227
c620b35d
FG
228/// Evaluates a constant and turns it into a type-level constant value.
229pub(crate) fn eval_to_valtree<'tcx>(
230 tcx: TyCtxt<'tcx>,
231 param_env: ty::ParamEnv<'tcx>,
232 cid: GlobalId<'tcx>,
233) -> EvalToValTreeResult<'tcx> {
234 let const_alloc = tcx.eval_to_allocation_raw(param_env.and(cid))?;
235
236 // FIXME Need to provide a span to `eval_to_valtree`
237 let ecx = mk_eval_cx_to_read_const_val(
238 tcx,
239 DUMMY_SP,
240 param_env,
241 // It is absolutely crucial for soundness that
242 // we do not read from mutable memory.
243 CanAccessMutGlobal::No,
244 );
245 let place = ecx.raw_const_to_mplace(const_alloc).unwrap();
246 debug!(?place);
247
248 let mut num_nodes = 0;
249 let valtree_result = const_to_valtree_inner(&ecx, &place, &mut num_nodes);
250
251 match valtree_result {
252 Ok(valtree) => Ok(Some(valtree)),
253 Err(err) => {
254 let did = cid.instance.def_id();
255 let global_const_id = cid.display(tcx);
256 let span = tcx.hir().span_if_local(did);
257 match err {
258 ValTreeCreationError::NodesOverflow => {
259 let handled =
260 tcx.dcx().emit_err(MaxNumNodesInConstErr { span, global_const_id });
261 Err(handled.into())
262 }
263 ValTreeCreationError::NonSupportedType => Ok(None),
264 }
265 }
266 }
267}
268
04454e1e
FG
269/// Converts a `ValTree` to a `ConstValue`, which is needed after mir
270/// construction has finished.
923072b8 271// FIXME Merge `valtree_to_const_value` and `valtree_into_mplace` into one function
f2b60f7d 272#[instrument(skip(tcx), level = "debug", ret)]
04454e1e
FG
273pub fn valtree_to_const_value<'tcx>(
274 tcx: TyCtxt<'tcx>,
275 param_env_ty: ty::ParamEnvAnd<'tcx, Ty<'tcx>>,
276 valtree: ty::ValTree<'tcx>,
781aab86 277) -> mir::ConstValue<'tcx> {
04454e1e
FG
278 // Basic idea: We directly construct `Scalar` values from trivial `ValTree`s
279 // (those for constants with type bool, int, uint, float or char).
280 // For all other types we create an `MPlace` and fill that by walking
281 // the `ValTree` and using `place_projection` and `place_field` to
282 // create inner `MPlace`s which are filled recursively.
283 // FIXME Does this need an example?
284
285 let (param_env, ty) = param_env_ty.into_parts();
04454e1e 286
e8be2606 287 match *ty.kind() {
04454e1e
FG
288 ty::FnDef(..) => {
289 assert!(valtree.unwrap_branch().is_empty());
781aab86 290 mir::ConstValue::ZeroSized
04454e1e 291 }
e8be2606 292 ty::Bool | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Char | ty::RawPtr(_, _) => {
ed00b5ec
FG
293 match valtree {
294 ty::ValTree::Leaf(scalar_int) => mir::ConstValue::Scalar(Scalar::Int(scalar_int)),
295 ty::ValTree::Branch(_) => bug!(
296 "ValTrees for Bool, Int, Uint, Float, Char or RawPtr should have the form ValTree::Leaf"
297 ),
298 }
299 }
e8be2606 300 ty::Pat(ty, _) => valtree_to_const_value(tcx, param_env.and(ty), valtree),
781aab86 301 ty::Ref(_, inner_ty, _) => {
c620b35d
FG
302 let mut ecx =
303 mk_eval_cx_to_read_const_val(tcx, DUMMY_SP, param_env, CanAccessMutGlobal::No);
e8be2606 304 let imm = valtree_to_ref(&mut ecx, valtree, inner_ty);
781aab86 305 let imm = ImmTy::from_immediate(imm, tcx.layout_of(param_env_ty).unwrap());
ed00b5ec 306 op_to_const(&ecx, &imm.into(), /* for diagnostics */ false)
781aab86
FG
307 }
308 ty::Tuple(_) | ty::Array(_, _) | ty::Adt(..) => {
309 let layout = tcx.layout_of(param_env_ty).unwrap();
310 if layout.is_zst() {
311 // Fast path to avoid some allocations.
312 return mir::ConstValue::ZeroSized;
313 }
314 if layout.abi.is_scalar()
315 && (matches!(ty.kind(), ty::Tuple(_))
316 || matches!(ty.kind(), ty::Adt(def, _) if def.is_struct()))
317 {
318 // A Scalar tuple/struct; we can avoid creating an allocation.
319 let branches = valtree.unwrap_branch();
320 // Find the non-ZST field. (There can be aligned ZST!)
321 for (i, &inner_valtree) in branches.iter().enumerate() {
322 let field = layout.field(&LayoutCx { tcx, param_env }, i);
323 if !field.is_zst() {
324 return valtree_to_const_value(tcx, param_env.and(field.ty), inner_valtree);
325 }
add651ee 326 }
781aab86
FG
327 bug!("could not find non-ZST field during in {layout:#?}");
328 }
329
c620b35d
FG
330 let mut ecx =
331 mk_eval_cx_to_read_const_val(tcx, DUMMY_SP, param_env, CanAccessMutGlobal::No);
781aab86
FG
332
333 // Need to create a place for this valtree.
334 let place = create_valtree_place(&mut ecx, layout, valtree);
04454e1e 335
add651ee
FG
336 valtree_into_mplace(&mut ecx, &place, valtree);
337 dump_place(&ecx, &place);
04454e1e
FG
338 intern_const_alloc_recursive(&mut ecx, InternKind::Constant, &place).unwrap();
339
ed00b5ec 340 op_to_const(&ecx, &place.into(), /* for diagnostics */ false)
04454e1e
FG
341 }
342 ty::Never
343 | ty::Error(_)
344 | ty::Foreign(..)
345 | ty::Infer(ty::FreshIntTy(_))
346 | ty::Infer(ty::FreshFloatTy(_))
9c376795 347 | ty::Alias(..)
04454e1e
FG
348 | ty::Param(_)
349 | ty::Bound(..)
350 | ty::Placeholder(..)
04454e1e
FG
351 | ty::Infer(_)
352 | ty::Closure(..)
c620b35d 353 | ty::CoroutineClosure(..)
ed00b5ec
FG
354 | ty::Coroutine(..)
355 | ty::CoroutineWitness(..)
04454e1e 356 | ty::FnPtr(_)
04454e1e
FG
357 | ty::Str
358 | ty::Slice(_)
359 | ty::Dynamic(..) => bug!("no ValTree should have been created for type {:?}", ty.kind()),
360 }
361}
362
781aab86
FG
363/// Put a valtree into memory and return a reference to that.
364fn valtree_to_ref<'tcx>(
365 ecx: &mut CompileTimeEvalContext<'tcx, 'tcx>,
366 valtree: ty::ValTree<'tcx>,
367 pointee_ty: Ty<'tcx>,
368) -> Immediate {
369 let pointee_place = create_valtree_place(ecx, ecx.layout_of(pointee_ty).unwrap(), valtree);
370 debug!(?pointee_place);
371
372 valtree_into_mplace(ecx, &pointee_place, valtree);
373 dump_place(ecx, &pointee_place);
374 intern_const_alloc_recursive(ecx, InternKind::Constant, &pointee_place).unwrap();
375
376 pointee_place.to_ref(&ecx.tcx)
377}
378
04454e1e 379#[instrument(skip(ecx), level = "debug")]
923072b8 380fn valtree_into_mplace<'tcx>(
04454e1e 381 ecx: &mut CompileTimeEvalContext<'tcx, 'tcx>,
add651ee 382 place: &MPlaceTy<'tcx>,
04454e1e
FG
383 valtree: ty::ValTree<'tcx>,
384) {
385 // This will match on valtree and write the value(s) corresponding to the ValTree
386 // inside the place recursively.
387
04454e1e
FG
388 let ty = place.layout.ty;
389
390 match ty.kind() {
391 ty::FnDef(_, _) => {
49aad941 392 // Zero-sized type, nothing to do.
04454e1e 393 }
c0240ec0 394 ty::Bool | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Char | ty::RawPtr(..) => {
04454e1e
FG
395 let scalar_int = valtree.unwrap_leaf();
396 debug!("writing trivial valtree {:?} to place {:?}", scalar_int, place);
add651ee 397 ecx.write_immediate(Immediate::Scalar(scalar_int.into()), place).unwrap();
04454e1e
FG
398 }
399 ty::Ref(_, inner_ty, _) => {
781aab86 400 let imm = valtree_to_ref(ecx, valtree, *inner_ty);
04454e1e 401 debug!(?imm);
add651ee 402 ecx.write_immediate(imm, place).unwrap();
04454e1e
FG
403 }
404 ty::Adt(_, _) | ty::Tuple(_) | ty::Array(_, _) | ty::Str | ty::Slice(_) => {
405 let branches = valtree.unwrap_branch();
406
407 // Need to downcast place for enums
408 let (place_adjusted, branches, variant_idx) = match ty.kind() {
409 ty::Adt(def, _) if def.is_enum() => {
410 // First element of valtree corresponds to variant
411 let scalar_int = branches[0].unwrap_leaf();
412 let variant_idx = VariantIdx::from_u32(scalar_int.try_to_u32().unwrap());
413 let variant = def.variant(variant_idx);
414 debug!(?variant);
415
416 (
add651ee 417 ecx.project_downcast(place, variant_idx).unwrap(),
04454e1e
FG
418 &branches[1..],
419 Some(variant_idx),
420 )
421 }
add651ee 422 _ => (place.clone(), branches, None),
04454e1e
FG
423 };
424 debug!(?place_adjusted, ?branches);
425
426 // Create the places (by indexing into `place`) for the fields and fill
427 // them recursively
428 for (i, inner_valtree) in branches.iter().enumerate() {
429 debug!(?i, ?inner_valtree);
430
add651ee
FG
431 let place_inner = match ty.kind() {
432 ty::Str | ty::Slice(_) | ty::Array(..) => {
433 ecx.project_index(place, i as u64).unwrap()
04454e1e 434 }
add651ee 435 _ => ecx.project_field(&place_adjusted, i).unwrap(),
04454e1e
FG
436 };
437
438 debug!(?place_inner);
add651ee 439 valtree_into_mplace(ecx, &place_inner, *inner_valtree);
4b012472 440 dump_place(ecx, &place_inner);
04454e1e
FG
441 }
442
443 debug!("dump of place_adjusted:");
add651ee 444 dump_place(ecx, &place_adjusted);
04454e1e
FG
445
446 if let Some(variant_idx) = variant_idx {
447 // don't forget filling the place with the discriminant of the enum
add651ee 448 ecx.write_discriminant(variant_idx, place).unwrap();
04454e1e
FG
449 }
450
451 debug!("dump of place after writing discriminant:");
add651ee 452 dump_place(ecx, place);
04454e1e
FG
453 }
454 _ => bug!("shouldn't have created a ValTree for {:?}", ty),
455 }
456}
457
add651ee 458fn dump_place<'tcx>(ecx: &CompileTimeEvalContext<'tcx, 'tcx>, place: &MPlaceTy<'tcx>) {
781aab86 459 trace!("{:?}", ecx.dump_place(&PlaceTy::from(place.clone())));
04454e1e 460}