]> git.proxmox.com Git - rustc.git/blame - compiler/rustc_const_eval/src/interpret/intern.rs
New upstream version 1.71.1+dfsg1
[rustc.git] / compiler / rustc_const_eval / src / interpret / intern.rs
CommitLineData
dc9dc135
XL
1//! This module specifies the type based interner for constants.
2//!
3//! After a const evaluation has computed a value, before we destroy the const evaluator's session
4//! memory, we need to extract all memory allocations to the global memory pool so they stay around.
29967ef6
XL
5//!
6//! In principle, this is not very complicated: we recursively walk the final value, follow all the
7//! pointers, and move all reachable allocations to the global `tcx` memory. The only complication
8//! is picking the right mutability for the allocations in a `static` initializer: we want to make
9//! as many allocations as possible immutable so LLVM can put them into read-only memory. At the
10//! same time, we need to make memory that could be mutated by the program mutable to avoid
11//! incorrect compilations. To achieve this, we do a type-based traversal of the final value,
12//! tracking mutable and shared references and `UnsafeCell` to determine the current mutability.
13//! (In principle, we could skip this type-based part for `const` and promoteds, as they need to be
14//! always immutable. At least for `const` however we use this opportunity to reject any `const`
15//! that contains allocations whose mutability we cannot identify.)
dc9dc135 16
dc9dc135 17use super::validity::RefTracking;
2b03887a 18use rustc_data_structures::fx::{FxIndexMap, FxIndexSet};
5e7ed085 19use rustc_errors::ErrorGuaranteed;
dfeec247 20use rustc_hir as hir;
f9f354fc 21use rustc_middle::mir::interpret::InterpResult;
29967ef6 22use rustc_middle::ty::{self, layout::TyAndLayout, Ty};
dc9dc135 23
3dfed10e 24use rustc_ast::Mutability;
dc9dc135 25
5e7ed085
FG
26use super::{
27 AllocId, Allocation, ConstAllocation, InterpCx, MPlaceTy, Machine, MemoryKind, PlaceTy,
28 ValueVisitor,
29};
fc512014 30use crate::const_eval;
dc9dc135 31
fc512014 32pub trait CompileTimeMachine<'mir, 'tcx, T> = Machine<
9ffffee4
FG
33 'mir,
34 'tcx,
35 MemoryKind = T,
36 Provenance = AllocId,
37 ExtraFnVal = !,
38 FrameExtra = (),
39 AllocExtra = (),
40 MemoryMap = FxIndexMap<AllocId, (MemoryKind<T>, Allocation)>,
41 >;
60c5eb7d 42
fc512014 43struct InternVisitor<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval::MemoryKind>> {
e1599b0c 44 /// The ectx from which we intern.
60c5eb7d 45 ecx: &'rt mut InterpCx<'mir, 'tcx, M>,
e1599b0c 46 /// Previously encountered safe references.
f9f354fc 47 ref_tracking: &'rt mut RefTracking<(MPlaceTy<'tcx>, InternMode)>,
e1599b0c
XL
48 /// A list of all encountered allocations. After type-based interning, we traverse this list to
49 /// also intern allocations that are only referenced by a raw pointer or inside a union.
2b03887a 50 leftover_allocations: &'rt mut FxIndexSet<AllocId>,
29967ef6
XL
51 /// The root kind of the value that we're looking at. This field is never mutated for a
52 /// particular allocation. It is primarily used to make as many allocations as possible
53 /// read-only so LLVM can place them in const memory.
dc9dc135 54 mode: InternMode,
f9f354fc
XL
55 /// This field stores whether we are *currently* inside an `UnsafeCell`. This can affect
56 /// the intern mode of references we encounter.
57 inside_unsafe_cell: bool,
dc9dc135
XL
58}
59
60#[derive(Copy, Clone, Debug, PartialEq, Hash, Eq)]
61enum InternMode {
9c376795 62 /// A static and its current mutability. Below shared references inside a `static mut`,
f9f354fc
XL
63 /// this is *immutable*, and below mutable references inside an `UnsafeCell`, this
64 /// is *mutable*.
65 Static(hir::Mutability),
29967ef6
XL
66 /// A `const`.
67 Const,
dc9dc135
XL
68}
69
70/// Signalling data structure to ensure we don't recurse
71/// into the memory of other constants or statics
72struct IsStaticOrFn;
73
e1599b0c
XL
74/// Intern an allocation without looking at its children.
75/// `mode` is the mode of the environment where we found this pointer.
5e7ed085 76/// `mutability` is the mutability of the place to be interned; even if that says
e1599b0c
XL
77/// `immutable` things might become mutable if `ty` is not frozen.
78/// `ty` can be `None` if there is no potential interior mutability
79/// to account for (e.g. for vtables).
fc512014 80fn intern_shallow<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval::MemoryKind>>(
60c5eb7d 81 ecx: &'rt mut InterpCx<'mir, 'tcx, M>,
2b03887a 82 leftover_allocations: &'rt mut FxIndexSet<AllocId>,
e1599b0c 83 alloc_id: AllocId,
f9f354fc 84 mode: InternMode,
e1599b0c 85 ty: Option<Ty<'tcx>>,
f9f354fc
XL
86) -> Option<IsStaticOrFn> {
87 trace!("intern_shallow {:?} with {:?}", alloc_id, mode);
e1599b0c
XL
88 // remove allocation
89 let tcx = ecx.tcx;
5e7ed085
FG
90 let Some((kind, mut alloc)) = ecx.memory.alloc_map.remove(&alloc_id) else {
91 // Pointer not found in local memory map. It is either a pointer to the global
92 // map, or dangling.
93 // If the pointer is dangling (neither in local nor global memory), we leave it
94 // to validation to error -- it has the much better error messages, pointing out where
95 // in the value the dangling reference lies.
96 // The `delay_span_bug` ensures that we don't forget such a check in validation.
064997fb 97 if tcx.try_get_global_alloc(alloc_id).is_none() {
5e7ed085 98 tcx.sess.delay_span_bug(ecx.tcx.span, "tried to intern dangling pointer");
dfeec247 99 }
5e7ed085
FG
100 // treat dangling pointers like other statics
101 // just to stop trying to recurse into them
102 return Some(IsStaticOrFn);
e1599b0c
XL
103 };
104 // This match is just a canary for future changes to `MemoryKind`, which most likely need
105 // changes in this function.
106 match kind {
fc512014
XL
107 MemoryKind::Stack
108 | MemoryKind::Machine(const_eval::MemoryKind::Heap)
fc512014 109 | MemoryKind::CallerLocation => {}
e1599b0c
XL
110 }
111 // Set allocation mutability as appropriate. This is used by LLVM to put things into
ba9703b0 112 // read-only memory, and also by Miri when evaluating other globals that
e1599b0c 113 // access this one.
f9f354fc
XL
114 if let InternMode::Static(mutability) = mode {
115 // For this, we need to take into account `UnsafeCell`. When `ty` is `None`, we assume
116 // no interior mutability.
2b03887a 117 let frozen = ty.map_or(true, |ty| ty.is_freeze(*ecx.tcx, ecx.param_env));
29967ef6
XL
118 // For statics, allocation mutability is the combination of place mutability and
119 // type mutability.
e1599b0c 120 // The entire allocation needs to be mutable if it contains an `UnsafeCell` anywhere.
f9f354fc
XL
121 let immutable = mutability == Mutability::Not && frozen;
122 if immutable {
dfeec247 123 alloc.mutability = Mutability::Not;
e1599b0c
XL
124 } else {
125 // Just making sure we are not "upgrading" an immutable allocation to mutable.
dfeec247 126 assert_eq!(alloc.mutability, Mutability::Mut);
e1599b0c
XL
127 }
128 } else {
f9f354fc
XL
129 // No matter what, *constants are never mutable*. Mutating them is UB.
130 // See const_eval::machine::MemoryExtra::can_access_statics for why
131 // immutability is so important.
132
29967ef6 133 // Validation will ensure that there is no `UnsafeCell` on an immutable allocation.
dfeec247 134 alloc.mutability = Mutability::Not;
e1599b0c
XL
135 };
136 // link the alloc id to the actual allocation
487cf647 137 leftover_allocations.extend(alloc.provenance().ptrs().iter().map(|&(_, alloc_id)| alloc_id));
9ffffee4 138 let alloc = tcx.mk_const_alloc(alloc);
f9f354fc
XL
139 tcx.set_alloc_id_memory(alloc_id, alloc);
140 None
e1599b0c
XL
141}
142
fc512014
XL
143impl<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval::MemoryKind>>
144 InternVisitor<'rt, 'mir, 'tcx, M>
145{
dc9dc135
XL
146 fn intern_shallow(
147 &mut self,
e1599b0c 148 alloc_id: AllocId,
f9f354fc 149 mode: InternMode,
e1599b0c 150 ty: Option<Ty<'tcx>>,
f9f354fc
XL
151 ) -> Option<IsStaticOrFn> {
152 intern_shallow(self.ecx, self.leftover_allocations, alloc_id, mode, ty)
dc9dc135
XL
153 }
154}
155
fc512014
XL
156impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::MemoryKind>>
157 ValueVisitor<'mir, 'tcx, M> for InternVisitor<'rt, 'mir, 'tcx, M>
dc9dc135
XL
158{
159 type V = MPlaceTy<'tcx>;
160
161 #[inline(always)]
60c5eb7d 162 fn ecx(&self) -> &InterpCx<'mir, 'tcx, M> {
dc9dc135
XL
163 &self.ecx
164 }
165
166 fn visit_aggregate(
167 &mut self,
6a06907d 168 mplace: &MPlaceTy<'tcx>,
dfeec247 169 fields: impl Iterator<Item = InterpResult<'tcx, Self::V>>,
dc9dc135 170 ) -> InterpResult<'tcx> {
064997fb
FG
171 // We want to walk the aggregate to look for references to intern. While doing that we
172 // also need to take special care of interior mutability.
173 //
174 // As an optimization, however, if the allocation does not contain any references: we don't
175 // need to do the walk. It can be costly for big arrays for example (e.g. issue #93215).
176 let is_walk_needed = |mplace: &MPlaceTy<'tcx>| -> InterpResult<'tcx, bool> {
177 // ZSTs cannot contain pointers, we can avoid the interning walk.
178 if mplace.layout.is_zst() {
179 return Ok(false);
180 }
181
182 // Now, check whether this allocation could contain references.
183 //
184 // Note, this check may sometimes not be cheap, so we only do it when the walk we'd like
185 // to avoid could be expensive: on the potentially larger types, arrays and slices,
186 // rather than on all aggregates unconditionally.
187 if matches!(mplace.layout.ty.kind(), ty::Array(..) | ty::Slice(..)) {
188 let Some((size, align)) = self.ecx.size_and_align_of_mplace(&mplace)? else {
189 // We do the walk if we can't determine the size of the mplace: we may be
190 // dealing with extern types here in the future.
191 return Ok(true);
192 };
193
f2b60f7d 194 // If there is no provenance in this allocation, it does not contain references
064997fb
FG
195 // that point to another allocation, and we can avoid the interning walk.
196 if let Some(alloc) = self.ecx.get_ptr_alloc(mplace.ptr, size, align)? {
f2b60f7d 197 if !alloc.has_provenance() {
064997fb
FG
198 return Ok(false);
199 }
200 } else {
201 // We're encountering a ZST here, and can avoid the walk as well.
202 return Ok(false);
203 }
204 }
205
206 // In the general case, we do the walk.
207 Ok(true)
208 };
209
210 // If this allocation contains no references to intern, we avoid the potentially costly
211 // walk.
212 //
213 // We can do this before the checks for interior mutability below, because only references
214 // are relevant in that situation, and we're checking if there are any here.
215 if !is_walk_needed(mplace)? {
29967ef6
XL
216 return Ok(());
217 }
218
dc9dc135 219 if let Some(def) = mplace.layout.ty.ty_adt_def() {
064997fb 220 if def.is_unsafe_cell() {
e1599b0c
XL
221 // We are crossing over an `UnsafeCell`, we can mutate again. This means that
222 // References we encounter inside here are interned as pointing to mutable
223 // allocations.
f9f354fc
XL
224 // Remember the `old` value to handle nested `UnsafeCell`.
225 let old = std::mem::replace(&mut self.inside_unsafe_cell, true);
dc9dc135 226 let walked = self.walk_aggregate(mplace, fields);
f9f354fc 227 self.inside_unsafe_cell = old;
dc9dc135
XL
228 return walked;
229 }
230 }
29967ef6 231
dc9dc135
XL
232 self.walk_aggregate(mplace, fields)
233 }
234
6a06907d 235 fn visit_value(&mut self, mplace: &MPlaceTy<'tcx>) -> InterpResult<'tcx> {
f2b60f7d
FG
236 // Handle Reference types, as these are the only types with provenance supported by const eval.
237 // Raw pointers (and boxes) are handled by the `leftover_allocations` logic.
f9f354fc 238 let tcx = self.ecx.tcx;
dc9dc135 239 let ty = mplace.layout.ty;
1b1a35ee 240 if let ty::Ref(_, referenced_ty, ref_mutability) = *ty.kind() {
064997fb 241 let value = self.ecx.read_immediate(&mplace.into())?;
6a06907d 242 let mplace = self.ecx.ref_to_mplace(&value)?;
f9f354fc 243 assert_eq!(mplace.layout.ty, referenced_ty);
dfeec247 244 // Handle trait object vtables.
9ffffee4 245 if let ty::Dynamic(_, _, ty::Dyn) =
1b1a35ee 246 tcx.struct_tail_erasing_lifetimes(referenced_ty, self.ecx.param_env).kind()
60c5eb7d 247 {
064997fb 248 let ptr = mplace.meta.unwrap_meta().to_pointer(&tcx)?;
136023e0 249 if let Some(alloc_id) = ptr.provenance {
f9f354fc 250 // Explicitly choose const mode here, since vtables are immutable, even
dfeec247 251 // if the reference of the fat pointer is mutable.
136023e0 252 self.intern_shallow(alloc_id, InternMode::Const, None);
dfeec247 253 } else {
29967ef6 254 // Validation will error (with a better message) on an invalid vtable pointer.
f9f354fc
XL
255 // Let validation show the error message, but make sure it *does* error.
256 tcx.sess
257 .delay_span_bug(tcx.span, "vtables pointers cannot be integer pointers");
dc9dc135
XL
258 }
259 }
dc9dc135
XL
260 // Check if we have encountered this pointer+layout combination before.
261 // Only recurse for allocation-backed pointers.
136023e0 262 if let Some(alloc_id) = mplace.ptr.provenance {
29967ef6
XL
263 // Compute the mode with which we intern this. Our goal here is to make as many
264 // statics as we can immutable so they can be placed in read-only memory by LLVM.
f9f354fc
XL
265 let ref_mode = match self.mode {
266 InternMode::Static(mutbl) => {
267 // In statics, merge outer mutability with reference mutability and
268 // take into account whether we are in an `UnsafeCell`.
dc9dc135 269
f9f354fc
XL
270 // The only way a mutable reference actually works as a mutable reference is
271 // by being in a `static mut` directly or behind another mutable reference.
272 // If there's an immutable reference or we are inside a `static`, then our
273 // mutable reference is equivalent to an immutable one. As an example:
274 // `&&mut Foo` is semantically equivalent to `&&Foo`
275 match ref_mutability {
276 _ if self.inside_unsafe_cell => {
277 // Inside an `UnsafeCell` is like inside a `static mut`, the "outer"
278 // mutability does not matter.
279 InternMode::Static(ref_mutability)
280 }
281 Mutability::Not => {
282 // A shared reference, things become immutable.
29967ef6
XL
283 // We do *not* consider `freeze` here: `intern_shallow` considers
284 // `freeze` for the actual mutability of this allocation; the intern
285 // mode for references contained in this allocation is tracked more
286 // precisely when traversing the referenced data (by tracking
287 // `UnsafeCell`). This makes sure that `&(&i32, &Cell<i32>)` still
288 // has the left inner reference interned into a read-only
289 // allocation.
f9f354fc
XL
290 InternMode::Static(Mutability::Not)
291 }
292 Mutability::Mut => {
293 // Mutable reference.
294 InternMode::Static(mutbl)
295 }
ba9703b0
XL
296 }
297 }
29967ef6 298 InternMode::Const => {
9c376795 299 // Ignore `UnsafeCell`, everything is immutable. Validity does some sanity
29967ef6
XL
300 // checking for mutable references that we encounter -- they must all be
301 // ZST.
302 InternMode::Const
f9f354fc 303 }
dc9dc135 304 };
136023e0 305 match self.intern_shallow(alloc_id, ref_mode, Some(referenced_ty)) {
dc9dc135
XL
306 // No need to recurse, these are interned already and statics may have
307 // cycles, so we don't want to recurse there
dfeec247 308 Some(IsStaticOrFn) => {}
dc9dc135
XL
309 // intern everything referenced by this value. The mutability is taken from the
310 // reference. It is checked above that mutable references only happen in
311 // `static mut`
f9f354fc 312 None => self.ref_tracking.track((mplace, ref_mode), || ()),
dc9dc135
XL
313 }
314 }
74b04a01
XL
315 Ok(())
316 } else {
317 // Not a reference -- proceed recursively.
318 self.walk_value(mplace)
dc9dc135 319 }
dc9dc135
XL
320 }
321}
322
f9f354fc 323#[derive(Copy, Clone, Debug, PartialEq, Hash, Eq)]
dfeec247
XL
324pub enum InternKind {
325 /// The `mutability` of the static, ignoring the type which may have interior mutability.
326 Static(hir::Mutability),
327 Constant,
328 Promoted,
dfeec247
XL
329}
330
f9f354fc
XL
331/// Intern `ret` and everything it references.
332///
9c376795 333/// This *cannot raise an interpreter error*. Doing so is left to validation, which
f9f354fc 334/// tracks where in the value we are and thus can show much better error messages.
f2b60f7d 335#[instrument(level = "debug", skip(ecx))]
a2a8927a
XL
336pub fn intern_const_alloc_recursive<
337 'mir,
338 'tcx: 'mir,
339 M: CompileTimeMachine<'mir, 'tcx, const_eval::MemoryKind>,
340>(
60c5eb7d 341 ecx: &mut InterpCx<'mir, 'tcx, M>,
dfeec247 342 intern_kind: InternKind,
6a06907d 343 ret: &MPlaceTy<'tcx>,
5e7ed085 344) -> Result<(), ErrorGuaranteed> {
dc9dc135 345 let tcx = ecx.tcx;
f9f354fc
XL
346 let base_intern_mode = match intern_kind {
347 InternKind::Static(mutbl) => InternMode::Static(mutbl),
3dfed10e 348 // `Constant` includes array lengths.
29967ef6 349 InternKind::Constant | InternKind::Promoted => InternMode::Const,
dc9dc135
XL
350 };
351
e1599b0c 352 // Type based interning.
f9f354fc 353 // `ref_tracking` tracks typed references we have already interned and still need to crawl for
e1599b0c
XL
354 // more typed information inside them.
355 // `leftover_allocations` collects *all* allocations we see, because some might not
356 // be available in a typed way. They get interned at the end.
f9f354fc 357 let mut ref_tracking = RefTracking::empty();
2b03887a 358 let leftover_allocations = &mut FxIndexSet::default();
dc9dc135
XL
359
360 // start with the outermost allocation
e1599b0c 361 intern_shallow(
dc9dc135 362 ecx,
e1599b0c 363 leftover_allocations,
dfeec247
XL
364 // The outermost allocation must exist, because we allocated it with
365 // `Memory::allocate`.
136023e0 366 ret.ptr.provenance.unwrap(),
f9f354fc 367 base_intern_mode,
dfeec247 368 Some(ret.layout.ty),
f9f354fc 369 );
dc9dc135 370
6a06907d 371 ref_tracking.track((*ret, base_intern_mode), || ());
f9f354fc
XL
372
373 while let Some(((mplace, mode), _)) = ref_tracking.todo.pop() {
374 let res = InternVisitor {
dc9dc135
XL
375 ref_tracking: &mut ref_tracking,
376 ecx,
377 mode,
e1599b0c 378 leftover_allocations,
f9f354fc 379 inside_unsafe_cell: false,
dfeec247 380 }
6a06907d 381 .visit_value(&mplace);
9c376795 382 // We deliberately *ignore* interpreter errors here. When there is a problem, the remaining
f9f354fc
XL
383 // references are "leftover"-interned, and later validation will show a proper error
384 // and point at the right part of the value causing the problem.
385 match res {
386 Ok(()) => {}
387 Err(error) => {
388 ecx.tcx.sess.delay_span_bug(
389 ecx.tcx.span,
49aad941 390 format!(
f035d41b
XL
391 "error during interning should later cause validation failure: {}",
392 error
393 ),
f9f354fc 394 );
dc9dc135
XL
395 }
396 }
397 }
398
399 // Intern the rest of the allocations as mutable. These might be inside unions, padding, raw
400 // pointers, ... So we can't intern them according to their type rules
401
e1599b0c 402 let mut todo: Vec<_> = leftover_allocations.iter().cloned().collect();
5e7ed085
FG
403 debug!(?todo);
404 debug!("dead_alloc_map: {:#?}", ecx.memory.dead_alloc_map);
dc9dc135 405 while let Some(alloc_id) = todo.pop() {
e74abb32 406 if let Some((_, mut alloc)) = ecx.memory.alloc_map.remove(&alloc_id) {
e1599b0c
XL
407 // We can't call the `intern_shallow` method here, as its logic is tailored to safe
408 // references and a `leftover_allocations` set (where we only have a todo-list here).
409 // So we hand-roll the interning logic here again.
dfeec247 410 match intern_kind {
f2b60f7d 411 // Statics may point to mutable allocations.
dfeec247
XL
412 // Even for immutable statics it would be ok to have mutable allocations behind
413 // raw pointers, e.g. for `static FOO: *const AtomicUsize = &AtomicUsize::new(42)`.
414 InternKind::Static(_) => {}
415 // Raw pointers in promoteds may only point to immutable things so we mark
416 // everything as immutable.
f9f354fc 417 // It is UB to mutate through a raw pointer obtained via an immutable reference:
dfeec247
XL
418 // Since all references and pointers inside a promoted must by their very definition
419 // be created from an immutable reference (and promotion also excludes interior
420 // mutability), mutating through them would be UB.
421 // There's no way we can check whether the user is using raw pointers correctly,
422 // so all we can do is mark this as immutable here.
423 InternKind::Promoted => {
f9f354fc
XL
424 // See const_eval::machine::MemoryExtra::can_access_statics for why
425 // immutability is so important.
dfeec247
XL
426 alloc.mutability = Mutability::Not;
427 }
f035d41b 428 InternKind::Constant => {
f9f354fc
XL
429 // If it's a constant, we should not have any "leftovers" as everything
430 // is tracked by const-checking.
431 // FIXME: downgrade this to a warning? It rejects some legitimate consts,
432 // such as `const CONST_RAW: *const Vec<i32> = &Vec::new() as *const _;`.
433 ecx.tcx
434 .sess
435 .span_err(ecx.tcx.span, "untyped pointers are not allowed in constant");
436 // For better errors later, mark the allocation as immutable.
437 alloc.mutability = Mutability::Not;
dfeec247 438 }
e1599b0c 439 }
9ffffee4 440 let alloc = tcx.mk_const_alloc(alloc);
f9f354fc 441 tcx.set_alloc_id_memory(alloc_id, alloc);
487cf647 442 for &(_, alloc_id) in alloc.inner().provenance().ptrs().iter() {
136023e0
XL
443 if leftover_allocations.insert(alloc_id) {
444 todo.push(alloc_id);
dc9dc135
XL
445 }
446 }
e74abb32 447 } else if ecx.memory.dead_alloc_map.contains_key(&alloc_id) {
f9f354fc
XL
448 // Codegen does not like dangling pointers, and generally `tcx` assumes that
449 // all allocations referenced anywhere actually exist. So, make sure we error here.
5e7ed085
FG
450 let reported = ecx
451 .tcx
452 .sess
453 .span_err(ecx.tcx.span, "encountered dangling pointer in final constant");
454 return Err(reported);
064997fb 455 } else if ecx.tcx.try_get_global_alloc(alloc_id).is_none() {
f9f354fc 456 // We have hit an `AllocId` that is neither in local or global memory and isn't
9c376795 457 // marked as dangling by local memory. That should be impossible.
dfeec247 458 span_bug!(ecx.tcx.span, "encountered unknown alloc id {:?}", alloc_id);
dc9dc135
XL
459 }
460 }
29967ef6 461 Ok(())
dc9dc135 462}
3dfed10e 463
fc512014
XL
464impl<'mir, 'tcx: 'mir, M: super::intern::CompileTimeMachine<'mir, 'tcx, !>>
465 InterpCx<'mir, 'tcx, M>
466{
3dfed10e
XL
467 /// A helper function that allocates memory for the layout given and gives you access to mutate
468 /// it. Once your own mutation code is done, the backing `Allocation` is removed from the
469 /// current `Memory` and returned.
c295e0f8 470 pub fn intern_with_temp_alloc(
3dfed10e
XL
471 &mut self,
472 layout: TyAndLayout<'tcx>,
473 f: impl FnOnce(
474 &mut InterpCx<'mir, 'tcx, M>,
064997fb 475 &PlaceTy<'tcx, M::Provenance>,
3dfed10e 476 ) -> InterpResult<'tcx, ()>,
5e7ed085 477 ) -> InterpResult<'tcx, ConstAllocation<'tcx>> {
136023e0
XL
478 let dest = self.allocate(layout, MemoryKind::Stack)?;
479 f(self, &dest.into())?;
480 let mut alloc = self.memory.alloc_map.remove(&dest.ptr.provenance.unwrap()).unwrap().1;
3dfed10e 481 alloc.mutability = Mutability::Not;
9ffffee4 482 Ok(self.tcx.mk_const_alloc(alloc))
3dfed10e
XL
483 }
484}