]> git.proxmox.com Git - rustc.git/blame - compiler/rustc_const_eval/src/interpret/intern.rs
New upstream version 1.62.1+dfsg1
[rustc.git] / compiler / rustc_const_eval / src / interpret / intern.rs
CommitLineData
dc9dc135
XL
1//! This module specifies the type based interner for constants.
2//!
3//! After a const evaluation has computed a value, before we destroy the const evaluator's session
4//! memory, we need to extract all memory allocations to the global memory pool so they stay around.
29967ef6
XL
5//!
6//! In principle, this is not very complicated: we recursively walk the final value, follow all the
7//! pointers, and move all reachable allocations to the global `tcx` memory. The only complication
8//! is picking the right mutability for the allocations in a `static` initializer: we want to make
9//! as many allocations as possible immutable so LLVM can put them into read-only memory. At the
10//! same time, we need to make memory that could be mutated by the program mutable to avoid
11//! incorrect compilations. To achieve this, we do a type-based traversal of the final value,
12//! tracking mutable and shared references and `UnsafeCell` to determine the current mutability.
13//! (In principle, we could skip this type-based part for `const` and promoteds, as they need to be
14//! always immutable. At least for `const` however we use this opportunity to reject any `const`
15//! that contains allocations whose mutability we cannot identify.)
dc9dc135 16
dc9dc135 17use super::validity::RefTracking;
60c5eb7d 18use rustc_data_structures::fx::{FxHashMap, FxHashSet};
5e7ed085 19use rustc_errors::ErrorGuaranteed;
dfeec247 20use rustc_hir as hir;
f9f354fc 21use rustc_middle::mir::interpret::InterpResult;
29967ef6 22use rustc_middle::ty::{self, layout::TyAndLayout, Ty};
dc9dc135 23
3dfed10e 24use rustc_ast::Mutability;
dc9dc135 25
5e7ed085
FG
26use super::{
27 AllocId, Allocation, ConstAllocation, InterpCx, MPlaceTy, Machine, MemoryKind, PlaceTy,
28 ValueVisitor,
29};
fc512014 30use crate::const_eval;
dc9dc135 31
fc512014 32pub trait CompileTimeMachine<'mir, 'tcx, T> = Machine<
dfeec247
XL
33 'mir,
34 'tcx,
fc512014 35 MemoryKind = T,
136023e0 36 PointerTag = AllocId,
dfeec247
XL
37 ExtraFnVal = !,
38 FrameExtra = (),
39 AllocExtra = (),
fc512014 40 MemoryMap = FxHashMap<AllocId, (MemoryKind<T>, Allocation)>,
dfeec247 41>;
60c5eb7d 42
fc512014 43struct InternVisitor<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval::MemoryKind>> {
e1599b0c 44 /// The ectx from which we intern.
60c5eb7d 45 ecx: &'rt mut InterpCx<'mir, 'tcx, M>,
e1599b0c 46 /// Previously encountered safe references.
f9f354fc 47 ref_tracking: &'rt mut RefTracking<(MPlaceTy<'tcx>, InternMode)>,
e1599b0c
XL
48 /// A list of all encountered allocations. After type-based interning, we traverse this list to
49 /// also intern allocations that are only referenced by a raw pointer or inside a union.
50 leftover_allocations: &'rt mut FxHashSet<AllocId>,
29967ef6
XL
51 /// The root kind of the value that we're looking at. This field is never mutated for a
52 /// particular allocation. It is primarily used to make as many allocations as possible
53 /// read-only so LLVM can place them in const memory.
dc9dc135 54 mode: InternMode,
f9f354fc
XL
55 /// This field stores whether we are *currently* inside an `UnsafeCell`. This can affect
56 /// the intern mode of references we encounter.
57 inside_unsafe_cell: bool,
dc9dc135
XL
58}
59
60#[derive(Copy, Clone, Debug, PartialEq, Hash, Eq)]
61enum InternMode {
f9f354fc
XL
62 /// A static and its current mutability. Below shared references inside a `static mut`,
63 /// this is *immutable*, and below mutable references inside an `UnsafeCell`, this
64 /// is *mutable*.
65 Static(hir::Mutability),
29967ef6
XL
66 /// A `const`.
67 Const,
dc9dc135
XL
68}
69
70/// Signalling data structure to ensure we don't recurse
71/// into the memory of other constants or statics
72struct IsStaticOrFn;
73
e1599b0c
XL
74/// Intern an allocation without looking at its children.
75/// `mode` is the mode of the environment where we found this pointer.
5e7ed085 76/// `mutability` is the mutability of the place to be interned; even if that says
e1599b0c
XL
77/// `immutable` things might become mutable if `ty` is not frozen.
78/// `ty` can be `None` if there is no potential interior mutability
79/// to account for (e.g. for vtables).
fc512014 80fn intern_shallow<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval::MemoryKind>>(
60c5eb7d 81 ecx: &'rt mut InterpCx<'mir, 'tcx, M>,
e1599b0c 82 leftover_allocations: &'rt mut FxHashSet<AllocId>,
e1599b0c 83 alloc_id: AllocId,
f9f354fc 84 mode: InternMode,
e1599b0c 85 ty: Option<Ty<'tcx>>,
f9f354fc
XL
86) -> Option<IsStaticOrFn> {
87 trace!("intern_shallow {:?} with {:?}", alloc_id, mode);
e1599b0c
XL
88 // remove allocation
89 let tcx = ecx.tcx;
5e7ed085
FG
90 let Some((kind, mut alloc)) = ecx.memory.alloc_map.remove(&alloc_id) else {
91 // Pointer not found in local memory map. It is either a pointer to the global
92 // map, or dangling.
93 // If the pointer is dangling (neither in local nor global memory), we leave it
94 // to validation to error -- it has the much better error messages, pointing out where
95 // in the value the dangling reference lies.
96 // The `delay_span_bug` ensures that we don't forget such a check in validation.
97 if tcx.get_global_alloc(alloc_id).is_none() {
98 tcx.sess.delay_span_bug(ecx.tcx.span, "tried to intern dangling pointer");
dfeec247 99 }
5e7ed085
FG
100 // treat dangling pointers like other statics
101 // just to stop trying to recurse into them
102 return Some(IsStaticOrFn);
e1599b0c
XL
103 };
104 // This match is just a canary for future changes to `MemoryKind`, which most likely need
105 // changes in this function.
106 match kind {
fc512014
XL
107 MemoryKind::Stack
108 | MemoryKind::Machine(const_eval::MemoryKind::Heap)
fc512014 109 | MemoryKind::CallerLocation => {}
e1599b0c
XL
110 }
111 // Set allocation mutability as appropriate. This is used by LLVM to put things into
ba9703b0 112 // read-only memory, and also by Miri when evaluating other globals that
e1599b0c 113 // access this one.
f9f354fc
XL
114 if let InternMode::Static(mutability) = mode {
115 // For this, we need to take into account `UnsafeCell`. When `ty` is `None`, we assume
116 // no interior mutability.
f035d41b 117 let frozen = ty.map_or(true, |ty| ty.is_freeze(ecx.tcx, ecx.param_env));
29967ef6
XL
118 // For statics, allocation mutability is the combination of place mutability and
119 // type mutability.
e1599b0c 120 // The entire allocation needs to be mutable if it contains an `UnsafeCell` anywhere.
f9f354fc
XL
121 let immutable = mutability == Mutability::Not && frozen;
122 if immutable {
dfeec247 123 alloc.mutability = Mutability::Not;
e1599b0c
XL
124 } else {
125 // Just making sure we are not "upgrading" an immutable allocation to mutable.
dfeec247 126 assert_eq!(alloc.mutability, Mutability::Mut);
e1599b0c
XL
127 }
128 } else {
f9f354fc
XL
129 // No matter what, *constants are never mutable*. Mutating them is UB.
130 // See const_eval::machine::MemoryExtra::can_access_statics for why
131 // immutability is so important.
132
29967ef6 133 // Validation will ensure that there is no `UnsafeCell` on an immutable allocation.
dfeec247 134 alloc.mutability = Mutability::Not;
e1599b0c
XL
135 };
136 // link the alloc id to the actual allocation
136023e0 137 leftover_allocations.extend(alloc.relocations().iter().map(|&(_, alloc_id)| alloc_id));
5e7ed085 138 let alloc = tcx.intern_const_alloc(alloc);
f9f354fc
XL
139 tcx.set_alloc_id_memory(alloc_id, alloc);
140 None
e1599b0c
XL
141}
142
fc512014
XL
143impl<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval::MemoryKind>>
144 InternVisitor<'rt, 'mir, 'tcx, M>
145{
dc9dc135
XL
146 fn intern_shallow(
147 &mut self,
e1599b0c 148 alloc_id: AllocId,
f9f354fc 149 mode: InternMode,
e1599b0c 150 ty: Option<Ty<'tcx>>,
f9f354fc
XL
151 ) -> Option<IsStaticOrFn> {
152 intern_shallow(self.ecx, self.leftover_allocations, alloc_id, mode, ty)
dc9dc135
XL
153 }
154}
155
fc512014
XL
156impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::MemoryKind>>
157 ValueVisitor<'mir, 'tcx, M> for InternVisitor<'rt, 'mir, 'tcx, M>
dc9dc135
XL
158{
159 type V = MPlaceTy<'tcx>;
160
161 #[inline(always)]
60c5eb7d 162 fn ecx(&self) -> &InterpCx<'mir, 'tcx, M> {
dc9dc135
XL
163 &self.ecx
164 }
165
166 fn visit_aggregate(
167 &mut self,
6a06907d 168 mplace: &MPlaceTy<'tcx>,
dfeec247 169 fields: impl Iterator<Item = InterpResult<'tcx, Self::V>>,
dc9dc135 170 ) -> InterpResult<'tcx> {
29967ef6
XL
171 // ZSTs cannot contain pointers, so we can skip them.
172 if mplace.layout.is_zst() {
173 return Ok(());
174 }
175
dc9dc135 176 if let Some(def) = mplace.layout.ty.ty_adt_def() {
5e7ed085 177 if Some(def.did()) == self.ecx.tcx.lang_items().unsafe_cell_type() {
e1599b0c
XL
178 // We are crossing over an `UnsafeCell`, we can mutate again. This means that
179 // References we encounter inside here are interned as pointing to mutable
180 // allocations.
f9f354fc
XL
181 // Remember the `old` value to handle nested `UnsafeCell`.
182 let old = std::mem::replace(&mut self.inside_unsafe_cell, true);
dc9dc135 183 let walked = self.walk_aggregate(mplace, fields);
f9f354fc 184 self.inside_unsafe_cell = old;
dc9dc135
XL
185 return walked;
186 }
187 }
29967ef6 188
dc9dc135
XL
189 self.walk_aggregate(mplace, fields)
190 }
191
6a06907d 192 fn visit_value(&mut self, mplace: &MPlaceTy<'tcx>) -> InterpResult<'tcx> {
dc9dc135
XL
193 // Handle Reference types, as these are the only relocations supported by const eval.
194 // Raw pointers (and boxes) are handled by the `leftover_relocations` logic.
f9f354fc 195 let tcx = self.ecx.tcx;
dc9dc135 196 let ty = mplace.layout.ty;
1b1a35ee 197 if let ty::Ref(_, referenced_ty, ref_mutability) = *ty.kind() {
6a06907d
XL
198 let value = self.ecx.read_immediate(&(*mplace).into())?;
199 let mplace = self.ecx.ref_to_mplace(&value)?;
f9f354fc 200 assert_eq!(mplace.layout.ty, referenced_ty);
dfeec247 201 // Handle trait object vtables.
60c5eb7d 202 if let ty::Dynamic(..) =
1b1a35ee 203 tcx.struct_tail_erasing_lifetimes(referenced_ty, self.ecx.param_env).kind()
60c5eb7d 204 {
04454e1e 205 let ptr = self.ecx.scalar_to_ptr(mplace.meta.unwrap_meta())?;
136023e0 206 if let Some(alloc_id) = ptr.provenance {
f9f354fc 207 // Explicitly choose const mode here, since vtables are immutable, even
dfeec247 208 // if the reference of the fat pointer is mutable.
136023e0 209 self.intern_shallow(alloc_id, InternMode::Const, None);
dfeec247 210 } else {
29967ef6 211 // Validation will error (with a better message) on an invalid vtable pointer.
f9f354fc
XL
212 // Let validation show the error message, but make sure it *does* error.
213 tcx.sess
214 .delay_span_bug(tcx.span, "vtables pointers cannot be integer pointers");
dc9dc135
XL
215 }
216 }
dc9dc135
XL
217 // Check if we have encountered this pointer+layout combination before.
218 // Only recurse for allocation-backed pointers.
136023e0 219 if let Some(alloc_id) = mplace.ptr.provenance {
29967ef6
XL
220 // Compute the mode with which we intern this. Our goal here is to make as many
221 // statics as we can immutable so they can be placed in read-only memory by LLVM.
f9f354fc
XL
222 let ref_mode = match self.mode {
223 InternMode::Static(mutbl) => {
224 // In statics, merge outer mutability with reference mutability and
225 // take into account whether we are in an `UnsafeCell`.
dc9dc135 226
f9f354fc
XL
227 // The only way a mutable reference actually works as a mutable reference is
228 // by being in a `static mut` directly or behind another mutable reference.
229 // If there's an immutable reference or we are inside a `static`, then our
230 // mutable reference is equivalent to an immutable one. As an example:
231 // `&&mut Foo` is semantically equivalent to `&&Foo`
232 match ref_mutability {
233 _ if self.inside_unsafe_cell => {
234 // Inside an `UnsafeCell` is like inside a `static mut`, the "outer"
235 // mutability does not matter.
236 InternMode::Static(ref_mutability)
237 }
238 Mutability::Not => {
239 // A shared reference, things become immutable.
29967ef6
XL
240 // We do *not* consider `freeze` here: `intern_shallow` considers
241 // `freeze` for the actual mutability of this allocation; the intern
242 // mode for references contained in this allocation is tracked more
243 // precisely when traversing the referenced data (by tracking
244 // `UnsafeCell`). This makes sure that `&(&i32, &Cell<i32>)` still
245 // has the left inner reference interned into a read-only
246 // allocation.
f9f354fc
XL
247 InternMode::Static(Mutability::Not)
248 }
249 Mutability::Mut => {
250 // Mutable reference.
251 InternMode::Static(mutbl)
252 }
ba9703b0
XL
253 }
254 }
29967ef6
XL
255 InternMode::Const => {
256 // Ignore `UnsafeCell`, everything is immutable. Validity does some sanity
257 // checking for mutable references that we encounter -- they must all be
258 // ZST.
259 InternMode::Const
f9f354fc 260 }
dc9dc135 261 };
136023e0 262 match self.intern_shallow(alloc_id, ref_mode, Some(referenced_ty)) {
dc9dc135
XL
263 // No need to recurse, these are interned already and statics may have
264 // cycles, so we don't want to recurse there
dfeec247 265 Some(IsStaticOrFn) => {}
dc9dc135
XL
266 // intern everything referenced by this value. The mutability is taken from the
267 // reference. It is checked above that mutable references only happen in
268 // `static mut`
f9f354fc 269 None => self.ref_tracking.track((mplace, ref_mode), || ()),
dc9dc135
XL
270 }
271 }
74b04a01
XL
272 Ok(())
273 } else {
274 // Not a reference -- proceed recursively.
275 self.walk_value(mplace)
dc9dc135 276 }
dc9dc135
XL
277 }
278}
279
f9f354fc 280#[derive(Copy, Clone, Debug, PartialEq, Hash, Eq)]
dfeec247
XL
281pub enum InternKind {
282 /// The `mutability` of the static, ignoring the type which may have interior mutability.
283 Static(hir::Mutability),
284 Constant,
285 Promoted,
dfeec247
XL
286}
287
f9f354fc
XL
288/// Intern `ret` and everything it references.
289///
290/// This *cannot raise an interpreter error*. Doing so is left to validation, which
291/// tracks where in the value we are and thus can show much better error messages.
292/// Any errors here would anyway be turned into `const_err` lints, whereas validation failures
293/// are hard errors.
6a06907d 294#[tracing::instrument(level = "debug", skip(ecx))]
a2a8927a
XL
295pub fn intern_const_alloc_recursive<
296 'mir,
297 'tcx: 'mir,
298 M: CompileTimeMachine<'mir, 'tcx, const_eval::MemoryKind>,
299>(
60c5eb7d 300 ecx: &mut InterpCx<'mir, 'tcx, M>,
dfeec247 301 intern_kind: InternKind,
6a06907d 302 ret: &MPlaceTy<'tcx>,
5e7ed085 303) -> Result<(), ErrorGuaranteed> {
dc9dc135 304 let tcx = ecx.tcx;
f9f354fc
XL
305 let base_intern_mode = match intern_kind {
306 InternKind::Static(mutbl) => InternMode::Static(mutbl),
3dfed10e 307 // `Constant` includes array lengths.
29967ef6 308 InternKind::Constant | InternKind::Promoted => InternMode::Const,
dc9dc135
XL
309 };
310
e1599b0c 311 // Type based interning.
f9f354fc 312 // `ref_tracking` tracks typed references we have already interned and still need to crawl for
e1599b0c
XL
313 // more typed information inside them.
314 // `leftover_allocations` collects *all* allocations we see, because some might not
315 // be available in a typed way. They get interned at the end.
f9f354fc 316 let mut ref_tracking = RefTracking::empty();
e1599b0c 317 let leftover_allocations = &mut FxHashSet::default();
dc9dc135
XL
318
319 // start with the outermost allocation
e1599b0c 320 intern_shallow(
dc9dc135 321 ecx,
e1599b0c 322 leftover_allocations,
dfeec247
XL
323 // The outermost allocation must exist, because we allocated it with
324 // `Memory::allocate`.
136023e0 325 ret.ptr.provenance.unwrap(),
f9f354fc 326 base_intern_mode,
dfeec247 327 Some(ret.layout.ty),
f9f354fc 328 );
dc9dc135 329
6a06907d 330 ref_tracking.track((*ret, base_intern_mode), || ());
f9f354fc
XL
331
332 while let Some(((mplace, mode), _)) = ref_tracking.todo.pop() {
333 let res = InternVisitor {
dc9dc135
XL
334 ref_tracking: &mut ref_tracking,
335 ecx,
336 mode,
e1599b0c 337 leftover_allocations,
f9f354fc 338 inside_unsafe_cell: false,
dfeec247 339 }
6a06907d 340 .visit_value(&mplace);
f9f354fc
XL
341 // We deliberately *ignore* interpreter errors here. When there is a problem, the remaining
342 // references are "leftover"-interned, and later validation will show a proper error
343 // and point at the right part of the value causing the problem.
344 match res {
345 Ok(()) => {}
346 Err(error) => {
347 ecx.tcx.sess.delay_span_bug(
348 ecx.tcx.span,
f035d41b
XL
349 &format!(
350 "error during interning should later cause validation failure: {}",
351 error
352 ),
f9f354fc 353 );
dc9dc135
XL
354 }
355 }
356 }
357
358 // Intern the rest of the allocations as mutable. These might be inside unions, padding, raw
359 // pointers, ... So we can't intern them according to their type rules
360
e1599b0c 361 let mut todo: Vec<_> = leftover_allocations.iter().cloned().collect();
5e7ed085
FG
362 debug!(?todo);
363 debug!("dead_alloc_map: {:#?}", ecx.memory.dead_alloc_map);
dc9dc135 364 while let Some(alloc_id) = todo.pop() {
e74abb32 365 if let Some((_, mut alloc)) = ecx.memory.alloc_map.remove(&alloc_id) {
e1599b0c
XL
366 // We can't call the `intern_shallow` method here, as its logic is tailored to safe
367 // references and a `leftover_allocations` set (where we only have a todo-list here).
368 // So we hand-roll the interning logic here again.
dfeec247
XL
369 match intern_kind {
370 // Statics may contain mutable allocations even behind relocations.
371 // Even for immutable statics it would be ok to have mutable allocations behind
372 // raw pointers, e.g. for `static FOO: *const AtomicUsize = &AtomicUsize::new(42)`.
373 InternKind::Static(_) => {}
374 // Raw pointers in promoteds may only point to immutable things so we mark
375 // everything as immutable.
f9f354fc 376 // It is UB to mutate through a raw pointer obtained via an immutable reference:
dfeec247
XL
377 // Since all references and pointers inside a promoted must by their very definition
378 // be created from an immutable reference (and promotion also excludes interior
379 // mutability), mutating through them would be UB.
380 // There's no way we can check whether the user is using raw pointers correctly,
381 // so all we can do is mark this as immutable here.
382 InternKind::Promoted => {
f9f354fc
XL
383 // See const_eval::machine::MemoryExtra::can_access_statics for why
384 // immutability is so important.
dfeec247
XL
385 alloc.mutability = Mutability::Not;
386 }
f035d41b 387 InternKind::Constant => {
f9f354fc
XL
388 // If it's a constant, we should not have any "leftovers" as everything
389 // is tracked by const-checking.
390 // FIXME: downgrade this to a warning? It rejects some legitimate consts,
391 // such as `const CONST_RAW: *const Vec<i32> = &Vec::new() as *const _;`.
392 ecx.tcx
393 .sess
394 .span_err(ecx.tcx.span, "untyped pointers are not allowed in constant");
395 // For better errors later, mark the allocation as immutable.
396 alloc.mutability = Mutability::Not;
dfeec247 397 }
e1599b0c 398 }
dc9dc135 399 let alloc = tcx.intern_const_alloc(alloc);
f9f354fc 400 tcx.set_alloc_id_memory(alloc_id, alloc);
5e7ed085 401 for &(_, alloc_id) in alloc.inner().relocations().iter() {
136023e0
XL
402 if leftover_allocations.insert(alloc_id) {
403 todo.push(alloc_id);
dc9dc135
XL
404 }
405 }
e74abb32 406 } else if ecx.memory.dead_alloc_map.contains_key(&alloc_id) {
f9f354fc
XL
407 // Codegen does not like dangling pointers, and generally `tcx` assumes that
408 // all allocations referenced anywhere actually exist. So, make sure we error here.
5e7ed085
FG
409 let reported = ecx
410 .tcx
411 .sess
412 .span_err(ecx.tcx.span, "encountered dangling pointer in final constant");
413 return Err(reported);
f9f354fc
XL
414 } else if ecx.tcx.get_global_alloc(alloc_id).is_none() {
415 // We have hit an `AllocId` that is neither in local or global memory and isn't
416 // marked as dangling by local memory. That should be impossible.
dfeec247 417 span_bug!(ecx.tcx.span, "encountered unknown alloc id {:?}", alloc_id);
dc9dc135
XL
418 }
419 }
29967ef6 420 Ok(())
dc9dc135 421}
3dfed10e 422
fc512014
XL
423impl<'mir, 'tcx: 'mir, M: super::intern::CompileTimeMachine<'mir, 'tcx, !>>
424 InterpCx<'mir, 'tcx, M>
425{
3dfed10e
XL
426 /// A helper function that allocates memory for the layout given and gives you access to mutate
427 /// it. Once your own mutation code is done, the backing `Allocation` is removed from the
428 /// current `Memory` and returned.
c295e0f8 429 pub fn intern_with_temp_alloc(
3dfed10e
XL
430 &mut self,
431 layout: TyAndLayout<'tcx>,
432 f: impl FnOnce(
433 &mut InterpCx<'mir, 'tcx, M>,
136023e0 434 &PlaceTy<'tcx, M::PointerTag>,
3dfed10e 435 ) -> InterpResult<'tcx, ()>,
5e7ed085 436 ) -> InterpResult<'tcx, ConstAllocation<'tcx>> {
136023e0
XL
437 let dest = self.allocate(layout, MemoryKind::Stack)?;
438 f(self, &dest.into())?;
439 let mut alloc = self.memory.alloc_map.remove(&dest.ptr.provenance.unwrap()).unwrap().1;
3dfed10e
XL
440 alloc.mutability = Mutability::Not;
441 Ok(self.tcx.intern_const_alloc(alloc))
442 }
443}