]> git.proxmox.com Git - rustc.git/blame - src/librustc_mir/interpret/intern.rs
New upstream version 1.45.0+dfsg1
[rustc.git] / src / librustc_mir / interpret / intern.rs
CommitLineData
dc9dc135
XL
1//! This module specifies the type based interner for constants.
2//!
3//! After a const evaluation has computed a value, before we destroy the const evaluator's session
4//! memory, we need to extract all memory allocations to the global memory pool so they stay around.
5
dc9dc135 6use super::validity::RefTracking;
60c5eb7d 7use rustc_data_structures::fx::{FxHashMap, FxHashSet};
dfeec247 8use rustc_hir as hir;
f9f354fc
XL
9use rustc_middle::mir::interpret::InterpResult;
10use rustc_middle::ty::{self, query::TyCtxtAt, Ty};
dc9dc135 11
74b04a01 12use rustc_ast::ast::Mutability;
dc9dc135 13
dfeec247 14use super::{AllocId, Allocation, InterpCx, MPlaceTy, Machine, MemoryKind, Scalar, ValueVisitor};
dc9dc135 15
dfeec247
XL
16pub trait CompileTimeMachine<'mir, 'tcx> = Machine<
17 'mir,
18 'tcx,
ba9703b0 19 MemoryKind = !,
dfeec247
XL
20 PointerTag = (),
21 ExtraFnVal = !,
22 FrameExtra = (),
23 AllocExtra = (),
24 MemoryMap = FxHashMap<AllocId, (MemoryKind<!>, Allocation)>,
25>;
60c5eb7d
XL
26
27struct InternVisitor<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx>> {
e1599b0c 28 /// The ectx from which we intern.
60c5eb7d 29 ecx: &'rt mut InterpCx<'mir, 'tcx, M>,
e1599b0c 30 /// Previously encountered safe references.
f9f354fc 31 ref_tracking: &'rt mut RefTracking<(MPlaceTy<'tcx>, InternMode)>,
e1599b0c
XL
32 /// A list of all encountered allocations. After type-based interning, we traverse this list to
33 /// also intern allocations that are only referenced by a raw pointer or inside a union.
34 leftover_allocations: &'rt mut FxHashSet<AllocId>,
f9f354fc 35 /// The root kind of the value that we're looking at. This field is never mutated and only used
dc9dc135
XL
36 /// for sanity assertions that will ICE when `const_qualif` screws up.
37 mode: InternMode,
f9f354fc
XL
38 /// This field stores whether we are *currently* inside an `UnsafeCell`. This can affect
39 /// the intern mode of references we encounter.
40 inside_unsafe_cell: bool,
dfeec247
XL
41
42 /// This flag is to avoid triggering UnsafeCells are not allowed behind references in constants
43 /// for promoteds.
44 /// It's a copy of `mir::Body`'s ignore_interior_mut_in_const_validation field
f9f354fc 45 ignore_interior_mut_in_const: bool,
dc9dc135
XL
46}
47
48#[derive(Copy, Clone, Debug, PartialEq, Hash, Eq)]
49enum InternMode {
f9f354fc
XL
50 /// A static and its current mutability. Below shared references inside a `static mut`,
51 /// this is *immutable*, and below mutable references inside an `UnsafeCell`, this
52 /// is *mutable*.
53 Static(hir::Mutability),
54 /// The "base value" of a const, which can have `UnsafeCell` (as in `const FOO: Cell<i32>`),
55 /// but that interior mutability is simply ignored.
dc9dc135 56 ConstBase,
f9f354fc
XL
57 /// The "inner values" of a const with references, where `UnsafeCell` is an error.
58 ConstInner,
dc9dc135
XL
59}
60
61/// Signalling data structure to ensure we don't recurse
62/// into the memory of other constants or statics
63struct IsStaticOrFn;
64
f9f354fc
XL
65fn mutable_memory_in_const(tcx: TyCtxtAt<'_>, kind: &str) {
66 // FIXME: show this in validation instead so we can point at where in the value the error is?
67 tcx.sess.span_err(tcx.span, &format!("mutable memory ({}) is not allowed in constant", kind));
68}
69
e1599b0c
XL
70/// Intern an allocation without looking at its children.
71/// `mode` is the mode of the environment where we found this pointer.
72/// `mutablity` is the mutability of the place to be interned; even if that says
73/// `immutable` things might become mutable if `ty` is not frozen.
74/// `ty` can be `None` if there is no potential interior mutability
75/// to account for (e.g. for vtables).
60c5eb7d
XL
76fn intern_shallow<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx>>(
77 ecx: &'rt mut InterpCx<'mir, 'tcx, M>,
e1599b0c 78 leftover_allocations: &'rt mut FxHashSet<AllocId>,
e1599b0c 79 alloc_id: AllocId,
f9f354fc 80 mode: InternMode,
e1599b0c 81 ty: Option<Ty<'tcx>>,
f9f354fc
XL
82) -> Option<IsStaticOrFn> {
83 trace!("intern_shallow {:?} with {:?}", alloc_id, mode);
e1599b0c
XL
84 // remove allocation
85 let tcx = ecx.tcx;
e74abb32 86 let (kind, mut alloc) = match ecx.memory.alloc_map.remove(&alloc_id) {
e1599b0c
XL
87 Some(entry) => entry,
88 None => {
89 // Pointer not found in local memory map. It is either a pointer to the global
90 // map, or dangling.
91 // If the pointer is dangling (neither in local nor global memory), we leave it
f9f354fc
XL
92 // to validation to error -- it has the much better error messages, pointing out where
93 // in the value the dangling reference lies.
94 // The `delay_span_bug` ensures that we don't forget such a check in validation.
95 if tcx.get_global_alloc(alloc_id).is_none() {
e1599b0c
XL
96 tcx.sess.delay_span_bug(ecx.tcx.span, "tried to intern dangling pointer");
97 }
98 // treat dangling pointers like other statics
99 // just to stop trying to recurse into them
f9f354fc 100 return Some(IsStaticOrFn);
dfeec247 101 }
e1599b0c
XL
102 };
103 // This match is just a canary for future changes to `MemoryKind`, which most likely need
104 // changes in this function.
105 match kind {
dfeec247 106 MemoryKind::Stack | MemoryKind::Vtable | MemoryKind::CallerLocation => {}
e1599b0c
XL
107 }
108 // Set allocation mutability as appropriate. This is used by LLVM to put things into
ba9703b0 109 // read-only memory, and also by Miri when evaluating other globals that
e1599b0c 110 // access this one.
f9f354fc
XL
111 if let InternMode::Static(mutability) = mode {
112 // For this, we need to take into account `UnsafeCell`. When `ty` is `None`, we assume
113 // no interior mutability.
dfeec247 114 let frozen = ty.map_or(true, |ty| ty.is_freeze(ecx.tcx.tcx, ecx.param_env, ecx.tcx.span));
e1599b0c
XL
115 // For statics, allocation mutability is the combination of the place mutability and
116 // the type mutability.
117 // The entire allocation needs to be mutable if it contains an `UnsafeCell` anywhere.
f9f354fc
XL
118 let immutable = mutability == Mutability::Not && frozen;
119 if immutable {
dfeec247 120 alloc.mutability = Mutability::Not;
e1599b0c
XL
121 } else {
122 // Just making sure we are not "upgrading" an immutable allocation to mutable.
dfeec247 123 assert_eq!(alloc.mutability, Mutability::Mut);
e1599b0c
XL
124 }
125 } else {
f9f354fc
XL
126 // No matter what, *constants are never mutable*. Mutating them is UB.
127 // See const_eval::machine::MemoryExtra::can_access_statics for why
128 // immutability is so important.
129
130 // There are no sensible checks we can do here; grep for `mutable_memory_in_const` to
131 // find the checks we are doing elsewhere to avoid even getting here for memory
132 // that "wants" to be mutable.
dfeec247 133 alloc.mutability = Mutability::Not;
e1599b0c
XL
134 };
135 // link the alloc id to the actual allocation
136 let alloc = tcx.intern_const_alloc(alloc);
137 leftover_allocations.extend(alloc.relocations().iter().map(|&(_, ((), reloc))| reloc));
f9f354fc
XL
138 tcx.set_alloc_id_memory(alloc_id, alloc);
139 None
e1599b0c
XL
140}
141
60c5eb7d 142impl<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx>> InternVisitor<'rt, 'mir, 'tcx, M> {
dc9dc135
XL
143 fn intern_shallow(
144 &mut self,
e1599b0c 145 alloc_id: AllocId,
f9f354fc 146 mode: InternMode,
e1599b0c 147 ty: Option<Ty<'tcx>>,
f9f354fc
XL
148 ) -> Option<IsStaticOrFn> {
149 intern_shallow(self.ecx, self.leftover_allocations, alloc_id, mode, ty)
dc9dc135
XL
150 }
151}
152
ba9703b0 153impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
dfeec247 154 for InternVisitor<'rt, 'mir, 'tcx, M>
dc9dc135
XL
155{
156 type V = MPlaceTy<'tcx>;
157
158 #[inline(always)]
60c5eb7d 159 fn ecx(&self) -> &InterpCx<'mir, 'tcx, M> {
dc9dc135
XL
160 &self.ecx
161 }
162
163 fn visit_aggregate(
164 &mut self,
165 mplace: MPlaceTy<'tcx>,
dfeec247 166 fields: impl Iterator<Item = InterpResult<'tcx, Self::V>>,
dc9dc135
XL
167 ) -> InterpResult<'tcx> {
168 if let Some(def) = mplace.layout.ty.ty_adt_def() {
169 if Some(def.did) == self.ecx.tcx.lang_items().unsafe_cell_type() {
f9f354fc
XL
170 if self.mode == InternMode::ConstInner && !self.ignore_interior_mut_in_const {
171 // We do not actually make this memory mutable. But in case the user
172 // *expected* it to be mutable, make sure we error. This is just a
173 // sanity check to prevent users from accidentally exploiting the UB
174 // they caused. It also helps us to find cases where const-checking
175 // failed to prevent an `UnsafeCell` (but as `ignore_interior_mut_in_const`
176 // shows that part is not airtight).
177 mutable_memory_in_const(self.ecx.tcx, "`UnsafeCell`");
178 }
e1599b0c
XL
179 // We are crossing over an `UnsafeCell`, we can mutate again. This means that
180 // References we encounter inside here are interned as pointing to mutable
181 // allocations.
f9f354fc
XL
182 // Remember the `old` value to handle nested `UnsafeCell`.
183 let old = std::mem::replace(&mut self.inside_unsafe_cell, true);
dc9dc135 184 let walked = self.walk_aggregate(mplace, fields);
f9f354fc 185 self.inside_unsafe_cell = old;
dc9dc135
XL
186 return walked;
187 }
188 }
189 self.walk_aggregate(mplace, fields)
190 }
191
74b04a01 192 fn visit_value(&mut self, mplace: MPlaceTy<'tcx>) -> InterpResult<'tcx> {
dc9dc135
XL
193 // Handle Reference types, as these are the only relocations supported by const eval.
194 // Raw pointers (and boxes) are handled by the `leftover_relocations` logic.
f9f354fc 195 let tcx = self.ecx.tcx;
dc9dc135 196 let ty = mplace.layout.ty;
f9f354fc 197 if let ty::Ref(_, referenced_ty, ref_mutability) = ty.kind {
dc9dc135 198 let value = self.ecx.read_immediate(mplace.into())?;
60c5eb7d 199 let mplace = self.ecx.ref_to_mplace(value)?;
f9f354fc 200 assert_eq!(mplace.layout.ty, referenced_ty);
dfeec247 201 // Handle trait object vtables.
60c5eb7d 202 if let ty::Dynamic(..) =
f9f354fc 203 tcx.struct_tail_erasing_lifetimes(referenced_ty, self.ecx.param_env).kind
60c5eb7d 204 {
f9f354fc
XL
205 // Validation will error (with a better message) on an invalid vtable pointer
206 // so we can safely not do anything if this is not a real pointer.
dfeec247 207 if let Scalar::Ptr(vtable) = mplace.meta.unwrap_meta() {
f9f354fc 208 // Explicitly choose const mode here, since vtables are immutable, even
dfeec247 209 // if the reference of the fat pointer is mutable.
f9f354fc 210 self.intern_shallow(vtable.alloc_id, InternMode::ConstInner, None);
dfeec247 211 } else {
f9f354fc
XL
212 // Let validation show the error message, but make sure it *does* error.
213 tcx.sess
214 .delay_span_bug(tcx.span, "vtables pointers cannot be integer pointers");
dc9dc135
XL
215 }
216 }
dc9dc135
XL
217 // Check if we have encountered this pointer+layout combination before.
218 // Only recurse for allocation-backed pointers.
219 if let Scalar::Ptr(ptr) = mplace.ptr {
f9f354fc
XL
220 // Compute the mode with which we intern this.
221 let ref_mode = match self.mode {
222 InternMode::Static(mutbl) => {
223 // In statics, merge outer mutability with reference mutability and
224 // take into account whether we are in an `UnsafeCell`.
dc9dc135 225
f9f354fc
XL
226 // The only way a mutable reference actually works as a mutable reference is
227 // by being in a `static mut` directly or behind another mutable reference.
228 // If there's an immutable reference or we are inside a `static`, then our
229 // mutable reference is equivalent to an immutable one. As an example:
230 // `&&mut Foo` is semantically equivalent to `&&Foo`
231 match ref_mutability {
232 _ if self.inside_unsafe_cell => {
233 // Inside an `UnsafeCell` is like inside a `static mut`, the "outer"
234 // mutability does not matter.
235 InternMode::Static(ref_mutability)
236 }
237 Mutability::Not => {
238 // A shared reference, things become immutable.
239 // We do *not* consier `freeze` here -- that is done more precisely
240 // when traversing the referenced data (by tracking `UnsafeCell`).
241 InternMode::Static(Mutability::Not)
242 }
243 Mutability::Mut => {
244 // Mutable reference.
245 InternMode::Static(mutbl)
246 }
ba9703b0
XL
247 }
248 }
f9f354fc
XL
249 InternMode::ConstBase | InternMode::ConstInner => {
250 // Ignore `UnsafeCell`, everything is immutable. Do some sanity checking
251 // for mutable references that we encounter -- they must all be ZST.
252 // This helps to prevent users from accidentally exploiting UB that they
253 // caused (by somehow getting a mutable reference in a `const`).
254 if ref_mutability == Mutability::Mut {
255 match referenced_ty.kind {
256 ty::Array(_, n)
257 if n.eval_usize(tcx.tcx, self.ecx.param_env) == 0 => {}
258 ty::Slice(_)
259 if mplace.meta.unwrap_meta().to_machine_usize(self.ecx)?
260 == 0 => {}
261 _ => mutable_memory_in_const(tcx, "`&mut`"),
262 }
263 } else {
264 // A shared reference. We cannot check `freeze` here due to references
265 // like `&dyn Trait` that are actually immutable. We do check for
266 // concrete `UnsafeCell` when traversing the pointee though (if it is
267 // a new allocation, not yet interned).
268 }
269 // Go on with the "inner" rules.
270 InternMode::ConstInner
271 }
dc9dc135 272 };
f9f354fc 273 match self.intern_shallow(ptr.alloc_id, ref_mode, Some(referenced_ty)) {
dc9dc135
XL
274 // No need to recurse, these are interned already and statics may have
275 // cycles, so we don't want to recurse there
dfeec247 276 Some(IsStaticOrFn) => {}
dc9dc135
XL
277 // intern everything referenced by this value. The mutability is taken from the
278 // reference. It is checked above that mutable references only happen in
279 // `static mut`
f9f354fc 280 None => self.ref_tracking.track((mplace, ref_mode), || ()),
dc9dc135
XL
281 }
282 }
74b04a01
XL
283 Ok(())
284 } else {
285 // Not a reference -- proceed recursively.
286 self.walk_value(mplace)
dc9dc135 287 }
dc9dc135
XL
288 }
289}
290
f9f354fc 291#[derive(Copy, Clone, Debug, PartialEq, Hash, Eq)]
dfeec247
XL
292pub enum InternKind {
293 /// The `mutability` of the static, ignoring the type which may have interior mutability.
294 Static(hir::Mutability),
295 Constant,
296 Promoted,
297 ConstProp,
298}
299
f9f354fc
XL
300/// Intern `ret` and everything it references.
301///
302/// This *cannot raise an interpreter error*. Doing so is left to validation, which
303/// tracks where in the value we are and thus can show much better error messages.
304/// Any errors here would anyway be turned into `const_err` lints, whereas validation failures
305/// are hard errors.
60c5eb7d
XL
306pub fn intern_const_alloc_recursive<M: CompileTimeMachine<'mir, 'tcx>>(
307 ecx: &mut InterpCx<'mir, 'tcx, M>,
dfeec247 308 intern_kind: InternKind,
dc9dc135 309 ret: MPlaceTy<'tcx>,
f9f354fc
XL
310 ignore_interior_mut_in_const: bool,
311) where
ba9703b0
XL
312 'tcx: 'mir,
313{
dc9dc135 314 let tcx = ecx.tcx;
f9f354fc
XL
315 let base_intern_mode = match intern_kind {
316 InternKind::Static(mutbl) => InternMode::Static(mutbl),
dfeec247 317 // FIXME: what about array lengths, array initializers?
f9f354fc
XL
318 InternKind::Constant | InternKind::ConstProp | InternKind::Promoted => {
319 InternMode::ConstBase
320 }
dc9dc135
XL
321 };
322
e1599b0c 323 // Type based interning.
f9f354fc 324 // `ref_tracking` tracks typed references we have already interned and still need to crawl for
e1599b0c
XL
325 // more typed information inside them.
326 // `leftover_allocations` collects *all* allocations we see, because some might not
327 // be available in a typed way. They get interned at the end.
f9f354fc 328 let mut ref_tracking = RefTracking::empty();
e1599b0c 329 let leftover_allocations = &mut FxHashSet::default();
dc9dc135
XL
330
331 // start with the outermost allocation
e1599b0c 332 intern_shallow(
dc9dc135 333 ecx,
e1599b0c 334 leftover_allocations,
dfeec247
XL
335 // The outermost allocation must exist, because we allocated it with
336 // `Memory::allocate`.
337 ret.ptr.assert_ptr().alloc_id,
f9f354fc 338 base_intern_mode,
dfeec247 339 Some(ret.layout.ty),
f9f354fc 340 );
dc9dc135 341
f9f354fc
XL
342 ref_tracking.track((ret, base_intern_mode), || ());
343
344 while let Some(((mplace, mode), _)) = ref_tracking.todo.pop() {
345 let res = InternVisitor {
dc9dc135
XL
346 ref_tracking: &mut ref_tracking,
347 ecx,
348 mode,
e1599b0c 349 leftover_allocations,
f9f354fc
XL
350 ignore_interior_mut_in_const,
351 inside_unsafe_cell: false,
dfeec247
XL
352 }
353 .visit_value(mplace);
f9f354fc
XL
354 // We deliberately *ignore* interpreter errors here. When there is a problem, the remaining
355 // references are "leftover"-interned, and later validation will show a proper error
356 // and point at the right part of the value causing the problem.
357 match res {
358 Ok(()) => {}
359 Err(error) => {
360 ecx.tcx.sess.delay_span_bug(
361 ecx.tcx.span,
362 "error during interning should later cause validation failure",
363 );
364 // Some errors shouldn't come up because creating them causes
365 // an allocation, which we should avoid. When that happens,
366 // dedicated error variants should be introduced instead.
367 assert!(
368 !error.kind.allocates(),
369 "interning encountered allocating error: {}",
370 error
371 );
dc9dc135
XL
372 }
373 }
374 }
375
376 // Intern the rest of the allocations as mutable. These might be inside unions, padding, raw
377 // pointers, ... So we can't intern them according to their type rules
378
e1599b0c 379 let mut todo: Vec<_> = leftover_allocations.iter().cloned().collect();
dc9dc135 380 while let Some(alloc_id) = todo.pop() {
e74abb32 381 if let Some((_, mut alloc)) = ecx.memory.alloc_map.remove(&alloc_id) {
e1599b0c
XL
382 // We can't call the `intern_shallow` method here, as its logic is tailored to safe
383 // references and a `leftover_allocations` set (where we only have a todo-list here).
384 // So we hand-roll the interning logic here again.
dfeec247
XL
385 match intern_kind {
386 // Statics may contain mutable allocations even behind relocations.
387 // Even for immutable statics it would be ok to have mutable allocations behind
388 // raw pointers, e.g. for `static FOO: *const AtomicUsize = &AtomicUsize::new(42)`.
389 InternKind::Static(_) => {}
390 // Raw pointers in promoteds may only point to immutable things so we mark
391 // everything as immutable.
f9f354fc 392 // It is UB to mutate through a raw pointer obtained via an immutable reference:
dfeec247
XL
393 // Since all references and pointers inside a promoted must by their very definition
394 // be created from an immutable reference (and promotion also excludes interior
395 // mutability), mutating through them would be UB.
396 // There's no way we can check whether the user is using raw pointers correctly,
397 // so all we can do is mark this as immutable here.
398 InternKind::Promoted => {
f9f354fc
XL
399 // See const_eval::machine::MemoryExtra::can_access_statics for why
400 // immutability is so important.
dfeec247
XL
401 alloc.mutability = Mutability::Not;
402 }
403 InternKind::Constant | InternKind::ConstProp => {
f9f354fc
XL
404 // If it's a constant, we should not have any "leftovers" as everything
405 // is tracked by const-checking.
406 // FIXME: downgrade this to a warning? It rejects some legitimate consts,
407 // such as `const CONST_RAW: *const Vec<i32> = &Vec::new() as *const _;`.
408 ecx.tcx
409 .sess
410 .span_err(ecx.tcx.span, "untyped pointers are not allowed in constant");
411 // For better errors later, mark the allocation as immutable.
412 alloc.mutability = Mutability::Not;
dfeec247 413 }
e1599b0c 414 }
dc9dc135 415 let alloc = tcx.intern_const_alloc(alloc);
f9f354fc 416 tcx.set_alloc_id_memory(alloc_id, alloc);
e1599b0c
XL
417 for &(_, ((), reloc)) in alloc.relocations().iter() {
418 if leftover_allocations.insert(reloc) {
dc9dc135
XL
419 todo.push(reloc);
420 }
421 }
e74abb32 422 } else if ecx.memory.dead_alloc_map.contains_key(&alloc_id) {
f9f354fc
XL
423 // Codegen does not like dangling pointers, and generally `tcx` assumes that
424 // all allocations referenced anywhere actually exist. So, make sure we error here.
425 ecx.tcx.sess.span_err(ecx.tcx.span, "encountered dangling pointer in final constant");
426 } else if ecx.tcx.get_global_alloc(alloc_id).is_none() {
427 // We have hit an `AllocId` that is neither in local or global memory and isn't
428 // marked as dangling by local memory. That should be impossible.
dfeec247 429 span_bug!(ecx.tcx.span, "encountered unknown alloc id {:?}", alloc_id);
dc9dc135
XL
430 }
431 }
dc9dc135 432}