]>
Commit | Line | Data |
---|---|---|
dc9dc135 XL |
1 | //! This module specifies the type based interner for constants. |
2 | //! | |
3 | //! After a const evaluation has computed a value, before we destroy the const evaluator's session | |
4 | //! memory, we need to extract all memory allocations to the global memory pool so they stay around. | |
5 | ||
dc9dc135 | 6 | use super::validity::RefTracking; |
60c5eb7d | 7 | use rustc_data_structures::fx::{FxHashMap, FxHashSet}; |
ba9703b0 | 8 | use rustc_errors::ErrorReported; |
dfeec247 | 9 | use rustc_hir as hir; |
ba9703b0 XL |
10 | use rustc_middle::mir::interpret::{ErrorHandled, InterpResult}; |
11 | use rustc_middle::ty::{self, Ty}; | |
dc9dc135 | 12 | |
74b04a01 | 13 | use rustc_ast::ast::Mutability; |
dc9dc135 | 14 | |
dfeec247 | 15 | use super::{AllocId, Allocation, InterpCx, MPlaceTy, Machine, MemoryKind, Scalar, ValueVisitor}; |
dc9dc135 | 16 | |
dfeec247 XL |
17 | pub trait CompileTimeMachine<'mir, 'tcx> = Machine< |
18 | 'mir, | |
19 | 'tcx, | |
ba9703b0 | 20 | MemoryKind = !, |
dfeec247 XL |
21 | PointerTag = (), |
22 | ExtraFnVal = !, | |
23 | FrameExtra = (), | |
24 | AllocExtra = (), | |
25 | MemoryMap = FxHashMap<AllocId, (MemoryKind<!>, Allocation)>, | |
26 | >; | |
60c5eb7d XL |
27 | |
28 | struct InternVisitor<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx>> { | |
e1599b0c | 29 | /// The ectx from which we intern. |
60c5eb7d | 30 | ecx: &'rt mut InterpCx<'mir, 'tcx, M>, |
e1599b0c XL |
31 | /// Previously encountered safe references. |
32 | ref_tracking: &'rt mut RefTracking<(MPlaceTy<'tcx>, Mutability, InternMode)>, | |
33 | /// A list of all encountered allocations. After type-based interning, we traverse this list to | |
34 | /// also intern allocations that are only referenced by a raw pointer or inside a union. | |
35 | leftover_allocations: &'rt mut FxHashSet<AllocId>, | |
dc9dc135 XL |
36 | /// The root node of the value that we're looking at. This field is never mutated and only used |
37 | /// for sanity assertions that will ICE when `const_qualif` screws up. | |
38 | mode: InternMode, | |
39 | /// This field stores the mutability of the value *currently* being checked. | |
e1599b0c XL |
40 | /// When encountering a mutable reference, we determine the pointee mutability |
41 | /// taking into account the mutability of the context: `& &mut i32` is entirely immutable, | |
42 | /// despite the nested mutable reference! | |
43 | /// The field gets updated when an `UnsafeCell` is encountered. | |
dc9dc135 | 44 | mutability: Mutability, |
dfeec247 XL |
45 | |
46 | /// This flag is to avoid triggering UnsafeCells are not allowed behind references in constants | |
47 | /// for promoteds. | |
48 | /// It's a copy of `mir::Body`'s ignore_interior_mut_in_const_validation field | |
49 | ignore_interior_mut_in_const_validation: bool, | |
dc9dc135 XL |
50 | } |
51 | ||
52 | #[derive(Copy, Clone, Debug, PartialEq, Hash, Eq)] | |
53 | enum InternMode { | |
54 | /// Mutable references must in fact be immutable due to their surrounding immutability in a | |
55 | /// `static`. In a `static mut` we start out as mutable and thus can also contain further `&mut` | |
56 | /// that will actually be treated as mutable. | |
57 | Static, | |
e1599b0c XL |
58 | /// UnsafeCell is OK in the value of a constant: `const FOO = Cell::new(0)` creates |
59 | /// a new cell every time it is used. | |
dc9dc135 | 60 | ConstBase, |
e1599b0c | 61 | /// `UnsafeCell` ICEs. |
dc9dc135 XL |
62 | Const, |
63 | } | |
64 | ||
65 | /// Signalling data structure to ensure we don't recurse | |
66 | /// into the memory of other constants or statics | |
67 | struct IsStaticOrFn; | |
68 | ||
e1599b0c XL |
69 | /// Intern an allocation without looking at its children. |
70 | /// `mode` is the mode of the environment where we found this pointer. | |
71 | /// `mutablity` is the mutability of the place to be interned; even if that says | |
72 | /// `immutable` things might become mutable if `ty` is not frozen. | |
73 | /// `ty` can be `None` if there is no potential interior mutability | |
74 | /// to account for (e.g. for vtables). | |
60c5eb7d XL |
75 | fn intern_shallow<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx>>( |
76 | ecx: &'rt mut InterpCx<'mir, 'tcx, M>, | |
e1599b0c XL |
77 | leftover_allocations: &'rt mut FxHashSet<AllocId>, |
78 | mode: InternMode, | |
79 | alloc_id: AllocId, | |
80 | mutability: Mutability, | |
81 | ty: Option<Ty<'tcx>>, | |
82 | ) -> InterpResult<'tcx, Option<IsStaticOrFn>> { | |
60c5eb7d | 83 | trace!("InternVisitor::intern {:?} with {:?}", alloc_id, mutability,); |
e1599b0c XL |
84 | // remove allocation |
85 | let tcx = ecx.tcx; | |
e74abb32 | 86 | let (kind, mut alloc) = match ecx.memory.alloc_map.remove(&alloc_id) { |
e1599b0c XL |
87 | Some(entry) => entry, |
88 | None => { | |
89 | // Pointer not found in local memory map. It is either a pointer to the global | |
90 | // map, or dangling. | |
91 | // If the pointer is dangling (neither in local nor global memory), we leave it | |
92 | // to validation to error. The `delay_span_bug` ensures that we don't forget such | |
93 | // a check in validation. | |
94 | if tcx.alloc_map.lock().get(alloc_id).is_none() { | |
95 | tcx.sess.delay_span_bug(ecx.tcx.span, "tried to intern dangling pointer"); | |
96 | } | |
97 | // treat dangling pointers like other statics | |
98 | // just to stop trying to recurse into them | |
99 | return Ok(Some(IsStaticOrFn)); | |
dfeec247 | 100 | } |
e1599b0c XL |
101 | }; |
102 | // This match is just a canary for future changes to `MemoryKind`, which most likely need | |
103 | // changes in this function. | |
104 | match kind { | |
dfeec247 | 105 | MemoryKind::Stack | MemoryKind::Vtable | MemoryKind::CallerLocation => {} |
e1599b0c XL |
106 | } |
107 | // Set allocation mutability as appropriate. This is used by LLVM to put things into | |
ba9703b0 | 108 | // read-only memory, and also by Miri when evaluating other globals that |
e1599b0c XL |
109 | // access this one. |
110 | if mode == InternMode::Static { | |
111 | // When `ty` is `None`, we assume no interior mutability. | |
dfeec247 | 112 | let frozen = ty.map_or(true, |ty| ty.is_freeze(ecx.tcx.tcx, ecx.param_env, ecx.tcx.span)); |
e1599b0c XL |
113 | // For statics, allocation mutability is the combination of the place mutability and |
114 | // the type mutability. | |
115 | // The entire allocation needs to be mutable if it contains an `UnsafeCell` anywhere. | |
dfeec247 XL |
116 | if mutability == Mutability::Not && frozen { |
117 | alloc.mutability = Mutability::Not; | |
e1599b0c XL |
118 | } else { |
119 | // Just making sure we are not "upgrading" an immutable allocation to mutable. | |
dfeec247 | 120 | assert_eq!(alloc.mutability, Mutability::Mut); |
e1599b0c XL |
121 | } |
122 | } else { | |
123 | // We *could* be non-frozen at `ConstBase`, for constants like `Cell::new(0)`. | |
124 | // But we still intern that as immutable as the memory cannot be changed once the | |
125 | // initial value was computed. | |
126 | // Constants are never mutable. | |
127 | assert_eq!( | |
dfeec247 XL |
128 | mutability, |
129 | Mutability::Not, | |
e1599b0c XL |
130 | "Something went very wrong: mutability requested for a constant" |
131 | ); | |
dfeec247 | 132 | alloc.mutability = Mutability::Not; |
e1599b0c XL |
133 | }; |
134 | // link the alloc id to the actual allocation | |
135 | let alloc = tcx.intern_const_alloc(alloc); | |
136 | leftover_allocations.extend(alloc.relocations().iter().map(|&(_, ((), reloc))| reloc)); | |
137 | tcx.alloc_map.lock().set_alloc_id_memory(alloc_id, alloc); | |
138 | Ok(None) | |
139 | } | |
140 | ||
60c5eb7d | 141 | impl<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx>> InternVisitor<'rt, 'mir, 'tcx, M> { |
dc9dc135 XL |
142 | fn intern_shallow( |
143 | &mut self, | |
e1599b0c | 144 | alloc_id: AllocId, |
dc9dc135 | 145 | mutability: Mutability, |
e1599b0c | 146 | ty: Option<Ty<'tcx>>, |
dc9dc135 | 147 | ) -> InterpResult<'tcx, Option<IsStaticOrFn>> { |
dfeec247 | 148 | intern_shallow(self.ecx, self.leftover_allocations, self.mode, alloc_id, mutability, ty) |
dc9dc135 XL |
149 | } |
150 | } | |
151 | ||
ba9703b0 | 152 | impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M> |
dfeec247 | 153 | for InternVisitor<'rt, 'mir, 'tcx, M> |
dc9dc135 XL |
154 | { |
155 | type V = MPlaceTy<'tcx>; | |
156 | ||
157 | #[inline(always)] | |
60c5eb7d | 158 | fn ecx(&self) -> &InterpCx<'mir, 'tcx, M> { |
dc9dc135 XL |
159 | &self.ecx |
160 | } | |
161 | ||
162 | fn visit_aggregate( | |
163 | &mut self, | |
164 | mplace: MPlaceTy<'tcx>, | |
dfeec247 | 165 | fields: impl Iterator<Item = InterpResult<'tcx, Self::V>>, |
dc9dc135 XL |
166 | ) -> InterpResult<'tcx> { |
167 | if let Some(def) = mplace.layout.ty.ty_adt_def() { | |
168 | if Some(def.did) == self.ecx.tcx.lang_items().unsafe_cell_type() { | |
e1599b0c XL |
169 | // We are crossing over an `UnsafeCell`, we can mutate again. This means that |
170 | // References we encounter inside here are interned as pointing to mutable | |
171 | // allocations. | |
dfeec247 XL |
172 | let old = std::mem::replace(&mut self.mutability, Mutability::Mut); |
173 | if !self.ignore_interior_mut_in_const_validation { | |
174 | assert_ne!( | |
175 | self.mode, | |
176 | InternMode::Const, | |
177 | "UnsafeCells are not allowed behind references in constants. This should \ | |
178 | have been prevented statically by const qualification. If this were \ | |
179 | allowed one would be able to change a constant at one use site and other \ | |
180 | use sites could observe that mutation.", | |
181 | ); | |
182 | } | |
dc9dc135 XL |
183 | let walked = self.walk_aggregate(mplace, fields); |
184 | self.mutability = old; | |
185 | return walked; | |
186 | } | |
187 | } | |
188 | self.walk_aggregate(mplace, fields) | |
189 | } | |
190 | ||
74b04a01 | 191 | fn visit_value(&mut self, mplace: MPlaceTy<'tcx>) -> InterpResult<'tcx> { |
dc9dc135 XL |
192 | // Handle Reference types, as these are the only relocations supported by const eval. |
193 | // Raw pointers (and boxes) are handled by the `leftover_relocations` logic. | |
194 | let ty = mplace.layout.ty; | |
e74abb32 | 195 | if let ty::Ref(_, referenced_ty, mutability) = ty.kind { |
dc9dc135 | 196 | let value = self.ecx.read_immediate(mplace.into())?; |
60c5eb7d | 197 | let mplace = self.ecx.ref_to_mplace(value)?; |
dfeec247 | 198 | // Handle trait object vtables. |
60c5eb7d | 199 | if let ty::Dynamic(..) = |
dfeec247 | 200 | self.ecx.tcx.struct_tail_erasing_lifetimes(referenced_ty, self.ecx.param_env).kind |
60c5eb7d | 201 | { |
dfeec247 XL |
202 | // Validation has already errored on an invalid vtable pointer so we can safely not |
203 | // do anything if this is not a real pointer. | |
204 | if let Scalar::Ptr(vtable) = mplace.meta.unwrap_meta() { | |
205 | // Explicitly choose `Immutable` here, since vtables are immutable, even | |
206 | // if the reference of the fat pointer is mutable. | |
207 | self.intern_shallow(vtable.alloc_id, Mutability::Not, None)?; | |
208 | } else { | |
209 | self.ecx().tcx.sess.delay_span_bug( | |
210 | rustc_span::DUMMY_SP, | |
211 | "vtables pointers cannot be integer pointers", | |
212 | ); | |
dc9dc135 XL |
213 | } |
214 | } | |
dc9dc135 XL |
215 | // Check if we have encountered this pointer+layout combination before. |
216 | // Only recurse for allocation-backed pointers. | |
217 | if let Scalar::Ptr(ptr) = mplace.ptr { | |
218 | // We do not have any `frozen` logic here, because it's essentially equivalent to | |
219 | // the mutability except for the outermost item. Only `UnsafeCell` can "unfreeze", | |
220 | // and we check that in `visit_aggregate`. | |
221 | // This is not an inherent limitation, but one that we know to be true, because | |
222 | // const qualification enforces it. We can lift it in the future. | |
223 | match (self.mode, mutability) { | |
224 | // immutable references are fine everywhere | |
dfeec247 | 225 | (_, hir::Mutability::Not) => {} |
dc9dc135 XL |
226 | // all is "good and well" in the unsoundness of `static mut` |
227 | ||
228 | // mutable references are ok in `static`. Either they are treated as immutable | |
229 | // because they are behind an immutable one, or they are behind an `UnsafeCell` | |
230 | // and thus ok. | |
dfeec247 | 231 | (InternMode::Static, hir::Mutability::Mut) => {} |
dc9dc135 | 232 | // we statically prevent `&mut T` via `const_qualif` and double check this here |
ba9703b0 XL |
233 | (InternMode::ConstBase | InternMode::Const, hir::Mutability::Mut) => { |
234 | match referenced_ty.kind { | |
235 | ty::Array(_, n) | |
236 | if n.eval_usize(self.ecx.tcx.tcx, self.ecx.param_env) == 0 => {} | |
237 | ty::Slice(_) | |
238 | if mplace.meta.unwrap_meta().to_machine_usize(self.ecx)? == 0 => {} | |
239 | _ => bug!("const qualif failed to prevent mutable references"), | |
240 | } | |
241 | } | |
dc9dc135 XL |
242 | } |
243 | // Compute the mutability with which we'll start visiting the allocation. This is | |
dfeec247 XL |
244 | // what gets changed when we encounter an `UnsafeCell`. |
245 | // | |
246 | // The only way a mutable reference actually works as a mutable reference is | |
247 | // by being in a `static mut` directly or behind another mutable reference. | |
248 | // If there's an immutable reference or we are inside a static, then our | |
249 | // mutable reference is equivalent to an immutable one. As an example: | |
250 | // `&&mut Foo` is semantically equivalent to `&&Foo` | |
251 | let mutability = self.mutability.and(mutability); | |
dc9dc135 XL |
252 | // Recursing behind references changes the intern mode for constants in order to |
253 | // cause assertions to trigger if we encounter any `UnsafeCell`s. | |
254 | let mode = match self.mode { | |
255 | InternMode::ConstBase => InternMode::Const, | |
256 | other => other, | |
257 | }; | |
e1599b0c | 258 | match self.intern_shallow(ptr.alloc_id, mutability, Some(mplace.layout.ty))? { |
dc9dc135 XL |
259 | // No need to recurse, these are interned already and statics may have |
260 | // cycles, so we don't want to recurse there | |
dfeec247 | 261 | Some(IsStaticOrFn) => {} |
dc9dc135 XL |
262 | // intern everything referenced by this value. The mutability is taken from the |
263 | // reference. It is checked above that mutable references only happen in | |
264 | // `static mut` | |
265 | None => self.ref_tracking.track((mplace, mutability, mode), || ()), | |
266 | } | |
267 | } | |
74b04a01 XL |
268 | Ok(()) |
269 | } else { | |
270 | // Not a reference -- proceed recursively. | |
271 | self.walk_value(mplace) | |
dc9dc135 | 272 | } |
dc9dc135 XL |
273 | } |
274 | } | |
275 | ||
dfeec247 XL |
276 | pub enum InternKind { |
277 | /// The `mutability` of the static, ignoring the type which may have interior mutability. | |
278 | Static(hir::Mutability), | |
279 | Constant, | |
280 | Promoted, | |
281 | ConstProp, | |
282 | } | |
283 | ||
60c5eb7d XL |
284 | pub fn intern_const_alloc_recursive<M: CompileTimeMachine<'mir, 'tcx>>( |
285 | ecx: &mut InterpCx<'mir, 'tcx, M>, | |
dfeec247 | 286 | intern_kind: InternKind, |
dc9dc135 | 287 | ret: MPlaceTy<'tcx>, |
dfeec247 | 288 | ignore_interior_mut_in_const_validation: bool, |
ba9703b0 XL |
289 | ) -> InterpResult<'tcx> |
290 | where | |
291 | 'tcx: 'mir, | |
292 | { | |
dc9dc135 | 293 | let tcx = ecx.tcx; |
dfeec247 | 294 | let (base_mutability, base_intern_mode) = match intern_kind { |
dc9dc135 | 295 | // `static mut` doesn't care about interior mutability, it's mutable anyway |
dfeec247 XL |
296 | InternKind::Static(mutbl) => (mutbl, InternMode::Static), |
297 | // FIXME: what about array lengths, array initializers? | |
298 | InternKind::Constant | InternKind::ConstProp => (Mutability::Not, InternMode::ConstBase), | |
299 | InternKind::Promoted => (Mutability::Not, InternMode::ConstBase), | |
dc9dc135 XL |
300 | }; |
301 | ||
e1599b0c XL |
302 | // Type based interning. |
303 | // `ref_tracking` tracks typed references we have seen and still need to crawl for | |
304 | // more typed information inside them. | |
305 | // `leftover_allocations` collects *all* allocations we see, because some might not | |
306 | // be available in a typed way. They get interned at the end. | |
307 | let mut ref_tracking = RefTracking::new((ret, base_mutability, base_intern_mode)); | |
308 | let leftover_allocations = &mut FxHashSet::default(); | |
dc9dc135 XL |
309 | |
310 | // start with the outermost allocation | |
e1599b0c | 311 | intern_shallow( |
dc9dc135 | 312 | ecx, |
e1599b0c XL |
313 | leftover_allocations, |
314 | base_intern_mode, | |
dfeec247 XL |
315 | // The outermost allocation must exist, because we allocated it with |
316 | // `Memory::allocate`. | |
317 | ret.ptr.assert_ptr().alloc_id, | |
e1599b0c | 318 | base_mutability, |
dfeec247 | 319 | Some(ret.layout.ty), |
e1599b0c | 320 | )?; |
dc9dc135 XL |
321 | |
322 | while let Some(((mplace, mutability, mode), _)) = ref_tracking.todo.pop() { | |
323 | let interned = InternVisitor { | |
324 | ref_tracking: &mut ref_tracking, | |
325 | ecx, | |
326 | mode, | |
e1599b0c | 327 | leftover_allocations, |
dc9dc135 | 328 | mutability, |
dfeec247 XL |
329 | ignore_interior_mut_in_const_validation, |
330 | } | |
331 | .visit_value(mplace); | |
dc9dc135 XL |
332 | if let Err(error) = interned { |
333 | // This can happen when e.g. the tag of an enum is not a valid discriminant. We do have | |
334 | // to read enum discriminants in order to find references in enum variant fields. | |
ba9703b0 | 335 | if let err_ub!(ValidationFailure(_)) = error.kind { |
dc9dc135 | 336 | let err = crate::const_eval::error_to_const_error(&ecx, error); |
74b04a01 XL |
337 | match err.struct_error( |
338 | ecx.tcx, | |
339 | "it is undefined behavior to use this value", | |
340 | |mut diag| { | |
e1599b0c | 341 | diag.note(crate::const_eval::note_on_undefined_behavior_error()); |
dc9dc135 | 342 | diag.emit(); |
74b04a01 XL |
343 | }, |
344 | ) { | |
ba9703b0 XL |
345 | ErrorHandled::TooGeneric |
346 | | ErrorHandled::Reported(ErrorReported) | |
347 | | ErrorHandled::Linted => {} | |
dc9dc135 XL |
348 | } |
349 | } | |
350 | } | |
351 | } | |
352 | ||
353 | // Intern the rest of the allocations as mutable. These might be inside unions, padding, raw | |
354 | // pointers, ... So we can't intern them according to their type rules | |
355 | ||
e1599b0c | 356 | let mut todo: Vec<_> = leftover_allocations.iter().cloned().collect(); |
dc9dc135 | 357 | while let Some(alloc_id) = todo.pop() { |
e74abb32 | 358 | if let Some((_, mut alloc)) = ecx.memory.alloc_map.remove(&alloc_id) { |
e1599b0c XL |
359 | // We can't call the `intern_shallow` method here, as its logic is tailored to safe |
360 | // references and a `leftover_allocations` set (where we only have a todo-list here). | |
361 | // So we hand-roll the interning logic here again. | |
dfeec247 XL |
362 | match intern_kind { |
363 | // Statics may contain mutable allocations even behind relocations. | |
364 | // Even for immutable statics it would be ok to have mutable allocations behind | |
365 | // raw pointers, e.g. for `static FOO: *const AtomicUsize = &AtomicUsize::new(42)`. | |
366 | InternKind::Static(_) => {} | |
367 | // Raw pointers in promoteds may only point to immutable things so we mark | |
368 | // everything as immutable. | |
369 | // It is UB to mutate through a raw pointer obtained via an immutable reference. | |
370 | // Since all references and pointers inside a promoted must by their very definition | |
371 | // be created from an immutable reference (and promotion also excludes interior | |
372 | // mutability), mutating through them would be UB. | |
373 | // There's no way we can check whether the user is using raw pointers correctly, | |
374 | // so all we can do is mark this as immutable here. | |
375 | InternKind::Promoted => { | |
376 | alloc.mutability = Mutability::Not; | |
377 | } | |
378 | InternKind::Constant | InternKind::ConstProp => { | |
379 | // If it's a constant, it *must* be immutable. | |
380 | // We cannot have mutable memory inside a constant. | |
381 | // We use `delay_span_bug` here, because this can be reached in the presence | |
382 | // of fancy transmutes. | |
383 | if alloc.mutability == Mutability::Mut { | |
384 | // For better errors later, mark the allocation as immutable | |
385 | // (on top of the delayed ICE). | |
386 | alloc.mutability = Mutability::Not; | |
387 | ecx.tcx.sess.delay_span_bug(ecx.tcx.span, "mutable allocation in constant"); | |
388 | } | |
389 | } | |
e1599b0c | 390 | } |
dc9dc135 XL |
391 | let alloc = tcx.intern_const_alloc(alloc); |
392 | tcx.alloc_map.lock().set_alloc_id_memory(alloc_id, alloc); | |
e1599b0c XL |
393 | for &(_, ((), reloc)) in alloc.relocations().iter() { |
394 | if leftover_allocations.insert(reloc) { | |
dc9dc135 XL |
395 | todo.push(reloc); |
396 | } | |
397 | } | |
e74abb32 | 398 | } else if ecx.memory.dead_alloc_map.contains_key(&alloc_id) { |
dc9dc135 | 399 | // dangling pointer |
ba9703b0 | 400 | throw_ub_format!("encountered dangling pointer in final constant") |
dfeec247 XL |
401 | } else if ecx.tcx.alloc_map.lock().get(alloc_id).is_none() { |
402 | // We have hit an `AllocId` that is neither in local or global memory and isn't marked | |
403 | // as dangling by local memory. | |
404 | span_bug!(ecx.tcx.span, "encountered unknown alloc id {:?}", alloc_id); | |
dc9dc135 XL |
405 | } |
406 | } | |
407 | Ok(()) | |
408 | } |