]>
Commit | Line | Data |
---|---|---|
dc9dc135 XL |
1 | //! This module specifies the type based interner for constants. |
2 | //! | |
3 | //! After a const evaluation has computed a value, before we destroy the const evaluator's session | |
4 | //! memory, we need to extract all memory allocations to the global memory pool so they stay around. | |
5 | ||
e1599b0c | 6 | use rustc::ty::{Ty, self}; |
416331ca | 7 | use rustc::mir::interpret::{InterpResult, ErrorHandled}; |
dc9dc135 XL |
8 | use rustc::hir; |
9 | use rustc::hir::def_id::DefId; | |
10 | use super::validity::RefTracking; | |
11 | use rustc_data_structures::fx::FxHashSet; | |
12 | ||
13 | use syntax::ast::Mutability; | |
dc9dc135 XL |
14 | |
15 | use super::{ | |
e1599b0c | 16 | ValueVisitor, MemoryKind, AllocId, MPlaceTy, Scalar, |
dc9dc135 XL |
17 | }; |
18 | use crate::const_eval::{CompileTimeInterpreter, CompileTimeEvalContext}; | |
19 | ||
20 | struct InternVisitor<'rt, 'mir, 'tcx> { | |
e1599b0c | 21 | /// The ectx from which we intern. |
dc9dc135 | 22 | ecx: &'rt mut CompileTimeEvalContext<'mir, 'tcx>, |
e1599b0c XL |
23 | /// Previously encountered safe references. |
24 | ref_tracking: &'rt mut RefTracking<(MPlaceTy<'tcx>, Mutability, InternMode)>, | |
25 | /// A list of all encountered allocations. After type-based interning, we traverse this list to | |
26 | /// also intern allocations that are only referenced by a raw pointer or inside a union. | |
27 | leftover_allocations: &'rt mut FxHashSet<AllocId>, | |
dc9dc135 XL |
28 | /// The root node of the value that we're looking at. This field is never mutated and only used |
29 | /// for sanity assertions that will ICE when `const_qualif` screws up. | |
30 | mode: InternMode, | |
31 | /// This field stores the mutability of the value *currently* being checked. | |
e1599b0c XL |
32 | /// When encountering a mutable reference, we determine the pointee mutability |
33 | /// taking into account the mutability of the context: `& &mut i32` is entirely immutable, | |
34 | /// despite the nested mutable reference! | |
35 | /// The field gets updated when an `UnsafeCell` is encountered. | |
dc9dc135 | 36 | mutability: Mutability, |
dc9dc135 XL |
37 | } |
38 | ||
39 | #[derive(Copy, Clone, Debug, PartialEq, Hash, Eq)] | |
40 | enum InternMode { | |
41 | /// Mutable references must in fact be immutable due to their surrounding immutability in a | |
42 | /// `static`. In a `static mut` we start out as mutable and thus can also contain further `&mut` | |
43 | /// that will actually be treated as mutable. | |
44 | Static, | |
e1599b0c XL |
45 | /// UnsafeCell is OK in the value of a constant: `const FOO = Cell::new(0)` creates |
46 | /// a new cell every time it is used. | |
dc9dc135 | 47 | ConstBase, |
e1599b0c | 48 | /// `UnsafeCell` ICEs. |
dc9dc135 XL |
49 | Const, |
50 | } | |
51 | ||
52 | /// Signalling data structure to ensure we don't recurse | |
53 | /// into the memory of other constants or statics | |
54 | struct IsStaticOrFn; | |
55 | ||
e1599b0c XL |
56 | /// Intern an allocation without looking at its children. |
57 | /// `mode` is the mode of the environment where we found this pointer. | |
58 | /// `mutablity` is the mutability of the place to be interned; even if that says | |
59 | /// `immutable` things might become mutable if `ty` is not frozen. | |
60 | /// `ty` can be `None` if there is no potential interior mutability | |
61 | /// to account for (e.g. for vtables). | |
62 | fn intern_shallow<'rt, 'mir, 'tcx>( | |
63 | ecx: &'rt mut CompileTimeEvalContext<'mir, 'tcx>, | |
64 | leftover_allocations: &'rt mut FxHashSet<AllocId>, | |
65 | mode: InternMode, | |
66 | alloc_id: AllocId, | |
67 | mutability: Mutability, | |
68 | ty: Option<Ty<'tcx>>, | |
69 | ) -> InterpResult<'tcx, Option<IsStaticOrFn>> { | |
70 | trace!( | |
71 | "InternVisitor::intern {:?} with {:?}", | |
72 | alloc_id, mutability, | |
73 | ); | |
74 | // remove allocation | |
75 | let tcx = ecx.tcx; | |
76 | let memory = ecx.memory_mut(); | |
77 | let (kind, mut alloc) = match memory.alloc_map.remove(&alloc_id) { | |
78 | Some(entry) => entry, | |
79 | None => { | |
80 | // Pointer not found in local memory map. It is either a pointer to the global | |
81 | // map, or dangling. | |
82 | // If the pointer is dangling (neither in local nor global memory), we leave it | |
83 | // to validation to error. The `delay_span_bug` ensures that we don't forget such | |
84 | // a check in validation. | |
85 | if tcx.alloc_map.lock().get(alloc_id).is_none() { | |
86 | tcx.sess.delay_span_bug(ecx.tcx.span, "tried to intern dangling pointer"); | |
87 | } | |
88 | // treat dangling pointers like other statics | |
89 | // just to stop trying to recurse into them | |
90 | return Ok(Some(IsStaticOrFn)); | |
91 | }, | |
92 | }; | |
93 | // This match is just a canary for future changes to `MemoryKind`, which most likely need | |
94 | // changes in this function. | |
95 | match kind { | |
96 | MemoryKind::Stack | MemoryKind::Vtable => {}, | |
97 | } | |
98 | // Set allocation mutability as appropriate. This is used by LLVM to put things into | |
99 | // read-only memory, and also by Miri when evluating other constants/statics that | |
100 | // access this one. | |
101 | if mode == InternMode::Static { | |
102 | // When `ty` is `None`, we assume no interior mutability. | |
103 | let frozen = ty.map_or(true, |ty| ty.is_freeze( | |
104 | ecx.tcx.tcx, | |
105 | ecx.param_env, | |
106 | ecx.tcx.span, | |
107 | )); | |
108 | // For statics, allocation mutability is the combination of the place mutability and | |
109 | // the type mutability. | |
110 | // The entire allocation needs to be mutable if it contains an `UnsafeCell` anywhere. | |
111 | if mutability == Mutability::Immutable && frozen { | |
112 | alloc.mutability = Mutability::Immutable; | |
113 | } else { | |
114 | // Just making sure we are not "upgrading" an immutable allocation to mutable. | |
115 | assert_eq!(alloc.mutability, Mutability::Mutable); | |
116 | } | |
117 | } else { | |
118 | // We *could* be non-frozen at `ConstBase`, for constants like `Cell::new(0)`. | |
119 | // But we still intern that as immutable as the memory cannot be changed once the | |
120 | // initial value was computed. | |
121 | // Constants are never mutable. | |
122 | assert_eq!( | |
123 | mutability, Mutability::Immutable, | |
124 | "Something went very wrong: mutability requested for a constant" | |
125 | ); | |
126 | alloc.mutability = Mutability::Immutable; | |
127 | }; | |
128 | // link the alloc id to the actual allocation | |
129 | let alloc = tcx.intern_const_alloc(alloc); | |
130 | leftover_allocations.extend(alloc.relocations().iter().map(|&(_, ((), reloc))| reloc)); | |
131 | tcx.alloc_map.lock().set_alloc_id_memory(alloc_id, alloc); | |
132 | Ok(None) | |
133 | } | |
134 | ||
dc9dc135 | 135 | impl<'rt, 'mir, 'tcx> InternVisitor<'rt, 'mir, 'tcx> { |
dc9dc135 XL |
136 | fn intern_shallow( |
137 | &mut self, | |
e1599b0c | 138 | alloc_id: AllocId, |
dc9dc135 | 139 | mutability: Mutability, |
e1599b0c | 140 | ty: Option<Ty<'tcx>>, |
dc9dc135 | 141 | ) -> InterpResult<'tcx, Option<IsStaticOrFn>> { |
e1599b0c XL |
142 | intern_shallow( |
143 | self.ecx, | |
144 | self.leftover_allocations, | |
145 | self.mode, | |
146 | alloc_id, | |
147 | mutability, | |
148 | ty, | |
149 | ) | |
dc9dc135 XL |
150 | } |
151 | } | |
152 | ||
153 | impl<'rt, 'mir, 'tcx> | |
154 | ValueVisitor<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>> | |
155 | for | |
156 | InternVisitor<'rt, 'mir, 'tcx> | |
157 | { | |
158 | type V = MPlaceTy<'tcx>; | |
159 | ||
160 | #[inline(always)] | |
161 | fn ecx(&self) -> &CompileTimeEvalContext<'mir, 'tcx> { | |
162 | &self.ecx | |
163 | } | |
164 | ||
165 | fn visit_aggregate( | |
166 | &mut self, | |
167 | mplace: MPlaceTy<'tcx>, | |
168 | fields: impl Iterator<Item=InterpResult<'tcx, Self::V>>, | |
169 | ) -> InterpResult<'tcx> { | |
170 | if let Some(def) = mplace.layout.ty.ty_adt_def() { | |
171 | if Some(def.did) == self.ecx.tcx.lang_items().unsafe_cell_type() { | |
e1599b0c XL |
172 | // We are crossing over an `UnsafeCell`, we can mutate again. This means that |
173 | // References we encounter inside here are interned as pointing to mutable | |
174 | // allocations. | |
dc9dc135 XL |
175 | let old = std::mem::replace(&mut self.mutability, Mutability::Mutable); |
176 | assert_ne!( | |
177 | self.mode, InternMode::Const, | |
178 | "UnsafeCells are not allowed behind references in constants. This should have \ | |
179 | been prevented statically by const qualification. If this were allowed one \ | |
e1599b0c XL |
180 | would be able to change a constant at one use site and other use sites could \ |
181 | observe that mutation.", | |
dc9dc135 XL |
182 | ); |
183 | let walked = self.walk_aggregate(mplace, fields); | |
184 | self.mutability = old; | |
185 | return walked; | |
186 | } | |
187 | } | |
188 | self.walk_aggregate(mplace, fields) | |
189 | } | |
190 | ||
191 | fn visit_primitive(&mut self, mplace: MPlaceTy<'tcx>) -> InterpResult<'tcx> { | |
192 | // Handle Reference types, as these are the only relocations supported by const eval. | |
193 | // Raw pointers (and boxes) are handled by the `leftover_relocations` logic. | |
194 | let ty = mplace.layout.ty; | |
195 | if let ty::Ref(_, referenced_ty, mutability) = ty.sty { | |
196 | let value = self.ecx.read_immediate(mplace.into())?; | |
197 | // Handle trait object vtables | |
198 | if let Ok(meta) = value.to_meta() { | |
416331ca | 199 | if let ty::Dynamic(..) = |
e1599b0c XL |
200 | self.ecx.tcx.struct_tail_erasing_lifetimes( |
201 | referenced_ty, self.ecx.param_env).sty | |
416331ca | 202 | { |
dc9dc135 XL |
203 | if let Ok(vtable) = meta.unwrap().to_ptr() { |
204 | // explitly choose `Immutable` here, since vtables are immutable, even | |
205 | // if the reference of the fat pointer is mutable | |
e1599b0c | 206 | self.intern_shallow(vtable.alloc_id, Mutability::Immutable, None)?; |
dc9dc135 XL |
207 | } |
208 | } | |
209 | } | |
210 | let mplace = self.ecx.ref_to_mplace(value)?; | |
211 | // Check if we have encountered this pointer+layout combination before. | |
212 | // Only recurse for allocation-backed pointers. | |
213 | if let Scalar::Ptr(ptr) = mplace.ptr { | |
214 | // We do not have any `frozen` logic here, because it's essentially equivalent to | |
215 | // the mutability except for the outermost item. Only `UnsafeCell` can "unfreeze", | |
216 | // and we check that in `visit_aggregate`. | |
217 | // This is not an inherent limitation, but one that we know to be true, because | |
218 | // const qualification enforces it. We can lift it in the future. | |
219 | match (self.mode, mutability) { | |
220 | // immutable references are fine everywhere | |
221 | (_, hir::Mutability::MutImmutable) => {}, | |
222 | // all is "good and well" in the unsoundness of `static mut` | |
223 | ||
224 | // mutable references are ok in `static`. Either they are treated as immutable | |
225 | // because they are behind an immutable one, or they are behind an `UnsafeCell` | |
226 | // and thus ok. | |
227 | (InternMode::Static, hir::Mutability::MutMutable) => {}, | |
228 | // we statically prevent `&mut T` via `const_qualif` and double check this here | |
229 | (InternMode::ConstBase, hir::Mutability::MutMutable) | | |
230 | (InternMode::Const, hir::Mutability::MutMutable) => { | |
231 | match referenced_ty.sty { | |
416331ca | 232 | ty::Array(_, n) |
e1599b0c | 233 | if n.eval_usize(self.ecx.tcx.tcx, self.ecx.param_env) == 0 => {} |
dc9dc135 XL |
234 | ty::Slice(_) |
235 | if value.to_meta().unwrap().unwrap().to_usize(self.ecx)? == 0 => {} | |
236 | _ => bug!("const qualif failed to prevent mutable references"), | |
237 | } | |
238 | }, | |
239 | } | |
240 | // Compute the mutability with which we'll start visiting the allocation. This is | |
241 | // what gets changed when we encounter an `UnsafeCell` | |
242 | let mutability = match (self.mutability, mutability) { | |
243 | // The only way a mutable reference actually works as a mutable reference is | |
244 | // by being in a `static mut` directly or behind another mutable reference. | |
245 | // If there's an immutable reference or we are inside a static, then our | |
246 | // mutable reference is equivalent to an immutable one. As an example: | |
247 | // `&&mut Foo` is semantically equivalent to `&&Foo` | |
248 | (Mutability::Mutable, hir::Mutability::MutMutable) => Mutability::Mutable, | |
249 | _ => Mutability::Immutable, | |
250 | }; | |
dc9dc135 XL |
251 | // Recursing behind references changes the intern mode for constants in order to |
252 | // cause assertions to trigger if we encounter any `UnsafeCell`s. | |
253 | let mode = match self.mode { | |
254 | InternMode::ConstBase => InternMode::Const, | |
255 | other => other, | |
256 | }; | |
e1599b0c | 257 | match self.intern_shallow(ptr.alloc_id, mutability, Some(mplace.layout.ty))? { |
dc9dc135 XL |
258 | // No need to recurse, these are interned already and statics may have |
259 | // cycles, so we don't want to recurse there | |
260 | Some(IsStaticOrFn) => {}, | |
261 | // intern everything referenced by this value. The mutability is taken from the | |
262 | // reference. It is checked above that mutable references only happen in | |
263 | // `static mut` | |
264 | None => self.ref_tracking.track((mplace, mutability, mode), || ()), | |
265 | } | |
266 | } | |
267 | } | |
268 | Ok(()) | |
269 | } | |
270 | } | |
271 | ||
dc9dc135 XL |
272 | pub fn intern_const_alloc_recursive( |
273 | ecx: &mut CompileTimeEvalContext<'mir, 'tcx>, | |
274 | def_id: DefId, | |
275 | ret: MPlaceTy<'tcx>, | |
dc9dc135 XL |
276 | ) -> InterpResult<'tcx> { |
277 | let tcx = ecx.tcx; | |
278 | // this `mutability` is the mutability of the place, ignoring the type | |
e1599b0c | 279 | let (base_mutability, base_intern_mode) = match tcx.static_mutability(def_id) { |
dc9dc135 | 280 | Some(hir::Mutability::MutImmutable) => (Mutability::Immutable, InternMode::Static), |
dc9dc135 XL |
281 | // `static mut` doesn't care about interior mutability, it's mutable anyway |
282 | Some(hir::Mutability::MutMutable) => (Mutability::Mutable, InternMode::Static), | |
e1599b0c XL |
283 | // consts, promoteds. FIXME: what about array lengths, array initializers? |
284 | None => (Mutability::Immutable, InternMode::ConstBase), | |
dc9dc135 XL |
285 | }; |
286 | ||
e1599b0c XL |
287 | // Type based interning. |
288 | // `ref_tracking` tracks typed references we have seen and still need to crawl for | |
289 | // more typed information inside them. | |
290 | // `leftover_allocations` collects *all* allocations we see, because some might not | |
291 | // be available in a typed way. They get interned at the end. | |
292 | let mut ref_tracking = RefTracking::new((ret, base_mutability, base_intern_mode)); | |
293 | let leftover_allocations = &mut FxHashSet::default(); | |
dc9dc135 XL |
294 | |
295 | // start with the outermost allocation | |
e1599b0c | 296 | intern_shallow( |
dc9dc135 | 297 | ecx, |
e1599b0c XL |
298 | leftover_allocations, |
299 | base_intern_mode, | |
300 | ret.ptr.to_ptr()?.alloc_id, | |
301 | base_mutability, | |
302 | Some(ret.layout.ty) | |
303 | )?; | |
dc9dc135 XL |
304 | |
305 | while let Some(((mplace, mutability, mode), _)) = ref_tracking.todo.pop() { | |
306 | let interned = InternVisitor { | |
307 | ref_tracking: &mut ref_tracking, | |
308 | ecx, | |
309 | mode, | |
e1599b0c | 310 | leftover_allocations, |
dc9dc135 XL |
311 | mutability, |
312 | }.visit_value(mplace); | |
313 | if let Err(error) = interned { | |
314 | // This can happen when e.g. the tag of an enum is not a valid discriminant. We do have | |
315 | // to read enum discriminants in order to find references in enum variant fields. | |
416331ca | 316 | if let err_unsup!(ValidationFailure(_)) = error.kind { |
dc9dc135 XL |
317 | let err = crate::const_eval::error_to_const_error(&ecx, error); |
318 | match err.struct_error(ecx.tcx, "it is undefined behavior to use this value") { | |
319 | Ok(mut diag) => { | |
e1599b0c | 320 | diag.note(crate::const_eval::note_on_undefined_behavior_error()); |
dc9dc135 XL |
321 | diag.emit(); |
322 | } | |
323 | Err(ErrorHandled::TooGeneric) | | |
324 | Err(ErrorHandled::Reported) => {}, | |
325 | } | |
326 | } | |
327 | } | |
328 | } | |
329 | ||
330 | // Intern the rest of the allocations as mutable. These might be inside unions, padding, raw | |
331 | // pointers, ... So we can't intern them according to their type rules | |
332 | ||
e1599b0c | 333 | let mut todo: Vec<_> = leftover_allocations.iter().cloned().collect(); |
dc9dc135 | 334 | while let Some(alloc_id) = todo.pop() { |
e1599b0c XL |
335 | if let Some((_, mut alloc)) = ecx.memory_mut().alloc_map.remove(&alloc_id) { |
336 | // We can't call the `intern_shallow` method here, as its logic is tailored to safe | |
337 | // references and a `leftover_allocations` set (where we only have a todo-list here). | |
338 | // So we hand-roll the interning logic here again. | |
339 | if base_intern_mode != InternMode::Static { | |
340 | // If it's not a static, it *must* be immutable. | |
341 | // We cannot have mutable memory inside a constant. | |
342 | // FIXME: ideally we would assert that they already are immutable, to double- | |
343 | // check our static checks. | |
344 | alloc.mutability = Mutability::Immutable; | |
345 | } | |
dc9dc135 XL |
346 | let alloc = tcx.intern_const_alloc(alloc); |
347 | tcx.alloc_map.lock().set_alloc_id_memory(alloc_id, alloc); | |
e1599b0c XL |
348 | for &(_, ((), reloc)) in alloc.relocations().iter() { |
349 | if leftover_allocations.insert(reloc) { | |
dc9dc135 XL |
350 | todo.push(reloc); |
351 | } | |
352 | } | |
353 | } else if ecx.memory().dead_alloc_map.contains_key(&alloc_id) { | |
354 | // dangling pointer | |
416331ca | 355 | throw_unsup!(ValidationFailure("encountered dangling pointer in final constant".into())) |
dc9dc135 XL |
356 | } |
357 | } | |
358 | Ok(()) | |
359 | } |