]>
Commit | Line | Data |
---|---|---|
ba9703b0 | 1 | use rustc_middle::mir; |
ba9703b0 | 2 | use rustc_middle::ty::{self, Ty}; |
f9f354fc | 3 | use std::borrow::Borrow; |
dfeec247 XL |
4 | use std::collections::hash_map::Entry; |
5 | use std::hash::Hash; | |
6 | ||
7 | use rustc_data_structures::fx::FxHashMap; | |
fc512014 | 8 | use std::fmt; |
dfeec247 | 9 | |
3dfed10e | 10 | use rustc_ast::Mutability; |
ba9703b0 XL |
11 | use rustc_hir::def_id::DefId; |
12 | use rustc_middle::mir::AssertMessage; | |
f9f354fc | 13 | use rustc_session::Limit; |
1b1a35ee | 14 | use rustc_span::symbol::{sym, Symbol}; |
fc512014 | 15 | use rustc_target::abi::{Align, Size}; |
5869c6ff | 16 | use rustc_target::spec::abi::Abi; |
dfeec247 XL |
17 | |
18 | use crate::interpret::{ | |
136023e0 XL |
19 | self, compile_time_machine, AllocId, Allocation, Frame, ImmTy, InterpCx, InterpResult, OpTy, |
20 | PlaceTy, Scalar, StackPopUnwind, | |
dfeec247 XL |
21 | }; |
22 | ||
23 | use super::error::*; | |
24 | ||
25 | impl<'mir, 'tcx> InterpCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>> { | |
74b04a01 XL |
26 | /// "Intercept" a function call to a panic-related function |
27 | /// because we have something special to do for it. | |
28 | /// If this returns successfully (`Ok`), the function should just be evaluated normally. | |
c295e0f8 | 29 | fn hook_special_const_fn( |
74b04a01 | 30 | &mut self, |
74b04a01 XL |
31 | instance: ty::Instance<'tcx>, |
32 | args: &[OpTy<'tcx>], | |
94222f64 | 33 | ) -> InterpResult<'tcx, Option<ty::Instance<'tcx>>> { |
3c0e092e | 34 | // All `#[rustc_do_not_const_check]` functions should be hooked here. |
74b04a01 | 35 | let def_id = instance.def_id(); |
c295e0f8 | 36 | |
3c0e092e XL |
37 | if Some(def_id) == self.tcx.lang_items().const_eval_select() { |
38 | // redirect to const_eval_select_ct | |
39 | if let Some(const_eval_select) = self.tcx.lang_items().const_eval_select_ct() { | |
40 | return Ok(Some( | |
41 | ty::Instance::resolve( | |
42 | *self.tcx, | |
43 | ty::ParamEnv::reveal_all(), | |
44 | const_eval_select, | |
45 | instance.substs, | |
46 | ) | |
47 | .unwrap() | |
48 | .unwrap(), | |
49 | )); | |
c295e0f8 | 50 | } |
3c0e092e | 51 | } else if Some(def_id) == self.tcx.lang_items().panic_display() |
74b04a01 XL |
52 | || Some(def_id) == self.tcx.lang_items().begin_panic_fn() |
53 | { | |
c295e0f8 | 54 | // &str or &&str |
74b04a01 XL |
55 | assert!(args.len() == 1); |
56 | ||
c295e0f8 XL |
57 | let mut msg_place = self.deref_operand(&args[0])?; |
58 | while msg_place.layout.ty.is_ref() { | |
59 | msg_place = self.deref_operand(&msg_place.into())?; | |
60 | } | |
61 | ||
6a06907d | 62 | let msg = Symbol::intern(self.read_str(&msg_place)?); |
ba9703b0 | 63 | let span = self.find_closest_untracked_caller_location(); |
74b04a01 | 64 | let (file, line, col) = self.location_triple_for_span(span); |
94222f64 | 65 | return Err(ConstEvalErrKind::Panic { msg, file, line, col }.into()); |
3c0e092e | 66 | } else if Some(def_id) == self.tcx.lang_items().panic_fmt() { |
94222f64 XL |
67 | // For panic_fmt, call const_panic_fmt instead. |
68 | if let Some(const_panic_fmt) = self.tcx.lang_items().const_panic_fmt() { | |
69 | return Ok(Some( | |
70 | ty::Instance::resolve( | |
71 | *self.tcx, | |
72 | ty::ParamEnv::reveal_all(), | |
73 | const_panic_fmt, | |
74 | self.tcx.intern_substs(&[]), | |
75 | ) | |
76 | .unwrap() | |
77 | .unwrap(), | |
78 | )); | |
79 | } | |
74b04a01 | 80 | } |
94222f64 | 81 | Ok(None) |
74b04a01 | 82 | } |
dfeec247 XL |
83 | } |
84 | ||
ba9703b0 | 85 | /// Extra machine state for CTFE, and the Machine instance |
dfeec247 | 86 | pub struct CompileTimeInterpreter<'mir, 'tcx> { |
ba9703b0 | 87 | /// For now, the number of terminators that can be evaluated before we throw a resource |
cdc7bbd5 | 88 | /// exhaustion error. |
ba9703b0 XL |
89 | /// |
90 | /// Setting this to `0` disables the limit and allows the interpreter to run forever. | |
91 | pub steps_remaining: usize, | |
74b04a01 | 92 | |
ba9703b0 | 93 | /// The virtual call stack. |
136023e0 | 94 | pub(crate) stack: Vec<Frame<'mir, 'tcx, AllocId, ()>>, |
dfeec247 XL |
95 | } |
96 | ||
97 | #[derive(Copy, Clone, Debug)] | |
98 | pub struct MemoryExtra { | |
f9f354fc XL |
99 | /// We need to make sure consts never point to anything mutable, even recursively. That is |
100 | /// relied on for pattern matching on consts with references. | |
101 | /// To achieve this, two pieces have to work together: | |
102 | /// * Interning makes everything outside of statics immutable. | |
103 | /// * Pointers to allocations inside of statics can never leak outside, to a non-static global. | |
104 | /// This boolean here controls the second part. | |
dfeec247 XL |
105 | pub(super) can_access_statics: bool, |
106 | } | |
107 | ||
108 | impl<'mir, 'tcx> CompileTimeInterpreter<'mir, 'tcx> { | |
f9f354fc XL |
109 | pub(super) fn new(const_eval_limit: Limit) -> Self { |
110 | CompileTimeInterpreter { steps_remaining: const_eval_limit.0, stack: Vec::new() } | |
dfeec247 XL |
111 | } |
112 | } | |
113 | ||
114 | impl<K: Hash + Eq, V> interpret::AllocMap<K, V> for FxHashMap<K, V> { | |
115 | #[inline(always)] | |
116 | fn contains_key<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> bool | |
117 | where | |
118 | K: Borrow<Q>, | |
119 | { | |
120 | FxHashMap::contains_key(self, k) | |
121 | } | |
122 | ||
123 | #[inline(always)] | |
124 | fn insert(&mut self, k: K, v: V) -> Option<V> { | |
125 | FxHashMap::insert(self, k, v) | |
126 | } | |
127 | ||
128 | #[inline(always)] | |
129 | fn remove<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> Option<V> | |
130 | where | |
131 | K: Borrow<Q>, | |
132 | { | |
133 | FxHashMap::remove(self, k) | |
134 | } | |
135 | ||
136 | #[inline(always)] | |
137 | fn filter_map_collect<T>(&self, mut f: impl FnMut(&K, &V) -> Option<T>) -> Vec<T> { | |
138 | self.iter().filter_map(move |(k, v)| f(k, &*v)).collect() | |
139 | } | |
140 | ||
141 | #[inline(always)] | |
142 | fn get_or<E>(&self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&V, E> { | |
143 | match self.get(&k) { | |
144 | Some(v) => Ok(v), | |
145 | None => { | |
146 | vacant()?; | |
147 | bug!("The CTFE machine shouldn't ever need to extend the alloc_map when reading") | |
148 | } | |
149 | } | |
150 | } | |
151 | ||
152 | #[inline(always)] | |
153 | fn get_mut_or<E>(&mut self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&mut V, E> { | |
154 | match self.entry(k) { | |
155 | Entry::Occupied(e) => Ok(e.into_mut()), | |
156 | Entry::Vacant(e) => { | |
157 | let v = vacant()?; | |
158 | Ok(e.insert(v)) | |
159 | } | |
160 | } | |
161 | } | |
162 | } | |
163 | ||
164 | crate type CompileTimeEvalContext<'mir, 'tcx> = | |
165 | InterpCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>; | |
166 | ||
fc512014 XL |
167 | #[derive(Debug, PartialEq, Eq, Copy, Clone)] |
168 | pub enum MemoryKind { | |
169 | Heap, | |
170 | } | |
171 | ||
172 | impl fmt::Display for MemoryKind { | |
173 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { | |
174 | match self { | |
175 | MemoryKind::Heap => write!(f, "heap allocation"), | |
176 | } | |
177 | } | |
178 | } | |
179 | ||
180 | impl interpret::MayLeak for MemoryKind { | |
181 | #[inline(always)] | |
182 | fn may_leak(self) -> bool { | |
183 | match self { | |
184 | MemoryKind::Heap => false, | |
185 | } | |
186 | } | |
187 | } | |
188 | ||
dfeec247 XL |
189 | impl interpret::MayLeak for ! { |
190 | #[inline(always)] | |
191 | fn may_leak(self) -> bool { | |
192 | // `self` is uninhabited | |
193 | self | |
194 | } | |
195 | } | |
196 | ||
1b1a35ee XL |
197 | impl<'mir, 'tcx: 'mir> CompileTimeEvalContext<'mir, 'tcx> { |
198 | fn guaranteed_eq(&mut self, a: Scalar, b: Scalar) -> bool { | |
199 | match (a, b) { | |
200 | // Comparisons between integers are always known. | |
29967ef6 | 201 | (Scalar::Int { .. }, Scalar::Int { .. }) => a == b, |
1b1a35ee | 202 | // Equality with integers can never be known for sure. |
136023e0 | 203 | (Scalar::Int { .. }, Scalar::Ptr(..)) | (Scalar::Ptr(..), Scalar::Int { .. }) => false, |
1b1a35ee XL |
204 | // FIXME: return `true` for when both sides are the same pointer, *except* that |
205 | // some things (like functions and vtables) do not have stable addresses | |
206 | // so we need to be careful around them (see e.g. #73722). | |
136023e0 | 207 | (Scalar::Ptr(..), Scalar::Ptr(..)) => false, |
1b1a35ee XL |
208 | } |
209 | } | |
210 | ||
211 | fn guaranteed_ne(&mut self, a: Scalar, b: Scalar) -> bool { | |
212 | match (a, b) { | |
213 | // Comparisons between integers are always known. | |
29967ef6 | 214 | (Scalar::Int(_), Scalar::Int(_)) => a != b, |
1b1a35ee XL |
215 | // Comparisons of abstract pointers with null pointers are known if the pointer |
216 | // is in bounds, because if they are in bounds, the pointer can't be null. | |
1b1a35ee | 217 | // Inequality with integers other than null can never be known for sure. |
136023e0 XL |
218 | (Scalar::Int(int), Scalar::Ptr(ptr, _)) | (Scalar::Ptr(ptr, _), Scalar::Int(int)) => { |
219 | int.is_null() && !self.memory.ptr_may_be_null(ptr.into()) | |
29967ef6 | 220 | } |
1b1a35ee XL |
221 | // FIXME: return `true` for at least some comparisons where we can reliably |
222 | // determine the result of runtime inequality tests at compile-time. | |
223 | // Examples include comparison of addresses in different static items. | |
136023e0 | 224 | (Scalar::Ptr(..), Scalar::Ptr(..)) => false, |
1b1a35ee XL |
225 | } |
226 | } | |
227 | } | |
228 | ||
dfeec247 | 229 | impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir, 'tcx> { |
f9f354fc | 230 | compile_time_machine!(<'mir, 'tcx>); |
dfeec247 | 231 | |
fc512014 XL |
232 | type MemoryKind = MemoryKind; |
233 | ||
dfeec247 | 234 | type MemoryExtra = MemoryExtra; |
dfeec247 | 235 | |
136023e0 XL |
236 | const PANIC_ON_ALLOC_FAIL: bool = false; // will be raised as a proper error |
237 | ||
5869c6ff XL |
238 | fn load_mir( |
239 | ecx: &InterpCx<'mir, 'tcx, Self>, | |
240 | instance: ty::InstanceDef<'tcx>, | |
241 | ) -> InterpResult<'tcx, &'tcx mir::Body<'tcx>> { | |
242 | match instance { | |
243 | ty::InstanceDef::Item(def) => { | |
244 | if ecx.tcx.is_ctfe_mir_available(def.did) { | |
245 | Ok(ecx.tcx.mir_for_ctfe_opt_const_arg(def)) | |
246 | } else { | |
94222f64 XL |
247 | let path = ecx.tcx.def_path_str(def.did); |
248 | Err(ConstEvalErrKind::NeedsRfc(format!("calling extern function `{}`", path)) | |
249 | .into()) | |
5869c6ff XL |
250 | } |
251 | } | |
252 | _ => Ok(ecx.tcx.instance_mir(instance)), | |
253 | } | |
254 | } | |
255 | ||
dfeec247 XL |
256 | fn find_mir_or_eval_fn( |
257 | ecx: &mut InterpCx<'mir, 'tcx, Self>, | |
dfeec247 | 258 | instance: ty::Instance<'tcx>, |
5869c6ff | 259 | _abi: Abi, |
dfeec247 | 260 | args: &[OpTy<'tcx>], |
6a06907d | 261 | _ret: Option<(&PlaceTy<'tcx>, mir::BasicBlock)>, |
17df50a5 | 262 | _unwind: StackPopUnwind, // unwinding is not supported in consts |
dfeec247 XL |
263 | ) -> InterpResult<'tcx, Option<&'mir mir::Body<'tcx>>> { |
264 | debug!("find_mir_or_eval_fn: {:?}", instance); | |
265 | ||
266 | // Only check non-glue functions | |
3dfed10e | 267 | if let ty::InstanceDef::Item(def) = instance.def { |
dfeec247 XL |
268 | // Execution might have wandered off into other crates, so we cannot do a stability- |
269 | // sensitive check here. But we can at least rule out functions that are not const | |
270 | // at all. | |
fc512014 | 271 | if !ecx.tcx.is_const_fn_raw(def.did) { |
136023e0 XL |
272 | // allow calling functions marked with #[default_method_body_is_const]. |
273 | if !ecx.tcx.has_attr(def.did, sym::default_method_body_is_const) { | |
3c0e092e XL |
274 | // We certainly do *not* want to actually call the fn |
275 | // though, so be sure we return here. | |
276 | throw_unsup_format!("calling non-const function `{}`", instance) | |
136023e0 | 277 | } |
dfeec247 | 278 | } |
c295e0f8 | 279 | |
3c0e092e | 280 | if let Some(new_instance) = ecx.hook_special_const_fn(instance, args)? { |
c295e0f8 XL |
281 | // We call another const fn instead. |
282 | return Self::find_mir_or_eval_fn(ecx, new_instance, _abi, args, _ret, _unwind); | |
283 | } | |
dfeec247 XL |
284 | } |
285 | // This is a const fn. Call it. | |
94222f64 | 286 | Ok(Some(ecx.load_mir(instance.def, None)?)) |
dfeec247 XL |
287 | } |
288 | ||
dfeec247 XL |
289 | fn call_intrinsic( |
290 | ecx: &mut InterpCx<'mir, 'tcx, Self>, | |
dfeec247 XL |
291 | instance: ty::Instance<'tcx>, |
292 | args: &[OpTy<'tcx>], | |
6a06907d | 293 | ret: Option<(&PlaceTy<'tcx>, mir::BasicBlock)>, |
17df50a5 | 294 | _unwind: StackPopUnwind, |
dfeec247 | 295 | ) -> InterpResult<'tcx> { |
1b1a35ee | 296 | // Shared intrinsics. |
ba9703b0 | 297 | if ecx.emulate_intrinsic(instance, args, ret)? { |
dfeec247 XL |
298 | return Ok(()); |
299 | } | |
dfeec247 | 300 | let intrinsic_name = ecx.tcx.item_name(instance.def_id()); |
1b1a35ee XL |
301 | |
302 | // CTFE-specific intrinsics. | |
303 | let (dest, ret) = match ret { | |
304 | None => { | |
305 | return Err(ConstEvalErrKind::NeedsRfc(format!( | |
306 | "calling intrinsic `{}`", | |
307 | intrinsic_name | |
308 | )) | |
309 | .into()); | |
310 | } | |
311 | Some(p) => p, | |
312 | }; | |
313 | match intrinsic_name { | |
314 | sym::ptr_guaranteed_eq | sym::ptr_guaranteed_ne => { | |
6a06907d XL |
315 | let a = ecx.read_immediate(&args[0])?.to_scalar()?; |
316 | let b = ecx.read_immediate(&args[1])?.to_scalar()?; | |
1b1a35ee XL |
317 | let cmp = if intrinsic_name == sym::ptr_guaranteed_eq { |
318 | ecx.guaranteed_eq(a, b) | |
319 | } else { | |
320 | ecx.guaranteed_ne(a, b) | |
321 | }; | |
322 | ecx.write_scalar(Scalar::from_bool(cmp), dest)?; | |
323 | } | |
fc512014 | 324 | sym::const_allocate => { |
6a06907d XL |
325 | let size = ecx.read_scalar(&args[0])?.to_machine_usize(ecx)?; |
326 | let align = ecx.read_scalar(&args[1])?.to_machine_usize(ecx)?; | |
fc512014 XL |
327 | |
328 | let align = match Align::from_bytes(align) { | |
329 | Ok(a) => a, | |
330 | Err(err) => throw_ub_format!("align has to be a power of 2, {}", err), | |
331 | }; | |
332 | ||
333 | let ptr = ecx.memory.allocate( | |
334 | Size::from_bytes(size as u64), | |
335 | align, | |
336 | interpret::MemoryKind::Machine(MemoryKind::Heap), | |
136023e0 XL |
337 | )?; |
338 | ecx.write_pointer(ptr, dest)?; | |
fc512014 | 339 | } |
1b1a35ee XL |
340 | _ => { |
341 | return Err(ConstEvalErrKind::NeedsRfc(format!( | |
342 | "calling intrinsic `{}`", | |
343 | intrinsic_name | |
344 | )) | |
345 | .into()); | |
346 | } | |
347 | } | |
348 | ||
349 | ecx.go_to_block(ret); | |
350 | Ok(()) | |
dfeec247 XL |
351 | } |
352 | ||
353 | fn assert_panic( | |
354 | ecx: &mut InterpCx<'mir, 'tcx, Self>, | |
dfeec247 XL |
355 | msg: &AssertMessage<'tcx>, |
356 | _unwind: Option<mir::BasicBlock>, | |
357 | ) -> InterpResult<'tcx> { | |
ba9703b0 | 358 | use rustc_middle::mir::AssertKind::*; |
f035d41b XL |
359 | // Convert `AssertKind<Operand>` to `AssertKind<Scalar>`. |
360 | let eval_to_int = | |
6a06907d | 361 | |op| ecx.read_immediate(&ecx.eval_operand(op, None)?).map(|x| x.to_const_int()); |
74b04a01 | 362 | let err = match msg { |
dfeec247 | 363 | BoundsCheck { ref len, ref index } => { |
f035d41b XL |
364 | let len = eval_to_int(len)?; |
365 | let index = eval_to_int(index)?; | |
74b04a01 | 366 | BoundsCheck { len, index } |
dfeec247 | 367 | } |
f035d41b XL |
368 | Overflow(op, l, r) => Overflow(*op, eval_to_int(l)?, eval_to_int(r)?), |
369 | OverflowNeg(op) => OverflowNeg(eval_to_int(op)?), | |
370 | DivisionByZero(op) => DivisionByZero(eval_to_int(op)?), | |
371 | RemainderByZero(op) => RemainderByZero(eval_to_int(op)?), | |
74b04a01 XL |
372 | ResumedAfterReturn(generator_kind) => ResumedAfterReturn(*generator_kind), |
373 | ResumedAfterPanic(generator_kind) => ResumedAfterPanic(*generator_kind), | |
374 | }; | |
375 | Err(ConstEvalErrKind::AssertFailure(err).into()) | |
dfeec247 XL |
376 | } |
377 | ||
fc512014 XL |
378 | fn abort(_ecx: &mut InterpCx<'mir, 'tcx, Self>, msg: String) -> InterpResult<'tcx, !> { |
379 | Err(ConstEvalErrKind::Abort(msg).into()) | |
380 | } | |
381 | ||
dfeec247 XL |
382 | fn binary_ptr_op( |
383 | _ecx: &InterpCx<'mir, 'tcx, Self>, | |
384 | _bin_op: mir::BinOp, | |
6a06907d XL |
385 | _left: &ImmTy<'tcx>, |
386 | _right: &ImmTy<'tcx>, | |
dfeec247 | 387 | ) -> InterpResult<'tcx, (Scalar, bool, Ty<'tcx>)> { |
74b04a01 | 388 | Err(ConstEvalErrKind::NeedsRfc("pointer arithmetic or comparison".to_string()).into()) |
dfeec247 XL |
389 | } |
390 | ||
dfeec247 XL |
391 | fn box_alloc( |
392 | _ecx: &mut InterpCx<'mir, 'tcx, Self>, | |
6a06907d | 393 | _dest: &PlaceTy<'tcx>, |
dfeec247 | 394 | ) -> InterpResult<'tcx> { |
74b04a01 | 395 | Err(ConstEvalErrKind::NeedsRfc("heap allocations via `box` keyword".to_string()).into()) |
dfeec247 XL |
396 | } |
397 | ||
398 | fn before_terminator(ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> { | |
ba9703b0 XL |
399 | // The step limit has already been hit in a previous call to `before_terminator`. |
400 | if ecx.machine.steps_remaining == 0 { | |
74b04a01 XL |
401 | return Ok(()); |
402 | } | |
403 | ||
ba9703b0 XL |
404 | ecx.machine.steps_remaining -= 1; |
405 | if ecx.machine.steps_remaining == 0 { | |
406 | throw_exhaust!(StepLimitReached) | |
407 | } | |
dfeec247 | 408 | |
ba9703b0 XL |
409 | Ok(()) |
410 | } | |
dfeec247 | 411 | |
3dfed10e XL |
412 | #[inline(always)] |
413 | fn init_frame_extra( | |
414 | ecx: &mut InterpCx<'mir, 'tcx, Self>, | |
415 | frame: Frame<'mir, 'tcx>, | |
416 | ) -> InterpResult<'tcx, Frame<'mir, 'tcx>> { | |
417 | // Enforce stack size limit. Add 1 because this is run before the new frame is pushed. | |
136023e0 | 418 | if !ecx.recursion_limit.value_within_limit(ecx.stack().len() + 1) { |
3dfed10e XL |
419 | throw_exhaust!(StackFrameLimitReached) |
420 | } else { | |
421 | Ok(frame) | |
422 | } | |
423 | } | |
424 | ||
ba9703b0 XL |
425 | #[inline(always)] |
426 | fn stack( | |
427 | ecx: &'a InterpCx<'mir, 'tcx, Self>, | |
428 | ) -> &'a [Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>] { | |
429 | &ecx.machine.stack | |
dfeec247 XL |
430 | } |
431 | ||
432 | #[inline(always)] | |
ba9703b0 XL |
433 | fn stack_mut( |
434 | ecx: &'a mut InterpCx<'mir, 'tcx, Self>, | |
435 | ) -> &'a mut Vec<Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>> { | |
436 | &mut ecx.machine.stack | |
dfeec247 XL |
437 | } |
438 | ||
ba9703b0 | 439 | fn before_access_global( |
dfeec247 | 440 | memory_extra: &MemoryExtra, |
ba9703b0 XL |
441 | alloc_id: AllocId, |
442 | allocation: &Allocation, | |
443 | static_def_id: Option<DefId>, | |
444 | is_write: bool, | |
dfeec247 | 445 | ) -> InterpResult<'tcx> { |
ba9703b0 XL |
446 | if is_write { |
447 | // Write access. These are never allowed, but we give a targeted error message. | |
448 | if allocation.mutability == Mutability::Not { | |
449 | Err(err_ub!(WriteToReadOnly(alloc_id)).into()) | |
450 | } else { | |
451 | Err(ConstEvalErrKind::ModifiedGlobal.into()) | |
452 | } | |
dfeec247 | 453 | } else { |
ba9703b0 XL |
454 | // Read access. These are usually allowed, with some exceptions. |
455 | if memory_extra.can_access_statics { | |
456 | // Machine configuration allows us read from anything (e.g., `static` initializer). | |
457 | Ok(()) | |
458 | } else if static_def_id.is_some() { | |
459 | // Machine configuration does not allow us to read statics | |
460 | // (e.g., `const` initializer). | |
f9f354fc XL |
461 | // See const_eval::machine::MemoryExtra::can_access_statics for why |
462 | // this check is so important: if we could read statics, we could read pointers | |
463 | // to mutable allocations *inside* statics. These allocations are not themselves | |
464 | // statics, so pointers to them can get around the check in `validity.rs`. | |
ba9703b0 XL |
465 | Err(ConstEvalErrKind::ConstAccessesStatic.into()) |
466 | } else { | |
467 | // Immutable global, this read is fine. | |
468 | // But make sure we never accept a read from something mutable, that would be | |
469 | // unsound. The reason is that as the content of this allocation may be different | |
470 | // now and at run-time, so if we permit reading now we might return the wrong value. | |
471 | assert_eq!(allocation.mutability, Mutability::Not); | |
472 | Ok(()) | |
473 | } | |
dfeec247 XL |
474 | } |
475 | } | |
476 | } | |
477 | ||
478 | // Please do not add any code below the above `Machine` trait impl. I (oli-obk) plan more cleanups | |
479 | // so we can end up having a file with just that impl, but for now, let's keep the impl discoverable | |
480 | // at the bottom of this file. |