]> git.proxmox.com Git - rustc.git/blob - compiler/rustc_mir/src/const_eval/machine.rs
73ca7e0d471ca21dadabef1a32b06eec0c8415f4
[rustc.git] / compiler / rustc_mir / src / const_eval / machine.rs
1 use rustc_middle::mir;
2 use rustc_middle::ty::layout::HasTyCtxt;
3 use rustc_middle::ty::{self, Ty};
4 use std::borrow::Borrow;
5 use std::collections::hash_map::Entry;
6 use std::hash::Hash;
7
8 use rustc_data_structures::fx::FxHashMap;
9
10 use rustc_ast::Mutability;
11 use rustc_hir::def_id::DefId;
12 use rustc_middle::mir::AssertMessage;
13 use rustc_session::Limit;
14 use rustc_span::symbol::{sym, Symbol};
15
16 use crate::interpret::{
17 self, compile_time_machine, AllocId, Allocation, Frame, GlobalId, ImmTy, InterpCx,
18 InterpResult, Memory, OpTy, PlaceTy, Pointer, Scalar,
19 };
20
21 use super::error::*;
22
23 impl<'mir, 'tcx> InterpCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>> {
24 /// Evaluate a const function where all arguments (if any) are zero-sized types.
25 /// The evaluation is memoized thanks to the query system.
26 ///
27 /// Returns `true` if the call has been evaluated.
28 fn try_eval_const_fn_call(
29 &mut self,
30 instance: ty::Instance<'tcx>,
31 ret: Option<(PlaceTy<'tcx>, mir::BasicBlock)>,
32 args: &[OpTy<'tcx>],
33 ) -> InterpResult<'tcx, bool> {
34 trace!("try_eval_const_fn_call: {:?}", instance);
35 // Because `#[track_caller]` adds an implicit non-ZST argument, we also cannot
36 // perform this optimization on items tagged with it.
37 if instance.def.requires_caller_location(self.tcx()) {
38 return Ok(false);
39 }
40 // For the moment we only do this for functions which take no arguments
41 // (or all arguments are ZSTs) so that we don't memoize too much.
42 if args.iter().any(|a| !a.layout.is_zst()) {
43 return Ok(false);
44 }
45
46 let dest = match ret {
47 Some((dest, _)) => dest,
48 // Don't memoize diverging function calls.
49 None => return Ok(false),
50 };
51
52 let gid = GlobalId { instance, promoted: None };
53
54 let place = self.eval_to_allocation(gid)?;
55
56 self.copy_op(place.into(), dest)?;
57
58 self.return_to_block(ret.map(|r| r.1))?;
59 trace!("{:?}", self.dump_place(*dest));
60 Ok(true)
61 }
62
63 /// "Intercept" a function call to a panic-related function
64 /// because we have something special to do for it.
65 /// If this returns successfully (`Ok`), the function should just be evaluated normally.
66 fn hook_panic_fn(
67 &mut self,
68 instance: ty::Instance<'tcx>,
69 args: &[OpTy<'tcx>],
70 ) -> InterpResult<'tcx> {
71 let def_id = instance.def_id();
72 if Some(def_id) == self.tcx.lang_items().panic_fn()
73 || Some(def_id) == self.tcx.lang_items().begin_panic_fn()
74 {
75 // &'static str
76 assert!(args.len() == 1);
77
78 let msg_place = self.deref_operand(args[0])?;
79 let msg = Symbol::intern(self.read_str(msg_place)?);
80 let span = self.find_closest_untracked_caller_location();
81 let (file, line, col) = self.location_triple_for_span(span);
82 Err(ConstEvalErrKind::Panic { msg, file, line, col }.into())
83 } else {
84 Ok(())
85 }
86 }
87 }
88
89 /// Extra machine state for CTFE, and the Machine instance
90 pub struct CompileTimeInterpreter<'mir, 'tcx> {
91 /// For now, the number of terminators that can be evaluated before we throw a resource
92 /// exhuastion error.
93 ///
94 /// Setting this to `0` disables the limit and allows the interpreter to run forever.
95 pub steps_remaining: usize,
96
97 /// The virtual call stack.
98 pub(crate) stack: Vec<Frame<'mir, 'tcx, (), ()>>,
99 }
100
101 #[derive(Copy, Clone, Debug)]
102 pub struct MemoryExtra {
103 /// We need to make sure consts never point to anything mutable, even recursively. That is
104 /// relied on for pattern matching on consts with references.
105 /// To achieve this, two pieces have to work together:
106 /// * Interning makes everything outside of statics immutable.
107 /// * Pointers to allocations inside of statics can never leak outside, to a non-static global.
108 /// This boolean here controls the second part.
109 pub(super) can_access_statics: bool,
110 }
111
112 impl<'mir, 'tcx> CompileTimeInterpreter<'mir, 'tcx> {
113 pub(super) fn new(const_eval_limit: Limit) -> Self {
114 CompileTimeInterpreter { steps_remaining: const_eval_limit.0, stack: Vec::new() }
115 }
116 }
117
118 impl<K: Hash + Eq, V> interpret::AllocMap<K, V> for FxHashMap<K, V> {
119 #[inline(always)]
120 fn contains_key<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> bool
121 where
122 K: Borrow<Q>,
123 {
124 FxHashMap::contains_key(self, k)
125 }
126
127 #[inline(always)]
128 fn insert(&mut self, k: K, v: V) -> Option<V> {
129 FxHashMap::insert(self, k, v)
130 }
131
132 #[inline(always)]
133 fn remove<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> Option<V>
134 where
135 K: Borrow<Q>,
136 {
137 FxHashMap::remove(self, k)
138 }
139
140 #[inline(always)]
141 fn filter_map_collect<T>(&self, mut f: impl FnMut(&K, &V) -> Option<T>) -> Vec<T> {
142 self.iter().filter_map(move |(k, v)| f(k, &*v)).collect()
143 }
144
145 #[inline(always)]
146 fn get_or<E>(&self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&V, E> {
147 match self.get(&k) {
148 Some(v) => Ok(v),
149 None => {
150 vacant()?;
151 bug!("The CTFE machine shouldn't ever need to extend the alloc_map when reading")
152 }
153 }
154 }
155
156 #[inline(always)]
157 fn get_mut_or<E>(&mut self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&mut V, E> {
158 match self.entry(k) {
159 Entry::Occupied(e) => Ok(e.into_mut()),
160 Entry::Vacant(e) => {
161 let v = vacant()?;
162 Ok(e.insert(v))
163 }
164 }
165 }
166 }
167
168 crate type CompileTimeEvalContext<'mir, 'tcx> =
169 InterpCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>;
170
171 impl interpret::MayLeak for ! {
172 #[inline(always)]
173 fn may_leak(self) -> bool {
174 // `self` is uninhabited
175 self
176 }
177 }
178
179 impl<'mir, 'tcx: 'mir> CompileTimeEvalContext<'mir, 'tcx> {
180 fn guaranteed_eq(&mut self, a: Scalar, b: Scalar) -> bool {
181 match (a, b) {
182 // Comparisons between integers are always known.
183 (Scalar::Raw { .. }, Scalar::Raw { .. }) => a == b,
184 // Equality with integers can never be known for sure.
185 (Scalar::Raw { .. }, Scalar::Ptr(_)) | (Scalar::Ptr(_), Scalar::Raw { .. }) => false,
186 // FIXME: return `true` for when both sides are the same pointer, *except* that
187 // some things (like functions and vtables) do not have stable addresses
188 // so we need to be careful around them (see e.g. #73722).
189 (Scalar::Ptr(_), Scalar::Ptr(_)) => false,
190 }
191 }
192
193 fn guaranteed_ne(&mut self, a: Scalar, b: Scalar) -> bool {
194 match (a, b) {
195 // Comparisons between integers are always known.
196 (Scalar::Raw { .. }, Scalar::Raw { .. }) => a != b,
197 // Comparisons of abstract pointers with null pointers are known if the pointer
198 // is in bounds, because if they are in bounds, the pointer can't be null.
199 (Scalar::Raw { data: 0, .. }, Scalar::Ptr(ptr))
200 | (Scalar::Ptr(ptr), Scalar::Raw { data: 0, .. }) => !self.memory.ptr_may_be_null(ptr),
201 // Inequality with integers other than null can never be known for sure.
202 (Scalar::Raw { .. }, Scalar::Ptr(_)) | (Scalar::Ptr(_), Scalar::Raw { .. }) => false,
203 // FIXME: return `true` for at least some comparisons where we can reliably
204 // determine the result of runtime inequality tests at compile-time.
205 // Examples include comparison of addresses in different static items.
206 (Scalar::Ptr(_), Scalar::Ptr(_)) => false,
207 }
208 }
209 }
210
211 impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir, 'tcx> {
212 compile_time_machine!(<'mir, 'tcx>);
213
214 type MemoryExtra = MemoryExtra;
215
216 fn find_mir_or_eval_fn(
217 ecx: &mut InterpCx<'mir, 'tcx, Self>,
218 instance: ty::Instance<'tcx>,
219 args: &[OpTy<'tcx>],
220 ret: Option<(PlaceTy<'tcx>, mir::BasicBlock)>,
221 _unwind: Option<mir::BasicBlock>, // unwinding is not supported in consts
222 ) -> InterpResult<'tcx, Option<&'mir mir::Body<'tcx>>> {
223 debug!("find_mir_or_eval_fn: {:?}", instance);
224
225 // Only check non-glue functions
226 if let ty::InstanceDef::Item(def) = instance.def {
227 // Execution might have wandered off into other crates, so we cannot do a stability-
228 // sensitive check here. But we can at least rule out functions that are not const
229 // at all.
230 if ecx.tcx.is_const_fn_raw(def.did) {
231 // If this function is a `const fn` then under certain circumstances we
232 // can evaluate call via the query system, thus memoizing all future calls.
233 if ecx.try_eval_const_fn_call(instance, ret, args)? {
234 return Ok(None);
235 }
236 } else {
237 // Some functions we support even if they are non-const -- but avoid testing
238 // that for const fn!
239 ecx.hook_panic_fn(instance, args)?;
240 // We certainly do *not* want to actually call the fn
241 // though, so be sure we return here.
242 throw_unsup_format!("calling non-const function `{}`", instance)
243 }
244 }
245 // This is a const fn. Call it.
246 Ok(Some(match ecx.load_mir(instance.def, None) {
247 Ok(body) => body,
248 Err(err) => {
249 if let err_unsup!(NoMirFor(did)) = err.kind {
250 let path = ecx.tcx.def_path_str(did);
251 return Err(ConstEvalErrKind::NeedsRfc(format!(
252 "calling extern function `{}`",
253 path
254 ))
255 .into());
256 }
257 return Err(err);
258 }
259 }))
260 }
261
262 fn call_intrinsic(
263 ecx: &mut InterpCx<'mir, 'tcx, Self>,
264 instance: ty::Instance<'tcx>,
265 args: &[OpTy<'tcx>],
266 ret: Option<(PlaceTy<'tcx>, mir::BasicBlock)>,
267 _unwind: Option<mir::BasicBlock>,
268 ) -> InterpResult<'tcx> {
269 // Shared intrinsics.
270 if ecx.emulate_intrinsic(instance, args, ret)? {
271 return Ok(());
272 }
273 let intrinsic_name = ecx.tcx.item_name(instance.def_id());
274
275 // CTFE-specific intrinsics.
276 let (dest, ret) = match ret {
277 None => {
278 return Err(ConstEvalErrKind::NeedsRfc(format!(
279 "calling intrinsic `{}`",
280 intrinsic_name
281 ))
282 .into());
283 }
284 Some(p) => p,
285 };
286 match intrinsic_name {
287 sym::ptr_guaranteed_eq | sym::ptr_guaranteed_ne => {
288 let a = ecx.read_immediate(args[0])?.to_scalar()?;
289 let b = ecx.read_immediate(args[1])?.to_scalar()?;
290 let cmp = if intrinsic_name == sym::ptr_guaranteed_eq {
291 ecx.guaranteed_eq(a, b)
292 } else {
293 ecx.guaranteed_ne(a, b)
294 };
295 ecx.write_scalar(Scalar::from_bool(cmp), dest)?;
296 }
297 _ => {
298 return Err(ConstEvalErrKind::NeedsRfc(format!(
299 "calling intrinsic `{}`",
300 intrinsic_name
301 ))
302 .into());
303 }
304 }
305
306 ecx.go_to_block(ret);
307 Ok(())
308 }
309
310 fn assert_panic(
311 ecx: &mut InterpCx<'mir, 'tcx, Self>,
312 msg: &AssertMessage<'tcx>,
313 _unwind: Option<mir::BasicBlock>,
314 ) -> InterpResult<'tcx> {
315 use rustc_middle::mir::AssertKind::*;
316 // Convert `AssertKind<Operand>` to `AssertKind<Scalar>`.
317 let eval_to_int =
318 |op| ecx.read_immediate(ecx.eval_operand(op, None)?).map(|x| x.to_const_int());
319 let err = match msg {
320 BoundsCheck { ref len, ref index } => {
321 let len = eval_to_int(len)?;
322 let index = eval_to_int(index)?;
323 BoundsCheck { len, index }
324 }
325 Overflow(op, l, r) => Overflow(*op, eval_to_int(l)?, eval_to_int(r)?),
326 OverflowNeg(op) => OverflowNeg(eval_to_int(op)?),
327 DivisionByZero(op) => DivisionByZero(eval_to_int(op)?),
328 RemainderByZero(op) => RemainderByZero(eval_to_int(op)?),
329 ResumedAfterReturn(generator_kind) => ResumedAfterReturn(*generator_kind),
330 ResumedAfterPanic(generator_kind) => ResumedAfterPanic(*generator_kind),
331 };
332 Err(ConstEvalErrKind::AssertFailure(err).into())
333 }
334
335 fn ptr_to_int(_mem: &Memory<'mir, 'tcx, Self>, _ptr: Pointer) -> InterpResult<'tcx, u64> {
336 Err(ConstEvalErrKind::NeedsRfc("pointer-to-integer cast".to_string()).into())
337 }
338
339 fn binary_ptr_op(
340 _ecx: &InterpCx<'mir, 'tcx, Self>,
341 _bin_op: mir::BinOp,
342 _left: ImmTy<'tcx>,
343 _right: ImmTy<'tcx>,
344 ) -> InterpResult<'tcx, (Scalar, bool, Ty<'tcx>)> {
345 Err(ConstEvalErrKind::NeedsRfc("pointer arithmetic or comparison".to_string()).into())
346 }
347
348 fn box_alloc(
349 _ecx: &mut InterpCx<'mir, 'tcx, Self>,
350 _dest: PlaceTy<'tcx>,
351 ) -> InterpResult<'tcx> {
352 Err(ConstEvalErrKind::NeedsRfc("heap allocations via `box` keyword".to_string()).into())
353 }
354
355 fn before_terminator(ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> {
356 // The step limit has already been hit in a previous call to `before_terminator`.
357 if ecx.machine.steps_remaining == 0 {
358 return Ok(());
359 }
360
361 ecx.machine.steps_remaining -= 1;
362 if ecx.machine.steps_remaining == 0 {
363 throw_exhaust!(StepLimitReached)
364 }
365
366 Ok(())
367 }
368
369 #[inline(always)]
370 fn init_frame_extra(
371 ecx: &mut InterpCx<'mir, 'tcx, Self>,
372 frame: Frame<'mir, 'tcx>,
373 ) -> InterpResult<'tcx, Frame<'mir, 'tcx>> {
374 // Enforce stack size limit. Add 1 because this is run before the new frame is pushed.
375 if !ecx.tcx.sess.recursion_limit().value_within_limit(ecx.stack().len() + 1) {
376 throw_exhaust!(StackFrameLimitReached)
377 } else {
378 Ok(frame)
379 }
380 }
381
382 #[inline(always)]
383 fn stack(
384 ecx: &'a InterpCx<'mir, 'tcx, Self>,
385 ) -> &'a [Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>] {
386 &ecx.machine.stack
387 }
388
389 #[inline(always)]
390 fn stack_mut(
391 ecx: &'a mut InterpCx<'mir, 'tcx, Self>,
392 ) -> &'a mut Vec<Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>> {
393 &mut ecx.machine.stack
394 }
395
396 fn before_access_global(
397 memory_extra: &MemoryExtra,
398 alloc_id: AllocId,
399 allocation: &Allocation,
400 static_def_id: Option<DefId>,
401 is_write: bool,
402 ) -> InterpResult<'tcx> {
403 if is_write {
404 // Write access. These are never allowed, but we give a targeted error message.
405 if allocation.mutability == Mutability::Not {
406 Err(err_ub!(WriteToReadOnly(alloc_id)).into())
407 } else {
408 Err(ConstEvalErrKind::ModifiedGlobal.into())
409 }
410 } else {
411 // Read access. These are usually allowed, with some exceptions.
412 if memory_extra.can_access_statics {
413 // Machine configuration allows us read from anything (e.g., `static` initializer).
414 Ok(())
415 } else if static_def_id.is_some() {
416 // Machine configuration does not allow us to read statics
417 // (e.g., `const` initializer).
418 // See const_eval::machine::MemoryExtra::can_access_statics for why
419 // this check is so important: if we could read statics, we could read pointers
420 // to mutable allocations *inside* statics. These allocations are not themselves
421 // statics, so pointers to them can get around the check in `validity.rs`.
422 Err(ConstEvalErrKind::ConstAccessesStatic.into())
423 } else {
424 // Immutable global, this read is fine.
425 // But make sure we never accept a read from something mutable, that would be
426 // unsound. The reason is that as the content of this allocation may be different
427 // now and at run-time, so if we permit reading now we might return the wrong value.
428 assert_eq!(allocation.mutability, Mutability::Not);
429 Ok(())
430 }
431 }
432 }
433 }
434
435 // Please do not add any code below the above `Machine` trait impl. I (oli-obk) plan more cleanups
436 // so we can end up having a file with just that impl, but for now, let's keep the impl discoverable
437 // at the bottom of this file.