]> git.proxmox.com Git - rustc.git/blob - compiler/rustc_const_eval/src/const_eval/machine.rs
New upstream version 1.63.0+dfsg1
[rustc.git] / compiler / rustc_const_eval / src / const_eval / machine.rs
1 use rustc_hir::def::DefKind;
2 use rustc_middle::mir;
3 use rustc_middle::ty::{self, Ty, TyCtxt};
4 use std::borrow::Borrow;
5 use std::collections::hash_map::Entry;
6 use std::hash::Hash;
7
8 use rustc_data_structures::fx::FxHashMap;
9 use std::fmt;
10
11 use rustc_ast::Mutability;
12 use rustc_hir::def_id::DefId;
13 use rustc_middle::mir::AssertMessage;
14 use rustc_session::Limit;
15 use rustc_span::symbol::{sym, Symbol};
16 use rustc_target::abi::{Align, Size};
17 use rustc_target::spec::abi::Abi;
18
19 use crate::interpret::{
20 self, compile_time_machine, AllocId, ConstAllocation, Frame, ImmTy, InterpCx, InterpResult,
21 OpTy, PlaceTy, Pointer, Scalar, StackPopUnwind,
22 };
23
24 use super::error::*;
25
26 impl<'mir, 'tcx> InterpCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>> {
27 /// "Intercept" a function call to a panic-related function
28 /// because we have something special to do for it.
29 /// If this returns successfully (`Ok`), the function should just be evaluated normally.
30 fn hook_special_const_fn(
31 &mut self,
32 instance: ty::Instance<'tcx>,
33 args: &[OpTy<'tcx>],
34 ) -> InterpResult<'tcx, Option<ty::Instance<'tcx>>> {
35 // All `#[rustc_do_not_const_check]` functions should be hooked here.
36 let def_id = instance.def_id();
37
38 if Some(def_id) == self.tcx.lang_items().const_eval_select() {
39 // redirect to const_eval_select_ct
40 if let Some(const_eval_select) = self.tcx.lang_items().const_eval_select_ct() {
41 return Ok(Some(
42 ty::Instance::resolve(
43 *self.tcx,
44 ty::ParamEnv::reveal_all(),
45 const_eval_select,
46 instance.substs,
47 )
48 .unwrap()
49 .unwrap(),
50 ));
51 }
52 } else if Some(def_id) == self.tcx.lang_items().panic_display()
53 || Some(def_id) == self.tcx.lang_items().begin_panic_fn()
54 {
55 // &str or &&str
56 assert!(args.len() == 1);
57
58 let mut msg_place = self.deref_operand(&args[0])?;
59 while msg_place.layout.ty.is_ref() {
60 msg_place = self.deref_operand(&msg_place.into())?;
61 }
62
63 let msg = Symbol::intern(self.read_str(&msg_place)?);
64 let span = self.find_closest_untracked_caller_location();
65 let (file, line, col) = self.location_triple_for_span(span);
66 return Err(ConstEvalErrKind::Panic { msg, file, line, col }.into());
67 } else if Some(def_id) == self.tcx.lang_items().panic_fmt() {
68 // For panic_fmt, call const_panic_fmt instead.
69 if let Some(const_panic_fmt) = self.tcx.lang_items().const_panic_fmt() {
70 return Ok(Some(
71 ty::Instance::resolve(
72 *self.tcx,
73 ty::ParamEnv::reveal_all(),
74 const_panic_fmt,
75 self.tcx.intern_substs(&[]),
76 )
77 .unwrap()
78 .unwrap(),
79 ));
80 }
81 }
82 Ok(None)
83 }
84 }
85
86 /// Extra machine state for CTFE, and the Machine instance
87 pub struct CompileTimeInterpreter<'mir, 'tcx> {
88 /// For now, the number of terminators that can be evaluated before we throw a resource
89 /// exhaustion error.
90 ///
91 /// Setting this to `0` disables the limit and allows the interpreter to run forever.
92 pub steps_remaining: usize,
93
94 /// The virtual call stack.
95 pub(crate) stack: Vec<Frame<'mir, 'tcx, AllocId, ()>>,
96
97 /// We need to make sure consts never point to anything mutable, even recursively. That is
98 /// relied on for pattern matching on consts with references.
99 /// To achieve this, two pieces have to work together:
100 /// * Interning makes everything outside of statics immutable.
101 /// * Pointers to allocations inside of statics can never leak outside, to a non-static global.
102 /// This boolean here controls the second part.
103 pub(super) can_access_statics: bool,
104 }
105
106 impl<'mir, 'tcx> CompileTimeInterpreter<'mir, 'tcx> {
107 pub(super) fn new(const_eval_limit: Limit, can_access_statics: bool) -> Self {
108 CompileTimeInterpreter {
109 steps_remaining: const_eval_limit.0,
110 stack: Vec::new(),
111 can_access_statics,
112 }
113 }
114 }
115
116 impl<K: Hash + Eq, V> interpret::AllocMap<K, V> for FxHashMap<K, V> {
117 #[inline(always)]
118 fn contains_key<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> bool
119 where
120 K: Borrow<Q>,
121 {
122 FxHashMap::contains_key(self, k)
123 }
124
125 #[inline(always)]
126 fn insert(&mut self, k: K, v: V) -> Option<V> {
127 FxHashMap::insert(self, k, v)
128 }
129
130 #[inline(always)]
131 fn remove<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> Option<V>
132 where
133 K: Borrow<Q>,
134 {
135 FxHashMap::remove(self, k)
136 }
137
138 #[inline(always)]
139 fn filter_map_collect<T>(&self, mut f: impl FnMut(&K, &V) -> Option<T>) -> Vec<T> {
140 self.iter().filter_map(move |(k, v)| f(k, &*v)).collect()
141 }
142
143 #[inline(always)]
144 fn get_or<E>(&self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&V, E> {
145 match self.get(&k) {
146 Some(v) => Ok(v),
147 None => {
148 vacant()?;
149 bug!("The CTFE machine shouldn't ever need to extend the alloc_map when reading")
150 }
151 }
152 }
153
154 #[inline(always)]
155 fn get_mut_or<E>(&mut self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&mut V, E> {
156 match self.entry(k) {
157 Entry::Occupied(e) => Ok(e.into_mut()),
158 Entry::Vacant(e) => {
159 let v = vacant()?;
160 Ok(e.insert(v))
161 }
162 }
163 }
164 }
165
166 pub(crate) type CompileTimeEvalContext<'mir, 'tcx> =
167 InterpCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>;
168
169 #[derive(Debug, PartialEq, Eq, Copy, Clone)]
170 pub enum MemoryKind {
171 Heap,
172 }
173
174 impl fmt::Display for MemoryKind {
175 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
176 match self {
177 MemoryKind::Heap => write!(f, "heap allocation"),
178 }
179 }
180 }
181
182 impl interpret::MayLeak for MemoryKind {
183 #[inline(always)]
184 fn may_leak(self) -> bool {
185 match self {
186 MemoryKind::Heap => false,
187 }
188 }
189 }
190
191 impl interpret::MayLeak for ! {
192 #[inline(always)]
193 fn may_leak(self) -> bool {
194 // `self` is uninhabited
195 self
196 }
197 }
198
199 impl<'mir, 'tcx: 'mir> CompileTimeEvalContext<'mir, 'tcx> {
200 fn guaranteed_eq(&mut self, a: Scalar, b: Scalar) -> InterpResult<'tcx, bool> {
201 Ok(match (a, b) {
202 // Comparisons between integers are always known.
203 (Scalar::Int { .. }, Scalar::Int { .. }) => a == b,
204 // Equality with integers can never be known for sure.
205 (Scalar::Int { .. }, Scalar::Ptr(..)) | (Scalar::Ptr(..), Scalar::Int { .. }) => false,
206 // FIXME: return `true` for when both sides are the same pointer, *except* that
207 // some things (like functions and vtables) do not have stable addresses
208 // so we need to be careful around them (see e.g. #73722).
209 (Scalar::Ptr(..), Scalar::Ptr(..)) => false,
210 })
211 }
212
213 fn guaranteed_ne(&mut self, a: Scalar, b: Scalar) -> InterpResult<'tcx, bool> {
214 Ok(match (a, b) {
215 // Comparisons between integers are always known.
216 (Scalar::Int(_), Scalar::Int(_)) => a != b,
217 // Comparisons of abstract pointers with null pointers are known if the pointer
218 // is in bounds, because if they are in bounds, the pointer can't be null.
219 // Inequality with integers other than null can never be known for sure.
220 (Scalar::Int(int), ptr @ Scalar::Ptr(..))
221 | (ptr @ Scalar::Ptr(..), Scalar::Int(int)) => {
222 int.is_null() && !self.scalar_may_be_null(ptr)?
223 }
224 // FIXME: return `true` for at least some comparisons where we can reliably
225 // determine the result of runtime inequality tests at compile-time.
226 // Examples include comparison of addresses in different static items.
227 (Scalar::Ptr(..), Scalar::Ptr(..)) => false,
228 })
229 }
230 }
231
232 impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir, 'tcx> {
233 compile_time_machine!(<'mir, 'tcx>);
234
235 type MemoryKind = MemoryKind;
236
237 const PANIC_ON_ALLOC_FAIL: bool = false; // will be raised as a proper error
238
239 fn load_mir(
240 ecx: &InterpCx<'mir, 'tcx, Self>,
241 instance: ty::InstanceDef<'tcx>,
242 ) -> InterpResult<'tcx, &'tcx mir::Body<'tcx>> {
243 match instance {
244 ty::InstanceDef::Item(def) => {
245 if ecx.tcx.is_ctfe_mir_available(def.did) {
246 Ok(ecx.tcx.mir_for_ctfe_opt_const_arg(def))
247 } else if ecx.tcx.def_kind(def.did) == DefKind::AssocConst {
248 let guar = ecx.tcx.sess.delay_span_bug(
249 rustc_span::DUMMY_SP,
250 "This is likely a const item that is missing from its impl",
251 );
252 throw_inval!(AlreadyReported(guar));
253 } else {
254 let path = ecx.tcx.def_path_str(def.did);
255 Err(ConstEvalErrKind::NeedsRfc(format!("calling extern function `{}`", path))
256 .into())
257 }
258 }
259 _ => Ok(ecx.tcx.instance_mir(instance)),
260 }
261 }
262
263 fn find_mir_or_eval_fn(
264 ecx: &mut InterpCx<'mir, 'tcx, Self>,
265 instance: ty::Instance<'tcx>,
266 _abi: Abi,
267 args: &[OpTy<'tcx>],
268 _dest: &PlaceTy<'tcx>,
269 _ret: Option<mir::BasicBlock>,
270 _unwind: StackPopUnwind, // unwinding is not supported in consts
271 ) -> InterpResult<'tcx, Option<(&'mir mir::Body<'tcx>, ty::Instance<'tcx>)>> {
272 debug!("find_mir_or_eval_fn: {:?}", instance);
273
274 // Only check non-glue functions
275 if let ty::InstanceDef::Item(def) = instance.def {
276 // Execution might have wandered off into other crates, so we cannot do a stability-
277 // sensitive check here. But we can at least rule out functions that are not const
278 // at all.
279 if !ecx.tcx.is_const_fn_raw(def.did) {
280 // allow calling functions inside a trait marked with #[const_trait].
281 if !ecx.tcx.is_const_default_method(def.did) {
282 // We certainly do *not* want to actually call the fn
283 // though, so be sure we return here.
284 throw_unsup_format!("calling non-const function `{}`", instance)
285 }
286 }
287
288 if let Some(new_instance) = ecx.hook_special_const_fn(instance, args)? {
289 // We call another const fn instead.
290 // However, we return the *original* instance to make backtraces work out
291 // (and we hope this does not confuse the FnAbi checks too much).
292 return Ok(Self::find_mir_or_eval_fn(
293 ecx,
294 new_instance,
295 _abi,
296 args,
297 _dest,
298 _ret,
299 _unwind,
300 )?
301 .map(|(body, _instance)| (body, instance)));
302 }
303 }
304 // This is a const fn. Call it.
305 Ok(Some((ecx.load_mir(instance.def, None)?, instance)))
306 }
307
308 fn call_intrinsic(
309 ecx: &mut InterpCx<'mir, 'tcx, Self>,
310 instance: ty::Instance<'tcx>,
311 args: &[OpTy<'tcx>],
312 dest: &PlaceTy<'tcx, Self::PointerTag>,
313 target: Option<mir::BasicBlock>,
314 _unwind: StackPopUnwind,
315 ) -> InterpResult<'tcx> {
316 // Shared intrinsics.
317 if ecx.emulate_intrinsic(instance, args, dest, target)? {
318 return Ok(());
319 }
320 let intrinsic_name = ecx.tcx.item_name(instance.def_id());
321
322 // CTFE-specific intrinsics.
323 let Some(ret) = target else {
324 return Err(ConstEvalErrKind::NeedsRfc(format!(
325 "calling intrinsic `{}`",
326 intrinsic_name
327 ))
328 .into());
329 };
330 match intrinsic_name {
331 sym::ptr_guaranteed_eq | sym::ptr_guaranteed_ne => {
332 let a = ecx.read_immediate(&args[0])?.to_scalar()?;
333 let b = ecx.read_immediate(&args[1])?.to_scalar()?;
334 let cmp = if intrinsic_name == sym::ptr_guaranteed_eq {
335 ecx.guaranteed_eq(a, b)?
336 } else {
337 ecx.guaranteed_ne(a, b)?
338 };
339 ecx.write_scalar(Scalar::from_bool(cmp), dest)?;
340 }
341 sym::const_allocate => {
342 let size = ecx.read_scalar(&args[0])?.to_machine_usize(ecx)?;
343 let align = ecx.read_scalar(&args[1])?.to_machine_usize(ecx)?;
344
345 let align = match Align::from_bytes(align) {
346 Ok(a) => a,
347 Err(err) => throw_ub_format!("align has to be a power of 2, {}", err),
348 };
349
350 let ptr = ecx.allocate_ptr(
351 Size::from_bytes(size as u64),
352 align,
353 interpret::MemoryKind::Machine(MemoryKind::Heap),
354 )?;
355 ecx.write_pointer(ptr, dest)?;
356 }
357 sym::const_deallocate => {
358 let ptr = ecx.read_pointer(&args[0])?;
359 let size = ecx.read_scalar(&args[1])?.to_machine_usize(ecx)?;
360 let align = ecx.read_scalar(&args[2])?.to_machine_usize(ecx)?;
361
362 let size = Size::from_bytes(size);
363 let align = match Align::from_bytes(align) {
364 Ok(a) => a,
365 Err(err) => throw_ub_format!("align has to be a power of 2, {}", err),
366 };
367
368 // If an allocation is created in an another const,
369 // we don't deallocate it.
370 let (alloc_id, _, _) = ecx.ptr_get_alloc_id(ptr)?;
371 let is_allocated_in_another_const = matches!(
372 ecx.tcx.get_global_alloc(alloc_id),
373 Some(interpret::GlobalAlloc::Memory(_))
374 );
375
376 if !is_allocated_in_another_const {
377 ecx.deallocate_ptr(
378 ptr,
379 Some((size, align)),
380 interpret::MemoryKind::Machine(MemoryKind::Heap),
381 )?;
382 }
383 }
384 _ => {
385 return Err(ConstEvalErrKind::NeedsRfc(format!(
386 "calling intrinsic `{}`",
387 intrinsic_name
388 ))
389 .into());
390 }
391 }
392
393 ecx.go_to_block(ret);
394 Ok(())
395 }
396
397 fn assert_panic(
398 ecx: &mut InterpCx<'mir, 'tcx, Self>,
399 msg: &AssertMessage<'tcx>,
400 _unwind: Option<mir::BasicBlock>,
401 ) -> InterpResult<'tcx> {
402 use rustc_middle::mir::AssertKind::*;
403 // Convert `AssertKind<Operand>` to `AssertKind<Scalar>`.
404 let eval_to_int =
405 |op| ecx.read_immediate(&ecx.eval_operand(op, None)?).map(|x| x.to_const_int());
406 let err = match msg {
407 BoundsCheck { ref len, ref index } => {
408 let len = eval_to_int(len)?;
409 let index = eval_to_int(index)?;
410 BoundsCheck { len, index }
411 }
412 Overflow(op, l, r) => Overflow(*op, eval_to_int(l)?, eval_to_int(r)?),
413 OverflowNeg(op) => OverflowNeg(eval_to_int(op)?),
414 DivisionByZero(op) => DivisionByZero(eval_to_int(op)?),
415 RemainderByZero(op) => RemainderByZero(eval_to_int(op)?),
416 ResumedAfterReturn(generator_kind) => ResumedAfterReturn(*generator_kind),
417 ResumedAfterPanic(generator_kind) => ResumedAfterPanic(*generator_kind),
418 };
419 Err(ConstEvalErrKind::AssertFailure(err).into())
420 }
421
422 fn abort(_ecx: &mut InterpCx<'mir, 'tcx, Self>, msg: String) -> InterpResult<'tcx, !> {
423 Err(ConstEvalErrKind::Abort(msg).into())
424 }
425
426 fn binary_ptr_op(
427 _ecx: &InterpCx<'mir, 'tcx, Self>,
428 _bin_op: mir::BinOp,
429 _left: &ImmTy<'tcx>,
430 _right: &ImmTy<'tcx>,
431 ) -> InterpResult<'tcx, (Scalar, bool, Ty<'tcx>)> {
432 Err(ConstEvalErrKind::NeedsRfc("pointer arithmetic or comparison".to_string()).into())
433 }
434
435 fn before_terminator(ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> {
436 // The step limit has already been hit in a previous call to `before_terminator`.
437 if ecx.machine.steps_remaining == 0 {
438 return Ok(());
439 }
440
441 ecx.machine.steps_remaining -= 1;
442 if ecx.machine.steps_remaining == 0 {
443 throw_exhaust!(StepLimitReached)
444 }
445
446 Ok(())
447 }
448
449 #[inline(always)]
450 fn expose_ptr(
451 _ecx: &mut InterpCx<'mir, 'tcx, Self>,
452 _ptr: Pointer<AllocId>,
453 ) -> InterpResult<'tcx> {
454 Err(ConstEvalErrKind::NeedsRfc("exposing pointers".to_string()).into())
455 }
456
457 #[inline(always)]
458 fn init_frame_extra(
459 ecx: &mut InterpCx<'mir, 'tcx, Self>,
460 frame: Frame<'mir, 'tcx>,
461 ) -> InterpResult<'tcx, Frame<'mir, 'tcx>> {
462 // Enforce stack size limit. Add 1 because this is run before the new frame is pushed.
463 if !ecx.recursion_limit.value_within_limit(ecx.stack().len() + 1) {
464 throw_exhaust!(StackFrameLimitReached)
465 } else {
466 Ok(frame)
467 }
468 }
469
470 #[inline(always)]
471 fn stack<'a>(
472 ecx: &'a InterpCx<'mir, 'tcx, Self>,
473 ) -> &'a [Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>] {
474 &ecx.machine.stack
475 }
476
477 #[inline(always)]
478 fn stack_mut<'a>(
479 ecx: &'a mut InterpCx<'mir, 'tcx, Self>,
480 ) -> &'a mut Vec<Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>> {
481 &mut ecx.machine.stack
482 }
483
484 fn before_access_global(
485 _tcx: TyCtxt<'tcx>,
486 machine: &Self,
487 alloc_id: AllocId,
488 alloc: ConstAllocation<'tcx>,
489 static_def_id: Option<DefId>,
490 is_write: bool,
491 ) -> InterpResult<'tcx> {
492 let alloc = alloc.inner();
493 if is_write {
494 // Write access. These are never allowed, but we give a targeted error message.
495 if alloc.mutability == Mutability::Not {
496 Err(err_ub!(WriteToReadOnly(alloc_id)).into())
497 } else {
498 Err(ConstEvalErrKind::ModifiedGlobal.into())
499 }
500 } else {
501 // Read access. These are usually allowed, with some exceptions.
502 if machine.can_access_statics {
503 // Machine configuration allows us read from anything (e.g., `static` initializer).
504 Ok(())
505 } else if static_def_id.is_some() {
506 // Machine configuration does not allow us to read statics
507 // (e.g., `const` initializer).
508 // See const_eval::machine::MemoryExtra::can_access_statics for why
509 // this check is so important: if we could read statics, we could read pointers
510 // to mutable allocations *inside* statics. These allocations are not themselves
511 // statics, so pointers to them can get around the check in `validity.rs`.
512 Err(ConstEvalErrKind::ConstAccessesStatic.into())
513 } else {
514 // Immutable global, this read is fine.
515 // But make sure we never accept a read from something mutable, that would be
516 // unsound. The reason is that as the content of this allocation may be different
517 // now and at run-time, so if we permit reading now we might return the wrong value.
518 assert_eq!(alloc.mutability, Mutability::Not);
519 Ok(())
520 }
521 }
522 }
523 }
524
525 // Please do not add any code below the above `Machine` trait impl. I (oli-obk) plan more cleanups
526 // so we can end up having a file with just that impl, but for now, let's keep the impl discoverable
527 // at the bottom of this file.