1 use rustc_hir
::def
::DefKind
;
3 use rustc_middle
::ty
::{self, Ty, TyCtxt}
;
4 use std
::borrow
::Borrow
;
5 use std
::collections
::hash_map
::Entry
;
8 use rustc_data_structures
::fx
::FxHashMap
;
11 use rustc_ast
::Mutability
;
12 use rustc_hir
::def_id
::DefId
;
13 use rustc_middle
::mir
::AssertMessage
;
14 use rustc_session
::Limit
;
15 use rustc_span
::symbol
::{sym, Symbol}
;
16 use rustc_target
::abi
::{Align, Size}
;
17 use rustc_target
::spec
::abi
::Abi
;
19 use crate::interpret
::{
20 self, compile_time_machine
, AllocId
, ConstAllocation
, Frame
, ImmTy
, InterpCx
, InterpResult
,
21 OpTy
, PlaceTy
, Pointer
, Scalar
, StackPopUnwind
,
26 impl<'mir
, 'tcx
> InterpCx
<'mir
, 'tcx
, CompileTimeInterpreter
<'mir
, 'tcx
>> {
27 /// "Intercept" a function call to a panic-related function
28 /// because we have something special to do for it.
29 /// If this returns successfully (`Ok`), the function should just be evaluated normally.
30 fn hook_special_const_fn(
32 instance
: ty
::Instance
<'tcx
>,
34 ) -> InterpResult
<'tcx
, Option
<ty
::Instance
<'tcx
>>> {
35 // All `#[rustc_do_not_const_check]` functions should be hooked here.
36 let def_id
= instance
.def_id();
38 if Some(def_id
) == self.tcx
.lang_items().const_eval_select() {
39 // redirect to const_eval_select_ct
40 if let Some(const_eval_select
) = self.tcx
.lang_items().const_eval_select_ct() {
42 ty
::Instance
::resolve(
44 ty
::ParamEnv
::reveal_all(),
52 } else if Some(def_id
) == self.tcx
.lang_items().panic_display()
53 || Some(def_id
) == self.tcx
.lang_items().begin_panic_fn()
56 assert
!(args
.len() == 1);
58 let mut msg_place
= self.deref_operand(&args
[0])?
;
59 while msg_place
.layout
.ty
.is_ref() {
60 msg_place
= self.deref_operand(&msg_place
.into())?
;
63 let msg
= Symbol
::intern(self.read_str(&msg_place
)?
);
64 let span
= self.find_closest_untracked_caller_location();
65 let (file
, line
, col
) = self.location_triple_for_span(span
);
66 return Err(ConstEvalErrKind
::Panic { msg, file, line, col }
.into());
67 } else if Some(def_id
) == self.tcx
.lang_items().panic_fmt() {
68 // For panic_fmt, call const_panic_fmt instead.
69 if let Some(const_panic_fmt
) = self.tcx
.lang_items().const_panic_fmt() {
71 ty
::Instance
::resolve(
73 ty
::ParamEnv
::reveal_all(),
75 self.tcx
.intern_substs(&[]),
86 /// Extra machine state for CTFE, and the Machine instance
87 pub struct CompileTimeInterpreter
<'mir
, 'tcx
> {
88 /// For now, the number of terminators that can be evaluated before we throw a resource
91 /// Setting this to `0` disables the limit and allows the interpreter to run forever.
92 pub steps_remaining
: usize,
94 /// The virtual call stack.
95 pub(crate) stack
: Vec
<Frame
<'mir
, 'tcx
, AllocId
, ()>>,
97 /// We need to make sure consts never point to anything mutable, even recursively. That is
98 /// relied on for pattern matching on consts with references.
99 /// To achieve this, two pieces have to work together:
100 /// * Interning makes everything outside of statics immutable.
101 /// * Pointers to allocations inside of statics can never leak outside, to a non-static global.
102 /// This boolean here controls the second part.
103 pub(super) can_access_statics
: bool
,
106 impl<'mir
, 'tcx
> CompileTimeInterpreter
<'mir
, 'tcx
> {
107 pub(super) fn new(const_eval_limit
: Limit
, can_access_statics
: bool
) -> Self {
108 CompileTimeInterpreter
{
109 steps_remaining
: const_eval_limit
.0,
116 impl<K
: Hash
+ Eq
, V
> interpret
::AllocMap
<K
, V
> for FxHashMap
<K
, V
> {
118 fn contains_key
<Q
: ?Sized
+ Hash
+ Eq
>(&mut self, k
: &Q
) -> bool
122 FxHashMap
::contains_key(self, k
)
126 fn insert(&mut self, k
: K
, v
: V
) -> Option
<V
> {
127 FxHashMap
::insert(self, k
, v
)
131 fn remove
<Q
: ?Sized
+ Hash
+ Eq
>(&mut self, k
: &Q
) -> Option
<V
>
135 FxHashMap
::remove(self, k
)
139 fn filter_map_collect
<T
>(&self, mut f
: impl FnMut(&K
, &V
) -> Option
<T
>) -> Vec
<T
> {
140 self.iter().filter_map(move |(k
, v
)| f(k
, &*v
)).collect()
144 fn get_or
<E
>(&self, k
: K
, vacant
: impl FnOnce() -> Result
<V
, E
>) -> Result
<&V
, E
> {
149 bug
!("The CTFE machine shouldn't ever need to extend the alloc_map when reading")
155 fn get_mut_or
<E
>(&mut self, k
: K
, vacant
: impl FnOnce() -> Result
<V
, E
>) -> Result
<&mut V
, E
> {
156 match self.entry(k
) {
157 Entry
::Occupied(e
) => Ok(e
.into_mut()),
158 Entry
::Vacant(e
) => {
166 pub(crate) type CompileTimeEvalContext
<'mir
, 'tcx
> =
167 InterpCx
<'mir
, 'tcx
, CompileTimeInterpreter
<'mir
, 'tcx
>>;
169 #[derive(Debug, PartialEq, Eq, Copy, Clone)]
170 pub enum MemoryKind
{
174 impl fmt
::Display
for MemoryKind
{
175 fn fmt(&self, f
: &mut fmt
::Formatter
<'_
>) -> fmt
::Result
{
177 MemoryKind
::Heap
=> write
!(f
, "heap allocation"),
182 impl interpret
::MayLeak
for MemoryKind
{
184 fn may_leak(self) -> bool
{
186 MemoryKind
::Heap
=> false,
191 impl interpret
::MayLeak
for ! {
193 fn may_leak(self) -> bool
{
194 // `self` is uninhabited
199 impl<'mir
, 'tcx
: 'mir
> CompileTimeEvalContext
<'mir
, 'tcx
> {
200 fn guaranteed_eq(&mut self, a
: Scalar
, b
: Scalar
) -> InterpResult
<'tcx
, bool
> {
202 // Comparisons between integers are always known.
203 (Scalar
::Int { .. }
, Scalar
::Int { .. }
) => a
== b
,
204 // Equality with integers can never be known for sure.
205 (Scalar
::Int { .. }
, Scalar
::Ptr(..)) | (Scalar
::Ptr(..), Scalar
::Int { .. }
) => false,
206 // FIXME: return `true` for when both sides are the same pointer, *except* that
207 // some things (like functions and vtables) do not have stable addresses
208 // so we need to be careful around them (see e.g. #73722).
209 (Scalar
::Ptr(..), Scalar
::Ptr(..)) => false,
213 fn guaranteed_ne(&mut self, a
: Scalar
, b
: Scalar
) -> InterpResult
<'tcx
, bool
> {
215 // Comparisons between integers are always known.
216 (Scalar
::Int(_
), Scalar
::Int(_
)) => a
!= b
,
217 // Comparisons of abstract pointers with null pointers are known if the pointer
218 // is in bounds, because if they are in bounds, the pointer can't be null.
219 // Inequality with integers other than null can never be known for sure.
220 (Scalar
::Int(int
), ptr @ Scalar
::Ptr(..))
221 | (ptr @ Scalar
::Ptr(..), Scalar
::Int(int
)) => {
222 int
.is_null() && !self.scalar_may_be_null(ptr
)?
224 // FIXME: return `true` for at least some comparisons where we can reliably
225 // determine the result of runtime inequality tests at compile-time.
226 // Examples include comparison of addresses in different static items.
227 (Scalar
::Ptr(..), Scalar
::Ptr(..)) => false,
232 impl<'mir
, 'tcx
> interpret
::Machine
<'mir
, 'tcx
> for CompileTimeInterpreter
<'mir
, 'tcx
> {
233 compile_time_machine
!(<'mir
, 'tcx
>);
235 type MemoryKind
= MemoryKind
;
237 const PANIC_ON_ALLOC_FAIL
: bool
= false; // will be raised as a proper error
240 ecx
: &InterpCx
<'mir
, 'tcx
, Self>,
241 instance
: ty
::InstanceDef
<'tcx
>,
242 ) -> InterpResult
<'tcx
, &'tcx mir
::Body
<'tcx
>> {
244 ty
::InstanceDef
::Item(def
) => {
245 if ecx
.tcx
.is_ctfe_mir_available(def
.did
) {
246 Ok(ecx
.tcx
.mir_for_ctfe_opt_const_arg(def
))
247 } else if ecx
.tcx
.def_kind(def
.did
) == DefKind
::AssocConst
{
248 let guar
= ecx
.tcx
.sess
.delay_span_bug(
249 rustc_span
::DUMMY_SP
,
250 "This is likely a const item that is missing from its impl",
252 throw_inval
!(AlreadyReported(guar
));
254 let path
= ecx
.tcx
.def_path_str(def
.did
);
255 Err(ConstEvalErrKind
::NeedsRfc(format
!("calling extern function `{}`", path
))
259 _
=> Ok(ecx
.tcx
.instance_mir(instance
)),
263 fn find_mir_or_eval_fn(
264 ecx
: &mut InterpCx
<'mir
, 'tcx
, Self>,
265 instance
: ty
::Instance
<'tcx
>,
268 _dest
: &PlaceTy
<'tcx
>,
269 _ret
: Option
<mir
::BasicBlock
>,
270 _unwind
: StackPopUnwind
, // unwinding is not supported in consts
271 ) -> InterpResult
<'tcx
, Option
<(&'mir mir
::Body
<'tcx
>, ty
::Instance
<'tcx
>)>> {
272 debug
!("find_mir_or_eval_fn: {:?}", instance
);
274 // Only check non-glue functions
275 if let ty
::InstanceDef
::Item(def
) = instance
.def
{
276 // Execution might have wandered off into other crates, so we cannot do a stability-
277 // sensitive check here. But we can at least rule out functions that are not const
279 if !ecx
.tcx
.is_const_fn_raw(def
.did
) {
280 // allow calling functions inside a trait marked with #[const_trait].
281 if !ecx
.tcx
.is_const_default_method(def
.did
) {
282 // We certainly do *not* want to actually call the fn
283 // though, so be sure we return here.
284 throw_unsup_format
!("calling non-const function `{}`", instance
)
288 if let Some(new_instance
) = ecx
.hook_special_const_fn(instance
, args
)?
{
289 // We call another const fn instead.
290 // However, we return the *original* instance to make backtraces work out
291 // (and we hope this does not confuse the FnAbi checks too much).
292 return Ok(Self::find_mir_or_eval_fn(
301 .map(|(body
, _instance
)| (body
, instance
)));
304 // This is a const fn. Call it.
305 Ok(Some((ecx
.load_mir(instance
.def
, None
)?
, instance
)))
309 ecx
: &mut InterpCx
<'mir
, 'tcx
, Self>,
310 instance
: ty
::Instance
<'tcx
>,
312 dest
: &PlaceTy
<'tcx
, Self::PointerTag
>,
313 target
: Option
<mir
::BasicBlock
>,
314 _unwind
: StackPopUnwind
,
315 ) -> InterpResult
<'tcx
> {
316 // Shared intrinsics.
317 if ecx
.emulate_intrinsic(instance
, args
, dest
, target
)?
{
320 let intrinsic_name
= ecx
.tcx
.item_name(instance
.def_id());
322 // CTFE-specific intrinsics.
323 let Some(ret
) = target
else {
324 return Err(ConstEvalErrKind
::NeedsRfc(format
!(
325 "calling intrinsic `{}`",
330 match intrinsic_name
{
331 sym
::ptr_guaranteed_eq
| sym
::ptr_guaranteed_ne
=> {
332 let a
= ecx
.read_immediate(&args
[0])?
.to_scalar()?
;
333 let b
= ecx
.read_immediate(&args
[1])?
.to_scalar()?
;
334 let cmp
= if intrinsic_name
== sym
::ptr_guaranteed_eq
{
335 ecx
.guaranteed_eq(a
, b
)?
337 ecx
.guaranteed_ne(a
, b
)?
339 ecx
.write_scalar(Scalar
::from_bool(cmp
), dest
)?
;
341 sym
::const_allocate
=> {
342 let size
= ecx
.read_scalar(&args
[0])?
.to_machine_usize(ecx
)?
;
343 let align
= ecx
.read_scalar(&args
[1])?
.to_machine_usize(ecx
)?
;
345 let align
= match Align
::from_bytes(align
) {
347 Err(err
) => throw_ub_format
!("align has to be a power of 2, {}", err
),
350 let ptr
= ecx
.allocate_ptr(
351 Size
::from_bytes(size
as u64),
353 interpret
::MemoryKind
::Machine(MemoryKind
::Heap
),
355 ecx
.write_pointer(ptr
, dest
)?
;
357 sym
::const_deallocate
=> {
358 let ptr
= ecx
.read_pointer(&args
[0])?
;
359 let size
= ecx
.read_scalar(&args
[1])?
.to_machine_usize(ecx
)?
;
360 let align
= ecx
.read_scalar(&args
[2])?
.to_machine_usize(ecx
)?
;
362 let size
= Size
::from_bytes(size
);
363 let align
= match Align
::from_bytes(align
) {
365 Err(err
) => throw_ub_format
!("align has to be a power of 2, {}", err
),
368 // If an allocation is created in an another const,
369 // we don't deallocate it.
370 let (alloc_id
, _
, _
) = ecx
.ptr_get_alloc_id(ptr
)?
;
371 let is_allocated_in_another_const
= matches
!(
372 ecx
.tcx
.get_global_alloc(alloc_id
),
373 Some(interpret
::GlobalAlloc
::Memory(_
))
376 if !is_allocated_in_another_const
{
380 interpret
::MemoryKind
::Machine(MemoryKind
::Heap
),
385 return Err(ConstEvalErrKind
::NeedsRfc(format
!(
386 "calling intrinsic `{}`",
393 ecx
.go_to_block(ret
);
398 ecx
: &mut InterpCx
<'mir
, 'tcx
, Self>,
399 msg
: &AssertMessage
<'tcx
>,
400 _unwind
: Option
<mir
::BasicBlock
>,
401 ) -> InterpResult
<'tcx
> {
402 use rustc_middle
::mir
::AssertKind
::*;
403 // Convert `AssertKind<Operand>` to `AssertKind<Scalar>`.
405 |op
| ecx
.read_immediate(&ecx
.eval_operand(op
, None
)?
).map(|x
| x
.to_const_int());
406 let err
= match msg
{
407 BoundsCheck { ref len, ref index }
=> {
408 let len
= eval_to_int(len
)?
;
409 let index
= eval_to_int(index
)?
;
410 BoundsCheck { len, index }
412 Overflow(op
, l
, r
) => Overflow(*op
, eval_to_int(l
)?
, eval_to_int(r
)?
),
413 OverflowNeg(op
) => OverflowNeg(eval_to_int(op
)?
),
414 DivisionByZero(op
) => DivisionByZero(eval_to_int(op
)?
),
415 RemainderByZero(op
) => RemainderByZero(eval_to_int(op
)?
),
416 ResumedAfterReturn(generator_kind
) => ResumedAfterReturn(*generator_kind
),
417 ResumedAfterPanic(generator_kind
) => ResumedAfterPanic(*generator_kind
),
419 Err(ConstEvalErrKind
::AssertFailure(err
).into())
422 fn abort(_ecx
: &mut InterpCx
<'mir
, 'tcx
, Self>, msg
: String
) -> InterpResult
<'tcx
, !> {
423 Err(ConstEvalErrKind
::Abort(msg
).into())
427 _ecx
: &InterpCx
<'mir
, 'tcx
, Self>,
430 _right
: &ImmTy
<'tcx
>,
431 ) -> InterpResult
<'tcx
, (Scalar
, bool
, Ty
<'tcx
>)> {
432 Err(ConstEvalErrKind
::NeedsRfc("pointer arithmetic or comparison".to_string()).into())
435 fn before_terminator(ecx
: &mut InterpCx
<'mir
, 'tcx
, Self>) -> InterpResult
<'tcx
> {
436 // The step limit has already been hit in a previous call to `before_terminator`.
437 if ecx
.machine
.steps_remaining
== 0 {
441 ecx
.machine
.steps_remaining
-= 1;
442 if ecx
.machine
.steps_remaining
== 0 {
443 throw_exhaust
!(StepLimitReached
)
451 _ecx
: &mut InterpCx
<'mir
, 'tcx
, Self>,
452 _ptr
: Pointer
<AllocId
>,
453 ) -> InterpResult
<'tcx
> {
454 Err(ConstEvalErrKind
::NeedsRfc("exposing pointers".to_string()).into())
459 ecx
: &mut InterpCx
<'mir
, 'tcx
, Self>,
460 frame
: Frame
<'mir
, 'tcx
>,
461 ) -> InterpResult
<'tcx
, Frame
<'mir
, 'tcx
>> {
462 // Enforce stack size limit. Add 1 because this is run before the new frame is pushed.
463 if !ecx
.recursion_limit
.value_within_limit(ecx
.stack().len() + 1) {
464 throw_exhaust
!(StackFrameLimitReached
)
472 ecx
: &'a InterpCx
<'mir
, 'tcx
, Self>,
473 ) -> &'a
[Frame
<'mir
, 'tcx
, Self::PointerTag
, Self::FrameExtra
>] {
479 ecx
: &'a
mut InterpCx
<'mir
, 'tcx
, Self>,
480 ) -> &'a
mut Vec
<Frame
<'mir
, 'tcx
, Self::PointerTag
, Self::FrameExtra
>> {
481 &mut ecx
.machine
.stack
484 fn before_access_global(
488 alloc
: ConstAllocation
<'tcx
>,
489 static_def_id
: Option
<DefId
>,
491 ) -> InterpResult
<'tcx
> {
492 let alloc
= alloc
.inner();
494 // Write access. These are never allowed, but we give a targeted error message.
495 if alloc
.mutability
== Mutability
::Not
{
496 Err(err_ub
!(WriteToReadOnly(alloc_id
)).into())
498 Err(ConstEvalErrKind
::ModifiedGlobal
.into())
501 // Read access. These are usually allowed, with some exceptions.
502 if machine
.can_access_statics
{
503 // Machine configuration allows us read from anything (e.g., `static` initializer).
505 } else if static_def_id
.is_some() {
506 // Machine configuration does not allow us to read statics
507 // (e.g., `const` initializer).
508 // See const_eval::machine::MemoryExtra::can_access_statics for why
509 // this check is so important: if we could read statics, we could read pointers
510 // to mutable allocations *inside* statics. These allocations are not themselves
511 // statics, so pointers to them can get around the check in `validity.rs`.
512 Err(ConstEvalErrKind
::ConstAccessesStatic
.into())
514 // Immutable global, this read is fine.
515 // But make sure we never accept a read from something mutable, that would be
516 // unsound. The reason is that as the content of this allocation may be different
517 // now and at run-time, so if we permit reading now we might return the wrong value.
518 assert_eq
!(alloc
.mutability
, Mutability
::Not
);
525 // Please do not add any code below the above `Machine` trait impl. I (oli-obk) plan more cleanups
526 // so we can end up having a file with just that impl, but for now, let's keep the impl discoverable
527 // at the bottom of this file.