2 use rustc_middle
::ty
::{self, Ty}
;
3 use std
::borrow
::Borrow
;
4 use std
::collections
::hash_map
::Entry
;
7 use rustc_data_structures
::fx
::FxHashMap
;
10 use rustc_ast
::Mutability
;
11 use rustc_hir
::def_id
::DefId
;
12 use rustc_middle
::mir
::AssertMessage
;
13 use rustc_session
::Limit
;
14 use rustc_span
::symbol
::{sym, Symbol}
;
15 use rustc_target
::abi
::{Align, Size}
;
16 use rustc_target
::spec
::abi
::Abi
;
18 use crate::interpret
::{
19 self, compile_time_machine
, AllocId
, Allocation
, Frame
, ImmTy
, InterpCx
, InterpResult
, OpTy
,
20 PlaceTy
, Scalar
, StackPopUnwind
,
25 impl<'mir
, 'tcx
> InterpCx
<'mir
, 'tcx
, CompileTimeInterpreter
<'mir
, 'tcx
>> {
26 /// "Intercept" a function call to a panic-related function
27 /// because we have something special to do for it.
28 /// If this returns successfully (`Ok`), the function should just be evaluated normally.
31 instance
: ty
::Instance
<'tcx
>,
33 ) -> InterpResult
<'tcx
> {
34 let def_id
= instance
.def_id();
35 if Some(def_id
) == self.tcx
.lang_items().panic_fn()
36 || Some(def_id
) == self.tcx
.lang_items().panic_str()
37 || Some(def_id
) == self.tcx
.lang_items().begin_panic_fn()
40 assert
!(args
.len() == 1);
42 let msg_place
= self.deref_operand(&args
[0])?
;
43 let msg
= Symbol
::intern(self.read_str(&msg_place
)?
);
44 let span
= self.find_closest_untracked_caller_location();
45 let (file
, line
, col
) = self.location_triple_for_span(span
);
46 Err(ConstEvalErrKind
::Panic { msg, file, line, col }
.into())
53 /// Extra machine state for CTFE, and the Machine instance
54 pub struct CompileTimeInterpreter
<'mir
, 'tcx
> {
55 /// For now, the number of terminators that can be evaluated before we throw a resource
58 /// Setting this to `0` disables the limit and allows the interpreter to run forever.
59 pub steps_remaining
: usize,
61 /// The virtual call stack.
62 pub(crate) stack
: Vec
<Frame
<'mir
, 'tcx
, AllocId
, ()>>,
65 #[derive(Copy, Clone, Debug)]
66 pub struct MemoryExtra
{
67 /// We need to make sure consts never point to anything mutable, even recursively. That is
68 /// relied on for pattern matching on consts with references.
69 /// To achieve this, two pieces have to work together:
70 /// * Interning makes everything outside of statics immutable.
71 /// * Pointers to allocations inside of statics can never leak outside, to a non-static global.
72 /// This boolean here controls the second part.
73 pub(super) can_access_statics
: bool
,
76 impl<'mir
, 'tcx
> CompileTimeInterpreter
<'mir
, 'tcx
> {
77 pub(super) fn new(const_eval_limit
: Limit
) -> Self {
78 CompileTimeInterpreter { steps_remaining: const_eval_limit.0, stack: Vec::new() }
82 impl<K
: Hash
+ Eq
, V
> interpret
::AllocMap
<K
, V
> for FxHashMap
<K
, V
> {
84 fn contains_key
<Q
: ?Sized
+ Hash
+ Eq
>(&mut self, k
: &Q
) -> bool
88 FxHashMap
::contains_key(self, k
)
92 fn insert(&mut self, k
: K
, v
: V
) -> Option
<V
> {
93 FxHashMap
::insert(self, k
, v
)
97 fn remove
<Q
: ?Sized
+ Hash
+ Eq
>(&mut self, k
: &Q
) -> Option
<V
>
101 FxHashMap
::remove(self, k
)
105 fn filter_map_collect
<T
>(&self, mut f
: impl FnMut(&K
, &V
) -> Option
<T
>) -> Vec
<T
> {
106 self.iter().filter_map(move |(k
, v
)| f(k
, &*v
)).collect()
110 fn get_or
<E
>(&self, k
: K
, vacant
: impl FnOnce() -> Result
<V
, E
>) -> Result
<&V
, E
> {
115 bug
!("The CTFE machine shouldn't ever need to extend the alloc_map when reading")
121 fn get_mut_or
<E
>(&mut self, k
: K
, vacant
: impl FnOnce() -> Result
<V
, E
>) -> Result
<&mut V
, E
> {
122 match self.entry(k
) {
123 Entry
::Occupied(e
) => Ok(e
.into_mut()),
124 Entry
::Vacant(e
) => {
132 crate type CompileTimeEvalContext
<'mir
, 'tcx
> =
133 InterpCx
<'mir
, 'tcx
, CompileTimeInterpreter
<'mir
, 'tcx
>>;
135 #[derive(Debug, PartialEq, Eq, Copy, Clone)]
136 pub enum MemoryKind
{
140 impl fmt
::Display
for MemoryKind
{
141 fn fmt(&self, f
: &mut fmt
::Formatter
<'_
>) -> fmt
::Result
{
143 MemoryKind
::Heap
=> write
!(f
, "heap allocation"),
148 impl interpret
::MayLeak
for MemoryKind
{
150 fn may_leak(self) -> bool
{
152 MemoryKind
::Heap
=> false,
157 impl interpret
::MayLeak
for ! {
159 fn may_leak(self) -> bool
{
160 // `self` is uninhabited
165 impl<'mir
, 'tcx
: 'mir
> CompileTimeEvalContext
<'mir
, 'tcx
> {
166 fn guaranteed_eq(&mut self, a
: Scalar
, b
: Scalar
) -> bool
{
168 // Comparisons between integers are always known.
169 (Scalar
::Int { .. }
, Scalar
::Int { .. }
) => a
== b
,
170 // Equality with integers can never be known for sure.
171 (Scalar
::Int { .. }
, Scalar
::Ptr(..)) | (Scalar
::Ptr(..), Scalar
::Int { .. }
) => false,
172 // FIXME: return `true` for when both sides are the same pointer, *except* that
173 // some things (like functions and vtables) do not have stable addresses
174 // so we need to be careful around them (see e.g. #73722).
175 (Scalar
::Ptr(..), Scalar
::Ptr(..)) => false,
179 fn guaranteed_ne(&mut self, a
: Scalar
, b
: Scalar
) -> bool
{
181 // Comparisons between integers are always known.
182 (Scalar
::Int(_
), Scalar
::Int(_
)) => a
!= b
,
183 // Comparisons of abstract pointers with null pointers are known if the pointer
184 // is in bounds, because if they are in bounds, the pointer can't be null.
185 // Inequality with integers other than null can never be known for sure.
186 (Scalar
::Int(int
), Scalar
::Ptr(ptr
, _
)) | (Scalar
::Ptr(ptr
, _
), Scalar
::Int(int
)) => {
187 int
.is_null() && !self.memory
.ptr_may_be_null(ptr
.into())
189 // FIXME: return `true` for at least some comparisons where we can reliably
190 // determine the result of runtime inequality tests at compile-time.
191 // Examples include comparison of addresses in different static items.
192 (Scalar
::Ptr(..), Scalar
::Ptr(..)) => false,
197 impl<'mir
, 'tcx
> interpret
::Machine
<'mir
, 'tcx
> for CompileTimeInterpreter
<'mir
, 'tcx
> {
198 compile_time_machine
!(<'mir
, 'tcx
>);
200 type MemoryKind
= MemoryKind
;
202 type MemoryExtra
= MemoryExtra
;
204 const PANIC_ON_ALLOC_FAIL
: bool
= false; // will be raised as a proper error
207 ecx
: &InterpCx
<'mir
, 'tcx
, Self>,
208 instance
: ty
::InstanceDef
<'tcx
>,
209 ) -> InterpResult
<'tcx
, &'tcx mir
::Body
<'tcx
>> {
211 ty
::InstanceDef
::Item(def
) => {
212 if ecx
.tcx
.is_ctfe_mir_available(def
.did
) {
213 Ok(ecx
.tcx
.mir_for_ctfe_opt_const_arg(def
))
215 throw_unsup
!(NoMirFor(def
.did
))
218 _
=> Ok(ecx
.tcx
.instance_mir(instance
)),
222 fn find_mir_or_eval_fn(
223 ecx
: &mut InterpCx
<'mir
, 'tcx
, Self>,
224 instance
: ty
::Instance
<'tcx
>,
227 _ret
: Option
<(&PlaceTy
<'tcx
>, mir
::BasicBlock
)>,
228 _unwind
: StackPopUnwind
, // unwinding is not supported in consts
229 ) -> InterpResult
<'tcx
, Option
<&'mir mir
::Body
<'tcx
>>> {
230 debug
!("find_mir_or_eval_fn: {:?}", instance
);
232 // Only check non-glue functions
233 if let ty
::InstanceDef
::Item(def
) = instance
.def
{
234 // Execution might have wandered off into other crates, so we cannot do a stability-
235 // sensitive check here. But we can at least rule out functions that are not const
237 if !ecx
.tcx
.is_const_fn_raw(def
.did
) {
238 // allow calling functions marked with #[default_method_body_is_const].
239 if !ecx
.tcx
.has_attr(def
.did
, sym
::default_method_body_is_const
) {
240 // Some functions we support even if they are non-const -- but avoid testing
241 // that for const fn!
242 ecx
.hook_panic_fn(instance
, args
)?
;
243 // We certainly do *not* want to actually call the fn
244 // though, so be sure we return here.
245 throw_unsup_format
!("calling non-const function `{}`", instance
)
249 // This is a const fn. Call it.
250 Ok(Some(match ecx
.load_mir(instance
.def
, None
) {
253 if let err_unsup
!(NoMirFor(did
)) = err
.kind() {
254 let path
= ecx
.tcx
.def_path_str(*did
);
255 return Err(ConstEvalErrKind
::NeedsRfc(format
!(
256 "calling extern function `{}`",
267 ecx
: &mut InterpCx
<'mir
, 'tcx
, Self>,
268 instance
: ty
::Instance
<'tcx
>,
270 ret
: Option
<(&PlaceTy
<'tcx
>, mir
::BasicBlock
)>,
271 _unwind
: StackPopUnwind
,
272 ) -> InterpResult
<'tcx
> {
273 // Shared intrinsics.
274 if ecx
.emulate_intrinsic(instance
, args
, ret
)?
{
277 let intrinsic_name
= ecx
.tcx
.item_name(instance
.def_id());
279 // CTFE-specific intrinsics.
280 let (dest
, ret
) = match ret
{
282 return Err(ConstEvalErrKind
::NeedsRfc(format
!(
283 "calling intrinsic `{}`",
290 match intrinsic_name
{
291 sym
::ptr_guaranteed_eq
| sym
::ptr_guaranteed_ne
=> {
292 let a
= ecx
.read_immediate(&args
[0])?
.to_scalar()?
;
293 let b
= ecx
.read_immediate(&args
[1])?
.to_scalar()?
;
294 let cmp
= if intrinsic_name
== sym
::ptr_guaranteed_eq
{
295 ecx
.guaranteed_eq(a
, b
)
297 ecx
.guaranteed_ne(a
, b
)
299 ecx
.write_scalar(Scalar
::from_bool(cmp
), dest
)?
;
301 sym
::const_allocate
=> {
302 let size
= ecx
.read_scalar(&args
[0])?
.to_machine_usize(ecx
)?
;
303 let align
= ecx
.read_scalar(&args
[1])?
.to_machine_usize(ecx
)?
;
305 let align
= match Align
::from_bytes(align
) {
307 Err(err
) => throw_ub_format
!("align has to be a power of 2, {}", err
),
310 let ptr
= ecx
.memory
.allocate(
311 Size
::from_bytes(size
as u64),
313 interpret
::MemoryKind
::Machine(MemoryKind
::Heap
),
315 ecx
.write_pointer(ptr
, dest
)?
;
318 return Err(ConstEvalErrKind
::NeedsRfc(format
!(
319 "calling intrinsic `{}`",
326 ecx
.go_to_block(ret
);
331 ecx
: &mut InterpCx
<'mir
, 'tcx
, Self>,
332 msg
: &AssertMessage
<'tcx
>,
333 _unwind
: Option
<mir
::BasicBlock
>,
334 ) -> InterpResult
<'tcx
> {
335 use rustc_middle
::mir
::AssertKind
::*;
336 // Convert `AssertKind<Operand>` to `AssertKind<Scalar>`.
338 |op
| ecx
.read_immediate(&ecx
.eval_operand(op
, None
)?
).map(|x
| x
.to_const_int());
339 let err
= match msg
{
340 BoundsCheck { ref len, ref index }
=> {
341 let len
= eval_to_int(len
)?
;
342 let index
= eval_to_int(index
)?
;
343 BoundsCheck { len, index }
345 Overflow(op
, l
, r
) => Overflow(*op
, eval_to_int(l
)?
, eval_to_int(r
)?
),
346 OverflowNeg(op
) => OverflowNeg(eval_to_int(op
)?
),
347 DivisionByZero(op
) => DivisionByZero(eval_to_int(op
)?
),
348 RemainderByZero(op
) => RemainderByZero(eval_to_int(op
)?
),
349 ResumedAfterReturn(generator_kind
) => ResumedAfterReturn(*generator_kind
),
350 ResumedAfterPanic(generator_kind
) => ResumedAfterPanic(*generator_kind
),
352 Err(ConstEvalErrKind
::AssertFailure(err
).into())
355 fn abort(_ecx
: &mut InterpCx
<'mir
, 'tcx
, Self>, msg
: String
) -> InterpResult
<'tcx
, !> {
356 Err(ConstEvalErrKind
::Abort(msg
).into())
360 _ecx
: &InterpCx
<'mir
, 'tcx
, Self>,
363 _right
: &ImmTy
<'tcx
>,
364 ) -> InterpResult
<'tcx
, (Scalar
, bool
, Ty
<'tcx
>)> {
365 Err(ConstEvalErrKind
::NeedsRfc("pointer arithmetic or comparison".to_string()).into())
369 _ecx
: &mut InterpCx
<'mir
, 'tcx
, Self>,
370 _dest
: &PlaceTy
<'tcx
>,
371 ) -> InterpResult
<'tcx
> {
372 Err(ConstEvalErrKind
::NeedsRfc("heap allocations via `box` keyword".to_string()).into())
375 fn before_terminator(ecx
: &mut InterpCx
<'mir
, 'tcx
, Self>) -> InterpResult
<'tcx
> {
376 // The step limit has already been hit in a previous call to `before_terminator`.
377 if ecx
.machine
.steps_remaining
== 0 {
381 ecx
.machine
.steps_remaining
-= 1;
382 if ecx
.machine
.steps_remaining
== 0 {
383 throw_exhaust
!(StepLimitReached
)
391 ecx
: &mut InterpCx
<'mir
, 'tcx
, Self>,
392 frame
: Frame
<'mir
, 'tcx
>,
393 ) -> InterpResult
<'tcx
, Frame
<'mir
, 'tcx
>> {
394 // Enforce stack size limit. Add 1 because this is run before the new frame is pushed.
395 if !ecx
.recursion_limit
.value_within_limit(ecx
.stack().len() + 1) {
396 throw_exhaust
!(StackFrameLimitReached
)
404 ecx
: &'a InterpCx
<'mir
, 'tcx
, Self>,
405 ) -> &'a
[Frame
<'mir
, 'tcx
, Self::PointerTag
, Self::FrameExtra
>] {
411 ecx
: &'a
mut InterpCx
<'mir
, 'tcx
, Self>,
412 ) -> &'a
mut Vec
<Frame
<'mir
, 'tcx
, Self::PointerTag
, Self::FrameExtra
>> {
413 &mut ecx
.machine
.stack
416 fn before_access_global(
417 memory_extra
: &MemoryExtra
,
419 allocation
: &Allocation
,
420 static_def_id
: Option
<DefId
>,
422 ) -> InterpResult
<'tcx
> {
424 // Write access. These are never allowed, but we give a targeted error message.
425 if allocation
.mutability
== Mutability
::Not
{
426 Err(err_ub
!(WriteToReadOnly(alloc_id
)).into())
428 Err(ConstEvalErrKind
::ModifiedGlobal
.into())
431 // Read access. These are usually allowed, with some exceptions.
432 if memory_extra
.can_access_statics
{
433 // Machine configuration allows us read from anything (e.g., `static` initializer).
435 } else if static_def_id
.is_some() {
436 // Machine configuration does not allow us to read statics
437 // (e.g., `const` initializer).
438 // See const_eval::machine::MemoryExtra::can_access_statics for why
439 // this check is so important: if we could read statics, we could read pointers
440 // to mutable allocations *inside* statics. These allocations are not themselves
441 // statics, so pointers to them can get around the check in `validity.rs`.
442 Err(ConstEvalErrKind
::ConstAccessesStatic
.into())
444 // Immutable global, this read is fine.
445 // But make sure we never accept a read from something mutable, that would be
446 // unsound. The reason is that as the content of this allocation may be different
447 // now and at run-time, so if we permit reading now we might return the wrong value.
448 assert_eq
!(allocation
.mutability
, Mutability
::Not
);
455 // Please do not add any code below the above `Machine` trait impl. I (oli-obk) plan more cleanups
456 // so we can end up having a file with just that impl, but for now, let's keep the impl discoverable
457 // at the bottom of this file.