2 use rustc_middle
::ty
::layout
::HasTyCtxt
;
3 use rustc_middle
::ty
::{self, Ty}
;
4 use std
::borrow
::Borrow
;
5 use std
::collections
::hash_map
::Entry
;
8 use rustc_data_structures
::fx
::FxHashMap
;
10 use rustc_ast
::Mutability
;
11 use rustc_hir
::def_id
::DefId
;
12 use rustc_middle
::mir
::AssertMessage
;
13 use rustc_session
::Limit
;
14 use rustc_span
::symbol
::{sym, Symbol}
;
16 use crate::interpret
::{
17 self, compile_time_machine
, AllocId
, Allocation
, Frame
, GlobalId
, ImmTy
, InterpCx
,
18 InterpResult
, Memory
, OpTy
, PlaceTy
, Pointer
, Scalar
,
23 impl<'mir
, 'tcx
> InterpCx
<'mir
, 'tcx
, CompileTimeInterpreter
<'mir
, 'tcx
>> {
24 /// Evaluate a const function where all arguments (if any) are zero-sized types.
25 /// The evaluation is memoized thanks to the query system.
27 /// Returns `true` if the call has been evaluated.
28 fn try_eval_const_fn_call(
30 instance
: ty
::Instance
<'tcx
>,
31 ret
: Option
<(PlaceTy
<'tcx
>, mir
::BasicBlock
)>,
33 ) -> InterpResult
<'tcx
, bool
> {
34 trace
!("try_eval_const_fn_call: {:?}", instance
);
35 // Because `#[track_caller]` adds an implicit non-ZST argument, we also cannot
36 // perform this optimization on items tagged with it.
37 if instance
.def
.requires_caller_location(self.tcx()) {
40 // For the moment we only do this for functions which take no arguments
41 // (or all arguments are ZSTs) so that we don't memoize too much.
42 if args
.iter().any(|a
| !a
.layout
.is_zst()) {
46 let dest
= match ret
{
47 Some((dest
, _
)) => dest
,
48 // Don't memoize diverging function calls.
49 None
=> return Ok(false),
52 let gid
= GlobalId { instance, promoted: None }
;
54 let place
= self.eval_to_allocation(gid
)?
;
56 self.copy_op(place
.into(), dest
)?
;
58 self.return_to_block(ret
.map(|r
| r
.1))?
;
59 trace
!("{:?}", self.dump_place(*dest
));
63 /// "Intercept" a function call to a panic-related function
64 /// because we have something special to do for it.
65 /// If this returns successfully (`Ok`), the function should just be evaluated normally.
68 instance
: ty
::Instance
<'tcx
>,
70 ) -> InterpResult
<'tcx
> {
71 let def_id
= instance
.def_id();
72 if Some(def_id
) == self.tcx
.lang_items().panic_fn()
73 || Some(def_id
) == self.tcx
.lang_items().begin_panic_fn()
76 assert
!(args
.len() == 1);
78 let msg_place
= self.deref_operand(args
[0])?
;
79 let msg
= Symbol
::intern(self.read_str(msg_place
)?
);
80 let span
= self.find_closest_untracked_caller_location();
81 let (file
, line
, col
) = self.location_triple_for_span(span
);
82 Err(ConstEvalErrKind
::Panic { msg, file, line, col }
.into())
89 /// Extra machine state for CTFE, and the Machine instance
90 pub struct CompileTimeInterpreter
<'mir
, 'tcx
> {
91 /// For now, the number of terminators that can be evaluated before we throw a resource
94 /// Setting this to `0` disables the limit and allows the interpreter to run forever.
95 pub steps_remaining
: usize,
97 /// The virtual call stack.
98 pub(crate) stack
: Vec
<Frame
<'mir
, 'tcx
, (), ()>>,
101 #[derive(Copy, Clone, Debug)]
102 pub struct MemoryExtra
{
103 /// We need to make sure consts never point to anything mutable, even recursively. That is
104 /// relied on for pattern matching on consts with references.
105 /// To achieve this, two pieces have to work together:
106 /// * Interning makes everything outside of statics immutable.
107 /// * Pointers to allocations inside of statics can never leak outside, to a non-static global.
108 /// This boolean here controls the second part.
109 pub(super) can_access_statics
: bool
,
112 impl<'mir
, 'tcx
> CompileTimeInterpreter
<'mir
, 'tcx
> {
113 pub(super) fn new(const_eval_limit
: Limit
) -> Self {
114 CompileTimeInterpreter { steps_remaining: const_eval_limit.0, stack: Vec::new() }
118 impl<K
: Hash
+ Eq
, V
> interpret
::AllocMap
<K
, V
> for FxHashMap
<K
, V
> {
120 fn contains_key
<Q
: ?Sized
+ Hash
+ Eq
>(&mut self, k
: &Q
) -> bool
124 FxHashMap
::contains_key(self, k
)
128 fn insert(&mut self, k
: K
, v
: V
) -> Option
<V
> {
129 FxHashMap
::insert(self, k
, v
)
133 fn remove
<Q
: ?Sized
+ Hash
+ Eq
>(&mut self, k
: &Q
) -> Option
<V
>
137 FxHashMap
::remove(self, k
)
141 fn filter_map_collect
<T
>(&self, mut f
: impl FnMut(&K
, &V
) -> Option
<T
>) -> Vec
<T
> {
142 self.iter().filter_map(move |(k
, v
)| f(k
, &*v
)).collect()
146 fn get_or
<E
>(&self, k
: K
, vacant
: impl FnOnce() -> Result
<V
, E
>) -> Result
<&V
, E
> {
151 bug
!("The CTFE machine shouldn't ever need to extend the alloc_map when reading")
157 fn get_mut_or
<E
>(&mut self, k
: K
, vacant
: impl FnOnce() -> Result
<V
, E
>) -> Result
<&mut V
, E
> {
158 match self.entry(k
) {
159 Entry
::Occupied(e
) => Ok(e
.into_mut()),
160 Entry
::Vacant(e
) => {
168 crate type CompileTimeEvalContext
<'mir
, 'tcx
> =
169 InterpCx
<'mir
, 'tcx
, CompileTimeInterpreter
<'mir
, 'tcx
>>;
171 impl interpret
::MayLeak
for ! {
173 fn may_leak(self) -> bool
{
174 // `self` is uninhabited
179 impl<'mir
, 'tcx
: 'mir
> CompileTimeEvalContext
<'mir
, 'tcx
> {
180 fn guaranteed_eq(&mut self, a
: Scalar
, b
: Scalar
) -> bool
{
182 // Comparisons between integers are always known.
183 (Scalar
::Raw { .. }
, Scalar
::Raw { .. }
) => a
== b
,
184 // Equality with integers can never be known for sure.
185 (Scalar
::Raw { .. }
, Scalar
::Ptr(_
)) | (Scalar
::Ptr(_
), Scalar
::Raw { .. }
) => false,
186 // FIXME: return `true` for when both sides are the same pointer, *except* that
187 // some things (like functions and vtables) do not have stable addresses
188 // so we need to be careful around them (see e.g. #73722).
189 (Scalar
::Ptr(_
), Scalar
::Ptr(_
)) => false,
193 fn guaranteed_ne(&mut self, a
: Scalar
, b
: Scalar
) -> bool
{
195 // Comparisons between integers are always known.
196 (Scalar
::Raw { .. }
, Scalar
::Raw { .. }
) => a
!= b
,
197 // Comparisons of abstract pointers with null pointers are known if the pointer
198 // is in bounds, because if they are in bounds, the pointer can't be null.
199 (Scalar
::Raw { data: 0, .. }
, Scalar
::Ptr(ptr
))
200 | (Scalar
::Ptr(ptr
), Scalar
::Raw { data: 0, .. }
) => !self.memory
.ptr_may_be_null(ptr
),
201 // Inequality with integers other than null can never be known for sure.
202 (Scalar
::Raw { .. }
, Scalar
::Ptr(_
)) | (Scalar
::Ptr(_
), Scalar
::Raw { .. }
) => false,
203 // FIXME: return `true` for at least some comparisons where we can reliably
204 // determine the result of runtime inequality tests at compile-time.
205 // Examples include comparison of addresses in different static items.
206 (Scalar
::Ptr(_
), Scalar
::Ptr(_
)) => false,
211 impl<'mir
, 'tcx
> interpret
::Machine
<'mir
, 'tcx
> for CompileTimeInterpreter
<'mir
, 'tcx
> {
212 compile_time_machine
!(<'mir
, 'tcx
>);
214 type MemoryExtra
= MemoryExtra
;
216 fn find_mir_or_eval_fn(
217 ecx
: &mut InterpCx
<'mir
, 'tcx
, Self>,
218 instance
: ty
::Instance
<'tcx
>,
220 ret
: Option
<(PlaceTy
<'tcx
>, mir
::BasicBlock
)>,
221 _unwind
: Option
<mir
::BasicBlock
>, // unwinding is not supported in consts
222 ) -> InterpResult
<'tcx
, Option
<&'mir mir
::Body
<'tcx
>>> {
223 debug
!("find_mir_or_eval_fn: {:?}", instance
);
225 // Only check non-glue functions
226 if let ty
::InstanceDef
::Item(def
) = instance
.def
{
227 // Execution might have wandered off into other crates, so we cannot do a stability-
228 // sensitive check here. But we can at least rule out functions that are not const
230 if ecx
.tcx
.is_const_fn_raw(def
.did
) {
231 // If this function is a `const fn` then under certain circumstances we
232 // can evaluate call via the query system, thus memoizing all future calls.
233 if ecx
.try_eval_const_fn_call(instance
, ret
, args
)?
{
237 // Some functions we support even if they are non-const -- but avoid testing
238 // that for const fn!
239 ecx
.hook_panic_fn(instance
, args
)?
;
240 // We certainly do *not* want to actually call the fn
241 // though, so be sure we return here.
242 throw_unsup_format
!("calling non-const function `{}`", instance
)
245 // This is a const fn. Call it.
246 Ok(Some(match ecx
.load_mir(instance
.def
, None
) {
249 if let err_unsup
!(NoMirFor(did
)) = err
.kind
{
250 let path
= ecx
.tcx
.def_path_str(did
);
251 return Err(ConstEvalErrKind
::NeedsRfc(format
!(
252 "calling extern function `{}`",
263 ecx
: &mut InterpCx
<'mir
, 'tcx
, Self>,
264 instance
: ty
::Instance
<'tcx
>,
266 ret
: Option
<(PlaceTy
<'tcx
>, mir
::BasicBlock
)>,
267 _unwind
: Option
<mir
::BasicBlock
>,
268 ) -> InterpResult
<'tcx
> {
269 // Shared intrinsics.
270 if ecx
.emulate_intrinsic(instance
, args
, ret
)?
{
273 let intrinsic_name
= ecx
.tcx
.item_name(instance
.def_id());
275 // CTFE-specific intrinsics.
276 let (dest
, ret
) = match ret
{
278 return Err(ConstEvalErrKind
::NeedsRfc(format
!(
279 "calling intrinsic `{}`",
286 match intrinsic_name
{
287 sym
::ptr_guaranteed_eq
| sym
::ptr_guaranteed_ne
=> {
288 let a
= ecx
.read_immediate(args
[0])?
.to_scalar()?
;
289 let b
= ecx
.read_immediate(args
[1])?
.to_scalar()?
;
290 let cmp
= if intrinsic_name
== sym
::ptr_guaranteed_eq
{
291 ecx
.guaranteed_eq(a
, b
)
293 ecx
.guaranteed_ne(a
, b
)
295 ecx
.write_scalar(Scalar
::from_bool(cmp
), dest
)?
;
298 return Err(ConstEvalErrKind
::NeedsRfc(format
!(
299 "calling intrinsic `{}`",
306 ecx
.go_to_block(ret
);
311 ecx
: &mut InterpCx
<'mir
, 'tcx
, Self>,
312 msg
: &AssertMessage
<'tcx
>,
313 _unwind
: Option
<mir
::BasicBlock
>,
314 ) -> InterpResult
<'tcx
> {
315 use rustc_middle
::mir
::AssertKind
::*;
316 // Convert `AssertKind<Operand>` to `AssertKind<Scalar>`.
318 |op
| ecx
.read_immediate(ecx
.eval_operand(op
, None
)?
).map(|x
| x
.to_const_int());
319 let err
= match msg
{
320 BoundsCheck { ref len, ref index }
=> {
321 let len
= eval_to_int(len
)?
;
322 let index
= eval_to_int(index
)?
;
323 BoundsCheck { len, index }
325 Overflow(op
, l
, r
) => Overflow(*op
, eval_to_int(l
)?
, eval_to_int(r
)?
),
326 OverflowNeg(op
) => OverflowNeg(eval_to_int(op
)?
),
327 DivisionByZero(op
) => DivisionByZero(eval_to_int(op
)?
),
328 RemainderByZero(op
) => RemainderByZero(eval_to_int(op
)?
),
329 ResumedAfterReturn(generator_kind
) => ResumedAfterReturn(*generator_kind
),
330 ResumedAfterPanic(generator_kind
) => ResumedAfterPanic(*generator_kind
),
332 Err(ConstEvalErrKind
::AssertFailure(err
).into())
335 fn ptr_to_int(_mem
: &Memory
<'mir
, 'tcx
, Self>, _ptr
: Pointer
) -> InterpResult
<'tcx
, u64> {
336 Err(ConstEvalErrKind
::NeedsRfc("pointer-to-integer cast".to_string()).into())
340 _ecx
: &InterpCx
<'mir
, 'tcx
, Self>,
344 ) -> InterpResult
<'tcx
, (Scalar
, bool
, Ty
<'tcx
>)> {
345 Err(ConstEvalErrKind
::NeedsRfc("pointer arithmetic or comparison".to_string()).into())
349 _ecx
: &mut InterpCx
<'mir
, 'tcx
, Self>,
350 _dest
: PlaceTy
<'tcx
>,
351 ) -> InterpResult
<'tcx
> {
352 Err(ConstEvalErrKind
::NeedsRfc("heap allocations via `box` keyword".to_string()).into())
355 fn before_terminator(ecx
: &mut InterpCx
<'mir
, 'tcx
, Self>) -> InterpResult
<'tcx
> {
356 // The step limit has already been hit in a previous call to `before_terminator`.
357 if ecx
.machine
.steps_remaining
== 0 {
361 ecx
.machine
.steps_remaining
-= 1;
362 if ecx
.machine
.steps_remaining
== 0 {
363 throw_exhaust
!(StepLimitReached
)
371 ecx
: &mut InterpCx
<'mir
, 'tcx
, Self>,
372 frame
: Frame
<'mir
, 'tcx
>,
373 ) -> InterpResult
<'tcx
, Frame
<'mir
, 'tcx
>> {
374 // Enforce stack size limit. Add 1 because this is run before the new frame is pushed.
375 if !ecx
.tcx
.sess
.recursion_limit().value_within_limit(ecx
.stack().len() + 1) {
376 throw_exhaust
!(StackFrameLimitReached
)
384 ecx
: &'a InterpCx
<'mir
, 'tcx
, Self>,
385 ) -> &'a
[Frame
<'mir
, 'tcx
, Self::PointerTag
, Self::FrameExtra
>] {
391 ecx
: &'a
mut InterpCx
<'mir
, 'tcx
, Self>,
392 ) -> &'a
mut Vec
<Frame
<'mir
, 'tcx
, Self::PointerTag
, Self::FrameExtra
>> {
393 &mut ecx
.machine
.stack
396 fn before_access_global(
397 memory_extra
: &MemoryExtra
,
399 allocation
: &Allocation
,
400 static_def_id
: Option
<DefId
>,
402 ) -> InterpResult
<'tcx
> {
404 // Write access. These are never allowed, but we give a targeted error message.
405 if allocation
.mutability
== Mutability
::Not
{
406 Err(err_ub
!(WriteToReadOnly(alloc_id
)).into())
408 Err(ConstEvalErrKind
::ModifiedGlobal
.into())
411 // Read access. These are usually allowed, with some exceptions.
412 if memory_extra
.can_access_statics
{
413 // Machine configuration allows us read from anything (e.g., `static` initializer).
415 } else if static_def_id
.is_some() {
416 // Machine configuration does not allow us to read statics
417 // (e.g., `const` initializer).
418 // See const_eval::machine::MemoryExtra::can_access_statics for why
419 // this check is so important: if we could read statics, we could read pointers
420 // to mutable allocations *inside* statics. These allocations are not themselves
421 // statics, so pointers to them can get around the check in `validity.rs`.
422 Err(ConstEvalErrKind
::ConstAccessesStatic
.into())
424 // Immutable global, this read is fine.
425 // But make sure we never accept a read from something mutable, that would be
426 // unsound. The reason is that as the content of this allocation may be different
427 // now and at run-time, so if we permit reading now we might return the wrong value.
428 assert_eq
!(allocation
.mutability
, Mutability
::Not
);
435 // Please do not add any code below the above `Machine` trait impl. I (oli-obk) plan more cleanups
436 // so we can end up having a file with just that impl, but for now, let's keep the impl discoverable
437 // at the bottom of this file.