]> git.proxmox.com Git - rustc.git/blame - compiler/rustc_const_eval/src/interpret/machine.rs
New upstream version 1.75.0+dfsg1
[rustc.git] / compiler / rustc_const_eval / src / interpret / machine.rs
CommitLineData
ea8adc8c
XL
1//! This module contains everything needed to instantiate an interpreter.
2//! This separation exists to ensure that no fancy miri features like
3//! interpreting common C functions leak into CTFE.
4
0bf4aa26 5use std::borrow::{Borrow, Cow};
29967ef6 6use std::fmt::Debug;
0bf4aa26
XL
7use std::hash::Hash;
8
ed00b5ec 9use rustc_apfloat::{Float, FloatConvert};
f2b60f7d 10use rustc_ast::{InlineAsmOptions, InlineAsmTemplatePiece};
ba9703b0 11use rustc_middle::mir;
353b0b11 12use rustc_middle::ty::layout::TyAndLayout;
781aab86 13use rustc_middle::ty::{self, TyCtxt};
ba9703b0 14use rustc_span::def_id::DefId;
ed00b5ec 15use rustc_target::abi::Size;
064997fb 16use rustc_target::spec::abi::Abi as CallAbi;
b7449926 17
0bf4aa26 18use super::{
add651ee 19 AllocBytes, AllocId, AllocRange, Allocation, ConstAllocation, FnArg, Frame, ImmTy, InterpCx,
781aab86 20 InterpResult, MPlaceTy, MemoryKind, OpTy, PlaceTy, Pointer, Provenance,
0bf4aa26 21};
ea8adc8c 22
60c5eb7d
XL
23/// Data returned by Machine::stack_pop,
24/// to provide further control over the popping of the stack frame
25#[derive(Eq, PartialEq, Debug, Copy, Clone)]
ba9703b0 26pub enum StackPopJump {
60c5eb7d
XL
27 /// Indicates that no special handling should be
28 /// done - we'll either return normally or unwind
29 /// based on the terminator for the function
30 /// we're leaving.
31 Normal,
32
ba9703b0
XL
33 /// Indicates that we should *not* jump to the return/unwind address, as the callback already
34 /// took care of everything.
35 NoJump,
60c5eb7d
XL
36}
37
0bf4aa26
XL
38/// Whether this kind of memory is allowed to leak
39pub trait MayLeak: Copy {
40 fn may_leak(self) -> bool;
41}
42
43/// The functionality needed by memory to manage its allocations
44pub trait AllocMap<K: Hash + Eq, V> {
9fa01778 45 /// Tests if the map contains the given key.
0bf4aa26
XL
46 /// Deliberately takes `&mut` because that is sufficient, and some implementations
47 /// can be more efficient then (using `RefCell::get_mut`).
48 fn contains_key<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> bool
dfeec247
XL
49 where
50 K: Borrow<Q>;
0bf4aa26 51
9fa01778 52 /// Inserts a new entry into the map.
0bf4aa26
XL
53 fn insert(&mut self, k: K, v: V) -> Option<V>;
54
9fa01778 55 /// Removes an entry from the map.
0bf4aa26 56 fn remove<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> Option<V>
dfeec247
XL
57 where
58 K: Borrow<Q>;
0bf4aa26 59
ba9703b0 60 /// Returns data based on the keys and values in the map.
0bf4aa26 61 fn filter_map_collect<T>(&self, f: impl FnMut(&K, &V) -> Option<T>) -> Vec<T>;
ea8adc8c 62
9fa01778 63 /// Returns a reference to entry `k`. If no such entry exists, call
0bf4aa26
XL
64 /// `vacant` and either forward its error, or add its result to the map
65 /// and return a reference to *that*.
dfeec247 66 fn get_or<E>(&self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&V, E>;
0bf4aa26 67
9fa01778 68 /// Returns a mutable reference to entry `k`. If no such entry exists, call
0bf4aa26
XL
69 /// `vacant` and either forward its error, or add its result to the map
70 /// and return a reference to *that*.
dfeec247 71 fn get_mut_or<E>(&mut self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&mut V, E>;
dc9dc135
XL
72
73 /// Read-only lookup.
74 fn get(&self, k: K) -> Option<&V> {
75 self.get_or(k, || Err(())).ok()
76 }
416331ca
XL
77
78 /// Mutable lookup.
79 fn get_mut(&mut self, k: K) -> Option<&mut V> {
80 self.get_mut_or(k, || Err(())).ok()
81 }
0bf4aa26
XL
82}
83
84/// Methods of this trait signifies a point where CTFE evaluation would fail
85/// and some use case dependent behaviour can instead be applied.
add651ee 86pub trait Machine<'mir, 'tcx: 'mir>: Sized {
ea8adc8c 87 /// Additional memory kinds a machine wishes to distinguish from the builtin ones
29967ef6 88 type MemoryKind: Debug + std::fmt::Display + MayLeak + Eq + 'static;
0bf4aa26 89
136023e0 90 /// Pointers are "tagged" with provenance information; typically the `AllocId` they belong to.
064997fb 91 type Provenance: Provenance + Eq + Hash + 'static;
0bf4aa26 92
064997fb 93 /// When getting the AllocId of a pointer, some extra data is also obtained from the provenance
04454e1e 94 /// that is passed to memory access hooks so they can do things with it.
064997fb 95 type ProvenanceExtra: Copy + 'static;
04454e1e 96
416331ca 97 /// Machines can define extra (non-instance) things that represent values of function pointers.
60c5eb7d 98 /// For example, Miri uses this to return a function pointer from `dlsym`
416331ca 99 /// that can later be called to execute the right thing.
29967ef6 100 type ExtraFnVal: Debug + Copy;
416331ca 101
a1dfa0c6
XL
102 /// Extra data stored in every call frame.
103 type FrameExtra;
104
0bf4aa26 105 /// Extra data stored in every allocation.
49aad941 106 type AllocExtra: Debug + Clone + 'tcx;
0bf4aa26 107
9ffffee4
FG
108 /// Type for the bytes of the allocation.
109 type Bytes: AllocBytes + 'static;
110
0bf4aa26 111 /// Memory's allocation map
dfeec247 112 type MemoryMap: AllocMap<
0bf4aa26 113 AllocId,
9ffffee4
FG
114 (
115 MemoryKind<Self::MemoryKind>,
116 Allocation<Self::Provenance, Self::AllocExtra, Self::Bytes>,
117 ),
dfeec247
XL
118 > + Default
119 + Clone;
0bf4aa26 120
ba9703b0
XL
121 /// The memory kind to use for copied global memory (held in `tcx`) --
122 /// or None if such memory should not be mutated and thus any such attempt will cause
123 /// a `ModifiedStatic` error to be raised.
0bf4aa26 124 /// Statics are copied under two circumstances: When they are mutated, and when
064997fb 125 /// `adjust_allocation` (see below) returns an owned allocation
0bf4aa26 126 /// that is added to the memory so that the work is not done twice.
ba9703b0 127 const GLOBAL_KIND: Option<Self::MemoryKind>;
0bf4aa26 128
136023e0
XL
129 /// Should the machine panic on allocation failures?
130 const PANIC_ON_ALLOC_FAIL: bool;
131
781aab86
FG
132 /// Should post-monomorphization checks be run when a stack frame is pushed?
133 const POST_MONO_CHECKS: bool = true;
134
416331ca 135 /// Whether memory accesses should be alignment-checked.
ed00b5ec 136 fn enforce_alignment(ecx: &InterpCx<'mir, 'tcx, Self>) -> bool;
416331ca 137
f2b60f7d 138 /// Whether, when checking alignment, we should look at the actual address and thus support
3dfed10e 139 /// custom alignment logic based on whatever the integer address happens to be.
04454e1e 140 ///
f2b60f7d
FG
141 /// If this returns true, Provenance::OFFSET_IS_ADDR must be true.
142 fn use_addr_for_alignment_check(ecx: &InterpCx<'mir, 'tcx, Self>) -> bool;
3dfed10e 143
353b0b11
FG
144 /// Whether to enforce the validity invariant for a specific layout.
145 fn enforce_validity(ecx: &InterpCx<'mir, 'tcx, Self>, layout: TyAndLayout<'tcx>) -> bool;
b7449926 146
064997fb 147 /// Whether function calls should be [ABI](CallAbi)-checked.
17df50a5
XL
148 fn enforce_abi(_ecx: &InterpCx<'mir, 'tcx, Self>) -> bool {
149 true
150 }
151
9ffffee4
FG
152 /// Whether Assert(OverflowNeg) and Assert(Overflow) MIR terminators should actually
153 /// check for overflow.
353b0b11 154 fn ignore_optional_overflow_checks(_ecx: &InterpCx<'mir, 'tcx, Self>) -> bool;
064997fb 155
5869c6ff
XL
156 /// Entry point for obtaining the MIR of anything that should get evaluated.
157 /// So not just functions and shims, but also const/static initializers, anonymous
158 /// constants, ...
159 fn load_mir(
160 ecx: &InterpCx<'mir, 'tcx, Self>,
161 instance: ty::InstanceDef<'tcx>,
162 ) -> InterpResult<'tcx, &'tcx mir::Body<'tcx>> {
163 Ok(ecx.tcx.instance_mir(instance))
164 }
165
ea8adc8c
XL
166 /// Entry point to all function calls.
167 ///
b7449926
XL
168 /// Returns either the mir to use for the call, or `None` if execution should
169 /// just proceed (which usually means this hook did all the work that the
9fa01778 170 /// called function should usually have done). In the latter case, it is
60c5eb7d 171 /// this hook's responsibility to advance the instruction pointer!
b7449926
XL
172 /// (This is to support functions like `__rust_maybe_catch_panic` that neither find a MIR
173 /// nor just jump to `ret`, but instead push their own stack frame.)
174 /// Passing `dest`and `ret` in the same `Option` proved very annoying when only one of them
175 /// was used.
60c5eb7d 176 fn find_mir_or_eval_fn(
416331ca 177 ecx: &mut InterpCx<'mir, 'tcx, Self>,
ea8adc8c 178 instance: ty::Instance<'tcx>,
064997fb 179 abi: CallAbi,
add651ee 180 args: &[FnArg<'tcx, Self::Provenance>],
064997fb 181 destination: &PlaceTy<'tcx, Self::Provenance>,
923072b8 182 target: Option<mir::BasicBlock>,
353b0b11 183 unwind: mir::UnwindAction,
a2a8927a 184 ) -> InterpResult<'tcx, Option<(&'mir mir::Body<'tcx>, ty::Instance<'tcx>)>>;
ea8adc8c 185
9c376795 186 /// Execute `fn_val`. It is the hook's responsibility to advance the instruction
416331ca
XL
187 /// pointer as appropriate.
188 fn call_extra_fn(
189 ecx: &mut InterpCx<'mir, 'tcx, Self>,
190 fn_val: Self::ExtraFnVal,
064997fb 191 abi: CallAbi,
add651ee 192 args: &[FnArg<'tcx, Self::Provenance>],
064997fb 193 destination: &PlaceTy<'tcx, Self::Provenance>,
923072b8 194 target: Option<mir::BasicBlock>,
353b0b11 195 unwind: mir::UnwindAction,
416331ca
XL
196 ) -> InterpResult<'tcx>;
197
60c5eb7d
XL
198 /// Directly process an intrinsic without pushing a stack frame. It is the hook's
199 /// responsibility to advance the instruction pointer as appropriate.
0bf4aa26 200 fn call_intrinsic(
416331ca 201 ecx: &mut InterpCx<'mir, 'tcx, Self>,
ea8adc8c 202 instance: ty::Instance<'tcx>,
064997fb
FG
203 args: &[OpTy<'tcx, Self::Provenance>],
204 destination: &PlaceTy<'tcx, Self::Provenance>,
923072b8 205 target: Option<mir::BasicBlock>,
353b0b11 206 unwind: mir::UnwindAction,
60c5eb7d
XL
207 ) -> InterpResult<'tcx>;
208
209 /// Called to evaluate `Assert` MIR terminators that trigger a panic.
210 fn assert_panic(
211 ecx: &mut InterpCx<'mir, 'tcx, Self>,
74b04a01 212 msg: &mir::AssertMessage<'tcx>,
353b0b11 213 unwind: mir::UnwindAction,
dc9dc135 214 ) -> InterpResult<'tcx>;
ea8adc8c 215
781aab86
FG
216 /// Called to trigger a non-unwinding panic.
217 fn panic_nounwind(_ecx: &mut InterpCx<'mir, 'tcx, Self>, msg: &str) -> InterpResult<'tcx>;
218
219 /// Called when unwinding reached a state where execution should be terminated.
220 fn unwind_terminate(
221 ecx: &mut InterpCx<'mir, 'tcx, Self>,
222 reason: mir::UnwindTerminateReason,
223 ) -> InterpResult<'tcx>;
ba9703b0 224
416331ca 225 /// Called for all binary operations where the LHS has pointer type.
ea8adc8c
XL
226 ///
227 /// Returns a (value, overflowed) pair if the operation succeeded
416331ca
XL
228 fn binary_ptr_op(
229 ecx: &InterpCx<'mir, 'tcx, Self>,
ea8adc8c 230 bin_op: mir::BinOp,
064997fb
FG
231 left: &ImmTy<'tcx, Self::Provenance>,
232 right: &ImmTy<'tcx, Self::Provenance>,
781aab86 233 ) -> InterpResult<'tcx, (ImmTy<'tcx, Self::Provenance>, bool)>;
ea8adc8c 234
ed00b5ec
FG
235 /// Generate the NaN returned by a float operation, given the list of inputs.
236 /// (This is all inputs, not just NaN inputs!)
237 fn generate_nan<F1: Float + FloatConvert<F2>, F2: Float>(
238 _ecx: &InterpCx<'mir, 'tcx, Self>,
239 _inputs: &[F1],
240 ) -> F2 {
241 // By default we always return the preferred NaN.
242 F2::NAN
243 }
244
781aab86 245 /// Called before writing the specified `local` of the `frame`.
f035d41b
XL
246 /// Since writing a ZST is not actually accessing memory or locals, this is never invoked
247 /// for ZST reads.
f2b60f7d
FG
248 ///
249 /// Due to borrow checker trouble, we indicate the `frame` as an index rather than an `&mut
250 /// Frame`.
781aab86
FG
251 #[inline(always)]
252 fn before_access_local_mut<'a>(
253 _ecx: &'a mut InterpCx<'mir, 'tcx, Self>,
254 _frame: usize,
255 _local: mir::Local,
256 ) -> InterpResult<'tcx>
f035d41b
XL
257 where
258 'tcx: 'mir,
259 {
781aab86 260 Ok(())
f035d41b
XL
261 }
262
74b04a01 263 /// Called before a basic block terminator is executed.
74b04a01
XL
264 #[inline]
265 fn before_terminator(_ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> {
266 Ok(())
267 }
268
9ffffee4
FG
269 /// Called when the interpreter encounters a `StatementKind::ConstEvalCounter` instruction.
270 /// You can use this to detect long or endlessly running programs.
271 #[inline]
272 fn increment_const_eval_counter(_ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> {
273 Ok(())
274 }
275
ba9703b0
XL
276 /// Called before a global allocation is accessed.
277 /// `def_id` is `Some` if this is the "lazy" allocation of a static.
74b04a01 278 #[inline]
ba9703b0 279 fn before_access_global(
04454e1e
FG
280 _tcx: TyCtxt<'tcx>,
281 _machine: &Self,
ba9703b0 282 _alloc_id: AllocId,
5e7ed085 283 _allocation: ConstAllocation<'tcx>,
ba9703b0
XL
284 _static_def_id: Option<DefId>,
285 _is_write: bool,
e74abb32
XL
286 ) -> InterpResult<'tcx> {
287 Ok(())
288 }
289
3dfed10e 290 /// Return the `AllocId` for the given thread-local static in the current thread.
136023e0 291 fn thread_local_static_base_pointer(
3dfed10e
XL
292 _ecx: &mut InterpCx<'mir, 'tcx, Self>,
293 def_id: DefId,
064997fb 294 ) -> InterpResult<'tcx, Pointer<Self::Provenance>> {
3dfed10e 295 throw_unsup!(ThreadLocalStatic(def_id))
74b04a01
XL
296 }
297
136023e0
XL
298 /// Return the root pointer for the given `extern static`.
299 fn extern_static_base_pointer(
04454e1e 300 ecx: &InterpCx<'mir, 'tcx, Self>,
3dfed10e 301 def_id: DefId,
064997fb 302 ) -> InterpResult<'tcx, Pointer<Self::Provenance>>;
ba9703b0 303
136023e0
XL
304 /// Return a "base" pointer for the given allocation: the one that is used for direct
305 /// accesses to this static/const/fn allocation, or the one returned from the heap allocator.
3dfed10e 306 ///
136023e0 307 /// Not called on `extern` or thread-local statics (those use the methods above).
064997fb 308 fn adjust_alloc_base_pointer(
04454e1e 309 ecx: &InterpCx<'mir, 'tcx, Self>,
136023e0 310 ptr: Pointer,
9ffffee4 311 ) -> InterpResult<'tcx, Pointer<Self::Provenance>>;
136023e0
XL
312
313 /// "Int-to-pointer cast"
04454e1e
FG
314 fn ptr_from_addr_cast(
315 ecx: &InterpCx<'mir, 'tcx, Self>,
316 addr: u64,
064997fb 317 ) -> InterpResult<'tcx, Pointer<Option<Self::Provenance>>>;
136023e0 318
04454e1e
FG
319 /// Marks a pointer as exposed, allowing it's provenance
320 /// to be recovered. "Pointer-to-int cast"
321 fn expose_ptr(
322 ecx: &mut InterpCx<'mir, 'tcx, Self>,
064997fb 323 ptr: Pointer<Self::Provenance>,
04454e1e
FG
324 ) -> InterpResult<'tcx>;
325
326 /// Convert a pointer with provenance into an allocation-offset pair
327 /// and extra provenance info.
328 ///
329 /// The returned `AllocId` must be the same as `ptr.provenance.get_alloc_id()`.
330 ///
331 /// When this fails, that means the pointer does not point to a live allocation.
136023e0 332 fn ptr_get_alloc(
04454e1e 333 ecx: &InterpCx<'mir, 'tcx, Self>,
064997fb
FG
334 ptr: Pointer<Self::Provenance>,
335 ) -> Option<(AllocId, Size, Self::ProvenanceExtra)>;
336
337 /// Called to adjust allocations to the Provenance and AllocExtra of this machine.
dc9dc135 338 ///
064997fb
FG
339 /// The way we construct allocations is to always first construct it without extra and then add
340 /// the extra. This keeps uniform code paths for handling both allocations created by CTFE for
341 /// globals, and allocations created by Miri during evaluation.
342 ///
343 /// `kind` is the kind of the allocation being adjusted; it can be `None` when
ba9703b0 344 /// it's a global and `GLOBAL_KIND` is `None`.
48663c56
XL
345 ///
346 /// This should avoid copying if no work has to be done! If this returns an owned
064997fb 347 /// allocation (because a copy had to be done to adjust things), machine memory will
48663c56
XL
348 /// cache the result. (This relies on `AllocMap::get_or` being able to add the
349 /// owned allocation to the map even when the map is shared.)
923072b8 350 ///
f2b60f7d 351 /// This must only fail if `alloc` contains provenance.
064997fb 352 fn adjust_allocation<'b>(
04454e1e 353 ecx: &InterpCx<'mir, 'tcx, Self>,
dc9dc135
XL
354 id: AllocId,
355 alloc: Cow<'b, Allocation>,
ba9703b0 356 kind: Option<MemoryKind<Self::MemoryKind>>,
9ffffee4 357 ) -> InterpResult<'tcx, Cow<'b, Allocation<Self::Provenance, Self::AllocExtra, Self::Bytes>>>;
dc9dc135 358
f2b60f7d
FG
359 fn eval_inline_asm(
360 _ecx: &mut InterpCx<'mir, 'tcx, Self>,
361 _template: &'tcx [InlineAsmTemplatePiece],
362 _operands: &[mir::InlineAsmOperand<'tcx>],
363 _options: InlineAsmOptions,
364 ) -> InterpResult<'tcx> {
365 throw_unsup_format!("inline assembly is not supported")
366 }
367
17df50a5
XL
368 /// Hook for performing extra checks on a memory read access.
369 ///
370 /// Takes read-only access to the allocation so we can keep all the memory read
371 /// operations take `&self`. Use a `RefCell` in `AllocExtra` if you
372 /// need to mutate.
373 #[inline(always)]
f2b60f7d 374 fn before_memory_read(
04454e1e
FG
375 _tcx: TyCtxt<'tcx>,
376 _machine: &Self,
17df50a5 377 _alloc_extra: &Self::AllocExtra,
064997fb 378 _prov: (AllocId, Self::ProvenanceExtra),
136023e0 379 _range: AllocRange,
17df50a5
XL
380 ) -> InterpResult<'tcx> {
381 Ok(())
382 }
383
384 /// Hook for performing extra checks on a memory write access.
385 #[inline(always)]
f2b60f7d 386 fn before_memory_write(
04454e1e
FG
387 _tcx: TyCtxt<'tcx>,
388 _machine: &mut Self,
17df50a5 389 _alloc_extra: &mut Self::AllocExtra,
064997fb 390 _prov: (AllocId, Self::ProvenanceExtra),
136023e0 391 _range: AllocRange,
17df50a5
XL
392 ) -> InterpResult<'tcx> {
393 Ok(())
394 }
395
396 /// Hook for performing extra operations on a memory deallocation.
397 #[inline(always)]
f2b60f7d 398 fn before_memory_deallocation(
04454e1e
FG
399 _tcx: TyCtxt<'tcx>,
400 _machine: &mut Self,
17df50a5 401 _alloc_extra: &mut Self::AllocExtra,
064997fb 402 _prov: (AllocId, Self::ProvenanceExtra),
136023e0 403 _range: AllocRange,
fc512014
XL
404 ) -> InterpResult<'tcx> {
405 Ok(())
406 }
407
487cf647
FG
408 /// Executes a retagging operation for a single pointer.
409 /// Returns the possibly adjusted pointer.
0bf4aa26 410 #[inline]
487cf647
FG
411 fn retag_ptr_value(
412 _ecx: &mut InterpCx<'mir, 'tcx, Self>,
413 _kind: mir::RetagKind,
414 val: &ImmTy<'tcx, Self::Provenance>,
415 ) -> InterpResult<'tcx, ImmTy<'tcx, Self::Provenance>> {
416 Ok(val.clone())
417 }
418
419 /// Executes a retagging operation on a compound value.
420 /// Replaces all pointers stored in the given place.
421 #[inline]
422 fn retag_place_contents(
416331ca 423 _ecx: &mut InterpCx<'mir, 'tcx, Self>,
0731742a 424 _kind: mir::RetagKind,
064997fb 425 _place: &PlaceTy<'tcx, Self::Provenance>,
dc9dc135 426 ) -> InterpResult<'tcx> {
0bf4aa26
XL
427 Ok(())
428 }
429
add651ee
FG
430 /// Called on places used for in-place function argument and return value handling.
431 ///
432 /// These places need to be protected to make sure the program cannot tell whether the
433 /// argument/return value was actually copied or passed in-place..
434 fn protect_in_place_function_argument(
435 ecx: &mut InterpCx<'mir, 'tcx, Self>,
436 place: &PlaceTy<'tcx, Self::Provenance>,
437 ) -> InterpResult<'tcx> {
438 // Without an aliasing model, all we can do is put `Uninit` into the place.
ed00b5ec 439 // Conveniently this also ensures that the place actually points to suitable memory.
add651ee
FG
440 ecx.write_uninit(place)
441 }
442
ba9703b0
XL
443 /// Called immediately before a new stack frame gets pushed.
444 fn init_frame_extra(
445 ecx: &mut InterpCx<'mir, 'tcx, Self>,
064997fb
FG
446 frame: Frame<'mir, 'tcx, Self::Provenance>,
447 ) -> InterpResult<'tcx, Frame<'mir, 'tcx, Self::Provenance, Self::FrameExtra>>;
ba9703b0
XL
448
449 /// Borrow the current thread's stack.
a2a8927a 450 fn stack<'a>(
ba9703b0 451 ecx: &'a InterpCx<'mir, 'tcx, Self>,
064997fb 452 ) -> &'a [Frame<'mir, 'tcx, Self::Provenance, Self::FrameExtra>];
ba9703b0
XL
453
454 /// Mutably borrow the current thread's stack.
a2a8927a 455 fn stack_mut<'a>(
ba9703b0 456 ecx: &'a mut InterpCx<'mir, 'tcx, Self>,
064997fb 457 ) -> &'a mut Vec<Frame<'mir, 'tcx, Self::Provenance, Self::FrameExtra>>;
ba9703b0
XL
458
459 /// Called immediately after a stack frame got pushed and its locals got initialized.
460 fn after_stack_push(_ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> {
461 Ok(())
462 }
a1dfa0c6 463
add651ee
FG
464 /// Called just before the return value is copied to the caller-provided return place.
465 fn before_stack_pop(
466 _ecx: &InterpCx<'mir, 'tcx, Self>,
467 _frame: &Frame<'mir, 'tcx, Self::Provenance, Self::FrameExtra>,
468 ) -> InterpResult<'tcx> {
469 Ok(())
470 }
471
ba9703b0 472 /// Called immediately after a stack frame got popped, but before jumping back to the caller.
064997fb 473 /// The `locals` have already been destroyed!
781aab86 474 #[inline(always)]
ba9703b0 475 fn after_stack_pop(
60c5eb7d 476 _ecx: &mut InterpCx<'mir, 'tcx, Self>,
064997fb
FG
477 _frame: Frame<'mir, 'tcx, Self::Provenance, Self::FrameExtra>,
478 unwinding: bool,
ba9703b0 479 ) -> InterpResult<'tcx, StackPopJump> {
60c5eb7d 480 // By default, we do not support unwinding from panics
064997fb 481 assert!(!unwinding);
ba9703b0 482 Ok(StackPopJump::Normal)
60c5eb7d 483 }
781aab86
FG
484
485 /// Called immediately after actual memory was allocated for a local
486 /// but before the local's stack frame is updated to point to that memory.
487 #[inline(always)]
488 fn after_local_allocated(
489 _ecx: &mut InterpCx<'mir, 'tcx, Self>,
490 _frame: usize,
491 _local: mir::Local,
492 _mplace: &MPlaceTy<'tcx, Self::Provenance>,
493 ) -> InterpResult<'tcx> {
494 Ok(())
495 }
f9f354fc
XL
496}
497
487cf647 498/// A lot of the flexibility above is just needed for `Miri`, but all "compile-time" machines
9c376795 499/// (CTFE and ConstProp) use the same instance. Here, we share that code.
f9f354fc 500pub macro compile_time_machine(<$mir: lifetime, $tcx: lifetime>) {
064997fb
FG
501 type Provenance = AllocId;
502 type ProvenanceExtra = ();
04454e1e 503
f9f354fc
XL
504 type ExtraFnVal = !;
505
fc512014 506 type MemoryMap =
2b03887a 507 rustc_data_structures::fx::FxIndexMap<AllocId, (MemoryKind<Self::MemoryKind>, Allocation)>;
fc512014 508 const GLOBAL_KIND: Option<Self::MemoryKind> = None; // no copying of globals from `tcx` to machine memory
f9f354fc
XL
509
510 type AllocExtra = ();
511 type FrameExtra = ();
9ffffee4 512 type Bytes = Box<[u8]>;
f9f354fc
XL
513
514 #[inline(always)]
f2b60f7d
FG
515 fn use_addr_for_alignment_check(_ecx: &InterpCx<$mir, $tcx, Self>) -> bool {
516 // We do not support `use_addr`.
3dfed10e
XL
517 false
518 }
519
923072b8 520 #[inline(always)]
353b0b11 521 fn ignore_optional_overflow_checks(_ecx: &InterpCx<$mir, $tcx, Self>) -> bool {
9ffffee4 522 false
923072b8
FG
523 }
524
781aab86
FG
525 #[inline(always)]
526 fn unwind_terminate(
527 _ecx: &mut InterpCx<$mir, $tcx, Self>,
528 _reason: mir::UnwindTerminateReason,
529 ) -> InterpResult<$tcx> {
530 unreachable!("unwinding cannot happen during compile-time evaluation")
531 }
532
f9f354fc
XL
533 #[inline(always)]
534 fn call_extra_fn(
535 _ecx: &mut InterpCx<$mir, $tcx, Self>,
536 fn_val: !,
064997fb 537 _abi: CallAbi,
add651ee 538 _args: &[FnArg<$tcx>],
064997fb 539 _destination: &PlaceTy<$tcx, Self::Provenance>,
923072b8 540 _target: Option<mir::BasicBlock>,
353b0b11 541 _unwind: mir::UnwindAction,
f9f354fc
XL
542 ) -> InterpResult<$tcx> {
543 match fn_val {}
544 }
545
546 #[inline(always)]
064997fb 547 fn adjust_allocation<'b>(
04454e1e 548 _ecx: &InterpCx<$mir, $tcx, Self>,
f9f354fc
XL
549 _id: AllocId,
550 alloc: Cow<'b, Allocation>,
fc512014 551 _kind: Option<MemoryKind<Self::MemoryKind>>,
064997fb 552 ) -> InterpResult<$tcx, Cow<'b, Allocation<Self::Provenance>>> {
923072b8 553 Ok(alloc)
136023e0
XL
554 }
555
556 fn extern_static_base_pointer(
04454e1e 557 ecx: &InterpCx<$mir, $tcx, Self>,
136023e0
XL
558 def_id: DefId,
559 ) -> InterpResult<$tcx, Pointer> {
560 // Use the `AllocId` associated with the `DefId`. Any actual *access* will fail.
781aab86 561 Ok(Pointer::new(ecx.tcx.reserve_and_set_static_alloc(def_id), Size::ZERO))
f9f354fc
XL
562 }
563
564 #[inline(always)]
064997fb 565 fn adjust_alloc_base_pointer(
04454e1e 566 _ecx: &InterpCx<$mir, $tcx, Self>,
136023e0 567 ptr: Pointer<AllocId>,
9ffffee4
FG
568 ) -> InterpResult<$tcx, Pointer<AllocId>> {
569 Ok(ptr)
136023e0
XL
570 }
571
136023e0 572 #[inline(always)]
04454e1e
FG
573 fn ptr_from_addr_cast(
574 _ecx: &InterpCx<$mir, $tcx, Self>,
575 addr: u64,
923072b8
FG
576 ) -> InterpResult<$tcx, Pointer<Option<AllocId>>> {
577 // Allow these casts, but make the pointer not dereferenceable.
578 // (I.e., they behave like transmutation.)
f2b60f7d 579 // This is correct because no pointers can ever be exposed in compile-time evaluation.
9ffffee4 580 Ok(Pointer::from_addr_invalid(addr))
04454e1e
FG
581 }
582
583 #[inline(always)]
584 fn ptr_get_alloc(
585 _ecx: &InterpCx<$mir, $tcx, Self>,
586 ptr: Pointer<AllocId>,
064997fb 587 ) -> Option<(AllocId, Size, Self::ProvenanceExtra)> {
136023e0
XL
588 // We know `offset` is relative to the allocation, so we can use `into_parts`.
589 let (alloc_id, offset) = ptr.into_parts();
04454e1e 590 Some((alloc_id, offset, ()))
f9f354fc 591 }
ea8adc8c 592}