]> git.proxmox.com Git - rustc.git/blame - compiler/rustc_mir/src/interpret/eval_context.rs
New upstream version 1.49.0~beta.4+dfsg1
[rustc.git] / compiler / rustc_mir / src / interpret / eval_context.rs
CommitLineData
9fa01778 1use std::cell::Cell;
3dfed10e 2use std::fmt;
8faf50e0 3use std::mem;
ff7c6d11 4
0bf4aa26 5use rustc_data_structures::fx::FxHashMap;
60c5eb7d 6use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
3dfed10e 7use rustc_hir::{self as hir, def::DefKind, def_id::DefId, definitions::DefPathData};
dfeec247 8use rustc_index::vec::IndexVec;
60c5eb7d 9use rustc_macros::HashStable;
ba9703b0
XL
10use rustc_middle::ich::StableHashingContext;
11use rustc_middle::mir;
29967ef6 12use rustc_middle::mir::interpret::{GlobalId, InterpResult, Pointer, Scalar};
ba9703b0
XL
13use rustc_middle::ty::layout::{self, TyAndLayout};
14use rustc_middle::ty::{
f035d41b 15 self, query::TyCtxtAt, subst::SubstsRef, ParamEnv, Ty, TyCtxt, TypeFoldable,
ba9703b0 16};
3dfed10e 17use rustc_span::{Pos, Span};
ba9703b0 18use rustc_target::abi::{Align, HasDataLayout, LayoutOf, Size, TargetDataLayout};
ff7c6d11 19
b7449926 20use super::{
1b1a35ee 21 Immediate, MPlaceTy, Machine, MemPlace, MemPlaceMeta, Memory, Operand, Place, PlaceTy,
f9f354fc 22 ScalarMaybeUninit, StackPopJump,
b7449926 23};
f035d41b 24use crate::transform::validate::equal_up_to_regions;
ba9703b0 25use crate::util::storage::AlwaysLiveLocals;
ff7c6d11 26
416331ca 27pub struct InterpCx<'mir, 'tcx, M: Machine<'mir, 'tcx>> {
ff7c6d11 28 /// Stores the `Machine` instance.
ba9703b0
XL
29 ///
30 /// Note: the stack is provided by the machine.
ff7c6d11
XL
31 pub machine: M,
32
33 /// The results of the type checker, from rustc.
f035d41b
XL
34 /// The span in this is the "root" of the evaluation, i.e., the const
35 /// we are evaluating (if this is CTFE).
dc9dc135 36 pub tcx: TyCtxtAt<'tcx>,
ff7c6d11
XL
37
38 /// Bounds in scope for polymorphic evaluations.
0bf4aa26 39 pub(crate) param_env: ty::ParamEnv<'tcx>,
ff7c6d11
XL
40
41 /// The virtual memory system.
e74abb32 42 pub memory: Memory<'mir, 'tcx, M>,
ff7c6d11 43
0bf4aa26 44 /// A cache for deduplicating vtables
dc9dc135
XL
45 pub(super) vtables:
46 FxHashMap<(Ty<'tcx>, Option<ty::PolyExistentialTraitRef<'tcx>>), Pointer<M::PointerTag>>,
ff7c6d11
XL
47}
48
29967ef6
XL
49// The Phantomdata exists to prevent this type from being `Send`. If it were sent across a thread
50// boundary and dropped in the other thread, it would exit the span in the other thread.
51struct SpanGuard(tracing::Span, std::marker::PhantomData<*const u8>);
52
53impl SpanGuard {
54 /// By default a `SpanGuard` does nothing.
55 fn new() -> Self {
56 Self(tracing::Span::none(), std::marker::PhantomData)
57 }
58
59 /// If a span is entered, we exit the previous span (if any, normally none) and enter the
60 /// new span. This is mainly so we don't have to use `Option` for the `tracing_span` field of
61 /// `Frame` by creating a dummy span to being with and then entering it once the frame has
62 /// been pushed.
63 fn enter(&mut self, span: tracing::Span) {
64 // This executes the destructor on the previous instance of `SpanGuard`, ensuring that
65 // we never enter or exit more spans than vice versa. Unless you `mem::leak`, then we
66 // can't protect the tracing stack, but that'll just lead to weird logging, no actual
67 // problems.
68 *self = Self(span, std::marker::PhantomData);
69 self.0.with_subscriber(|(id, dispatch)| {
70 dispatch.enter(id);
71 });
72 }
73}
74
75impl Drop for SpanGuard {
76 fn drop(&mut self) {
77 self.0.with_subscriber(|(id, dispatch)| {
78 dispatch.exit(id);
79 });
80 }
81}
82
ff7c6d11 83/// A stack frame.
dfeec247 84pub struct Frame<'mir, 'tcx, Tag = (), Extra = ()> {
ff7c6d11
XL
85 ////////////////////////////////////////////////////////////////////////////////
86 // Function and callsite information
87 ////////////////////////////////////////////////////////////////////////////////
88 /// The MIR for the function called on this frame.
dc9dc135 89 pub body: &'mir mir::Body<'tcx>,
ff7c6d11 90
9fa01778 91 /// The def_id and substs of the current function.
ff7c6d11
XL
92 pub instance: ty::Instance<'tcx>,
93
60c5eb7d
XL
94 /// Extra data for the machine.
95 pub extra: Extra,
96
ff7c6d11
XL
97 ////////////////////////////////////////////////////////////////////////////////
98 // Return place and locals
99 ////////////////////////////////////////////////////////////////////////////////
9fa01778 100 /// Work to perform when returning from this function.
ff7c6d11
XL
101 pub return_to_block: StackPopCleanup,
102
0bf4aa26
XL
103 /// The location where the result of the current stack frame should be written to,
104 /// and its layout in the caller.
105 pub return_place: Option<PlaceTy<'tcx, Tag>>,
ff7c6d11
XL
106
107 /// The list of locals for this stack frame, stored in order as
b7449926
XL
108 /// `[return_ptr, arguments..., variables..., temporaries...]`.
109 /// The locals are stored as `Option<Value>`s.
ff7c6d11 110 /// `None` represents a local that is currently dead, while a live local
94b46f34 111 /// can either directly contain `Scalar` or refer to some part of an `Allocation`.
9fa01778 112 pub locals: IndexVec<mir::Local, LocalState<'tcx, Tag>>,
ff7c6d11 113
29967ef6
XL
114 /// The span of the `tracing` crate is stored here.
115 /// When the guard is dropped, the span is exited. This gives us
116 /// a full stack trace on all tracing statements.
117 tracing_span: SpanGuard,
118
ff7c6d11
XL
119 ////////////////////////////////////////////////////////////////////////////////
120 // Current position within the function
121 ////////////////////////////////////////////////////////////////////////////////
3dfed10e
XL
122 /// If this is `Err`, we are not currently executing any particular statement in
123 /// this frame (can happen e.g. during frame initialization, and during unwinding on
124 /// frames without cleanup code).
125 /// We basically abuse `Result` as `Either`.
126 pub(super) loc: Result<mir::Location, Span>,
127}
128
129/// What we store about a frame in an interpreter backtrace.
130#[derive(Debug)]
131pub struct FrameInfo<'tcx> {
132 pub instance: ty::Instance<'tcx>,
133 pub span: Span,
134 pub lint_root: Option<hir::HirId>,
ff7c6d11
XL
135}
136
60c5eb7d 137#[derive(Clone, Eq, PartialEq, Debug, HashStable)] // Miri debug-prints these
b7449926
XL
138pub enum StackPopCleanup {
139 /// Jump to the next block in the caller, or cause UB if None (that's a function
0bf4aa26
XL
140 /// that may never return). Also store layout of return place so
141 /// we can validate it at that layout.
ba9703b0
XL
142 /// `ret` stores the block we jump to on a normal return, while `unwind`
143 /// stores the block used for cleanup during unwinding.
60c5eb7d 144 Goto { ret: Option<mir::BasicBlock>, unwind: Option<mir::BasicBlock> },
ba9703b0 145 /// Just do nothing: Used by Main and for the `box_alloc` hook in miri.
9fa01778 146 /// `cleanup` says whether locals are deallocated. Static computation
b7449926
XL
147 /// wants them leaked to intern what they need (and just throw away
148 /// the entire `ecx` when it is done).
149 None { cleanup: bool },
8faf50e0
XL
150}
151
9fa01778 152/// State of a local variable including a memoized layout
60c5eb7d 153#[derive(Clone, PartialEq, Eq, HashStable)]
f9f354fc
XL
154pub struct LocalState<'tcx, Tag = ()> {
155 pub value: LocalValue<Tag>,
9fa01778 156 /// Don't modify if `Some`, this is only used to prevent computing the layout twice
60c5eb7d 157 #[stable_hasher(ignore)]
ba9703b0 158 pub layout: Cell<Option<TyAndLayout<'tcx>>>,
9fa01778
XL
159}
160
48663c56 161/// Current value of a local variable
dfeec247 162#[derive(Copy, Clone, PartialEq, Eq, Debug, HashStable)] // Miri debug-prints these
f9f354fc 163pub enum LocalValue<Tag = ()> {
48663c56 164 /// This local is not currently alive, and cannot be used at all.
b7449926 165 Dead,
48663c56
XL
166 /// This local is alive but not yet initialized. It can be written to
167 /// but not read from or its address taken. Locals get initialized on
168 /// first write because for unsized locals, we do not know their size
169 /// before that.
170 Uninitialized,
171 /// A normal, live local.
172 /// Mostly for convenience, we re-use the `Operand` type here.
173 /// This is an optimization over just always having a pointer here;
174 /// we can thus avoid doing an allocation when the local just stores
175 /// immediate values *and* never has its address taken.
f9f354fc 176 Live(Operand<Tag>),
b7449926 177}
8faf50e0 178
48663c56 179impl<'tcx, Tag: Copy + 'static> LocalState<'tcx, Tag> {
f035d41b
XL
180 /// Read the local's value or error if the local is not yet live or not live anymore.
181 ///
182 /// Note: This may only be invoked from the `Machine::access_local` hook and not from
183 /// anywhere else. You may be invalidating machine invariants if you do!
dc9dc135 184 pub fn access(&self) -> InterpResult<'tcx, Operand<Tag>> {
48663c56 185 match self.value {
ba9703b0 186 LocalValue::Dead => throw_ub!(DeadLocal),
dfeec247
XL
187 LocalValue::Uninitialized => {
188 bug!("The type checker should prevent reading from a never-written local")
189 }
48663c56 190 LocalValue::Live(val) => Ok(val),
8faf50e0 191 }
b7449926 192 }
8faf50e0 193
48663c56
XL
194 /// Overwrite the local. If the local can be overwritten in place, return a reference
195 /// to do so; otherwise return the `MemPlace` to consult instead.
f035d41b
XL
196 ///
197 /// Note: This may only be invoked from the `Machine::access_local_mut` hook and not from
198 /// anywhere else. You may be invalidating machine invariants if you do!
48663c56
XL
199 pub fn access_mut(
200 &mut self,
dc9dc135 201 ) -> InterpResult<'tcx, Result<&mut LocalValue<Tag>, MemPlace<Tag>>> {
48663c56 202 match self.value {
ba9703b0 203 LocalValue::Dead => throw_ub!(DeadLocal),
48663c56 204 LocalValue::Live(Operand::Indirect(mplace)) => Ok(Err(mplace)),
ba9703b0
XL
205 ref mut
206 local @ (LocalValue::Live(Operand::Immediate(_)) | LocalValue::Uninitialized) => {
207 Ok(Ok(local))
208 }
209 }
210 }
211}
212
213impl<'mir, 'tcx, Tag> Frame<'mir, 'tcx, Tag> {
214 pub fn with_extra<Extra>(self, extra: Extra) -> Frame<'mir, 'tcx, Tag, Extra> {
215 Frame {
216 body: self.body,
217 instance: self.instance,
218 return_to_block: self.return_to_block,
219 return_place: self.return_place,
220 locals: self.locals,
f9f354fc 221 loc: self.loc,
ba9703b0 222 extra,
29967ef6 223 tracing_span: self.tracing_span,
8faf50e0 224 }
8faf50e0
XL
225 }
226}
227
60c5eb7d
XL
228impl<'mir, 'tcx, Tag, Extra> Frame<'mir, 'tcx, Tag, Extra> {
229 /// Return the `SourceInfo` of the current instruction.
f035d41b 230 pub fn current_source_info(&self) -> Option<&mir::SourceInfo> {
3dfed10e
XL
231 self.loc.ok().map(|loc| self.body.source_info(loc))
232 }
233
234 pub fn current_span(&self) -> Span {
235 match self.loc {
236 Ok(loc) => self.body.source_info(loc).span,
237 Err(span) => span,
238 }
239 }
240}
241
242impl<'tcx> fmt::Display for FrameInfo<'tcx> {
243 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
244 ty::tls::with(|tcx| {
245 if tcx.def_key(self.instance.def_id()).disambiguated_data.data
246 == DefPathData::ClosureExpr
247 {
248 write!(f, "inside closure")?;
249 } else {
250 write!(f, "inside `{}`", self.instance)?;
251 }
252 if !self.span.is_dummy() {
253 let lo = tcx.sess.source_map().lookup_char_pos(self.span.lo());
254 write!(f, " at {}:{}:{}", lo.file.name, lo.line, lo.col.to_usize() + 1)?;
255 }
256 Ok(())
257 })
60c5eb7d
XL
258 }
259}
260
416331ca 261impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> HasDataLayout for InterpCx<'mir, 'tcx, M> {
ff7c6d11 262 #[inline]
ba9703b0 263 fn data_layout(&self) -> &TargetDataLayout {
ff7c6d11
XL
264 &self.tcx.data_layout
265 }
266}
267
416331ca 268impl<'mir, 'tcx, M> layout::HasTyCtxt<'tcx> for InterpCx<'mir, 'tcx, M>
dc9dc135
XL
269where
270 M: Machine<'mir, 'tcx>,
b7449926 271{
ff7c6d11 272 #[inline]
dc9dc135 273 fn tcx(&self) -> TyCtxt<'tcx> {
0531ce1d 274 *self.tcx
ff7c6d11
XL
275 }
276}
277
416331ca 278impl<'mir, 'tcx, M> layout::HasParamEnv<'tcx> for InterpCx<'mir, 'tcx, M>
dc9dc135
XL
279where
280 M: Machine<'mir, 'tcx>,
48663c56
XL
281{
282 fn param_env(&self) -> ty::ParamEnv<'tcx> {
283 self.param_env
284 }
285}
286
f035d41b 287impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> LayoutOf for InterpCx<'mir, 'tcx, M> {
83c7162d 288 type Ty = Ty<'tcx>;
ba9703b0 289 type TyAndLayout = InterpResult<'tcx, TyAndLayout<'tcx>>;
ff7c6d11 290
b7449926 291 #[inline]
ba9703b0 292 fn layout_of(&self, ty: Ty<'tcx>) -> Self::TyAndLayout {
416331ca
XL
293 self.tcx
294 .layout_of(self.param_env.and(ty))
295 .map_err(|layout| err_inval!(Layout(layout)).into())
ff7c6d11
XL
296 }
297}
298
ba9703b0
XL
299/// Test if it is valid for a MIR assignment to assign `src`-typed place to `dest`-typed value.
300/// This test should be symmetric, as it is primarily about layout compatibility.
301pub(super) fn mir_assign_valid_types<'tcx>(
302 tcx: TyCtxt<'tcx>,
f035d41b 303 param_env: ParamEnv<'tcx>,
ba9703b0
XL
304 src: TyAndLayout<'tcx>,
305 dest: TyAndLayout<'tcx>,
306) -> bool {
f035d41b
XL
307 // Type-changing assignments can happen when subtyping is used. While
308 // all normal lifetimes are erased, higher-ranked types with their
309 // late-bound lifetimes are still around and can lead to type
310 // differences. So we compare ignoring lifetimes.
311 if equal_up_to_regions(tcx, param_env, src.ty, dest.ty) {
312 // Make sure the layout is equal, too -- just to be safe. Miri really
313 // needs layout equality. For performance reason we skip this check when
314 // the types are equal. Equal types *can* have different layouts when
315 // enum downcast is involved (as enum variants carry the type of the
316 // enum), but those should never occur in assignments.
317 if cfg!(debug_assertions) || src.ty != dest.ty {
318 assert_eq!(src.layout, dest.layout);
319 }
320 true
321 } else {
322 false
323 }
ba9703b0
XL
324}
325
326/// Use the already known layout if given (but sanity check in debug mode),
327/// or compute the layout.
328#[cfg_attr(not(debug_assertions), inline(always))]
329pub(super) fn from_known_layout<'tcx>(
330 tcx: TyCtxtAt<'tcx>,
f035d41b 331 param_env: ParamEnv<'tcx>,
ba9703b0
XL
332 known_layout: Option<TyAndLayout<'tcx>>,
333 compute: impl FnOnce() -> InterpResult<'tcx, TyAndLayout<'tcx>>,
334) -> InterpResult<'tcx, TyAndLayout<'tcx>> {
335 match known_layout {
336 None => compute(),
337 Some(known_layout) => {
338 if cfg!(debug_assertions) {
339 let check_layout = compute()?;
f035d41b 340 if !mir_assign_valid_types(tcx.tcx, param_env, check_layout, known_layout) {
ba9703b0
XL
341 span_bug!(
342 tcx.span,
343 "expected type differs from actual type.\nexpected: {:?}\nactual: {:?}",
344 known_layout.ty,
345 check_layout.ty,
346 );
347 }
348 }
349 Ok(known_layout)
350 }
351 }
352}
353
354impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
416331ca 355 pub fn new(
f035d41b
XL
356 tcx: TyCtxt<'tcx>,
357 root_span: Span,
416331ca
XL
358 param_env: ty::ParamEnv<'tcx>,
359 machine: M,
360 memory_extra: M::MemoryExtra,
361 ) -> Self {
362 InterpCx {
ff7c6d11 363 machine,
f035d41b 364 tcx: tcx.at(root_span),
ff7c6d11 365 param_env,
416331ca 366 memory: Memory::new(tcx, memory_extra),
0bf4aa26 367 vtables: FxHashMap::default(),
ff7c6d11
XL
368 }
369 }
370
ba9703b0 371 #[inline(always)]
f035d41b 372 pub fn cur_span(&self) -> Span {
3dfed10e 373 self.stack().last().map(|f| f.current_span()).unwrap_or(self.tcx.span)
ba9703b0
XL
374 }
375
416331ca
XL
376 #[inline(always)]
377 pub fn force_ptr(
378 &self,
379 scalar: Scalar<M::PointerTag>,
380 ) -> InterpResult<'tcx, Pointer<M::PointerTag>> {
381 self.memory.force_ptr(scalar)
382 }
383
384 #[inline(always)]
385 pub fn force_bits(
386 &self,
387 scalar: Scalar<M::PointerTag>,
dfeec247 388 size: Size,
416331ca
XL
389 ) -> InterpResult<'tcx, u128> {
390 self.memory.force_bits(scalar, size)
391 }
392
60c5eb7d 393 /// Call this to turn untagged "global" pointers (obtained via `tcx`) into
3dfed10e
XL
394 /// the machine pointer to the allocation. Must never be used
395 /// for any other pointers, nor for TLS statics.
396 ///
397 /// Using the resulting pointer represents a *direct* access to that memory
398 /// (e.g. by directly using a `static`),
399 /// as opposed to access through a pointer that was created by the program.
60c5eb7d 400 ///
3dfed10e 401 /// This function can fail only if `ptr` points to an `extern static`.
dc9dc135 402 #[inline(always)]
3dfed10e
XL
403 pub fn global_base_pointer(&self, ptr: Pointer) -> InterpResult<'tcx, Pointer<M::PointerTag>> {
404 self.memory.global_base_pointer(ptr)
dc9dc135
XL
405 }
406
0bf4aa26 407 #[inline(always)]
ba9703b0
XL
408 pub(crate) fn stack(&self) -> &[Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra>] {
409 M::stack(self)
ff7c6d11
XL
410 }
411
0bf4aa26 412 #[inline(always)]
ba9703b0
XL
413 pub(crate) fn stack_mut(
414 &mut self,
415 ) -> &mut Vec<Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra>> {
416 M::stack_mut(self)
417 }
418
419 #[inline(always)]
420 pub fn frame_idx(&self) -> usize {
421 let stack = self.stack();
422 assert!(!stack.is_empty());
423 stack.len() - 1
ff7c6d11
XL
424 }
425
0bf4aa26 426 #[inline(always)]
a1dfa0c6 427 pub fn frame(&self) -> &Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra> {
ba9703b0 428 self.stack().last().expect("no call frames exist")
ff7c6d11
XL
429 }
430
0bf4aa26 431 #[inline(always)]
a1dfa0c6 432 pub fn frame_mut(&mut self) -> &mut Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra> {
ba9703b0 433 self.stack_mut().last_mut().expect("no call frames exist")
0bf4aa26 434 }
b7449926 435
0bf4aa26 436 #[inline(always)]
dc9dc135
XL
437 pub(super) fn body(&self) -> &'mir mir::Body<'tcx> {
438 self.frame().body
b7449926
XL
439 }
440
416331ca 441 #[inline(always)]
ba9703b0 442 pub fn sign_extend(&self, value: u128, ty: TyAndLayout<'_>) -> u128 {
416331ca 443 assert!(ty.abi.is_signed());
29967ef6 444 ty.size.sign_extend(value)
416331ca
XL
445 }
446
447 #[inline(always)]
ba9703b0 448 pub fn truncate(&self, value: u128, ty: TyAndLayout<'_>) -> u128 {
29967ef6 449 ty.size.truncate(value)
416331ca
XL
450 }
451
452 #[inline]
453 pub fn type_is_sized(&self, ty: Ty<'tcx>) -> bool {
454 ty.is_sized(self.tcx, self.param_env)
455 }
456
457 #[inline]
458 pub fn type_is_freeze(&self, ty: Ty<'tcx>) -> bool {
f035d41b 459 ty.is_freeze(self.tcx, self.param_env)
416331ca
XL
460 }
461
ff7c6d11
XL
462 pub fn load_mir(
463 &self,
464 instance: ty::InstanceDef<'tcx>,
e1599b0c 465 promoted: Option<mir::Promoted>,
f9f354fc 466 ) -> InterpResult<'tcx, &'tcx mir::Body<'tcx>> {
ff7c6d11 467 // do not continue if typeck errors occurred (can only occur in local crate)
3dfed10e
XL
468 let def = instance.with_opt_param();
469 if let Some(def) = def.as_local() {
470 if self.tcx.has_typeck_results(def.did) {
471 if let Some(error_reported) = self.tcx.typeck_opt_const_arg(def).tainted_by_errors {
29967ef6 472 throw_inval!(AlreadyReported(error_reported))
f9f354fc 473 }
ba9703b0 474 }
ff7c6d11 475 }
e1599b0c
XL
476 trace!("load mir(instance={:?}, promoted={:?})", instance, promoted);
477 if let Some(promoted) = promoted {
29967ef6 478 return Ok(&self.tcx.promoted_mir_opt_const_arg(def)[promoted]);
e1599b0c 479 }
ff7c6d11 480 match instance {
3dfed10e
XL
481 ty::InstanceDef::Item(def) => {
482 if self.tcx.is_mir_available(def.did) {
29967ef6 483 Ok(self.tcx.optimized_mir_opt_const_arg(def))
dfeec247 484 } else {
3dfed10e 485 throw_unsup!(NoMirFor(def.did))
dfeec247
XL
486 }
487 }
ff7c6d11
XL
488 _ => Ok(self.tcx.instance_mir(instance)),
489 }
490 }
491
e1599b0c
XL
492 /// Call this on things you got out of the MIR (so it is as generic as the current
493 /// stack frame), to bring it into the proper environment for this interpreter.
ba9703b0
XL
494 pub(super) fn subst_from_current_frame_and_normalize_erasing_regions<T: TypeFoldable<'tcx>>(
495 &self,
496 value: T,
497 ) -> T {
498 self.subst_from_frame_and_normalize_erasing_regions(self.frame(), value)
499 }
500
501 /// Call this on things you got out of the MIR (so it is as generic as the provided
502 /// stack frame), to bring it into the proper environment for this interpreter.
e1599b0c 503 pub(super) fn subst_from_frame_and_normalize_erasing_regions<T: TypeFoldable<'tcx>>(
9fa01778 504 &self,
ba9703b0 505 frame: &Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra>,
e1599b0c
XL
506 value: T,
507 ) -> T {
29967ef6 508 frame.instance.subst_mir_and_normalize_erasing_regions(*self.tcx, self.param_env, &value)
9fa01778
XL
509 }
510
e1599b0c
XL
511 /// The `substs` are assumed to already be in our interpreter "universe" (param_env).
512 pub(super) fn resolve(
b7449926 513 &self,
1b1a35ee 514 def: ty::WithOptConstParam<DefId>,
dfeec247 515 substs: SubstsRef<'tcx>,
e1599b0c 516 ) -> InterpResult<'tcx, ty::Instance<'tcx>> {
1b1a35ee 517 trace!("resolve: {:?}, {:#?}", def, substs);
e1599b0c
XL
518 trace!("param_env: {:#?}", self.param_env);
519 trace!("substs: {:#?}", substs);
1b1a35ee 520 match ty::Instance::resolve_opt_const_arg(*self.tcx, self.param_env, def, substs) {
f9f354fc
XL
521 Ok(Some(instance)) => Ok(instance),
522 Ok(None) => throw_inval!(TooGeneric),
523
29967ef6
XL
524 // FIXME(eddyb) this could be a bit more specific than `AlreadyReported`.
525 Err(error_reported) => throw_inval!(AlreadyReported(error_reported)),
f9f354fc 526 }
ff7c6d11
XL
527 }
528
b7449926
XL
529 pub fn layout_of_local(
530 &self,
a1dfa0c6 531 frame: &Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra>,
9fa01778 532 local: mir::Local,
ba9703b0
XL
533 layout: Option<TyAndLayout<'tcx>>,
534 ) -> InterpResult<'tcx, TyAndLayout<'tcx>> {
416331ca
XL
535 // `const_prop` runs into this with an invalid (empty) frame, so we
536 // have to support that case (mostly by skipping all caching).
537 match frame.locals.get(local).and_then(|state| state.layout.get()) {
9fa01778 538 None => {
f035d41b 539 let layout = from_known_layout(self.tcx, self.param_env, layout, || {
dc9dc135 540 let local_ty = frame.body.local_decls[local].ty;
ba9703b0
XL
541 let local_ty =
542 self.subst_from_frame_and_normalize_erasing_regions(frame, local_ty);
9fa01778
XL
543 self.layout_of(local_ty)
544 })?;
416331ca
XL
545 if let Some(state) = frame.locals.get(local) {
546 // Layouts of locals are requested a lot, so we cache them.
547 state.layout.set(Some(layout));
548 }
9fa01778
XL
549 Ok(layout)
550 }
551 Some(layout) => Ok(layout),
552 }
b7449926
XL
553 }
554
9fa01778 555 /// Returns the actual dynamic size and alignment of the place at the given type.
0bf4aa26
XL
556 /// Only the "meta" (metadata) part of the place matters.
557 /// This can fail to provide an answer for extern types.
b7449926
XL
558 pub(super) fn size_and_align_of(
559 &self,
dfeec247 560 metadata: MemPlaceMeta<M::PointerTag>,
ba9703b0 561 layout: TyAndLayout<'tcx>,
dc9dc135 562 ) -> InterpResult<'tcx, Option<(Size, Align)>> {
0bf4aa26 563 if !layout.is_unsized() {
a1dfa0c6 564 return Ok(Some((layout.size, layout.align.abi)));
0bf4aa26 565 }
1b1a35ee 566 match layout.ty.kind() {
b7449926
XL
567 ty::Adt(..) | ty::Tuple(..) => {
568 // First get the size of all statically known fields.
569 // Don't use type_of::sizing_type_of because that expects t to be sized,
570 // and it also rounds up to alignment, which we want to avoid,
571 // as the unsized field's alignment could be smaller.
572 assert!(!layout.ty.is_simd());
ba9703b0 573 assert!(layout.fields.count() > 0);
0bf4aa26 574 trace!("DST layout: {:?}", layout);
b7449926
XL
575
576 let sized_size = layout.fields.offset(layout.fields.count() - 1);
a1dfa0c6 577 let sized_align = layout.align.abi;
0bf4aa26 578 trace!(
b7449926
XL
579 "DST {} statically sized prefix size: {:?} align: {:?}",
580 layout.ty,
581 sized_size,
582 sized_align
583 );
ff7c6d11 584
b7449926 585 // Recurse to get the size of the dynamically sized field (must be
0bf4aa26
XL
586 // the last field). Can't have foreign types here, how would we
587 // adjust alignment and size for them?
b7449926 588 let field = layout.field(self, layout.fields.count() - 1)?;
a1dfa0c6
XL
589 let (unsized_size, unsized_align) = match self.size_and_align_of(metadata, field)? {
590 Some(size_and_align) => size_and_align,
591 None => {
592 // A field with extern type. If this field is at offset 0, we behave
593 // like the underlying extern type.
594 // FIXME: Once we have made decisions for how to handle size and alignment
595 // of `extern type`, this should be adapted. It is just a temporary hack
596 // to get some code to work that probably ought to work.
597 if sized_size == Size::ZERO {
dfeec247 598 return Ok(None);
a1dfa0c6 599 } else {
f035d41b
XL
600 span_bug!(
601 self.cur_span(),
602 "Fields cannot be extern types, unless they are at offset 0"
603 )
a1dfa0c6
XL
604 }
605 }
606 };
b7449926
XL
607
608 // FIXME (#26403, #27023): We should be adding padding
609 // to `sized_size` (to accommodate the `unsized_align`
610 // required of the unsized field that follows) before
611 // summing it with `sized_size`. (Note that since #26403
612 // is unfixed, we do not yet add the necessary padding
613 // here. But this is where the add would go.)
614
615 // Return the sum of sizes and max of aligns.
ba9703b0 616 let size = sized_size + unsized_size; // `Size` addition
b7449926
XL
617
618 // Choose max of two known alignments (combined value must
619 // be aligned according to more restrictive of the two).
620 let align = sized_align.max(unsized_align);
621
622 // Issue #27023: must add any necessary padding to `size`
623 // (to make it a multiple of `align`) before returning it.
e1599b0c
XL
624 let size = size.align_to(align);
625
626 // Check if this brought us over the size limit.
f035d41b 627 if size.bytes() >= self.tcx.data_layout.obj_size_bound() {
74b04a01 628 throw_ub!(InvalidMeta("total size is bigger than largest supported object"));
e1599b0c
XL
629 }
630 Ok(Some((size, align)))
ff7c6d11 631 }
b7449926 632 ty::Dynamic(..) => {
dfeec247 633 let vtable = metadata.unwrap_meta();
e1599b0c 634 // Read size and align from vtable (already checks size).
0bf4aa26 635 Ok(Some(self.read_size_and_align_from_vtable(vtable)?))
b7449926
XL
636 }
637
638 ty::Slice(_) | ty::Str => {
dfeec247 639 let len = metadata.unwrap_meta().to_machine_usize(self)?;
a1dfa0c6 640 let elem = layout.field(self, 0)?;
e1599b0c
XL
641
642 // Make sure the slice is not too big.
f035d41b 643 let size = elem.size.checked_mul(len, self).ok_or_else(|| {
74b04a01 644 err_ub!(InvalidMeta("slice is bigger than largest supported object"))
dfeec247 645 })?;
e1599b0c 646 Ok(Some((size, elem.align.abi)))
0bf4aa26
XL
647 }
648
dfeec247 649 ty::Foreign(_) => Ok(None),
b7449926 650
f035d41b 651 _ => span_bug!(self.cur_span(), "size_and_align_of::<{:?}> not supported", layout.ty),
ff7c6d11
XL
652 }
653 }
b7449926
XL
654 #[inline]
655 pub fn size_and_align_of_mplace(
656 &self,
dfeec247 657 mplace: MPlaceTy<'tcx, M::PointerTag>,
dc9dc135 658 ) -> InterpResult<'tcx, Option<(Size, Align)>> {
0bf4aa26 659 self.size_and_align_of(mplace.meta, mplace.layout)
b7449926 660 }
ff7c6d11
XL
661
662 pub fn push_stack_frame(
663 &mut self,
664 instance: ty::Instance<'tcx>,
dc9dc135 665 body: &'mir mir::Body<'tcx>,
0bf4aa26 666 return_place: Option<PlaceTy<'tcx, M::PointerTag>>,
ff7c6d11 667 return_to_block: StackPopCleanup,
dc9dc135 668 ) -> InterpResult<'tcx> {
b7449926 669 // first push a stack frame so we have access to the local substs
ba9703b0 670 let pre_frame = Frame {
dc9dc135 671 body,
3dfed10e 672 loc: Err(body.span), // Span used for errors caused during preamble.
b7449926
XL
673 return_to_block,
674 return_place,
675 // empty local array, we fill it in below, after we are inside the stack frame and
676 // all methods actually know about the frame
677 locals: IndexVec::new(),
b7449926 678 instance,
29967ef6 679 tracing_span: SpanGuard::new(),
ba9703b0
XL
680 extra: (),
681 };
682 let frame = M::init_frame_extra(self, pre_frame)?;
683 self.stack_mut().push(frame);
b7449926 684
3dfed10e
XL
685 // Make sure all the constants required by this frame evaluate successfully (post-monomorphization check).
686 for const_ in &body.required_consts {
687 let span = const_.span;
688 let const_ =
689 self.subst_from_current_frame_and_normalize_erasing_regions(const_.literal);
690 self.const_to_op(const_, None).map_err(|err| {
691 // If there was an error, set the span of the current frame to this constant.
692 // Avoiding doing this when evaluation succeeds.
693 self.frame_mut().loc = Err(span);
694 err
695 })?;
696 }
697
f9f354fc
XL
698 // Locals are initially uninitialized.
699 let dummy = LocalState { value: LocalValue::Uninitialized, layout: Cell::new(None) };
700 let mut locals = IndexVec::from_elem(dummy, &body.local_decls);
701
702 // Now mark those locals as dead that we do not want to initialize
703 match self.tcx.def_kind(instance.def_id()) {
704 // statics and constants don't have `Storage*` statements, no need to look for them
705 //
706 // FIXME: The above is likely untrue. See
707 // <https://github.com/rust-lang/rust/pull/70004#issuecomment-602022110>. Is it
708 // okay to ignore `StorageDead`/`StorageLive` annotations during CTFE?
709 DefKind::Static | DefKind::Const | DefKind::AssocConst => {}
710 _ => {
711 // Mark locals that use `Storage*` annotations as dead on function entry.
712 let always_live = AlwaysLiveLocals::new(self.body());
713 for local in locals.indices() {
714 if !always_live.contains(local) {
715 locals[local].value = LocalValue::Dead;
ff7c6d11 716 }
dfeec247 717 }
ff7c6d11 718 }
b7449926 719 }
f9f354fc
XL
720 // done
721 self.frame_mut().locals = locals;
ba9703b0 722 M::after_stack_push(self)?;
3dfed10e 723 self.frame_mut().loc = Ok(mir::Location::START);
29967ef6
XL
724
725 let span = info_span!("frame", "{}", instance);
726 self.frame_mut().tracing_span.enter(span);
0bf4aa26 727
3dfed10e 728 Ok(())
ff7c6d11
XL
729 }
730
60c5eb7d
XL
731 /// Jump to the given block.
732 #[inline]
733 pub fn go_to_block(&mut self, target: mir::BasicBlock) {
3dfed10e 734 self.frame_mut().loc = Ok(mir::Location { block: target, statement_index: 0 });
60c5eb7d
XL
735 }
736
737 /// *Return* to the given `target` basic block.
738 /// Do *not* use for unwinding! Use `unwind_to_block` instead.
739 ///
740 /// If `target` is `None`, that indicates the function cannot return, so we raise UB.
741 pub fn return_to_block(&mut self, target: Option<mir::BasicBlock>) -> InterpResult<'tcx> {
742 if let Some(target) = target {
ba9703b0
XL
743 self.go_to_block(target);
744 Ok(())
60c5eb7d
XL
745 } else {
746 throw_ub!(Unreachable)
747 }
748 }
749
750 /// *Unwind* to the given `target` basic block.
751 /// Do *not* use for returning! Use `return_to_block` instead.
752 ///
753 /// If `target` is `None`, that indicates the function does not need cleanup during
754 /// unwinding, and we will just keep propagating that upwards.
755 pub fn unwind_to_block(&mut self, target: Option<mir::BasicBlock>) {
3dfed10e
XL
756 self.frame_mut().loc = match target {
757 Some(block) => Ok(mir::Location { block, statement_index: 0 }),
758 None => Err(self.frame_mut().body.span),
759 };
60c5eb7d
XL
760 }
761
762 /// Pops the current frame from the stack, deallocating the
763 /// memory for allocated locals.
764 ///
765 /// If `unwinding` is `false`, then we are performing a normal return
766 /// from a function. In this case, we jump back into the frame of the caller,
767 /// and continue execution as normal.
768 ///
769 /// If `unwinding` is `true`, then we are in the middle of a panic,
770 /// and need to unwind this frame. In this case, we jump to the
771 /// `cleanup` block for the function, which is responsible for running
772 /// `Drop` impls for any locals that have been initialized at this point.
773 /// The cleanup block ends with a special `Resume` terminator, which will
774 /// cause us to continue unwinding.
dfeec247
XL
775 pub(super) fn pop_stack_frame(&mut self, unwinding: bool) -> InterpResult<'tcx> {
776 info!(
29967ef6
XL
777 "popping stack frame ({})",
778 if unwinding { "during unwinding" } else { "returning from function" }
dfeec247 779 );
60c5eb7d
XL
780
781 // Sanity check `unwinding`.
782 assert_eq!(
783 unwinding,
f9f354fc 784 match self.frame().loc {
3dfed10e
XL
785 Ok(loc) => self.body().basic_blocks()[loc.block].is_cleanup,
786 Err(_) => true,
60c5eb7d
XL
787 }
788 );
789
3dfed10e
XL
790 if unwinding && self.frame_idx() == 0 {
791 throw_ub_format!("unwinding past the topmost frame of the stack");
792 }
793
ba9703b0
XL
794 let frame =
795 self.stack_mut().pop().expect("tried to pop a stack frame, but there were none");
60c5eb7d 796
f9f354fc
XL
797 if !unwinding {
798 // Copy the return value to the caller's stack frame.
799 if let Some(return_place) = frame.return_place {
800 let op = self.access_local(&frame, mir::RETURN_PLACE, None)?;
801 self.copy_op_transmute(op, return_place)?;
3dfed10e 802 trace!("{:?}", self.dump_place(*return_place));
f9f354fc
XL
803 } else {
804 throw_ub!(Unreachable);
805 }
806 }
807
60c5eb7d
XL
808 // Now where do we jump next?
809
60c5eb7d
XL
810 // Usually we want to clean up (deallocate locals), but in a few rare cases we don't.
811 // In that case, we return early. We also avoid validation in that case,
a1dfa0c6 812 // because this is CTFE and the final value will be thoroughly validated anyway.
60c5eb7d
XL
813 let (cleanup, next_block) = match frame.return_to_block {
814 StackPopCleanup::Goto { ret, unwind } => {
ba9703b0 815 (true, Some(if unwinding { unwind } else { ret }))
dfeec247
XL
816 }
817 StackPopCleanup::None { cleanup, .. } => (cleanup, None),
60c5eb7d
XL
818 };
819
820 if !cleanup {
ba9703b0 821 assert!(self.stack().is_empty(), "only the topmost frame should ever be leaked");
60c5eb7d 822 assert!(next_block.is_none(), "tried to skip cleanup when we have a next block!");
ba9703b0
XL
823 assert!(!unwinding, "tried to skip cleanup during unwinding");
824 // Leak the locals, skip validation, skip machine hook.
60c5eb7d 825 return Ok(());
ff7c6d11 826 }
60c5eb7d
XL
827
828 // Cleanup: deallocate all locals that are backed by an allocation.
ba9703b0 829 for local in &frame.locals {
48663c56 830 self.deallocate_local(local.value)?;
ff7c6d11 831 }
60c5eb7d 832
ba9703b0
XL
833 if M::after_stack_pop(self, frame, unwinding)? == StackPopJump::NoJump {
834 // The hook already did everything.
835 // We want to skip the `info!` below, hence early return.
836 return Ok(());
837 }
838 // Normal return, figure out where to jump.
839 if unwinding {
60c5eb7d 840 // Follow the unwind edge.
74b04a01 841 let unwind = next_block.expect("Encountered StackPopCleanup::None when unwinding!");
60c5eb7d 842 self.unwind_to_block(unwind);
0bf4aa26 843 } else {
60c5eb7d 844 // Follow the normal return edge.
60c5eb7d
XL
845 if let Some(ret) = next_block {
846 self.return_to_block(ret)?;
a1dfa0c6 847 }
a1dfa0c6 848 }
0bf4aa26 849
ff7c6d11
XL
850 Ok(())
851 }
852
0bf4aa26
XL
853 /// Mark a storage as live, killing the previous content and returning it.
854 /// Remember to deallocate that!
855 pub fn storage_live(
856 &mut self,
dfeec247 857 local: mir::Local,
dc9dc135 858 ) -> InterpResult<'tcx, LocalValue<M::PointerTag>> {
0bf4aa26
XL
859 assert!(local != mir::RETURN_PLACE, "Cannot make return place live");
860 trace!("{:?} is now live", local);
861
48663c56
XL
862 let local_val = LocalValue::Uninitialized;
863 // StorageLive *always* kills the value that's currently stored.
864 // However, we do not error if the variable already is live;
865 // see <https://github.com/rust-lang/rust/issues/42371>.
866 Ok(mem::replace(&mut self.frame_mut().locals[local].value, local_val))
0bf4aa26
XL
867 }
868
869 /// Returns the old value of the local.
870 /// Remember to deallocate that!
871 pub fn storage_dead(&mut self, local: mir::Local) -> LocalValue<M::PointerTag> {
872 assert!(local != mir::RETURN_PLACE, "Cannot make return place dead");
873 trace!("{:?} is now dead", local);
874
48663c56 875 mem::replace(&mut self.frame_mut().locals[local].value, LocalValue::Dead)
0bf4aa26
XL
876 }
877
878 pub(super) fn deallocate_local(
879 &mut self,
880 local: LocalValue<M::PointerTag>,
dc9dc135 881 ) -> InterpResult<'tcx> {
b7449926
XL
882 // FIXME: should we tell the user that there was a local which was never written to?
883 if let LocalValue::Live(Operand::Indirect(MemPlace { ptr, .. })) = local {
dfeec247
XL
884 // All locals have a backing allocation, even if the allocation is empty
885 // due to the local having ZST type.
886 let ptr = ptr.assert_ptr();
3dfed10e 887 trace!("deallocating local: {:?}", self.memory.dump_alloc(ptr.alloc_id));
ff7c6d11
XL
888 self.memory.deallocate_local(ptr)?;
889 };
890 Ok(())
891 }
892
1b1a35ee 893 pub fn eval_to_allocation(
a1dfa0c6
XL
894 &self,
895 gid: GlobalId<'tcx>,
dc9dc135 896 ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
dfeec247
XL
897 // For statics we pick `ParamEnv::reveal_all`, because statics don't have generics
898 // and thus don't care about the parameter environment. While we could just use
899 // `self.param_env`, that would mean we invoke the query to evaluate the static
900 // with different parameter environments, thus causing the static to be evaluated
901 // multiple times.
48663c56 902 let param_env = if self.tcx.is_static(gid.instance.def_id()) {
0531ce1d
XL
903 ty::ParamEnv::reveal_all()
904 } else {
905 self.param_env
906 };
1b1a35ee 907 let val = self.tcx.eval_to_allocation_raw(param_env.and(gid))?;
a1dfa0c6 908 self.raw_const_to_mplace(val)
ff7c6d11
XL
909 }
910
3dfed10e
XL
911 #[must_use]
912 pub fn dump_place(&'a self, place: Place<M::PointerTag>) -> PlacePrinter<'a, 'mir, 'tcx, M> {
913 PlacePrinter { ecx: self, place }
914 }
915
916 #[must_use]
917 pub fn generate_stacktrace(&self) -> Vec<FrameInfo<'tcx>> {
918 let mut frames = Vec::new();
919 for frame in self.stack().iter().rev() {
920 let lint_root = frame.current_source_info().and_then(|source_info| {
921 match &frame.body.source_scopes[source_info.scope].local_data {
922 mir::ClearCrossCrate::Set(data) => Some(data.lint_root),
923 mir::ClearCrossCrate::Clear => None,
924 }
925 });
926 let span = frame.current_span();
927
928 frames.push(FrameInfo { span, instance: frame.instance, lint_root });
83c7162d 929 }
3dfed10e
XL
930 trace!("generate stacktrace: {:#?}", frames);
931 frames
932 }
933}
934
935#[doc(hidden)]
936/// Helper struct for the `dump_place` function.
937pub struct PlacePrinter<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> {
938 ecx: &'a InterpCx<'mir, 'tcx, M>,
939 place: Place<M::PointerTag>,
940}
941
942impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> std::fmt::Debug
943 for PlacePrinter<'a, 'mir, 'tcx, M>
944{
945 fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
946 match self.place {
ff7c6d11
XL
947 Place::Local { frame, local } => {
948 let mut allocs = Vec::new();
3dfed10e
XL
949 write!(fmt, "{:?}", local)?;
950 if frame != self.ecx.frame_idx() {
951 write!(fmt, " ({} frames up)", self.ecx.frame_idx() - frame)?;
ff7c6d11 952 }
3dfed10e 953 write!(fmt, ":")?;
ff7c6d11 954
3dfed10e
XL
955 match self.ecx.stack()[frame].locals[local].value {
956 LocalValue::Dead => write!(fmt, " is dead")?,
957 LocalValue::Uninitialized => write!(fmt, " is uninitialized")?,
dfeec247
XL
958 LocalValue::Live(Operand::Indirect(mplace)) => match mplace.ptr {
959 Scalar::Ptr(ptr) => {
960 write!(
3dfed10e 961 fmt,
dfeec247
XL
962 " by align({}){} ref:",
963 mplace.align.bytes(),
964 match mplace.meta {
965 MemPlaceMeta::Meta(meta) => format!(" meta({:?})", meta),
966 MemPlaceMeta::Poison | MemPlaceMeta::None => String::new(),
967 }
3dfed10e 968 )?;
dfeec247 969 allocs.push(ptr.alloc_id);
ff7c6d11 970 }
3dfed10e 971 ptr => write!(fmt, " by integral ref: {:?}", ptr)?,
dfeec247 972 },
48663c56 973 LocalValue::Live(Operand::Immediate(Immediate::Scalar(val))) => {
3dfed10e 974 write!(fmt, " {:?}", val)?;
f9f354fc 975 if let ScalarMaybeUninit::Scalar(Scalar::Ptr(ptr)) = val {
ff7c6d11
XL
976 allocs.push(ptr.alloc_id);
977 }
978 }
48663c56 979 LocalValue::Live(Operand::Immediate(Immediate::ScalarPair(val1, val2))) => {
3dfed10e 980 write!(fmt, " ({:?}, {:?})", val1, val2)?;
f9f354fc 981 if let ScalarMaybeUninit::Scalar(Scalar::Ptr(ptr)) = val1 {
ff7c6d11
XL
982 allocs.push(ptr.alloc_id);
983 }
f9f354fc 984 if let ScalarMaybeUninit::Scalar(Scalar::Ptr(ptr)) = val2 {
ff7c6d11
XL
985 allocs.push(ptr.alloc_id);
986 }
987 }
988 }
989
3dfed10e 990 write!(fmt, ": {:?}", self.ecx.memory.dump_allocs(allocs))
ff7c6d11 991 }
dfeec247 992 Place::Ptr(mplace) => match mplace.ptr {
3dfed10e
XL
993 Scalar::Ptr(ptr) => write!(
994 fmt,
995 "by align({}) ref: {:?}",
996 mplace.align.bytes(),
997 self.ecx.memory.dump_alloc(ptr.alloc_id)
998 ),
999 ptr => write!(fmt, " integral by ref: {:?}", ptr),
dfeec247 1000 },
ff7c6d11
XL
1001 }
1002 }
ff7c6d11 1003}
60c5eb7d
XL
1004
1005impl<'ctx, 'mir, 'tcx, Tag, Extra> HashStable<StableHashingContext<'ctx>>
dfeec247
XL
1006 for Frame<'mir, 'tcx, Tag, Extra>
1007where
1008 Extra: HashStable<StableHashingContext<'ctx>>,
1009 Tag: HashStable<StableHashingContext<'ctx>>,
60c5eb7d
XL
1010{
1011 fn hash_stable(&self, hcx: &mut StableHashingContext<'ctx>, hasher: &mut StableHasher) {
f9f354fc 1012 // Exhaustive match on fields to make sure we forget no field.
29967ef6
XL
1013 let Frame {
1014 body,
1015 instance,
1016 return_to_block,
1017 return_place,
1018 locals,
1019 loc,
1020 extra,
1021 tracing_span: _,
1022 } = self;
f9f354fc
XL
1023 body.hash_stable(hcx, hasher);
1024 instance.hash_stable(hcx, hasher);
1025 return_to_block.hash_stable(hcx, hasher);
1026 return_place.as_ref().map(|r| &**r).hash_stable(hcx, hasher);
1027 locals.hash_stable(hcx, hasher);
1028 loc.hash_stable(hcx, hasher);
1029 extra.hash_stable(hcx, hasher);
60c5eb7d
XL
1030 }
1031}