]> git.proxmox.com Git - rustc.git/blob - compiler/rustc_mir/src/interpret/eval_context.rs
New upstream version 1.56.0~beta.4+dfsg1
[rustc.git] / compiler / rustc_mir / src / interpret / eval_context.rs
1 use std::cell::Cell;
2 use std::fmt;
3 use std::mem;
4
5 use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
6 use rustc_hir::{self as hir, def_id::DefId, definitions::DefPathData};
7 use rustc_index::vec::IndexVec;
8 use rustc_macros::HashStable;
9 use rustc_middle::ich::StableHashingContext;
10 use rustc_middle::mir;
11 use rustc_middle::ty::layout::{self, TyAndLayout};
12 use rustc_middle::ty::{
13 self, query::TyCtxtAt, subst::SubstsRef, ParamEnv, Ty, TyCtxt, TypeFoldable,
14 };
15 use rustc_session::Limit;
16 use rustc_span::{Pos, Span};
17 use rustc_target::abi::{Align, HasDataLayout, LayoutOf, Size, TargetDataLayout};
18
19 use super::{
20 AllocId, GlobalId, Immediate, InterpResult, MPlaceTy, Machine, MemPlace, MemPlaceMeta, Memory,
21 MemoryKind, Operand, Place, PlaceTy, Pointer, Provenance, Scalar, ScalarMaybeUninit,
22 StackPopJump,
23 };
24 use crate::transform::validate::equal_up_to_regions;
25 use crate::util::storage::AlwaysLiveLocals;
26
27 pub struct InterpCx<'mir, 'tcx, M: Machine<'mir, 'tcx>> {
28 /// Stores the `Machine` instance.
29 ///
30 /// Note: the stack is provided by the machine.
31 pub machine: M,
32
33 /// The results of the type checker, from rustc.
34 /// The span in this is the "root" of the evaluation, i.e., the const
35 /// we are evaluating (if this is CTFE).
36 pub tcx: TyCtxtAt<'tcx>,
37
38 /// Bounds in scope for polymorphic evaluations.
39 pub(crate) param_env: ty::ParamEnv<'tcx>,
40
41 /// The virtual memory system.
42 pub memory: Memory<'mir, 'tcx, M>,
43
44 /// The recursion limit (cached from `tcx.recursion_limit(())`)
45 pub recursion_limit: Limit,
46 }
47
48 // The Phantomdata exists to prevent this type from being `Send`. If it were sent across a thread
49 // boundary and dropped in the other thread, it would exit the span in the other thread.
50 struct SpanGuard(tracing::Span, std::marker::PhantomData<*const u8>);
51
52 impl SpanGuard {
53 /// By default a `SpanGuard` does nothing.
54 fn new() -> Self {
55 Self(tracing::Span::none(), std::marker::PhantomData)
56 }
57
58 /// If a span is entered, we exit the previous span (if any, normally none) and enter the
59 /// new span. This is mainly so we don't have to use `Option` for the `tracing_span` field of
60 /// `Frame` by creating a dummy span to being with and then entering it once the frame has
61 /// been pushed.
62 fn enter(&mut self, span: tracing::Span) {
63 // This executes the destructor on the previous instance of `SpanGuard`, ensuring that
64 // we never enter or exit more spans than vice versa. Unless you `mem::leak`, then we
65 // can't protect the tracing stack, but that'll just lead to weird logging, no actual
66 // problems.
67 *self = Self(span, std::marker::PhantomData);
68 self.0.with_subscriber(|(id, dispatch)| {
69 dispatch.enter(id);
70 });
71 }
72 }
73
74 impl Drop for SpanGuard {
75 fn drop(&mut self) {
76 self.0.with_subscriber(|(id, dispatch)| {
77 dispatch.exit(id);
78 });
79 }
80 }
81
82 /// A stack frame.
83 pub struct Frame<'mir, 'tcx, Tag: Provenance = AllocId, Extra = ()> {
84 ////////////////////////////////////////////////////////////////////////////////
85 // Function and callsite information
86 ////////////////////////////////////////////////////////////////////////////////
87 /// The MIR for the function called on this frame.
88 pub body: &'mir mir::Body<'tcx>,
89
90 /// The def_id and substs of the current function.
91 pub instance: ty::Instance<'tcx>,
92
93 /// Extra data for the machine.
94 pub extra: Extra,
95
96 ////////////////////////////////////////////////////////////////////////////////
97 // Return place and locals
98 ////////////////////////////////////////////////////////////////////////////////
99 /// Work to perform when returning from this function.
100 pub return_to_block: StackPopCleanup,
101
102 /// The location where the result of the current stack frame should be written to,
103 /// and its layout in the caller.
104 pub return_place: Option<PlaceTy<'tcx, Tag>>,
105
106 /// The list of locals for this stack frame, stored in order as
107 /// `[return_ptr, arguments..., variables..., temporaries...]`.
108 /// The locals are stored as `Option<Value>`s.
109 /// `None` represents a local that is currently dead, while a live local
110 /// can either directly contain `Scalar` or refer to some part of an `Allocation`.
111 pub locals: IndexVec<mir::Local, LocalState<'tcx, Tag>>,
112
113 /// The span of the `tracing` crate is stored here.
114 /// When the guard is dropped, the span is exited. This gives us
115 /// a full stack trace on all tracing statements.
116 tracing_span: SpanGuard,
117
118 ////////////////////////////////////////////////////////////////////////////////
119 // Current position within the function
120 ////////////////////////////////////////////////////////////////////////////////
121 /// If this is `Err`, we are not currently executing any particular statement in
122 /// this frame (can happen e.g. during frame initialization, and during unwinding on
123 /// frames without cleanup code).
124 /// We basically abuse `Result` as `Either`.
125 pub(super) loc: Result<mir::Location, Span>,
126 }
127
128 /// What we store about a frame in an interpreter backtrace.
129 #[derive(Debug)]
130 pub struct FrameInfo<'tcx> {
131 pub instance: ty::Instance<'tcx>,
132 pub span: Span,
133 pub lint_root: Option<hir::HirId>,
134 }
135
136 /// Unwind information.
137 #[derive(Clone, Copy, Eq, PartialEq, Debug, HashStable)]
138 pub enum StackPopUnwind {
139 /// The cleanup block.
140 Cleanup(mir::BasicBlock),
141 /// No cleanup needs to be done.
142 Skip,
143 /// Unwinding is not allowed (UB).
144 NotAllowed,
145 }
146
147 #[derive(Clone, Copy, Eq, PartialEq, Debug, HashStable)] // Miri debug-prints these
148 pub enum StackPopCleanup {
149 /// Jump to the next block in the caller, or cause UB if None (that's a function
150 /// that may never return). Also store layout of return place so
151 /// we can validate it at that layout.
152 /// `ret` stores the block we jump to on a normal return, while `unwind`
153 /// stores the block used for cleanup during unwinding.
154 Goto { ret: Option<mir::BasicBlock>, unwind: StackPopUnwind },
155 /// Just do nothing: Used by Main and for the `box_alloc` hook in miri.
156 /// `cleanup` says whether locals are deallocated. Static computation
157 /// wants them leaked to intern what they need (and just throw away
158 /// the entire `ecx` when it is done).
159 None { cleanup: bool },
160 }
161
162 /// State of a local variable including a memoized layout
163 #[derive(Clone, PartialEq, Eq, HashStable)]
164 pub struct LocalState<'tcx, Tag: Provenance = AllocId> {
165 pub value: LocalValue<Tag>,
166 /// Don't modify if `Some`, this is only used to prevent computing the layout twice
167 #[stable_hasher(ignore)]
168 pub layout: Cell<Option<TyAndLayout<'tcx>>>,
169 }
170
171 /// Current value of a local variable
172 #[derive(Copy, Clone, PartialEq, Eq, HashStable, Debug)] // Miri debug-prints these
173 pub enum LocalValue<Tag: Provenance = AllocId> {
174 /// This local is not currently alive, and cannot be used at all.
175 Dead,
176 /// This local is alive but not yet initialized. It can be written to
177 /// but not read from or its address taken. Locals get initialized on
178 /// first write because for unsized locals, we do not know their size
179 /// before that.
180 Uninitialized,
181 /// A normal, live local.
182 /// Mostly for convenience, we re-use the `Operand` type here.
183 /// This is an optimization over just always having a pointer here;
184 /// we can thus avoid doing an allocation when the local just stores
185 /// immediate values *and* never has its address taken.
186 Live(Operand<Tag>),
187 }
188
189 impl<'tcx, Tag: Provenance + 'static> LocalState<'tcx, Tag> {
190 /// Read the local's value or error if the local is not yet live or not live anymore.
191 ///
192 /// Note: This may only be invoked from the `Machine::access_local` hook and not from
193 /// anywhere else. You may be invalidating machine invariants if you do!
194 pub fn access(&self) -> InterpResult<'tcx, Operand<Tag>> {
195 match self.value {
196 LocalValue::Dead => throw_ub!(DeadLocal),
197 LocalValue::Uninitialized => {
198 bug!("The type checker should prevent reading from a never-written local")
199 }
200 LocalValue::Live(val) => Ok(val),
201 }
202 }
203
204 /// Overwrite the local. If the local can be overwritten in place, return a reference
205 /// to do so; otherwise return the `MemPlace` to consult instead.
206 ///
207 /// Note: This may only be invoked from the `Machine::access_local_mut` hook and not from
208 /// anywhere else. You may be invalidating machine invariants if you do!
209 pub fn access_mut(
210 &mut self,
211 ) -> InterpResult<'tcx, Result<&mut LocalValue<Tag>, MemPlace<Tag>>> {
212 match self.value {
213 LocalValue::Dead => throw_ub!(DeadLocal),
214 LocalValue::Live(Operand::Indirect(mplace)) => Ok(Err(mplace)),
215 ref mut
216 local @ (LocalValue::Live(Operand::Immediate(_)) | LocalValue::Uninitialized) => {
217 Ok(Ok(local))
218 }
219 }
220 }
221 }
222
223 impl<'mir, 'tcx, Tag: Provenance> Frame<'mir, 'tcx, Tag> {
224 pub fn with_extra<Extra>(self, extra: Extra) -> Frame<'mir, 'tcx, Tag, Extra> {
225 Frame {
226 body: self.body,
227 instance: self.instance,
228 return_to_block: self.return_to_block,
229 return_place: self.return_place,
230 locals: self.locals,
231 loc: self.loc,
232 extra,
233 tracing_span: self.tracing_span,
234 }
235 }
236 }
237
238 impl<'mir, 'tcx, Tag: Provenance, Extra> Frame<'mir, 'tcx, Tag, Extra> {
239 /// Get the current location within the Frame.
240 ///
241 /// If this is `Err`, we are not currently executing any particular statement in
242 /// this frame (can happen e.g. during frame initialization, and during unwinding on
243 /// frames without cleanup code).
244 /// We basically abuse `Result` as `Either`.
245 ///
246 /// Used by priroda.
247 pub fn current_loc(&self) -> Result<mir::Location, Span> {
248 self.loc
249 }
250
251 /// Return the `SourceInfo` of the current instruction.
252 pub fn current_source_info(&self) -> Option<&mir::SourceInfo> {
253 self.loc.ok().map(|loc| self.body.source_info(loc))
254 }
255
256 pub fn current_span(&self) -> Span {
257 match self.loc {
258 Ok(loc) => self.body.source_info(loc).span,
259 Err(span) => span,
260 }
261 }
262 }
263
264 impl<'tcx> fmt::Display for FrameInfo<'tcx> {
265 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
266 ty::tls::with(|tcx| {
267 if tcx.def_key(self.instance.def_id()).disambiguated_data.data
268 == DefPathData::ClosureExpr
269 {
270 write!(f, "inside closure")?;
271 } else {
272 write!(f, "inside `{}`", self.instance)?;
273 }
274 if !self.span.is_dummy() {
275 let sm = tcx.sess.source_map();
276 let lo = sm.lookup_char_pos(self.span.lo());
277 write!(
278 f,
279 " at {}:{}:{}",
280 sm.filename_for_diagnostics(&lo.file.name),
281 lo.line,
282 lo.col.to_usize() + 1
283 )?;
284 }
285 Ok(())
286 })
287 }
288 }
289
290 impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> HasDataLayout for InterpCx<'mir, 'tcx, M> {
291 #[inline]
292 fn data_layout(&self) -> &TargetDataLayout {
293 &self.tcx.data_layout
294 }
295 }
296
297 impl<'mir, 'tcx, M> layout::HasTyCtxt<'tcx> for InterpCx<'mir, 'tcx, M>
298 where
299 M: Machine<'mir, 'tcx>,
300 {
301 #[inline]
302 fn tcx(&self) -> TyCtxt<'tcx> {
303 *self.tcx
304 }
305 }
306
307 impl<'mir, 'tcx, M> layout::HasParamEnv<'tcx> for InterpCx<'mir, 'tcx, M>
308 where
309 M: Machine<'mir, 'tcx>,
310 {
311 fn param_env(&self) -> ty::ParamEnv<'tcx> {
312 self.param_env
313 }
314 }
315
316 impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> LayoutOf<'tcx> for InterpCx<'mir, 'tcx, M> {
317 type Ty = Ty<'tcx>;
318 type TyAndLayout = InterpResult<'tcx, TyAndLayout<'tcx>>;
319
320 #[inline]
321 fn layout_of(&self, ty: Ty<'tcx>) -> Self::TyAndLayout {
322 self.tcx
323 .layout_of(self.param_env.and(ty))
324 .map_err(|layout| err_inval!(Layout(layout)).into())
325 }
326 }
327
328 /// Test if it is valid for a MIR assignment to assign `src`-typed place to `dest`-typed value.
329 /// This test should be symmetric, as it is primarily about layout compatibility.
330 pub(super) fn mir_assign_valid_types<'tcx>(
331 tcx: TyCtxt<'tcx>,
332 param_env: ParamEnv<'tcx>,
333 src: TyAndLayout<'tcx>,
334 dest: TyAndLayout<'tcx>,
335 ) -> bool {
336 // Type-changing assignments can happen when subtyping is used. While
337 // all normal lifetimes are erased, higher-ranked types with their
338 // late-bound lifetimes are still around and can lead to type
339 // differences. So we compare ignoring lifetimes.
340 if equal_up_to_regions(tcx, param_env, src.ty, dest.ty) {
341 // Make sure the layout is equal, too -- just to be safe. Miri really
342 // needs layout equality. For performance reason we skip this check when
343 // the types are equal. Equal types *can* have different layouts when
344 // enum downcast is involved (as enum variants carry the type of the
345 // enum), but those should never occur in assignments.
346 if cfg!(debug_assertions) || src.ty != dest.ty {
347 assert_eq!(src.layout, dest.layout);
348 }
349 true
350 } else {
351 false
352 }
353 }
354
355 /// Use the already known layout if given (but sanity check in debug mode),
356 /// or compute the layout.
357 #[cfg_attr(not(debug_assertions), inline(always))]
358 pub(super) fn from_known_layout<'tcx>(
359 tcx: TyCtxtAt<'tcx>,
360 param_env: ParamEnv<'tcx>,
361 known_layout: Option<TyAndLayout<'tcx>>,
362 compute: impl FnOnce() -> InterpResult<'tcx, TyAndLayout<'tcx>>,
363 ) -> InterpResult<'tcx, TyAndLayout<'tcx>> {
364 match known_layout {
365 None => compute(),
366 Some(known_layout) => {
367 if cfg!(debug_assertions) {
368 let check_layout = compute()?;
369 if !mir_assign_valid_types(tcx.tcx, param_env, check_layout, known_layout) {
370 span_bug!(
371 tcx.span,
372 "expected type differs from actual type.\nexpected: {:?}\nactual: {:?}",
373 known_layout.ty,
374 check_layout.ty,
375 );
376 }
377 }
378 Ok(known_layout)
379 }
380 }
381 }
382
383 impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
384 pub fn new(
385 tcx: TyCtxt<'tcx>,
386 root_span: Span,
387 param_env: ty::ParamEnv<'tcx>,
388 machine: M,
389 memory_extra: M::MemoryExtra,
390 ) -> Self {
391 InterpCx {
392 machine,
393 tcx: tcx.at(root_span),
394 param_env,
395 memory: Memory::new(tcx, memory_extra),
396 recursion_limit: tcx.recursion_limit(),
397 }
398 }
399
400 #[inline(always)]
401 pub fn cur_span(&self) -> Span {
402 self.stack()
403 .iter()
404 .rev()
405 .find(|frame| !frame.instance.def.requires_caller_location(*self.tcx))
406 .map_or(self.tcx.span, |f| f.current_span())
407 }
408
409 #[inline(always)]
410 pub fn scalar_to_ptr(&self, scalar: Scalar<M::PointerTag>) -> Pointer<Option<M::PointerTag>> {
411 self.memory.scalar_to_ptr(scalar)
412 }
413
414 /// Call this to turn untagged "global" pointers (obtained via `tcx`) into
415 /// the machine pointer to the allocation. Must never be used
416 /// for any other pointers, nor for TLS statics.
417 ///
418 /// Using the resulting pointer represents a *direct* access to that memory
419 /// (e.g. by directly using a `static`),
420 /// as opposed to access through a pointer that was created by the program.
421 ///
422 /// This function can fail only if `ptr` points to an `extern static`.
423 #[inline(always)]
424 pub fn global_base_pointer(&self, ptr: Pointer) -> InterpResult<'tcx, Pointer<M::PointerTag>> {
425 self.memory.global_base_pointer(ptr)
426 }
427
428 #[inline(always)]
429 pub(crate) fn stack(&self) -> &[Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra>] {
430 M::stack(self)
431 }
432
433 #[inline(always)]
434 pub(crate) fn stack_mut(
435 &mut self,
436 ) -> &mut Vec<Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra>> {
437 M::stack_mut(self)
438 }
439
440 #[inline(always)]
441 pub fn frame_idx(&self) -> usize {
442 let stack = self.stack();
443 assert!(!stack.is_empty());
444 stack.len() - 1
445 }
446
447 #[inline(always)]
448 pub fn frame(&self) -> &Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra> {
449 self.stack().last().expect("no call frames exist")
450 }
451
452 #[inline(always)]
453 pub fn frame_mut(&mut self) -> &mut Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra> {
454 self.stack_mut().last_mut().expect("no call frames exist")
455 }
456
457 #[inline(always)]
458 pub(super) fn body(&self) -> &'mir mir::Body<'tcx> {
459 self.frame().body
460 }
461
462 #[inline(always)]
463 pub fn sign_extend(&self, value: u128, ty: TyAndLayout<'_>) -> u128 {
464 assert!(ty.abi.is_signed());
465 ty.size.sign_extend(value)
466 }
467
468 #[inline(always)]
469 pub fn truncate(&self, value: u128, ty: TyAndLayout<'_>) -> u128 {
470 ty.size.truncate(value)
471 }
472
473 #[inline]
474 pub fn type_is_freeze(&self, ty: Ty<'tcx>) -> bool {
475 ty.is_freeze(self.tcx, self.param_env)
476 }
477
478 pub fn load_mir(
479 &self,
480 instance: ty::InstanceDef<'tcx>,
481 promoted: Option<mir::Promoted>,
482 ) -> InterpResult<'tcx, &'tcx mir::Body<'tcx>> {
483 // do not continue if typeck errors occurred (can only occur in local crate)
484 let def = instance.with_opt_param();
485 if let Some(def) = def.as_local() {
486 if self.tcx.has_typeck_results(def.did) {
487 if let Some(error_reported) = self.tcx.typeck_opt_const_arg(def).tainted_by_errors {
488 throw_inval!(AlreadyReported(error_reported))
489 }
490 }
491 }
492 trace!("load mir(instance={:?}, promoted={:?})", instance, promoted);
493 if let Some(promoted) = promoted {
494 return Ok(&self.tcx.promoted_mir_opt_const_arg(def)[promoted]);
495 }
496 M::load_mir(self, instance)
497 }
498
499 /// Call this on things you got out of the MIR (so it is as generic as the current
500 /// stack frame), to bring it into the proper environment for this interpreter.
501 pub(super) fn subst_from_current_frame_and_normalize_erasing_regions<T: TypeFoldable<'tcx>>(
502 &self,
503 value: T,
504 ) -> T {
505 self.subst_from_frame_and_normalize_erasing_regions(self.frame(), value)
506 }
507
508 /// Call this on things you got out of the MIR (so it is as generic as the provided
509 /// stack frame), to bring it into the proper environment for this interpreter.
510 pub(super) fn subst_from_frame_and_normalize_erasing_regions<T: TypeFoldable<'tcx>>(
511 &self,
512 frame: &Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra>,
513 value: T,
514 ) -> T {
515 frame.instance.subst_mir_and_normalize_erasing_regions(*self.tcx, self.param_env, value)
516 }
517
518 /// The `substs` are assumed to already be in our interpreter "universe" (param_env).
519 pub(super) fn resolve(
520 &self,
521 def: ty::WithOptConstParam<DefId>,
522 substs: SubstsRef<'tcx>,
523 ) -> InterpResult<'tcx, ty::Instance<'tcx>> {
524 trace!("resolve: {:?}, {:#?}", def, substs);
525 trace!("param_env: {:#?}", self.param_env);
526 trace!("substs: {:#?}", substs);
527 match ty::Instance::resolve_opt_const_arg(*self.tcx, self.param_env, def, substs) {
528 Ok(Some(instance)) => Ok(instance),
529 Ok(None) => throw_inval!(TooGeneric),
530
531 // FIXME(eddyb) this could be a bit more specific than `AlreadyReported`.
532 Err(error_reported) => throw_inval!(AlreadyReported(error_reported)),
533 }
534 }
535
536 #[inline(always)]
537 pub fn layout_of_local(
538 &self,
539 frame: &Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra>,
540 local: mir::Local,
541 layout: Option<TyAndLayout<'tcx>>,
542 ) -> InterpResult<'tcx, TyAndLayout<'tcx>> {
543 // `const_prop` runs into this with an invalid (empty) frame, so we
544 // have to support that case (mostly by skipping all caching).
545 match frame.locals.get(local).and_then(|state| state.layout.get()) {
546 None => {
547 let layout = from_known_layout(self.tcx, self.param_env, layout, || {
548 let local_ty = frame.body.local_decls[local].ty;
549 let local_ty =
550 self.subst_from_frame_and_normalize_erasing_regions(frame, local_ty);
551 self.layout_of(local_ty)
552 })?;
553 if let Some(state) = frame.locals.get(local) {
554 // Layouts of locals are requested a lot, so we cache them.
555 state.layout.set(Some(layout));
556 }
557 Ok(layout)
558 }
559 Some(layout) => Ok(layout),
560 }
561 }
562
563 /// Returns the actual dynamic size and alignment of the place at the given type.
564 /// Only the "meta" (metadata) part of the place matters.
565 /// This can fail to provide an answer for extern types.
566 pub(super) fn size_and_align_of(
567 &self,
568 metadata: &MemPlaceMeta<M::PointerTag>,
569 layout: &TyAndLayout<'tcx>,
570 ) -> InterpResult<'tcx, Option<(Size, Align)>> {
571 if !layout.is_unsized() {
572 return Ok(Some((layout.size, layout.align.abi)));
573 }
574 match layout.ty.kind() {
575 ty::Adt(..) | ty::Tuple(..) => {
576 // First get the size of all statically known fields.
577 // Don't use type_of::sizing_type_of because that expects t to be sized,
578 // and it also rounds up to alignment, which we want to avoid,
579 // as the unsized field's alignment could be smaller.
580 assert!(!layout.ty.is_simd());
581 assert!(layout.fields.count() > 0);
582 trace!("DST layout: {:?}", layout);
583
584 let sized_size = layout.fields.offset(layout.fields.count() - 1);
585 let sized_align = layout.align.abi;
586 trace!(
587 "DST {} statically sized prefix size: {:?} align: {:?}",
588 layout.ty,
589 sized_size,
590 sized_align
591 );
592
593 // Recurse to get the size of the dynamically sized field (must be
594 // the last field). Can't have foreign types here, how would we
595 // adjust alignment and size for them?
596 let field = layout.field(self, layout.fields.count() - 1);
597 let (unsized_size, unsized_align) =
598 match self.size_and_align_of(metadata, &field)? {
599 Some(size_and_align) => size_and_align,
600 None => {
601 // A field with extern type. If this field is at offset 0, we behave
602 // like the underlying extern type.
603 // FIXME: Once we have made decisions for how to handle size and alignment
604 // of `extern type`, this should be adapted. It is just a temporary hack
605 // to get some code to work that probably ought to work.
606 if sized_size == Size::ZERO {
607 return Ok(None);
608 } else {
609 span_bug!(
610 self.cur_span(),
611 "Fields cannot be extern types, unless they are at offset 0"
612 )
613 }
614 }
615 };
616
617 // FIXME (#26403, #27023): We should be adding padding
618 // to `sized_size` (to accommodate the `unsized_align`
619 // required of the unsized field that follows) before
620 // summing it with `sized_size`. (Note that since #26403
621 // is unfixed, we do not yet add the necessary padding
622 // here. But this is where the add would go.)
623
624 // Return the sum of sizes and max of aligns.
625 let size = sized_size + unsized_size; // `Size` addition
626
627 // Choose max of two known alignments (combined value must
628 // be aligned according to more restrictive of the two).
629 let align = sized_align.max(unsized_align);
630
631 // Issue #27023: must add any necessary padding to `size`
632 // (to make it a multiple of `align`) before returning it.
633 let size = size.align_to(align);
634
635 // Check if this brought us over the size limit.
636 if size.bytes() >= self.tcx.data_layout.obj_size_bound() {
637 throw_ub!(InvalidMeta("total size is bigger than largest supported object"));
638 }
639 Ok(Some((size, align)))
640 }
641 ty::Dynamic(..) => {
642 let vtable = self.scalar_to_ptr(metadata.unwrap_meta());
643 // Read size and align from vtable (already checks size).
644 Ok(Some(self.read_size_and_align_from_vtable(vtable)?))
645 }
646
647 ty::Slice(_) | ty::Str => {
648 let len = metadata.unwrap_meta().to_machine_usize(self)?;
649 let elem = layout.field(self, 0);
650
651 // Make sure the slice is not too big.
652 let size = elem.size.checked_mul(len, self).ok_or_else(|| {
653 err_ub!(InvalidMeta("slice is bigger than largest supported object"))
654 })?;
655 Ok(Some((size, elem.align.abi)))
656 }
657
658 ty::Foreign(_) => Ok(None),
659
660 _ => span_bug!(self.cur_span(), "size_and_align_of::<{:?}> not supported", layout.ty),
661 }
662 }
663 #[inline]
664 pub fn size_and_align_of_mplace(
665 &self,
666 mplace: &MPlaceTy<'tcx, M::PointerTag>,
667 ) -> InterpResult<'tcx, Option<(Size, Align)>> {
668 self.size_and_align_of(&mplace.meta, &mplace.layout)
669 }
670
671 pub fn push_stack_frame(
672 &mut self,
673 instance: ty::Instance<'tcx>,
674 body: &'mir mir::Body<'tcx>,
675 return_place: Option<&PlaceTy<'tcx, M::PointerTag>>,
676 return_to_block: StackPopCleanup,
677 ) -> InterpResult<'tcx> {
678 // first push a stack frame so we have access to the local substs
679 let pre_frame = Frame {
680 body,
681 loc: Err(body.span), // Span used for errors caused during preamble.
682 return_to_block,
683 return_place: return_place.copied(),
684 // empty local array, we fill it in below, after we are inside the stack frame and
685 // all methods actually know about the frame
686 locals: IndexVec::new(),
687 instance,
688 tracing_span: SpanGuard::new(),
689 extra: (),
690 };
691 let frame = M::init_frame_extra(self, pre_frame)?;
692 self.stack_mut().push(frame);
693
694 // Make sure all the constants required by this frame evaluate successfully (post-monomorphization check).
695 for const_ in &body.required_consts {
696 let span = const_.span;
697 let const_ =
698 self.subst_from_current_frame_and_normalize_erasing_regions(const_.literal);
699 self.mir_const_to_op(&const_, None).map_err(|err| {
700 // If there was an error, set the span of the current frame to this constant.
701 // Avoiding doing this when evaluation succeeds.
702 self.frame_mut().loc = Err(span);
703 err
704 })?;
705 }
706
707 // Locals are initially uninitialized.
708 let dummy = LocalState { value: LocalValue::Uninitialized, layout: Cell::new(None) };
709 let mut locals = IndexVec::from_elem(dummy, &body.local_decls);
710
711 // Now mark those locals as dead that we do not want to initialize
712 // Mark locals that use `Storage*` annotations as dead on function entry.
713 let always_live = AlwaysLiveLocals::new(self.body());
714 for local in locals.indices() {
715 if !always_live.contains(local) {
716 locals[local].value = LocalValue::Dead;
717 }
718 }
719 // done
720 self.frame_mut().locals = locals;
721 M::after_stack_push(self)?;
722 self.frame_mut().loc = Ok(mir::Location::START);
723
724 let span = info_span!("frame", "{}", instance);
725 self.frame_mut().tracing_span.enter(span);
726
727 Ok(())
728 }
729
730 /// Jump to the given block.
731 #[inline]
732 pub fn go_to_block(&mut self, target: mir::BasicBlock) {
733 self.frame_mut().loc = Ok(mir::Location { block: target, statement_index: 0 });
734 }
735
736 /// *Return* to the given `target` basic block.
737 /// Do *not* use for unwinding! Use `unwind_to_block` instead.
738 ///
739 /// If `target` is `None`, that indicates the function cannot return, so we raise UB.
740 pub fn return_to_block(&mut self, target: Option<mir::BasicBlock>) -> InterpResult<'tcx> {
741 if let Some(target) = target {
742 self.go_to_block(target);
743 Ok(())
744 } else {
745 throw_ub!(Unreachable)
746 }
747 }
748
749 /// *Unwind* to the given `target` basic block.
750 /// Do *not* use for returning! Use `return_to_block` instead.
751 ///
752 /// If `target` is `StackPopUnwind::Skip`, that indicates the function does not need cleanup
753 /// during unwinding, and we will just keep propagating that upwards.
754 ///
755 /// If `target` is `StackPopUnwind::NotAllowed`, that indicates the function does not allow
756 /// unwinding, and doing so is UB.
757 pub fn unwind_to_block(&mut self, target: StackPopUnwind) -> InterpResult<'tcx> {
758 self.frame_mut().loc = match target {
759 StackPopUnwind::Cleanup(block) => Ok(mir::Location { block, statement_index: 0 }),
760 StackPopUnwind::Skip => Err(self.frame_mut().body.span),
761 StackPopUnwind::NotAllowed => {
762 throw_ub_format!("unwinding past a stack frame that does not allow unwinding")
763 }
764 };
765 Ok(())
766 }
767
768 /// Pops the current frame from the stack, deallocating the
769 /// memory for allocated locals.
770 ///
771 /// If `unwinding` is `false`, then we are performing a normal return
772 /// from a function. In this case, we jump back into the frame of the caller,
773 /// and continue execution as normal.
774 ///
775 /// If `unwinding` is `true`, then we are in the middle of a panic,
776 /// and need to unwind this frame. In this case, we jump to the
777 /// `cleanup` block for the function, which is responsible for running
778 /// `Drop` impls for any locals that have been initialized at this point.
779 /// The cleanup block ends with a special `Resume` terminator, which will
780 /// cause us to continue unwinding.
781 pub(super) fn pop_stack_frame(&mut self, unwinding: bool) -> InterpResult<'tcx> {
782 info!(
783 "popping stack frame ({})",
784 if unwinding { "during unwinding" } else { "returning from function" }
785 );
786
787 // Sanity check `unwinding`.
788 assert_eq!(
789 unwinding,
790 match self.frame().loc {
791 Ok(loc) => self.body().basic_blocks()[loc.block].is_cleanup,
792 Err(_) => true,
793 }
794 );
795
796 if unwinding && self.frame_idx() == 0 {
797 throw_ub_format!("unwinding past the topmost frame of the stack");
798 }
799
800 let frame =
801 self.stack_mut().pop().expect("tried to pop a stack frame, but there were none");
802
803 if !unwinding {
804 // Copy the return value to the caller's stack frame.
805 if let Some(ref return_place) = frame.return_place {
806 let op = self.access_local(&frame, mir::RETURN_PLACE, None)?;
807 self.copy_op_transmute(&op, return_place)?;
808 trace!("{:?}", self.dump_place(**return_place));
809 } else {
810 throw_ub!(Unreachable);
811 }
812 }
813
814 let return_to_block = frame.return_to_block;
815
816 // Now where do we jump next?
817
818 // Usually we want to clean up (deallocate locals), but in a few rare cases we don't.
819 // In that case, we return early. We also avoid validation in that case,
820 // because this is CTFE and the final value will be thoroughly validated anyway.
821 let cleanup = match return_to_block {
822 StackPopCleanup::Goto { .. } => true,
823 StackPopCleanup::None { cleanup, .. } => cleanup,
824 };
825
826 if !cleanup {
827 assert!(self.stack().is_empty(), "only the topmost frame should ever be leaked");
828 assert!(!unwinding, "tried to skip cleanup during unwinding");
829 // Leak the locals, skip validation, skip machine hook.
830 return Ok(());
831 }
832
833 // Cleanup: deallocate all locals that are backed by an allocation.
834 for local in &frame.locals {
835 self.deallocate_local(local.value)?;
836 }
837
838 if M::after_stack_pop(self, frame, unwinding)? == StackPopJump::NoJump {
839 // The hook already did everything.
840 // We want to skip the `info!` below, hence early return.
841 return Ok(());
842 }
843 // Normal return, figure out where to jump.
844 if unwinding {
845 // Follow the unwind edge.
846 let unwind = match return_to_block {
847 StackPopCleanup::Goto { unwind, .. } => unwind,
848 StackPopCleanup::None { .. } => {
849 panic!("Encountered StackPopCleanup::None when unwinding!")
850 }
851 };
852 self.unwind_to_block(unwind)
853 } else {
854 // Follow the normal return edge.
855 match return_to_block {
856 StackPopCleanup::Goto { ret, .. } => self.return_to_block(ret),
857 StackPopCleanup::None { .. } => Ok(()),
858 }
859 }
860 }
861
862 /// Mark a storage as live, killing the previous content.
863 pub fn storage_live(&mut self, local: mir::Local) -> InterpResult<'tcx> {
864 assert!(local != mir::RETURN_PLACE, "Cannot make return place live");
865 trace!("{:?} is now live", local);
866
867 let local_val = LocalValue::Uninitialized;
868 // StorageLive expects the local to be dead, and marks it live.
869 let old = mem::replace(&mut self.frame_mut().locals[local].value, local_val);
870 if !matches!(old, LocalValue::Dead) {
871 throw_ub_format!("StorageLive on a local that was already live");
872 }
873 Ok(())
874 }
875
876 pub fn storage_dead(&mut self, local: mir::Local) -> InterpResult<'tcx> {
877 assert!(local != mir::RETURN_PLACE, "Cannot make return place dead");
878 trace!("{:?} is now dead", local);
879
880 // It is entirely okay for this local to be already dead (at least that's how we currently generate MIR)
881 let old = mem::replace(&mut self.frame_mut().locals[local].value, LocalValue::Dead);
882 self.deallocate_local(old)?;
883 Ok(())
884 }
885
886 fn deallocate_local(&mut self, local: LocalValue<M::PointerTag>) -> InterpResult<'tcx> {
887 if let LocalValue::Live(Operand::Indirect(MemPlace { ptr, .. })) = local {
888 // All locals have a backing allocation, even if the allocation is empty
889 // due to the local having ZST type. Hence we can `unwrap`.
890 trace!(
891 "deallocating local {:?}: {:?}",
892 local,
893 self.memory.dump_alloc(ptr.provenance.unwrap().get_alloc_id())
894 );
895 self.memory.deallocate(ptr, None, MemoryKind::Stack)?;
896 };
897 Ok(())
898 }
899
900 pub fn eval_to_allocation(
901 &self,
902 gid: GlobalId<'tcx>,
903 ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
904 // For statics we pick `ParamEnv::reveal_all`, because statics don't have generics
905 // and thus don't care about the parameter environment. While we could just use
906 // `self.param_env`, that would mean we invoke the query to evaluate the static
907 // with different parameter environments, thus causing the static to be evaluated
908 // multiple times.
909 let param_env = if self.tcx.is_static(gid.instance.def_id()) {
910 ty::ParamEnv::reveal_all()
911 } else {
912 self.param_env
913 };
914 let val = self.tcx.eval_to_allocation_raw(param_env.and(gid))?;
915 self.raw_const_to_mplace(val)
916 }
917
918 #[must_use]
919 pub fn dump_place(&'a self, place: Place<M::PointerTag>) -> PlacePrinter<'a, 'mir, 'tcx, M> {
920 PlacePrinter { ecx: self, place }
921 }
922
923 #[must_use]
924 pub fn generate_stacktrace(&self) -> Vec<FrameInfo<'tcx>> {
925 let mut frames = Vec::new();
926 for frame in self
927 .stack()
928 .iter()
929 .rev()
930 .skip_while(|frame| frame.instance.def.requires_caller_location(*self.tcx))
931 {
932 let lint_root = frame.current_source_info().and_then(|source_info| {
933 match &frame.body.source_scopes[source_info.scope].local_data {
934 mir::ClearCrossCrate::Set(data) => Some(data.lint_root),
935 mir::ClearCrossCrate::Clear => None,
936 }
937 });
938 let span = frame.current_span();
939
940 frames.push(FrameInfo { span, instance: frame.instance, lint_root });
941 }
942 trace!("generate stacktrace: {:#?}", frames);
943 frames
944 }
945 }
946
947 #[doc(hidden)]
948 /// Helper struct for the `dump_place` function.
949 pub struct PlacePrinter<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> {
950 ecx: &'a InterpCx<'mir, 'tcx, M>,
951 place: Place<M::PointerTag>,
952 }
953
954 impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> std::fmt::Debug
955 for PlacePrinter<'a, 'mir, 'tcx, M>
956 {
957 fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
958 match self.place {
959 Place::Local { frame, local } => {
960 let mut allocs = Vec::new();
961 write!(fmt, "{:?}", local)?;
962 if frame != self.ecx.frame_idx() {
963 write!(fmt, " ({} frames up)", self.ecx.frame_idx() - frame)?;
964 }
965 write!(fmt, ":")?;
966
967 match self.ecx.stack()[frame].locals[local].value {
968 LocalValue::Dead => write!(fmt, " is dead")?,
969 LocalValue::Uninitialized => write!(fmt, " is uninitialized")?,
970 LocalValue::Live(Operand::Indirect(mplace)) => {
971 write!(
972 fmt,
973 " by align({}){} ref {:?}:",
974 mplace.align.bytes(),
975 match mplace.meta {
976 MemPlaceMeta::Meta(meta) => format!(" meta({:?})", meta),
977 MemPlaceMeta::Poison | MemPlaceMeta::None => String::new(),
978 },
979 mplace.ptr,
980 )?;
981 allocs.extend(mplace.ptr.provenance.map(Provenance::get_alloc_id));
982 }
983 LocalValue::Live(Operand::Immediate(Immediate::Scalar(val))) => {
984 write!(fmt, " {:?}", val)?;
985 if let ScalarMaybeUninit::Scalar(Scalar::Ptr(ptr, _size)) = val {
986 allocs.push(ptr.provenance.get_alloc_id());
987 }
988 }
989 LocalValue::Live(Operand::Immediate(Immediate::ScalarPair(val1, val2))) => {
990 write!(fmt, " ({:?}, {:?})", val1, val2)?;
991 if let ScalarMaybeUninit::Scalar(Scalar::Ptr(ptr, _size)) = val1 {
992 allocs.push(ptr.provenance.get_alloc_id());
993 }
994 if let ScalarMaybeUninit::Scalar(Scalar::Ptr(ptr, _size)) = val2 {
995 allocs.push(ptr.provenance.get_alloc_id());
996 }
997 }
998 }
999
1000 write!(fmt, ": {:?}", self.ecx.memory.dump_allocs(allocs))
1001 }
1002 Place::Ptr(mplace) => match mplace.ptr.provenance.map(Provenance::get_alloc_id) {
1003 Some(alloc_id) => write!(
1004 fmt,
1005 "by align({}) ref {:?}: {:?}",
1006 mplace.align.bytes(),
1007 mplace.ptr,
1008 self.ecx.memory.dump_alloc(alloc_id)
1009 ),
1010 ptr => write!(fmt, " integral by ref: {:?}", ptr),
1011 },
1012 }
1013 }
1014 }
1015
1016 impl<'ctx, 'mir, 'tcx, Tag: Provenance, Extra> HashStable<StableHashingContext<'ctx>>
1017 for Frame<'mir, 'tcx, Tag, Extra>
1018 where
1019 Extra: HashStable<StableHashingContext<'ctx>>,
1020 Tag: HashStable<StableHashingContext<'ctx>>,
1021 {
1022 fn hash_stable(&self, hcx: &mut StableHashingContext<'ctx>, hasher: &mut StableHasher) {
1023 // Exhaustive match on fields to make sure we forget no field.
1024 let Frame {
1025 body,
1026 instance,
1027 return_to_block,
1028 return_place,
1029 locals,
1030 loc,
1031 extra,
1032 tracing_span: _,
1033 } = self;
1034 body.hash_stable(hcx, hasher);
1035 instance.hash_stable(hcx, hasher);
1036 return_to_block.hash_stable(hcx, hasher);
1037 return_place.as_ref().map(|r| &**r).hash_stable(hcx, hasher);
1038 locals.hash_stable(hcx, hasher);
1039 loc.hash_stable(hcx, hasher);
1040 extra.hash_stable(hcx, hasher);
1041 }
1042 }