]> git.proxmox.com Git - rustc.git/blob - compiler/rustc_const_eval/src/interpret/eval_context.rs
New upstream version 1.63.0+dfsg1
[rustc.git] / compiler / rustc_const_eval / src / interpret / eval_context.rs
1 use std::cell::Cell;
2 use std::fmt;
3 use std::mem;
4
5 use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
6 use rustc_hir::{self as hir, def_id::DefId, definitions::DefPathData};
7 use rustc_index::vec::IndexVec;
8 use rustc_macros::HashStable;
9 use rustc_middle::mir;
10 use rustc_middle::mir::interpret::{InterpError, InvalidProgramInfo};
11 use rustc_middle::ty::layout::{
12 self, FnAbiError, FnAbiOfHelpers, FnAbiRequest, LayoutError, LayoutOf, LayoutOfHelpers,
13 TyAndLayout,
14 };
15 use rustc_middle::ty::{
16 self, query::TyCtxtAt, subst::SubstsRef, ParamEnv, Ty, TyCtxt, TypeFoldable,
17 };
18 use rustc_mir_dataflow::storage::always_live_locals;
19 use rustc_query_system::ich::StableHashingContext;
20 use rustc_session::Limit;
21 use rustc_span::{Pos, Span};
22 use rustc_target::abi::{call::FnAbi, Align, HasDataLayout, Size, TargetDataLayout};
23
24 use super::{
25 AllocId, GlobalId, Immediate, InterpErrorInfo, InterpResult, MPlaceTy, Machine, MemPlace,
26 MemPlaceMeta, Memory, MemoryKind, Operand, Place, PlaceTy, PointerArithmetic, Provenance,
27 Scalar, ScalarMaybeUninit, StackPopJump,
28 };
29 use crate::transform::validate::equal_up_to_regions;
30
31 pub struct InterpCx<'mir, 'tcx, M: Machine<'mir, 'tcx>> {
32 /// Stores the `Machine` instance.
33 ///
34 /// Note: the stack is provided by the machine.
35 pub machine: M,
36
37 /// The results of the type checker, from rustc.
38 /// The span in this is the "root" of the evaluation, i.e., the const
39 /// we are evaluating (if this is CTFE).
40 pub tcx: TyCtxtAt<'tcx>,
41
42 /// Bounds in scope for polymorphic evaluations.
43 pub(crate) param_env: ty::ParamEnv<'tcx>,
44
45 /// The virtual memory system.
46 pub memory: Memory<'mir, 'tcx, M>,
47
48 /// The recursion limit (cached from `tcx.recursion_limit(())`)
49 pub recursion_limit: Limit,
50 }
51
52 // The Phantomdata exists to prevent this type from being `Send`. If it were sent across a thread
53 // boundary and dropped in the other thread, it would exit the span in the other thread.
54 struct SpanGuard(tracing::Span, std::marker::PhantomData<*const u8>);
55
56 impl SpanGuard {
57 /// By default a `SpanGuard` does nothing.
58 fn new() -> Self {
59 Self(tracing::Span::none(), std::marker::PhantomData)
60 }
61
62 /// If a span is entered, we exit the previous span (if any, normally none) and enter the
63 /// new span. This is mainly so we don't have to use `Option` for the `tracing_span` field of
64 /// `Frame` by creating a dummy span to being with and then entering it once the frame has
65 /// been pushed.
66 fn enter(&mut self, span: tracing::Span) {
67 // This executes the destructor on the previous instance of `SpanGuard`, ensuring that
68 // we never enter or exit more spans than vice versa. Unless you `mem::leak`, then we
69 // can't protect the tracing stack, but that'll just lead to weird logging, no actual
70 // problems.
71 *self = Self(span, std::marker::PhantomData);
72 self.0.with_subscriber(|(id, dispatch)| {
73 dispatch.enter(id);
74 });
75 }
76 }
77
78 impl Drop for SpanGuard {
79 fn drop(&mut self) {
80 self.0.with_subscriber(|(id, dispatch)| {
81 dispatch.exit(id);
82 });
83 }
84 }
85
86 /// A stack frame.
87 pub struct Frame<'mir, 'tcx, Tag: Provenance = AllocId, Extra = ()> {
88 ////////////////////////////////////////////////////////////////////////////////
89 // Function and callsite information
90 ////////////////////////////////////////////////////////////////////////////////
91 /// The MIR for the function called on this frame.
92 pub body: &'mir mir::Body<'tcx>,
93
94 /// The def_id and substs of the current function.
95 pub instance: ty::Instance<'tcx>,
96
97 /// Extra data for the machine.
98 pub extra: Extra,
99
100 ////////////////////////////////////////////////////////////////////////////////
101 // Return place and locals
102 ////////////////////////////////////////////////////////////////////////////////
103 /// Work to perform when returning from this function.
104 pub return_to_block: StackPopCleanup,
105
106 /// The location where the result of the current stack frame should be written to,
107 /// and its layout in the caller.
108 pub return_place: PlaceTy<'tcx, Tag>,
109
110 /// The list of locals for this stack frame, stored in order as
111 /// `[return_ptr, arguments..., variables..., temporaries...]`.
112 /// The locals are stored as `Option<Value>`s.
113 /// `None` represents a local that is currently dead, while a live local
114 /// can either directly contain `Scalar` or refer to some part of an `Allocation`.
115 pub locals: IndexVec<mir::Local, LocalState<'tcx, Tag>>,
116
117 /// The span of the `tracing` crate is stored here.
118 /// When the guard is dropped, the span is exited. This gives us
119 /// a full stack trace on all tracing statements.
120 tracing_span: SpanGuard,
121
122 ////////////////////////////////////////////////////////////////////////////////
123 // Current position within the function
124 ////////////////////////////////////////////////////////////////////////////////
125 /// If this is `Err`, we are not currently executing any particular statement in
126 /// this frame (can happen e.g. during frame initialization, and during unwinding on
127 /// frames without cleanup code).
128 /// We basically abuse `Result` as `Either`.
129 ///
130 /// Needs to be public because ConstProp does unspeakable things to it.
131 pub loc: Result<mir::Location, Span>,
132 }
133
134 /// What we store about a frame in an interpreter backtrace.
135 #[derive(Debug)]
136 pub struct FrameInfo<'tcx> {
137 pub instance: ty::Instance<'tcx>,
138 pub span: Span,
139 pub lint_root: Option<hir::HirId>,
140 }
141
142 /// Unwind information.
143 #[derive(Clone, Copy, Eq, PartialEq, Debug, HashStable)]
144 pub enum StackPopUnwind {
145 /// The cleanup block.
146 Cleanup(mir::BasicBlock),
147 /// No cleanup needs to be done.
148 Skip,
149 /// Unwinding is not allowed (UB).
150 NotAllowed,
151 }
152
153 #[derive(Clone, Copy, Eq, PartialEq, Debug, HashStable)] // Miri debug-prints these
154 pub enum StackPopCleanup {
155 /// Jump to the next block in the caller, or cause UB if None (that's a function
156 /// that may never return). Also store layout of return place so
157 /// we can validate it at that layout.
158 /// `ret` stores the block we jump to on a normal return, while `unwind`
159 /// stores the block used for cleanup during unwinding.
160 Goto { ret: Option<mir::BasicBlock>, unwind: StackPopUnwind },
161 /// The root frame of the stack: nowhere else to jump to.
162 /// `cleanup` says whether locals are deallocated. Static computation
163 /// wants them leaked to intern what they need (and just throw away
164 /// the entire `ecx` when it is done).
165 Root { cleanup: bool },
166 }
167
168 /// State of a local variable including a memoized layout
169 #[derive(Clone, Debug, PartialEq, Eq, HashStable)]
170 pub struct LocalState<'tcx, Tag: Provenance = AllocId> {
171 pub value: LocalValue<Tag>,
172 /// Don't modify if `Some`, this is only used to prevent computing the layout twice
173 #[stable_hasher(ignore)]
174 pub layout: Cell<Option<TyAndLayout<'tcx>>>,
175 }
176
177 /// Current value of a local variable
178 #[derive(Copy, Clone, PartialEq, Eq, HashStable, Debug)] // Miri debug-prints these
179 pub enum LocalValue<Tag: Provenance = AllocId> {
180 /// This local is not currently alive, and cannot be used at all.
181 Dead,
182 /// This local is alive but not yet allocated. It cannot be read from or have its address taken,
183 /// and will be allocated on the first write. This is to support unsized locals, where we cannot
184 /// know their size in advance.
185 Unallocated,
186 /// A normal, live local.
187 /// Mostly for convenience, we re-use the `Operand` type here.
188 /// This is an optimization over just always having a pointer here;
189 /// we can thus avoid doing an allocation when the local just stores
190 /// immediate values *and* never has its address taken.
191 Live(Operand<Tag>),
192 }
193
194 impl<'tcx, Tag: Provenance + 'static> LocalState<'tcx, Tag> {
195 /// Read the local's value or error if the local is not yet live or not live anymore.
196 ///
197 /// Note: This may only be invoked from the `Machine::access_local` hook and not from
198 /// anywhere else. You may be invalidating machine invariants if you do!
199 pub fn access(&self) -> InterpResult<'tcx, Operand<Tag>> {
200 match self.value {
201 LocalValue::Dead => throw_ub!(DeadLocal),
202 LocalValue::Unallocated => {
203 bug!("The type checker should prevent reading from a never-written local")
204 }
205 LocalValue::Live(val) => Ok(val),
206 }
207 }
208
209 /// Overwrite the local. If the local can be overwritten in place, return a reference
210 /// to do so; otherwise return the `MemPlace` to consult instead.
211 ///
212 /// Note: This may only be invoked from the `Machine::access_local_mut` hook and not from
213 /// anywhere else. You may be invalidating machine invariants if you do!
214 pub fn access_mut(
215 &mut self,
216 ) -> InterpResult<'tcx, Result<&mut LocalValue<Tag>, MemPlace<Tag>>> {
217 match self.value {
218 LocalValue::Dead => throw_ub!(DeadLocal),
219 LocalValue::Live(Operand::Indirect(mplace)) => Ok(Err(mplace)),
220 ref mut local @ (LocalValue::Live(Operand::Immediate(_)) | LocalValue::Unallocated) => {
221 Ok(Ok(local))
222 }
223 }
224 }
225 }
226
227 impl<'mir, 'tcx, Tag: Provenance> Frame<'mir, 'tcx, Tag> {
228 pub fn with_extra<Extra>(self, extra: Extra) -> Frame<'mir, 'tcx, Tag, Extra> {
229 Frame {
230 body: self.body,
231 instance: self.instance,
232 return_to_block: self.return_to_block,
233 return_place: self.return_place,
234 locals: self.locals,
235 loc: self.loc,
236 extra,
237 tracing_span: self.tracing_span,
238 }
239 }
240 }
241
242 impl<'mir, 'tcx, Tag: Provenance, Extra> Frame<'mir, 'tcx, Tag, Extra> {
243 /// Get the current location within the Frame.
244 ///
245 /// If this is `Err`, we are not currently executing any particular statement in
246 /// this frame (can happen e.g. during frame initialization, and during unwinding on
247 /// frames without cleanup code).
248 /// We basically abuse `Result` as `Either`.
249 ///
250 /// Used by priroda.
251 pub fn current_loc(&self) -> Result<mir::Location, Span> {
252 self.loc
253 }
254
255 /// Return the `SourceInfo` of the current instruction.
256 pub fn current_source_info(&self) -> Option<&mir::SourceInfo> {
257 self.loc.ok().map(|loc| self.body.source_info(loc))
258 }
259
260 pub fn current_span(&self) -> Span {
261 match self.loc {
262 Ok(loc) => self.body.source_info(loc).span,
263 Err(span) => span,
264 }
265 }
266 }
267
268 impl<'tcx> fmt::Display for FrameInfo<'tcx> {
269 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
270 ty::tls::with(|tcx| {
271 if tcx.def_key(self.instance.def_id()).disambiguated_data.data
272 == DefPathData::ClosureExpr
273 {
274 write!(f, "inside closure")?;
275 } else {
276 write!(f, "inside `{}`", self.instance)?;
277 }
278 if !self.span.is_dummy() {
279 let sm = tcx.sess.source_map();
280 let lo = sm.lookup_char_pos(self.span.lo());
281 write!(
282 f,
283 " at {}:{}:{}",
284 sm.filename_for_diagnostics(&lo.file.name),
285 lo.line,
286 lo.col.to_usize() + 1
287 )?;
288 }
289 Ok(())
290 })
291 }
292 }
293
294 impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> HasDataLayout for InterpCx<'mir, 'tcx, M> {
295 #[inline]
296 fn data_layout(&self) -> &TargetDataLayout {
297 &self.tcx.data_layout
298 }
299 }
300
301 impl<'mir, 'tcx, M> layout::HasTyCtxt<'tcx> for InterpCx<'mir, 'tcx, M>
302 where
303 M: Machine<'mir, 'tcx>,
304 {
305 #[inline]
306 fn tcx(&self) -> TyCtxt<'tcx> {
307 *self.tcx
308 }
309 }
310
311 impl<'mir, 'tcx, M> layout::HasParamEnv<'tcx> for InterpCx<'mir, 'tcx, M>
312 where
313 M: Machine<'mir, 'tcx>,
314 {
315 fn param_env(&self) -> ty::ParamEnv<'tcx> {
316 self.param_env
317 }
318 }
319
320 impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> LayoutOfHelpers<'tcx> for InterpCx<'mir, 'tcx, M> {
321 type LayoutOfResult = InterpResult<'tcx, TyAndLayout<'tcx>>;
322
323 #[inline]
324 fn layout_tcx_at_span(&self) -> Span {
325 // Using the cheap root span for performance.
326 self.tcx.span
327 }
328
329 #[inline]
330 fn handle_layout_err(
331 &self,
332 err: LayoutError<'tcx>,
333 _: Span,
334 _: Ty<'tcx>,
335 ) -> InterpErrorInfo<'tcx> {
336 err_inval!(Layout(err)).into()
337 }
338 }
339
340 impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> FnAbiOfHelpers<'tcx> for InterpCx<'mir, 'tcx, M> {
341 type FnAbiOfResult = InterpResult<'tcx, &'tcx FnAbi<'tcx, Ty<'tcx>>>;
342
343 fn handle_fn_abi_err(
344 &self,
345 err: FnAbiError<'tcx>,
346 _span: Span,
347 _fn_abi_request: FnAbiRequest<'tcx>,
348 ) -> InterpErrorInfo<'tcx> {
349 match err {
350 FnAbiError::Layout(err) => err_inval!(Layout(err)).into(),
351 FnAbiError::AdjustForForeignAbi(err) => {
352 err_inval!(FnAbiAdjustForForeignAbi(err)).into()
353 }
354 }
355 }
356 }
357
358 /// Test if it is valid for a MIR assignment to assign `src`-typed place to `dest`-typed value.
359 /// This test should be symmetric, as it is primarily about layout compatibility.
360 pub(super) fn mir_assign_valid_types<'tcx>(
361 tcx: TyCtxt<'tcx>,
362 param_env: ParamEnv<'tcx>,
363 src: TyAndLayout<'tcx>,
364 dest: TyAndLayout<'tcx>,
365 ) -> bool {
366 // Type-changing assignments can happen when subtyping is used. While
367 // all normal lifetimes are erased, higher-ranked types with their
368 // late-bound lifetimes are still around and can lead to type
369 // differences. So we compare ignoring lifetimes.
370 if equal_up_to_regions(tcx, param_env, src.ty, dest.ty) {
371 // Make sure the layout is equal, too -- just to be safe. Miri really
372 // needs layout equality. For performance reason we skip this check when
373 // the types are equal. Equal types *can* have different layouts when
374 // enum downcast is involved (as enum variants carry the type of the
375 // enum), but those should never occur in assignments.
376 if cfg!(debug_assertions) || src.ty != dest.ty {
377 assert_eq!(src.layout, dest.layout);
378 }
379 true
380 } else {
381 false
382 }
383 }
384
385 /// Use the already known layout if given (but sanity check in debug mode),
386 /// or compute the layout.
387 #[cfg_attr(not(debug_assertions), inline(always))]
388 pub(super) fn from_known_layout<'tcx>(
389 tcx: TyCtxtAt<'tcx>,
390 param_env: ParamEnv<'tcx>,
391 known_layout: Option<TyAndLayout<'tcx>>,
392 compute: impl FnOnce() -> InterpResult<'tcx, TyAndLayout<'tcx>>,
393 ) -> InterpResult<'tcx, TyAndLayout<'tcx>> {
394 match known_layout {
395 None => compute(),
396 Some(known_layout) => {
397 if cfg!(debug_assertions) {
398 let check_layout = compute()?;
399 if !mir_assign_valid_types(tcx.tcx, param_env, check_layout, known_layout) {
400 span_bug!(
401 tcx.span,
402 "expected type differs from actual type.\nexpected: {:?}\nactual: {:?}",
403 known_layout.ty,
404 check_layout.ty,
405 );
406 }
407 }
408 Ok(known_layout)
409 }
410 }
411 }
412
413 impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
414 pub fn new(
415 tcx: TyCtxt<'tcx>,
416 root_span: Span,
417 param_env: ty::ParamEnv<'tcx>,
418 machine: M,
419 ) -> Self {
420 InterpCx {
421 machine,
422 tcx: tcx.at(root_span),
423 param_env,
424 memory: Memory::new(),
425 recursion_limit: tcx.recursion_limit(),
426 }
427 }
428
429 #[inline(always)]
430 pub fn cur_span(&self) -> Span {
431 self.stack()
432 .iter()
433 .rev()
434 .find(|frame| !frame.instance.def.requires_caller_location(*self.tcx))
435 .map_or(self.tcx.span, |f| f.current_span())
436 }
437
438 #[inline(always)]
439 pub(crate) fn stack(&self) -> &[Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra>] {
440 M::stack(self)
441 }
442
443 #[inline(always)]
444 pub(crate) fn stack_mut(
445 &mut self,
446 ) -> &mut Vec<Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra>> {
447 M::stack_mut(self)
448 }
449
450 #[inline(always)]
451 pub fn frame_idx(&self) -> usize {
452 let stack = self.stack();
453 assert!(!stack.is_empty());
454 stack.len() - 1
455 }
456
457 #[inline(always)]
458 pub fn frame(&self) -> &Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra> {
459 self.stack().last().expect("no call frames exist")
460 }
461
462 #[inline(always)]
463 pub fn frame_mut(&mut self) -> &mut Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra> {
464 self.stack_mut().last_mut().expect("no call frames exist")
465 }
466
467 #[inline(always)]
468 pub(super) fn body(&self) -> &'mir mir::Body<'tcx> {
469 self.frame().body
470 }
471
472 #[inline(always)]
473 pub fn sign_extend(&self, value: u128, ty: TyAndLayout<'_>) -> u128 {
474 assert!(ty.abi.is_signed());
475 ty.size.sign_extend(value)
476 }
477
478 #[inline(always)]
479 pub fn truncate(&self, value: u128, ty: TyAndLayout<'_>) -> u128 {
480 ty.size.truncate(value)
481 }
482
483 #[inline]
484 pub fn type_is_freeze(&self, ty: Ty<'tcx>) -> bool {
485 ty.is_freeze(self.tcx, self.param_env)
486 }
487
488 pub fn load_mir(
489 &self,
490 instance: ty::InstanceDef<'tcx>,
491 promoted: Option<mir::Promoted>,
492 ) -> InterpResult<'tcx, &'tcx mir::Body<'tcx>> {
493 let def = instance.with_opt_param();
494 trace!("load mir(instance={:?}, promoted={:?})", instance, promoted);
495 let body = if let Some(promoted) = promoted {
496 &self.tcx.promoted_mir_opt_const_arg(def)[promoted]
497 } else {
498 M::load_mir(self, instance)?
499 };
500 // do not continue if typeck errors occurred (can only occur in local crate)
501 if let Some(err) = body.tainted_by_errors {
502 throw_inval!(AlreadyReported(err));
503 }
504 Ok(body)
505 }
506
507 /// Call this on things you got out of the MIR (so it is as generic as the current
508 /// stack frame), to bring it into the proper environment for this interpreter.
509 pub(super) fn subst_from_current_frame_and_normalize_erasing_regions<T: TypeFoldable<'tcx>>(
510 &self,
511 value: T,
512 ) -> Result<T, InterpError<'tcx>> {
513 self.subst_from_frame_and_normalize_erasing_regions(self.frame(), value)
514 }
515
516 /// Call this on things you got out of the MIR (so it is as generic as the provided
517 /// stack frame), to bring it into the proper environment for this interpreter.
518 pub(super) fn subst_from_frame_and_normalize_erasing_regions<T: TypeFoldable<'tcx>>(
519 &self,
520 frame: &Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra>,
521 value: T,
522 ) -> Result<T, InterpError<'tcx>> {
523 frame
524 .instance
525 .try_subst_mir_and_normalize_erasing_regions(*self.tcx, self.param_env, value)
526 .map_err(|e| {
527 self.tcx.sess.delay_span_bug(
528 self.cur_span(),
529 format!("failed to normalize {}", e.get_type_for_failure()).as_str(),
530 );
531
532 InterpError::InvalidProgram(InvalidProgramInfo::TooGeneric)
533 })
534 }
535
536 /// The `substs` are assumed to already be in our interpreter "universe" (param_env).
537 pub(super) fn resolve(
538 &self,
539 def: ty::WithOptConstParam<DefId>,
540 substs: SubstsRef<'tcx>,
541 ) -> InterpResult<'tcx, ty::Instance<'tcx>> {
542 trace!("resolve: {:?}, {:#?}", def, substs);
543 trace!("param_env: {:#?}", self.param_env);
544 trace!("substs: {:#?}", substs);
545 match ty::Instance::resolve_opt_const_arg(*self.tcx, self.param_env, def, substs) {
546 Ok(Some(instance)) => Ok(instance),
547 Ok(None) => throw_inval!(TooGeneric),
548
549 // FIXME(eddyb) this could be a bit more specific than `AlreadyReported`.
550 Err(error_reported) => throw_inval!(AlreadyReported(error_reported)),
551 }
552 }
553
554 #[inline(always)]
555 pub fn layout_of_local(
556 &self,
557 frame: &Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra>,
558 local: mir::Local,
559 layout: Option<TyAndLayout<'tcx>>,
560 ) -> InterpResult<'tcx, TyAndLayout<'tcx>> {
561 // `const_prop` runs into this with an invalid (empty) frame, so we
562 // have to support that case (mostly by skipping all caching).
563 match frame.locals.get(local).and_then(|state| state.layout.get()) {
564 None => {
565 let layout = from_known_layout(self.tcx, self.param_env, layout, || {
566 let local_ty = frame.body.local_decls[local].ty;
567 let local_ty =
568 self.subst_from_frame_and_normalize_erasing_regions(frame, local_ty)?;
569 self.layout_of(local_ty)
570 })?;
571 if let Some(state) = frame.locals.get(local) {
572 // Layouts of locals are requested a lot, so we cache them.
573 state.layout.set(Some(layout));
574 }
575 Ok(layout)
576 }
577 Some(layout) => Ok(layout),
578 }
579 }
580
581 /// Returns the actual dynamic size and alignment of the place at the given type.
582 /// Only the "meta" (metadata) part of the place matters.
583 /// This can fail to provide an answer for extern types.
584 pub(super) fn size_and_align_of(
585 &self,
586 metadata: &MemPlaceMeta<M::PointerTag>,
587 layout: &TyAndLayout<'tcx>,
588 ) -> InterpResult<'tcx, Option<(Size, Align)>> {
589 if !layout.is_unsized() {
590 return Ok(Some((layout.size, layout.align.abi)));
591 }
592 match layout.ty.kind() {
593 ty::Adt(..) | ty::Tuple(..) => {
594 // First get the size of all statically known fields.
595 // Don't use type_of::sizing_type_of because that expects t to be sized,
596 // and it also rounds up to alignment, which we want to avoid,
597 // as the unsized field's alignment could be smaller.
598 assert!(!layout.ty.is_simd());
599 assert!(layout.fields.count() > 0);
600 trace!("DST layout: {:?}", layout);
601
602 let sized_size = layout.fields.offset(layout.fields.count() - 1);
603 let sized_align = layout.align.abi;
604 trace!(
605 "DST {} statically sized prefix size: {:?} align: {:?}",
606 layout.ty,
607 sized_size,
608 sized_align
609 );
610
611 // Recurse to get the size of the dynamically sized field (must be
612 // the last field). Can't have foreign types here, how would we
613 // adjust alignment and size for them?
614 let field = layout.field(self, layout.fields.count() - 1);
615 let Some((unsized_size, unsized_align)) = self.size_and_align_of(metadata, &field)? else {
616 // A field with an extern type. We don't know the actual dynamic size
617 // or the alignment.
618 return Ok(None);
619 };
620
621 // FIXME (#26403, #27023): We should be adding padding
622 // to `sized_size` (to accommodate the `unsized_align`
623 // required of the unsized field that follows) before
624 // summing it with `sized_size`. (Note that since #26403
625 // is unfixed, we do not yet add the necessary padding
626 // here. But this is where the add would go.)
627
628 // Return the sum of sizes and max of aligns.
629 let size = sized_size + unsized_size; // `Size` addition
630
631 // Choose max of two known alignments (combined value must
632 // be aligned according to more restrictive of the two).
633 let align = sized_align.max(unsized_align);
634
635 // Issue #27023: must add any necessary padding to `size`
636 // (to make it a multiple of `align`) before returning it.
637 let size = size.align_to(align);
638
639 // Check if this brought us over the size limit.
640 if size > self.max_size_of_val() {
641 throw_ub!(InvalidMeta("total size is bigger than largest supported object"));
642 }
643 Ok(Some((size, align)))
644 }
645 ty::Dynamic(..) => {
646 let vtable = self.scalar_to_ptr(metadata.unwrap_meta())?;
647 // Read size and align from vtable (already checks size).
648 Ok(Some(self.read_size_and_align_from_vtable(vtable)?))
649 }
650
651 ty::Slice(_) | ty::Str => {
652 let len = metadata.unwrap_meta().to_machine_usize(self)?;
653 let elem = layout.field(self, 0);
654
655 // Make sure the slice is not too big.
656 let size = elem.size.bytes().saturating_mul(len); // we rely on `max_size_of_val` being smaller than `u64::MAX`.
657 let size = Size::from_bytes(size);
658 if size > self.max_size_of_val() {
659 throw_ub!(InvalidMeta("slice is bigger than largest supported object"));
660 }
661 Ok(Some((size, elem.align.abi)))
662 }
663
664 ty::Foreign(_) => Ok(None),
665
666 _ => span_bug!(self.cur_span(), "size_and_align_of::<{:?}> not supported", layout.ty),
667 }
668 }
669 #[inline]
670 pub fn size_and_align_of_mplace(
671 &self,
672 mplace: &MPlaceTy<'tcx, M::PointerTag>,
673 ) -> InterpResult<'tcx, Option<(Size, Align)>> {
674 self.size_and_align_of(&mplace.meta, &mplace.layout)
675 }
676
677 #[instrument(skip(self, body, return_place, return_to_block), level = "debug")]
678 pub fn push_stack_frame(
679 &mut self,
680 instance: ty::Instance<'tcx>,
681 body: &'mir mir::Body<'tcx>,
682 return_place: &PlaceTy<'tcx, M::PointerTag>,
683 return_to_block: StackPopCleanup,
684 ) -> InterpResult<'tcx> {
685 trace!("body: {:#?}", body);
686 // first push a stack frame so we have access to the local substs
687 let pre_frame = Frame {
688 body,
689 loc: Err(body.span), // Span used for errors caused during preamble.
690 return_to_block,
691 return_place: *return_place,
692 // empty local array, we fill it in below, after we are inside the stack frame and
693 // all methods actually know about the frame
694 locals: IndexVec::new(),
695 instance,
696 tracing_span: SpanGuard::new(),
697 extra: (),
698 };
699 let frame = M::init_frame_extra(self, pre_frame)?;
700 self.stack_mut().push(frame);
701
702 // Make sure all the constants required by this frame evaluate successfully (post-monomorphization check).
703 for const_ in &body.required_consts {
704 let span = const_.span;
705 let const_ =
706 self.subst_from_current_frame_and_normalize_erasing_regions(const_.literal)?;
707 self.mir_const_to_op(&const_, None).map_err(|err| {
708 // If there was an error, set the span of the current frame to this constant.
709 // Avoiding doing this when evaluation succeeds.
710 self.frame_mut().loc = Err(span);
711 err
712 })?;
713 }
714
715 // Locals are initially unallocated.
716 let dummy = LocalState { value: LocalValue::Unallocated, layout: Cell::new(None) };
717 let mut locals = IndexVec::from_elem(dummy, &body.local_decls);
718
719 // Now mark those locals as dead that we do not want to initialize
720 // Mark locals that use `Storage*` annotations as dead on function entry.
721 let always_live = always_live_locals(self.body());
722 for local in locals.indices() {
723 if !always_live.contains(local) {
724 locals[local].value = LocalValue::Dead;
725 }
726 }
727 // done
728 self.frame_mut().locals = locals;
729 M::after_stack_push(self)?;
730 self.frame_mut().loc = Ok(mir::Location::START);
731
732 let span = info_span!("frame", "{}", instance);
733 self.frame_mut().tracing_span.enter(span);
734
735 Ok(())
736 }
737
738 /// Jump to the given block.
739 #[inline]
740 pub fn go_to_block(&mut self, target: mir::BasicBlock) {
741 self.frame_mut().loc = Ok(mir::Location { block: target, statement_index: 0 });
742 }
743
744 /// *Return* to the given `target` basic block.
745 /// Do *not* use for unwinding! Use `unwind_to_block` instead.
746 ///
747 /// If `target` is `None`, that indicates the function cannot return, so we raise UB.
748 pub fn return_to_block(&mut self, target: Option<mir::BasicBlock>) -> InterpResult<'tcx> {
749 if let Some(target) = target {
750 self.go_to_block(target);
751 Ok(())
752 } else {
753 throw_ub!(Unreachable)
754 }
755 }
756
757 /// *Unwind* to the given `target` basic block.
758 /// Do *not* use for returning! Use `return_to_block` instead.
759 ///
760 /// If `target` is `StackPopUnwind::Skip`, that indicates the function does not need cleanup
761 /// during unwinding, and we will just keep propagating that upwards.
762 ///
763 /// If `target` is `StackPopUnwind::NotAllowed`, that indicates the function does not allow
764 /// unwinding, and doing so is UB.
765 pub fn unwind_to_block(&mut self, target: StackPopUnwind) -> InterpResult<'tcx> {
766 self.frame_mut().loc = match target {
767 StackPopUnwind::Cleanup(block) => Ok(mir::Location { block, statement_index: 0 }),
768 StackPopUnwind::Skip => Err(self.frame_mut().body.span),
769 StackPopUnwind::NotAllowed => {
770 throw_ub_format!("unwinding past a stack frame that does not allow unwinding")
771 }
772 };
773 Ok(())
774 }
775
776 /// Pops the current frame from the stack, deallocating the
777 /// memory for allocated locals.
778 ///
779 /// If `unwinding` is `false`, then we are performing a normal return
780 /// from a function. In this case, we jump back into the frame of the caller,
781 /// and continue execution as normal.
782 ///
783 /// If `unwinding` is `true`, then we are in the middle of a panic,
784 /// and need to unwind this frame. In this case, we jump to the
785 /// `cleanup` block for the function, which is responsible for running
786 /// `Drop` impls for any locals that have been initialized at this point.
787 /// The cleanup block ends with a special `Resume` terminator, which will
788 /// cause us to continue unwinding.
789 #[instrument(skip(self), level = "debug")]
790 pub(super) fn pop_stack_frame(&mut self, unwinding: bool) -> InterpResult<'tcx> {
791 info!(
792 "popping stack frame ({})",
793 if unwinding { "during unwinding" } else { "returning from function" }
794 );
795
796 // Sanity check `unwinding`.
797 assert_eq!(
798 unwinding,
799 match self.frame().loc {
800 Ok(loc) => self.body().basic_blocks()[loc.block].is_cleanup,
801 Err(_) => true,
802 }
803 );
804
805 if unwinding && self.frame_idx() == 0 {
806 throw_ub_format!("unwinding past the topmost frame of the stack");
807 }
808
809 let frame =
810 self.stack_mut().pop().expect("tried to pop a stack frame, but there were none");
811
812 if !unwinding {
813 let op = self.access_local(&frame, mir::RETURN_PLACE, None)?;
814 self.copy_op_transmute(&op, &frame.return_place)?;
815 trace!("{:?}", self.dump_place(*frame.return_place));
816 }
817
818 let return_to_block = frame.return_to_block;
819
820 // Now where do we jump next?
821
822 // Usually we want to clean up (deallocate locals), but in a few rare cases we don't.
823 // In that case, we return early. We also avoid validation in that case,
824 // because this is CTFE and the final value will be thoroughly validated anyway.
825 let cleanup = match return_to_block {
826 StackPopCleanup::Goto { .. } => true,
827 StackPopCleanup::Root { cleanup, .. } => cleanup,
828 };
829
830 if !cleanup {
831 assert!(self.stack().is_empty(), "only the topmost frame should ever be leaked");
832 assert!(!unwinding, "tried to skip cleanup during unwinding");
833 // Leak the locals, skip validation, skip machine hook.
834 return Ok(());
835 }
836
837 trace!("locals: {:#?}", frame.locals);
838
839 // Cleanup: deallocate all locals that are backed by an allocation.
840 for local in &frame.locals {
841 self.deallocate_local(local.value)?;
842 }
843
844 if M::after_stack_pop(self, frame, unwinding)? == StackPopJump::NoJump {
845 // The hook already did everything.
846 // We want to skip the `info!` below, hence early return.
847 return Ok(());
848 }
849 // Normal return, figure out where to jump.
850 if unwinding {
851 // Follow the unwind edge.
852 let unwind = match return_to_block {
853 StackPopCleanup::Goto { unwind, .. } => unwind,
854 StackPopCleanup::Root { .. } => {
855 panic!("encountered StackPopCleanup::Root when unwinding!")
856 }
857 };
858 self.unwind_to_block(unwind)
859 } else {
860 // Follow the normal return edge.
861 match return_to_block {
862 StackPopCleanup::Goto { ret, .. } => self.return_to_block(ret),
863 StackPopCleanup::Root { .. } => {
864 assert!(
865 self.stack().is_empty(),
866 "only the topmost frame can have StackPopCleanup::Root"
867 );
868 Ok(())
869 }
870 }
871 }
872 }
873
874 /// Mark a storage as live, killing the previous content.
875 pub fn storage_live(&mut self, local: mir::Local) -> InterpResult<'tcx> {
876 assert!(local != mir::RETURN_PLACE, "Cannot make return place live");
877 trace!("{:?} is now live", local);
878
879 let local_val = LocalValue::Unallocated;
880 // StorageLive expects the local to be dead, and marks it live.
881 let old = mem::replace(&mut self.frame_mut().locals[local].value, local_val);
882 if !matches!(old, LocalValue::Dead) {
883 throw_ub_format!("StorageLive on a local that was already live");
884 }
885 Ok(())
886 }
887
888 pub fn storage_dead(&mut self, local: mir::Local) -> InterpResult<'tcx> {
889 assert!(local != mir::RETURN_PLACE, "Cannot make return place dead");
890 trace!("{:?} is now dead", local);
891
892 // It is entirely okay for this local to be already dead (at least that's how we currently generate MIR)
893 let old = mem::replace(&mut self.frame_mut().locals[local].value, LocalValue::Dead);
894 self.deallocate_local(old)?;
895 Ok(())
896 }
897
898 #[instrument(skip(self), level = "debug")]
899 fn deallocate_local(&mut self, local: LocalValue<M::PointerTag>) -> InterpResult<'tcx> {
900 if let LocalValue::Live(Operand::Indirect(MemPlace { ptr, .. })) = local {
901 // All locals have a backing allocation, even if the allocation is empty
902 // due to the local having ZST type. Hence we can `unwrap`.
903 trace!(
904 "deallocating local {:?}: {:?}",
905 local,
906 // Locals always have a `alloc_id` (they are never the result of a int2ptr).
907 self.dump_alloc(ptr.provenance.unwrap().get_alloc_id().unwrap())
908 );
909 self.deallocate_ptr(ptr, None, MemoryKind::Stack)?;
910 };
911 Ok(())
912 }
913
914 pub fn eval_to_allocation(
915 &self,
916 gid: GlobalId<'tcx>,
917 ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
918 // For statics we pick `ParamEnv::reveal_all`, because statics don't have generics
919 // and thus don't care about the parameter environment. While we could just use
920 // `self.param_env`, that would mean we invoke the query to evaluate the static
921 // with different parameter environments, thus causing the static to be evaluated
922 // multiple times.
923 let param_env = if self.tcx.is_static(gid.instance.def_id()) {
924 ty::ParamEnv::reveal_all()
925 } else {
926 self.param_env
927 };
928 let param_env = param_env.with_const();
929 // Use a precise span for better cycle errors.
930 let val = self.tcx.at(self.cur_span()).eval_to_allocation_raw(param_env.and(gid))?;
931 self.raw_const_to_mplace(val)
932 }
933
934 #[must_use]
935 pub fn dump_place(&self, place: Place<M::PointerTag>) -> PlacePrinter<'_, 'mir, 'tcx, M> {
936 PlacePrinter { ecx: self, place }
937 }
938
939 #[must_use]
940 pub fn generate_stacktrace(&self) -> Vec<FrameInfo<'tcx>> {
941 let mut frames = Vec::new();
942 for frame in self
943 .stack()
944 .iter()
945 .rev()
946 .skip_while(|frame| frame.instance.def.requires_caller_location(*self.tcx))
947 {
948 let lint_root = frame.current_source_info().and_then(|source_info| {
949 match &frame.body.source_scopes[source_info.scope].local_data {
950 mir::ClearCrossCrate::Set(data) => Some(data.lint_root),
951 mir::ClearCrossCrate::Clear => None,
952 }
953 });
954 let span = frame.current_span();
955
956 frames.push(FrameInfo { span, instance: frame.instance, lint_root });
957 }
958 trace!("generate stacktrace: {:#?}", frames);
959 frames
960 }
961 }
962
963 #[doc(hidden)]
964 /// Helper struct for the `dump_place` function.
965 pub struct PlacePrinter<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> {
966 ecx: &'a InterpCx<'mir, 'tcx, M>,
967 place: Place<M::PointerTag>,
968 }
969
970 impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> std::fmt::Debug
971 for PlacePrinter<'a, 'mir, 'tcx, M>
972 {
973 fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
974 match self.place {
975 Place::Local { frame, local } => {
976 let mut allocs = Vec::new();
977 write!(fmt, "{:?}", local)?;
978 if frame != self.ecx.frame_idx() {
979 write!(fmt, " ({} frames up)", self.ecx.frame_idx() - frame)?;
980 }
981 write!(fmt, ":")?;
982
983 match self.ecx.stack()[frame].locals[local].value {
984 LocalValue::Dead => write!(fmt, " is dead")?,
985 LocalValue::Unallocated => write!(fmt, " is unallocated")?,
986 LocalValue::Live(Operand::Indirect(mplace)) => {
987 write!(
988 fmt,
989 " by align({}){} ref {:?}:",
990 mplace.align.bytes(),
991 match mplace.meta {
992 MemPlaceMeta::Meta(meta) => format!(" meta({:?})", meta),
993 MemPlaceMeta::Poison | MemPlaceMeta::None => String::new(),
994 },
995 mplace.ptr,
996 )?;
997 allocs.extend(mplace.ptr.provenance.map(Provenance::get_alloc_id));
998 }
999 LocalValue::Live(Operand::Immediate(Immediate::Scalar(val))) => {
1000 write!(fmt, " {:?}", val)?;
1001 if let ScalarMaybeUninit::Scalar(Scalar::Ptr(ptr, _size)) = val {
1002 allocs.push(ptr.provenance.get_alloc_id());
1003 }
1004 }
1005 LocalValue::Live(Operand::Immediate(Immediate::ScalarPair(val1, val2))) => {
1006 write!(fmt, " ({:?}, {:?})", val1, val2)?;
1007 if let ScalarMaybeUninit::Scalar(Scalar::Ptr(ptr, _size)) = val1 {
1008 allocs.push(ptr.provenance.get_alloc_id());
1009 }
1010 if let ScalarMaybeUninit::Scalar(Scalar::Ptr(ptr, _size)) = val2 {
1011 allocs.push(ptr.provenance.get_alloc_id());
1012 }
1013 }
1014 }
1015
1016 write!(fmt, ": {:?}", self.ecx.dump_allocs(allocs.into_iter().flatten().collect()))
1017 }
1018 Place::Ptr(mplace) => match mplace.ptr.provenance.and_then(Provenance::get_alloc_id) {
1019 Some(alloc_id) => write!(
1020 fmt,
1021 "by align({}) ref {:?}: {:?}",
1022 mplace.align.bytes(),
1023 mplace.ptr,
1024 self.ecx.dump_alloc(alloc_id)
1025 ),
1026 ptr => write!(fmt, " integral by ref: {:?}", ptr),
1027 },
1028 }
1029 }
1030 }
1031
1032 impl<'ctx, 'mir, 'tcx, Tag: Provenance, Extra> HashStable<StableHashingContext<'ctx>>
1033 for Frame<'mir, 'tcx, Tag, Extra>
1034 where
1035 Extra: HashStable<StableHashingContext<'ctx>>,
1036 Tag: HashStable<StableHashingContext<'ctx>>,
1037 {
1038 fn hash_stable(&self, hcx: &mut StableHashingContext<'ctx>, hasher: &mut StableHasher) {
1039 // Exhaustive match on fields to make sure we forget no field.
1040 let Frame {
1041 body,
1042 instance,
1043 return_to_block,
1044 return_place,
1045 locals,
1046 loc,
1047 extra,
1048 tracing_span: _,
1049 } = self;
1050 body.hash_stable(hcx, hasher);
1051 instance.hash_stable(hcx, hasher);
1052 return_to_block.hash_stable(hcx, hasher);
1053 return_place.hash_stable(hcx, hasher);
1054 locals.hash_stable(hcx, hasher);
1055 loc.hash_stable(hcx, hasher);
1056 extra.hash_stable(hcx, hasher);
1057 }
1058 }