]>
Commit | Line | Data |
---|---|---|
9fa01778 | 1 | use std::cell::Cell; |
3dfed10e | 2 | use std::fmt; |
8faf50e0 | 3 | use std::mem; |
ff7c6d11 | 4 | |
487cf647 FG |
5 | use either::{Either, Left, Right}; |
6 | ||
fc512014 | 7 | use rustc_hir::{self as hir, def_id::DefId, definitions::DefPathData}; |
dfeec247 | 8 | use rustc_index::vec::IndexVec; |
ba9703b0 | 9 | use rustc_middle::mir; |
353b0b11 | 10 | use rustc_middle::mir::interpret::{ErrorHandled, InterpError}; |
a2a8927a XL |
11 | use rustc_middle::ty::layout::{ |
12 | self, FnAbiError, FnAbiOfHelpers, FnAbiRequest, LayoutError, LayoutOf, LayoutOfHelpers, | |
13 | TyAndLayout, | |
14 | }; | |
ba9703b0 | 15 | use rustc_middle::ty::{ |
f035d41b | 16 | self, query::TyCtxtAt, subst::SubstsRef, ParamEnv, Ty, TyCtxt, TypeFoldable, |
ba9703b0 | 17 | }; |
064997fb | 18 | use rustc_mir_dataflow::storage::always_storage_live_locals; |
136023e0 | 19 | use rustc_session::Limit; |
487cf647 | 20 | use rustc_span::Span; |
a2a8927a | 21 | use rustc_target::abi::{call::FnAbi, Align, HasDataLayout, Size, TargetDataLayout}; |
ff7c6d11 | 22 | |
b7449926 | 23 | use super::{ |
04454e1e FG |
24 | AllocId, GlobalId, Immediate, InterpErrorInfo, InterpResult, MPlaceTy, Machine, MemPlace, |
25 | MemPlaceMeta, Memory, MemoryKind, Operand, Place, PlaceTy, PointerArithmetic, Provenance, | |
f2b60f7d | 26 | Scalar, StackPopJump, |
b7449926 | 27 | }; |
487cf647 | 28 | use crate::util; |
ff7c6d11 | 29 | |
416331ca | 30 | pub struct InterpCx<'mir, 'tcx, M: Machine<'mir, 'tcx>> { |
ff7c6d11 | 31 | /// Stores the `Machine` instance. |
ba9703b0 XL |
32 | /// |
33 | /// Note: the stack is provided by the machine. | |
ff7c6d11 XL |
34 | pub machine: M, |
35 | ||
36 | /// The results of the type checker, from rustc. | |
f035d41b XL |
37 | /// The span in this is the "root" of the evaluation, i.e., the const |
38 | /// we are evaluating (if this is CTFE). | |
dc9dc135 | 39 | pub tcx: TyCtxtAt<'tcx>, |
ff7c6d11 XL |
40 | |
41 | /// Bounds in scope for polymorphic evaluations. | |
0bf4aa26 | 42 | pub(crate) param_env: ty::ParamEnv<'tcx>, |
ff7c6d11 XL |
43 | |
44 | /// The virtual memory system. | |
e74abb32 | 45 | pub memory: Memory<'mir, 'tcx, M>, |
ff7c6d11 | 46 | |
136023e0 XL |
47 | /// The recursion limit (cached from `tcx.recursion_limit(())`) |
48 | pub recursion_limit: Limit, | |
ff7c6d11 XL |
49 | } |
50 | ||
29967ef6 XL |
51 | // The Phantomdata exists to prevent this type from being `Send`. If it were sent across a thread |
52 | // boundary and dropped in the other thread, it would exit the span in the other thread. | |
53 | struct SpanGuard(tracing::Span, std::marker::PhantomData<*const u8>); | |
54 | ||
55 | impl SpanGuard { | |
56 | /// By default a `SpanGuard` does nothing. | |
57 | fn new() -> Self { | |
58 | Self(tracing::Span::none(), std::marker::PhantomData) | |
59 | } | |
60 | ||
61 | /// If a span is entered, we exit the previous span (if any, normally none) and enter the | |
62 | /// new span. This is mainly so we don't have to use `Option` for the `tracing_span` field of | |
63 | /// `Frame` by creating a dummy span to being with and then entering it once the frame has | |
64 | /// been pushed. | |
65 | fn enter(&mut self, span: tracing::Span) { | |
66 | // This executes the destructor on the previous instance of `SpanGuard`, ensuring that | |
67 | // we never enter or exit more spans than vice versa. Unless you `mem::leak`, then we | |
68 | // can't protect the tracing stack, but that'll just lead to weird logging, no actual | |
69 | // problems. | |
70 | *self = Self(span, std::marker::PhantomData); | |
71 | self.0.with_subscriber(|(id, dispatch)| { | |
72 | dispatch.enter(id); | |
73 | }); | |
74 | } | |
75 | } | |
76 | ||
77 | impl Drop for SpanGuard { | |
78 | fn drop(&mut self) { | |
79 | self.0.with_subscriber(|(id, dispatch)| { | |
80 | dispatch.exit(id); | |
81 | }); | |
82 | } | |
83 | } | |
84 | ||
ff7c6d11 | 85 | /// A stack frame. |
064997fb | 86 | pub struct Frame<'mir, 'tcx, Prov: Provenance = AllocId, Extra = ()> { |
ff7c6d11 XL |
87 | //////////////////////////////////////////////////////////////////////////////// |
88 | // Function and callsite information | |
89 | //////////////////////////////////////////////////////////////////////////////// | |
90 | /// The MIR for the function called on this frame. | |
dc9dc135 | 91 | pub body: &'mir mir::Body<'tcx>, |
ff7c6d11 | 92 | |
9fa01778 | 93 | /// The def_id and substs of the current function. |
ff7c6d11 XL |
94 | pub instance: ty::Instance<'tcx>, |
95 | ||
60c5eb7d XL |
96 | /// Extra data for the machine. |
97 | pub extra: Extra, | |
98 | ||
ff7c6d11 XL |
99 | //////////////////////////////////////////////////////////////////////////////// |
100 | // Return place and locals | |
101 | //////////////////////////////////////////////////////////////////////////////// | |
9fa01778 | 102 | /// Work to perform when returning from this function. |
ff7c6d11 XL |
103 | pub return_to_block: StackPopCleanup, |
104 | ||
0bf4aa26 XL |
105 | /// The location where the result of the current stack frame should be written to, |
106 | /// and its layout in the caller. | |
064997fb | 107 | pub return_place: PlaceTy<'tcx, Prov>, |
ff7c6d11 XL |
108 | |
109 | /// The list of locals for this stack frame, stored in order as | |
b7449926 XL |
110 | /// `[return_ptr, arguments..., variables..., temporaries...]`. |
111 | /// The locals are stored as `Option<Value>`s. | |
ff7c6d11 | 112 | /// `None` represents a local that is currently dead, while a live local |
94b46f34 | 113 | /// can either directly contain `Scalar` or refer to some part of an `Allocation`. |
064997fb FG |
114 | /// |
115 | /// Do *not* access this directly; always go through the machine hook! | |
116 | pub locals: IndexVec<mir::Local, LocalState<'tcx, Prov>>, | |
ff7c6d11 | 117 | |
29967ef6 XL |
118 | /// The span of the `tracing` crate is stored here. |
119 | /// When the guard is dropped, the span is exited. This gives us | |
120 | /// a full stack trace on all tracing statements. | |
121 | tracing_span: SpanGuard, | |
122 | ||
ff7c6d11 XL |
123 | //////////////////////////////////////////////////////////////////////////////// |
124 | // Current position within the function | |
125 | //////////////////////////////////////////////////////////////////////////////// | |
487cf647 | 126 | /// If this is `Right`, we are not currently executing any particular statement in |
3dfed10e XL |
127 | /// this frame (can happen e.g. during frame initialization, and during unwinding on |
128 | /// frames without cleanup code). | |
923072b8 FG |
129 | /// |
130 | /// Needs to be public because ConstProp does unspeakable things to it. | |
487cf647 | 131 | pub loc: Either<mir::Location, Span>, |
3dfed10e XL |
132 | } |
133 | ||
134 | /// What we store about a frame in an interpreter backtrace. | |
135 | #[derive(Debug)] | |
136 | pub struct FrameInfo<'tcx> { | |
137 | pub instance: ty::Instance<'tcx>, | |
138 | pub span: Span, | |
139 | pub lint_root: Option<hir::HirId>, | |
ff7c6d11 XL |
140 | } |
141 | ||
064997fb | 142 | #[derive(Clone, Copy, Eq, PartialEq, Debug)] // Miri debug-prints these |
b7449926 XL |
143 | pub enum StackPopCleanup { |
144 | /// Jump to the next block in the caller, or cause UB if None (that's a function | |
0bf4aa26 XL |
145 | /// that may never return). Also store layout of return place so |
146 | /// we can validate it at that layout. | |
ba9703b0 XL |
147 | /// `ret` stores the block we jump to on a normal return, while `unwind` |
148 | /// stores the block used for cleanup during unwinding. | |
353b0b11 | 149 | Goto { ret: Option<mir::BasicBlock>, unwind: mir::UnwindAction }, |
a2a8927a | 150 | /// The root frame of the stack: nowhere else to jump to. |
9fa01778 | 151 | /// `cleanup` says whether locals are deallocated. Static computation |
b7449926 XL |
152 | /// wants them leaked to intern what they need (and just throw away |
153 | /// the entire `ecx` when it is done). | |
a2a8927a | 154 | Root { cleanup: bool }, |
8faf50e0 XL |
155 | } |
156 | ||
9fa01778 | 157 | /// State of a local variable including a memoized layout |
064997fb FG |
158 | #[derive(Clone, Debug)] |
159 | pub struct LocalState<'tcx, Prov: Provenance = AllocId> { | |
160 | pub value: LocalValue<Prov>, | |
9fa01778 | 161 | /// Don't modify if `Some`, this is only used to prevent computing the layout twice |
ba9703b0 | 162 | pub layout: Cell<Option<TyAndLayout<'tcx>>>, |
9fa01778 XL |
163 | } |
164 | ||
48663c56 | 165 | /// Current value of a local variable |
064997fb FG |
166 | #[derive(Copy, Clone, Debug)] // Miri debug-prints these |
167 | pub enum LocalValue<Prov: Provenance = AllocId> { | |
48663c56 | 168 | /// This local is not currently alive, and cannot be used at all. |
b7449926 | 169 | Dead, |
48663c56 XL |
170 | /// A normal, live local. |
171 | /// Mostly for convenience, we re-use the `Operand` type here. | |
172 | /// This is an optimization over just always having a pointer here; | |
173 | /// we can thus avoid doing an allocation when the local just stores | |
174 | /// immediate values *and* never has its address taken. | |
064997fb | 175 | Live(Operand<Prov>), |
b7449926 | 176 | } |
8faf50e0 | 177 | |
064997fb | 178 | impl<'tcx, Prov: Provenance + 'static> LocalState<'tcx, Prov> { |
f035d41b | 179 | /// Read the local's value or error if the local is not yet live or not live anymore. |
064997fb FG |
180 | #[inline] |
181 | pub fn access(&self) -> InterpResult<'tcx, &Operand<Prov>> { | |
182 | match &self.value { | |
183 | LocalValue::Dead => throw_ub!(DeadLocal), // could even be "invalid program"? | |
48663c56 | 184 | LocalValue::Live(val) => Ok(val), |
8faf50e0 | 185 | } |
b7449926 | 186 | } |
8faf50e0 | 187 | |
9c376795 | 188 | /// Overwrite the local. If the local can be overwritten in place, return a reference |
48663c56 | 189 | /// to do so; otherwise return the `MemPlace` to consult instead. |
f035d41b XL |
190 | /// |
191 | /// Note: This may only be invoked from the `Machine::access_local_mut` hook and not from | |
192 | /// anywhere else. You may be invalidating machine invariants if you do! | |
064997fb FG |
193 | #[inline] |
194 | pub fn access_mut(&mut self) -> InterpResult<'tcx, &mut Operand<Prov>> { | |
195 | match &mut self.value { | |
196 | LocalValue::Dead => throw_ub!(DeadLocal), // could even be "invalid program"? | |
197 | LocalValue::Live(val) => Ok(val), | |
ba9703b0 XL |
198 | } |
199 | } | |
200 | } | |
201 | ||
064997fb FG |
202 | impl<'mir, 'tcx, Prov: Provenance> Frame<'mir, 'tcx, Prov> { |
203 | pub fn with_extra<Extra>(self, extra: Extra) -> Frame<'mir, 'tcx, Prov, Extra> { | |
ba9703b0 XL |
204 | Frame { |
205 | body: self.body, | |
206 | instance: self.instance, | |
207 | return_to_block: self.return_to_block, | |
208 | return_place: self.return_place, | |
209 | locals: self.locals, | |
f9f354fc | 210 | loc: self.loc, |
ba9703b0 | 211 | extra, |
29967ef6 | 212 | tracing_span: self.tracing_span, |
8faf50e0 | 213 | } |
8faf50e0 XL |
214 | } |
215 | } | |
216 | ||
064997fb | 217 | impl<'mir, 'tcx, Prov: Provenance, Extra> Frame<'mir, 'tcx, Prov, Extra> { |
6a06907d XL |
218 | /// Get the current location within the Frame. |
219 | /// | |
487cf647 | 220 | /// If this is `Left`, we are not currently executing any particular statement in |
6a06907d XL |
221 | /// this frame (can happen e.g. during frame initialization, and during unwinding on |
222 | /// frames without cleanup code). | |
cdc7bbd5 XL |
223 | /// |
224 | /// Used by priroda. | |
487cf647 | 225 | pub fn current_loc(&self) -> Either<mir::Location, Span> { |
6a06907d XL |
226 | self.loc |
227 | } | |
228 | ||
60c5eb7d | 229 | /// Return the `SourceInfo` of the current instruction. |
f035d41b | 230 | pub fn current_source_info(&self) -> Option<&mir::SourceInfo> { |
487cf647 | 231 | self.loc.left().map(|loc| self.body.source_info(loc)) |
3dfed10e XL |
232 | } |
233 | ||
234 | pub fn current_span(&self) -> Span { | |
235 | match self.loc { | |
487cf647 FG |
236 | Left(loc) => self.body.source_info(loc).span, |
237 | Right(span) => span, | |
3dfed10e XL |
238 | } |
239 | } | |
9c376795 FG |
240 | |
241 | pub fn lint_root(&self) -> Option<hir::HirId> { | |
242 | self.current_source_info().and_then(|source_info| { | |
243 | match &self.body.source_scopes[source_info.scope].local_data { | |
244 | mir::ClearCrossCrate::Set(data) => Some(data.lint_root), | |
245 | mir::ClearCrossCrate::Clear => None, | |
246 | } | |
247 | }) | |
248 | } | |
3dfed10e XL |
249 | } |
250 | ||
251 | impl<'tcx> fmt::Display for FrameInfo<'tcx> { | |
252 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { | |
253 | ty::tls::with(|tcx| { | |
254 | if tcx.def_key(self.instance.def_id()).disambiguated_data.data | |
255 | == DefPathData::ClosureExpr | |
256 | { | |
487cf647 | 257 | write!(f, "inside closure") |
3dfed10e | 258 | } else { |
2b03887a FG |
259 | // Note: this triggers a `good_path_bug` state, which means that if we ever get here |
260 | // we must emit a diagnostic. We should never display a `FrameInfo` unless we | |
261 | // actually want to emit a warning or error to the user. | |
487cf647 | 262 | write!(f, "inside `{}`", self.instance) |
3dfed10e | 263 | } |
3dfed10e | 264 | }) |
60c5eb7d XL |
265 | } |
266 | } | |
267 | ||
416331ca | 268 | impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> HasDataLayout for InterpCx<'mir, 'tcx, M> { |
ff7c6d11 | 269 | #[inline] |
ba9703b0 | 270 | fn data_layout(&self) -> &TargetDataLayout { |
ff7c6d11 XL |
271 | &self.tcx.data_layout |
272 | } | |
273 | } | |
274 | ||
416331ca | 275 | impl<'mir, 'tcx, M> layout::HasTyCtxt<'tcx> for InterpCx<'mir, 'tcx, M> |
dc9dc135 XL |
276 | where |
277 | M: Machine<'mir, 'tcx>, | |
b7449926 | 278 | { |
ff7c6d11 | 279 | #[inline] |
dc9dc135 | 280 | fn tcx(&self) -> TyCtxt<'tcx> { |
0531ce1d | 281 | *self.tcx |
ff7c6d11 XL |
282 | } |
283 | } | |
284 | ||
416331ca | 285 | impl<'mir, 'tcx, M> layout::HasParamEnv<'tcx> for InterpCx<'mir, 'tcx, M> |
dc9dc135 XL |
286 | where |
287 | M: Machine<'mir, 'tcx>, | |
48663c56 XL |
288 | { |
289 | fn param_env(&self) -> ty::ParamEnv<'tcx> { | |
290 | self.param_env | |
291 | } | |
292 | } | |
293 | ||
c295e0f8 XL |
294 | impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> LayoutOfHelpers<'tcx> for InterpCx<'mir, 'tcx, M> { |
295 | type LayoutOfResult = InterpResult<'tcx, TyAndLayout<'tcx>>; | |
ff7c6d11 | 296 | |
b7449926 | 297 | #[inline] |
c295e0f8 | 298 | fn layout_tcx_at_span(&self) -> Span { |
923072b8 | 299 | // Using the cheap root span for performance. |
c295e0f8 XL |
300 | self.tcx.span |
301 | } | |
302 | ||
303 | #[inline] | |
304 | fn handle_layout_err( | |
305 | &self, | |
306 | err: LayoutError<'tcx>, | |
307 | _: Span, | |
308 | _: Ty<'tcx>, | |
309 | ) -> InterpErrorInfo<'tcx> { | |
310 | err_inval!(Layout(err)).into() | |
ff7c6d11 XL |
311 | } |
312 | } | |
313 | ||
a2a8927a XL |
314 | impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> FnAbiOfHelpers<'tcx> for InterpCx<'mir, 'tcx, M> { |
315 | type FnAbiOfResult = InterpResult<'tcx, &'tcx FnAbi<'tcx, Ty<'tcx>>>; | |
316 | ||
317 | fn handle_fn_abi_err( | |
318 | &self, | |
319 | err: FnAbiError<'tcx>, | |
320 | _span: Span, | |
321 | _fn_abi_request: FnAbiRequest<'tcx>, | |
322 | ) -> InterpErrorInfo<'tcx> { | |
323 | match err { | |
324 | FnAbiError::Layout(err) => err_inval!(Layout(err)).into(), | |
325 | FnAbiError::AdjustForForeignAbi(err) => { | |
326 | err_inval!(FnAbiAdjustForForeignAbi(err)).into() | |
327 | } | |
328 | } | |
329 | } | |
330 | } | |
331 | ||
ba9703b0 XL |
332 | /// Test if it is valid for a MIR assignment to assign `src`-typed place to `dest`-typed value. |
333 | /// This test should be symmetric, as it is primarily about layout compatibility. | |
334 | pub(super) fn mir_assign_valid_types<'tcx>( | |
335 | tcx: TyCtxt<'tcx>, | |
f035d41b | 336 | param_env: ParamEnv<'tcx>, |
ba9703b0 XL |
337 | src: TyAndLayout<'tcx>, |
338 | dest: TyAndLayout<'tcx>, | |
339 | ) -> bool { | |
f035d41b XL |
340 | // Type-changing assignments can happen when subtyping is used. While |
341 | // all normal lifetimes are erased, higher-ranked types with their | |
342 | // late-bound lifetimes are still around and can lead to type | |
487cf647 FG |
343 | // differences. |
344 | if util::is_subtype(tcx, param_env, src.ty, dest.ty) { | |
f035d41b XL |
345 | // Make sure the layout is equal, too -- just to be safe. Miri really |
346 | // needs layout equality. For performance reason we skip this check when | |
347 | // the types are equal. Equal types *can* have different layouts when | |
348 | // enum downcast is involved (as enum variants carry the type of the | |
349 | // enum), but those should never occur in assignments. | |
350 | if cfg!(debug_assertions) || src.ty != dest.ty { | |
351 | assert_eq!(src.layout, dest.layout); | |
352 | } | |
353 | true | |
354 | } else { | |
355 | false | |
356 | } | |
ba9703b0 XL |
357 | } |
358 | ||
359 | /// Use the already known layout if given (but sanity check in debug mode), | |
360 | /// or compute the layout. | |
361 | #[cfg_attr(not(debug_assertions), inline(always))] | |
362 | pub(super) fn from_known_layout<'tcx>( | |
363 | tcx: TyCtxtAt<'tcx>, | |
f035d41b | 364 | param_env: ParamEnv<'tcx>, |
ba9703b0 XL |
365 | known_layout: Option<TyAndLayout<'tcx>>, |
366 | compute: impl FnOnce() -> InterpResult<'tcx, TyAndLayout<'tcx>>, | |
367 | ) -> InterpResult<'tcx, TyAndLayout<'tcx>> { | |
368 | match known_layout { | |
369 | None => compute(), | |
370 | Some(known_layout) => { | |
371 | if cfg!(debug_assertions) { | |
372 | let check_layout = compute()?; | |
f035d41b | 373 | if !mir_assign_valid_types(tcx.tcx, param_env, check_layout, known_layout) { |
ba9703b0 XL |
374 | span_bug!( |
375 | tcx.span, | |
376 | "expected type differs from actual type.\nexpected: {:?}\nactual: {:?}", | |
377 | known_layout.ty, | |
378 | check_layout.ty, | |
379 | ); | |
380 | } | |
381 | } | |
382 | Ok(known_layout) | |
383 | } | |
384 | } | |
385 | } | |
386 | ||
387 | impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { | |
416331ca | 388 | pub fn new( |
f035d41b XL |
389 | tcx: TyCtxt<'tcx>, |
390 | root_span: Span, | |
416331ca XL |
391 | param_env: ty::ParamEnv<'tcx>, |
392 | machine: M, | |
416331ca XL |
393 | ) -> Self { |
394 | InterpCx { | |
ff7c6d11 | 395 | machine, |
f035d41b | 396 | tcx: tcx.at(root_span), |
ff7c6d11 | 397 | param_env, |
04454e1e | 398 | memory: Memory::new(), |
136023e0 | 399 | recursion_limit: tcx.recursion_limit(), |
ff7c6d11 XL |
400 | } |
401 | } | |
402 | ||
ba9703b0 | 403 | #[inline(always)] |
f035d41b | 404 | pub fn cur_span(&self) -> Span { |
064997fb FG |
405 | // This deliberately does *not* honor `requires_caller_location` since it is used for much |
406 | // more than just panics. | |
407 | self.stack().last().map_or(self.tcx.span, |f| f.current_span()) | |
ba9703b0 XL |
408 | } |
409 | ||
0bf4aa26 | 410 | #[inline(always)] |
064997fb | 411 | pub(crate) fn stack(&self) -> &[Frame<'mir, 'tcx, M::Provenance, M::FrameExtra>] { |
ba9703b0 | 412 | M::stack(self) |
ff7c6d11 XL |
413 | } |
414 | ||
0bf4aa26 | 415 | #[inline(always)] |
ba9703b0 XL |
416 | pub(crate) fn stack_mut( |
417 | &mut self, | |
064997fb | 418 | ) -> &mut Vec<Frame<'mir, 'tcx, M::Provenance, M::FrameExtra>> { |
ba9703b0 XL |
419 | M::stack_mut(self) |
420 | } | |
421 | ||
422 | #[inline(always)] | |
423 | pub fn frame_idx(&self) -> usize { | |
424 | let stack = self.stack(); | |
425 | assert!(!stack.is_empty()); | |
426 | stack.len() - 1 | |
ff7c6d11 XL |
427 | } |
428 | ||
0bf4aa26 | 429 | #[inline(always)] |
064997fb | 430 | pub fn frame(&self) -> &Frame<'mir, 'tcx, M::Provenance, M::FrameExtra> { |
ba9703b0 | 431 | self.stack().last().expect("no call frames exist") |
ff7c6d11 XL |
432 | } |
433 | ||
0bf4aa26 | 434 | #[inline(always)] |
064997fb | 435 | pub fn frame_mut(&mut self) -> &mut Frame<'mir, 'tcx, M::Provenance, M::FrameExtra> { |
ba9703b0 | 436 | self.stack_mut().last_mut().expect("no call frames exist") |
0bf4aa26 | 437 | } |
b7449926 | 438 | |
0bf4aa26 | 439 | #[inline(always)] |
dc9dc135 XL |
440 | pub(super) fn body(&self) -> &'mir mir::Body<'tcx> { |
441 | self.frame().body | |
b7449926 XL |
442 | } |
443 | ||
416331ca | 444 | #[inline(always)] |
ba9703b0 | 445 | pub fn sign_extend(&self, value: u128, ty: TyAndLayout<'_>) -> u128 { |
416331ca | 446 | assert!(ty.abi.is_signed()); |
29967ef6 | 447 | ty.size.sign_extend(value) |
416331ca XL |
448 | } |
449 | ||
450 | #[inline(always)] | |
ba9703b0 | 451 | pub fn truncate(&self, value: u128, ty: TyAndLayout<'_>) -> u128 { |
29967ef6 | 452 | ty.size.truncate(value) |
416331ca XL |
453 | } |
454 | ||
416331ca XL |
455 | #[inline] |
456 | pub fn type_is_freeze(&self, ty: Ty<'tcx>) -> bool { | |
2b03887a | 457 | ty.is_freeze(*self.tcx, self.param_env) |
416331ca XL |
458 | } |
459 | ||
ff7c6d11 XL |
460 | pub fn load_mir( |
461 | &self, | |
462 | instance: ty::InstanceDef<'tcx>, | |
e1599b0c | 463 | promoted: Option<mir::Promoted>, |
f9f354fc | 464 | ) -> InterpResult<'tcx, &'tcx mir::Body<'tcx>> { |
3dfed10e | 465 | let def = instance.with_opt_param(); |
e1599b0c | 466 | trace!("load mir(instance={:?}, promoted={:?})", instance, promoted); |
5099ac24 FG |
467 | let body = if let Some(promoted) = promoted { |
468 | &self.tcx.promoted_mir_opt_const_arg(def)[promoted] | |
469 | } else { | |
470 | M::load_mir(self, instance)? | |
471 | }; | |
472 | // do not continue if typeck errors occurred (can only occur in local crate) | |
473 | if let Some(err) = body.tainted_by_errors { | |
474 | throw_inval!(AlreadyReported(err)); | |
e1599b0c | 475 | } |
5099ac24 | 476 | Ok(body) |
ff7c6d11 XL |
477 | } |
478 | ||
e1599b0c XL |
479 | /// Call this on things you got out of the MIR (so it is as generic as the current |
480 | /// stack frame), to bring it into the proper environment for this interpreter. | |
9ffffee4 FG |
481 | pub(super) fn subst_from_current_frame_and_normalize_erasing_regions< |
482 | T: TypeFoldable<TyCtxt<'tcx>>, | |
483 | >( | |
ba9703b0 XL |
484 | &self, |
485 | value: T, | |
a2a8927a | 486 | ) -> Result<T, InterpError<'tcx>> { |
ba9703b0 XL |
487 | self.subst_from_frame_and_normalize_erasing_regions(self.frame(), value) |
488 | } | |
489 | ||
490 | /// Call this on things you got out of the MIR (so it is as generic as the provided | |
491 | /// stack frame), to bring it into the proper environment for this interpreter. | |
9ffffee4 | 492 | pub(super) fn subst_from_frame_and_normalize_erasing_regions<T: TypeFoldable<TyCtxt<'tcx>>>( |
9fa01778 | 493 | &self, |
064997fb | 494 | frame: &Frame<'mir, 'tcx, M::Provenance, M::FrameExtra>, |
e1599b0c | 495 | value: T, |
a2a8927a XL |
496 | ) -> Result<T, InterpError<'tcx>> { |
497 | frame | |
498 | .instance | |
499 | .try_subst_mir_and_normalize_erasing_regions(*self.tcx, self.param_env, value) | |
353b0b11 | 500 | .map_err(|_| err_inval!(TooGeneric)) |
9fa01778 XL |
501 | } |
502 | ||
e1599b0c XL |
503 | /// The `substs` are assumed to already be in our interpreter "universe" (param_env). |
504 | pub(super) fn resolve( | |
b7449926 | 505 | &self, |
1b1a35ee | 506 | def: ty::WithOptConstParam<DefId>, |
dfeec247 | 507 | substs: SubstsRef<'tcx>, |
e1599b0c | 508 | ) -> InterpResult<'tcx, ty::Instance<'tcx>> { |
1b1a35ee | 509 | trace!("resolve: {:?}, {:#?}", def, substs); |
e1599b0c XL |
510 | trace!("param_env: {:#?}", self.param_env); |
511 | trace!("substs: {:#?}", substs); | |
1b1a35ee | 512 | match ty::Instance::resolve_opt_const_arg(*self.tcx, self.param_env, def, substs) { |
f9f354fc XL |
513 | Ok(Some(instance)) => Ok(instance), |
514 | Ok(None) => throw_inval!(TooGeneric), | |
515 | ||
29967ef6 XL |
516 | // FIXME(eddyb) this could be a bit more specific than `AlreadyReported`. |
517 | Err(error_reported) => throw_inval!(AlreadyReported(error_reported)), | |
f9f354fc | 518 | } |
ff7c6d11 XL |
519 | } |
520 | ||
cdc7bbd5 | 521 | #[inline(always)] |
b7449926 XL |
522 | pub fn layout_of_local( |
523 | &self, | |
064997fb | 524 | frame: &Frame<'mir, 'tcx, M::Provenance, M::FrameExtra>, |
9fa01778 | 525 | local: mir::Local, |
ba9703b0 XL |
526 | layout: Option<TyAndLayout<'tcx>>, |
527 | ) -> InterpResult<'tcx, TyAndLayout<'tcx>> { | |
353b0b11 FG |
528 | let state = &frame.locals[local]; |
529 | if let Some(layout) = state.layout.get() { | |
530 | return Ok(layout); | |
9fa01778 | 531 | } |
353b0b11 FG |
532 | |
533 | let layout = from_known_layout(self.tcx, self.param_env, layout, || { | |
534 | let local_ty = frame.body.local_decls[local].ty; | |
535 | let local_ty = self.subst_from_frame_and_normalize_erasing_regions(frame, local_ty)?; | |
536 | self.layout_of(local_ty) | |
537 | })?; | |
538 | ||
539 | // Layouts of locals are requested a lot, so we cache them. | |
540 | state.layout.set(Some(layout)); | |
541 | Ok(layout) | |
b7449926 XL |
542 | } |
543 | ||
9fa01778 | 544 | /// Returns the actual dynamic size and alignment of the place at the given type. |
0bf4aa26 XL |
545 | /// Only the "meta" (metadata) part of the place matters. |
546 | /// This can fail to provide an answer for extern types. | |
b7449926 XL |
547 | pub(super) fn size_and_align_of( |
548 | &self, | |
064997fb | 549 | metadata: &MemPlaceMeta<M::Provenance>, |
6a06907d | 550 | layout: &TyAndLayout<'tcx>, |
dc9dc135 | 551 | ) -> InterpResult<'tcx, Option<(Size, Align)>> { |
487cf647 | 552 | if layout.is_sized() { |
a1dfa0c6 | 553 | return Ok(Some((layout.size, layout.align.abi))); |
0bf4aa26 | 554 | } |
1b1a35ee | 555 | match layout.ty.kind() { |
b7449926 XL |
556 | ty::Adt(..) | ty::Tuple(..) => { |
557 | // First get the size of all statically known fields. | |
558 | // Don't use type_of::sizing_type_of because that expects t to be sized, | |
559 | // and it also rounds up to alignment, which we want to avoid, | |
560 | // as the unsized field's alignment could be smaller. | |
561 | assert!(!layout.ty.is_simd()); | |
ba9703b0 | 562 | assert!(layout.fields.count() > 0); |
0bf4aa26 | 563 | trace!("DST layout: {:?}", layout); |
b7449926 XL |
564 | |
565 | let sized_size = layout.fields.offset(layout.fields.count() - 1); | |
a1dfa0c6 | 566 | let sized_align = layout.align.abi; |
0bf4aa26 | 567 | trace!( |
b7449926 XL |
568 | "DST {} statically sized prefix size: {:?} align: {:?}", |
569 | layout.ty, | |
570 | sized_size, | |
571 | sized_align | |
572 | ); | |
ff7c6d11 | 573 | |
b7449926 | 574 | // Recurse to get the size of the dynamically sized field (must be |
9c376795 | 575 | // the last field). Can't have foreign types here, how would we |
0bf4aa26 | 576 | // adjust alignment and size for them? |
94222f64 | 577 | let field = layout.field(self, layout.fields.count() - 1); |
487cf647 | 578 | let Some((unsized_size, mut unsized_align)) = self.size_and_align_of(metadata, &field)? else { |
5e7ed085 FG |
579 | // A field with an extern type. We don't know the actual dynamic size |
580 | // or the alignment. | |
581 | return Ok(None); | |
582 | }; | |
b7449926 XL |
583 | |
584 | // FIXME (#26403, #27023): We should be adding padding | |
585 | // to `sized_size` (to accommodate the `unsized_align` | |
586 | // required of the unsized field that follows) before | |
587 | // summing it with `sized_size`. (Note that since #26403 | |
588 | // is unfixed, we do not yet add the necessary padding | |
589 | // here. But this is where the add would go.) | |
590 | ||
591 | // Return the sum of sizes and max of aligns. | |
ba9703b0 | 592 | let size = sized_size + unsized_size; // `Size` addition |
b7449926 | 593 | |
487cf647 FG |
594 | // Packed types ignore the alignment of their fields. |
595 | if let ty::Adt(def, _) = layout.ty.kind() { | |
596 | if def.repr().packed() { | |
597 | unsized_align = sized_align; | |
598 | } | |
599 | } | |
600 | ||
b7449926 XL |
601 | // Choose max of two known alignments (combined value must |
602 | // be aligned according to more restrictive of the two). | |
603 | let align = sized_align.max(unsized_align); | |
604 | ||
605 | // Issue #27023: must add any necessary padding to `size` | |
606 | // (to make it a multiple of `align`) before returning it. | |
e1599b0c XL |
607 | let size = size.align_to(align); |
608 | ||
609 | // Check if this brought us over the size limit. | |
5e7ed085 | 610 | if size > self.max_size_of_val() { |
74b04a01 | 611 | throw_ub!(InvalidMeta("total size is bigger than largest supported object")); |
e1599b0c XL |
612 | } |
613 | Ok(Some((size, align))) | |
ff7c6d11 | 614 | } |
9ffffee4 | 615 | ty::Dynamic(_, _, ty::Dyn) => { |
064997fb | 616 | let vtable = metadata.unwrap_meta().to_pointer(self)?; |
e1599b0c | 617 | // Read size and align from vtable (already checks size). |
064997fb | 618 | Ok(Some(self.get_vtable_size_and_align(vtable)?)) |
b7449926 XL |
619 | } |
620 | ||
621 | ty::Slice(_) | ty::Str => { | |
9ffffee4 | 622 | let len = metadata.unwrap_meta().to_target_usize(self)?; |
94222f64 | 623 | let elem = layout.field(self, 0); |
e1599b0c XL |
624 | |
625 | // Make sure the slice is not too big. | |
5e7ed085 FG |
626 | let size = elem.size.bytes().saturating_mul(len); // we rely on `max_size_of_val` being smaller than `u64::MAX`. |
627 | let size = Size::from_bytes(size); | |
628 | if size > self.max_size_of_val() { | |
629 | throw_ub!(InvalidMeta("slice is bigger than largest supported object")); | |
630 | } | |
e1599b0c | 631 | Ok(Some((size, elem.align.abi))) |
0bf4aa26 XL |
632 | } |
633 | ||
dfeec247 | 634 | ty::Foreign(_) => Ok(None), |
b7449926 | 635 | |
f035d41b | 636 | _ => span_bug!(self.cur_span(), "size_and_align_of::<{:?}> not supported", layout.ty), |
ff7c6d11 XL |
637 | } |
638 | } | |
b7449926 XL |
639 | #[inline] |
640 | pub fn size_and_align_of_mplace( | |
641 | &self, | |
064997fb | 642 | mplace: &MPlaceTy<'tcx, M::Provenance>, |
dc9dc135 | 643 | ) -> InterpResult<'tcx, Option<(Size, Align)>> { |
6a06907d | 644 | self.size_and_align_of(&mplace.meta, &mplace.layout) |
b7449926 | 645 | } |
ff7c6d11 | 646 | |
5e7ed085 | 647 | #[instrument(skip(self, body, return_place, return_to_block), level = "debug")] |
ff7c6d11 XL |
648 | pub fn push_stack_frame( |
649 | &mut self, | |
650 | instance: ty::Instance<'tcx>, | |
dc9dc135 | 651 | body: &'mir mir::Body<'tcx>, |
064997fb | 652 | return_place: &PlaceTy<'tcx, M::Provenance>, |
ff7c6d11 | 653 | return_to_block: StackPopCleanup, |
dc9dc135 | 654 | ) -> InterpResult<'tcx> { |
04454e1e | 655 | trace!("body: {:#?}", body); |
487cf647 FG |
656 | // Clobber previous return place contents, nobody is supposed to be able to see them any more |
657 | // This also checks dereferenceable, but not align. We rely on all constructed places being | |
658 | // sufficiently aligned (in particular we rely on `deref_operand` checking alignment). | |
659 | self.write_uninit(return_place)?; | |
b7449926 | 660 | // first push a stack frame so we have access to the local substs |
ba9703b0 | 661 | let pre_frame = Frame { |
dc9dc135 | 662 | body, |
487cf647 | 663 | loc: Right(body.span), // Span used for errors caused during preamble. |
b7449926 | 664 | return_to_block, |
064997fb | 665 | return_place: return_place.clone(), |
b7449926 XL |
666 | // empty local array, we fill it in below, after we are inside the stack frame and |
667 | // all methods actually know about the frame | |
668 | locals: IndexVec::new(), | |
b7449926 | 669 | instance, |
29967ef6 | 670 | tracing_span: SpanGuard::new(), |
ba9703b0 XL |
671 | extra: (), |
672 | }; | |
673 | let frame = M::init_frame_extra(self, pre_frame)?; | |
674 | self.stack_mut().push(frame); | |
b7449926 | 675 | |
3dfed10e | 676 | // Make sure all the constants required by this frame evaluate successfully (post-monomorphization check). |
2b03887a FG |
677 | for ct in &body.required_consts { |
678 | let span = ct.span; | |
679 | let ct = self.subst_from_current_frame_and_normalize_erasing_regions(ct.literal)?; | |
487cf647 | 680 | self.eval_mir_constant(&ct, Some(span), None)?; |
3dfed10e XL |
681 | } |
682 | ||
064997fb FG |
683 | // Most locals are initially dead. |
684 | let dummy = LocalState { value: LocalValue::Dead, layout: Cell::new(None) }; | |
f9f354fc XL |
685 | let mut locals = IndexVec::from_elem(dummy, &body.local_decls); |
686 | ||
064997fb FG |
687 | // Now mark those locals as live that have no `Storage*` annotations. |
688 | let always_live = always_storage_live_locals(self.body()); | |
fc512014 | 689 | for local in locals.indices() { |
064997fb FG |
690 | if always_live.contains(local) { |
691 | locals[local].value = LocalValue::Live(Operand::Immediate(Immediate::Uninit)); | |
ff7c6d11 | 692 | } |
b7449926 | 693 | } |
f9f354fc XL |
694 | // done |
695 | self.frame_mut().locals = locals; | |
ba9703b0 | 696 | M::after_stack_push(self)?; |
487cf647 | 697 | self.frame_mut().loc = Left(mir::Location::START); |
29967ef6 XL |
698 | |
699 | let span = info_span!("frame", "{}", instance); | |
700 | self.frame_mut().tracing_span.enter(span); | |
0bf4aa26 | 701 | |
3dfed10e | 702 | Ok(()) |
ff7c6d11 XL |
703 | } |
704 | ||
60c5eb7d XL |
705 | /// Jump to the given block. |
706 | #[inline] | |
707 | pub fn go_to_block(&mut self, target: mir::BasicBlock) { | |
487cf647 | 708 | self.frame_mut().loc = Left(mir::Location { block: target, statement_index: 0 }); |
60c5eb7d XL |
709 | } |
710 | ||
711 | /// *Return* to the given `target` basic block. | |
712 | /// Do *not* use for unwinding! Use `unwind_to_block` instead. | |
713 | /// | |
714 | /// If `target` is `None`, that indicates the function cannot return, so we raise UB. | |
715 | pub fn return_to_block(&mut self, target: Option<mir::BasicBlock>) -> InterpResult<'tcx> { | |
716 | if let Some(target) = target { | |
ba9703b0 XL |
717 | self.go_to_block(target); |
718 | Ok(()) | |
60c5eb7d XL |
719 | } else { |
720 | throw_ub!(Unreachable) | |
721 | } | |
722 | } | |
723 | ||
724 | /// *Unwind* to the given `target` basic block. | |
725 | /// Do *not* use for returning! Use `return_to_block` instead. | |
726 | /// | |
353b0b11 | 727 | /// If `target` is `UnwindAction::Continue`, that indicates the function does not need cleanup |
17df50a5 XL |
728 | /// during unwinding, and we will just keep propagating that upwards. |
729 | /// | |
353b0b11 | 730 | /// If `target` is `UnwindAction::Unreachable`, that indicates the function does not allow |
17df50a5 | 731 | /// unwinding, and doing so is UB. |
353b0b11 | 732 | pub fn unwind_to_block(&mut self, target: mir::UnwindAction) -> InterpResult<'tcx> { |
3dfed10e | 733 | self.frame_mut().loc = match target { |
353b0b11 FG |
734 | mir::UnwindAction::Cleanup(block) => Left(mir::Location { block, statement_index: 0 }), |
735 | mir::UnwindAction::Continue => Right(self.frame_mut().body.span), | |
736 | mir::UnwindAction::Unreachable => { | |
17df50a5 XL |
737 | throw_ub_format!("unwinding past a stack frame that does not allow unwinding") |
738 | } | |
353b0b11 FG |
739 | mir::UnwindAction::Terminate => { |
740 | self.frame_mut().loc = Right(self.frame_mut().body.span); | |
741 | M::abort(self, "panic in a function that cannot unwind".to_owned())?; | |
742 | } | |
3dfed10e | 743 | }; |
17df50a5 | 744 | Ok(()) |
60c5eb7d XL |
745 | } |
746 | ||
747 | /// Pops the current frame from the stack, deallocating the | |
748 | /// memory for allocated locals. | |
749 | /// | |
750 | /// If `unwinding` is `false`, then we are performing a normal return | |
751 | /// from a function. In this case, we jump back into the frame of the caller, | |
752 | /// and continue execution as normal. | |
753 | /// | |
754 | /// If `unwinding` is `true`, then we are in the middle of a panic, | |
755 | /// and need to unwind this frame. In this case, we jump to the | |
756 | /// `cleanup` block for the function, which is responsible for running | |
757 | /// `Drop` impls for any locals that have been initialized at this point. | |
758 | /// The cleanup block ends with a special `Resume` terminator, which will | |
759 | /// cause us to continue unwinding. | |
5e7ed085 | 760 | #[instrument(skip(self), level = "debug")] |
dfeec247 XL |
761 | pub(super) fn pop_stack_frame(&mut self, unwinding: bool) -> InterpResult<'tcx> { |
762 | info!( | |
29967ef6 XL |
763 | "popping stack frame ({})", |
764 | if unwinding { "during unwinding" } else { "returning from function" } | |
dfeec247 | 765 | ); |
60c5eb7d | 766 | |
064997fb | 767 | // Check `unwinding`. |
60c5eb7d XL |
768 | assert_eq!( |
769 | unwinding, | |
f9f354fc | 770 | match self.frame().loc { |
487cf647 FG |
771 | Left(loc) => self.body().basic_blocks[loc.block].is_cleanup, |
772 | Right(_) => true, | |
60c5eb7d XL |
773 | } |
774 | ); | |
3dfed10e XL |
775 | if unwinding && self.frame_idx() == 0 { |
776 | throw_ub_format!("unwinding past the topmost frame of the stack"); | |
777 | } | |
778 | ||
064997fb FG |
779 | // Copy return value. Must of course happen *before* we deallocate the locals. |
780 | let copy_ret_result = if !unwinding { | |
781 | let op = self | |
782 | .local_to_op(self.frame(), mir::RETURN_PLACE, None) | |
783 | .expect("return place should always be live"); | |
784 | let dest = self.frame().return_place.clone(); | |
785 | let err = self.copy_op(&op, &dest, /*allow_transmute*/ true); | |
786 | trace!("return value: {:?}", self.dump_place(*dest)); | |
787 | // We delay actually short-circuiting on this error until *after* the stack frame is | |
788 | // popped, since we want this error to be attributed to the caller, whose type defines | |
789 | // this transmute. | |
790 | err | |
791 | } else { | |
792 | Ok(()) | |
793 | }; | |
60c5eb7d | 794 | |
064997fb | 795 | // Cleanup: deallocate locals. |
60c5eb7d | 796 | // Usually we want to clean up (deallocate locals), but in a few rare cases we don't. |
064997fb FG |
797 | // We do this while the frame is still on the stack, so errors point to the callee. |
798 | let return_to_block = self.frame().return_to_block; | |
17df50a5 XL |
799 | let cleanup = match return_to_block { |
800 | StackPopCleanup::Goto { .. } => true, | |
a2a8927a | 801 | StackPopCleanup::Root { cleanup, .. } => cleanup, |
60c5eb7d | 802 | }; |
064997fb FG |
803 | if cleanup { |
804 | // We need to take the locals out, since we need to mutate while iterating. | |
805 | let locals = mem::take(&mut self.frame_mut().locals); | |
806 | for local in &locals { | |
807 | self.deallocate_local(local.value)?; | |
808 | } | |
809 | } | |
60c5eb7d | 810 | |
064997fb FG |
811 | // All right, now it is time to actually pop the frame. |
812 | // Note that its locals are gone already, but that's fine. | |
813 | let frame = | |
814 | self.stack_mut().pop().expect("tried to pop a stack frame, but there were none"); | |
815 | // Report error from return value copy, if any. | |
816 | copy_ret_result?; | |
817 | ||
818 | // If we are not doing cleanup, also skip everything else. | |
60c5eb7d | 819 | if !cleanup { |
ba9703b0 | 820 | assert!(self.stack().is_empty(), "only the topmost frame should ever be leaked"); |
ba9703b0 | 821 | assert!(!unwinding, "tried to skip cleanup during unwinding"); |
064997fb | 822 | // Skip machine hook. |
60c5eb7d | 823 | return Ok(()); |
ff7c6d11 | 824 | } |
ba9703b0 XL |
825 | if M::after_stack_pop(self, frame, unwinding)? == StackPopJump::NoJump { |
826 | // The hook already did everything. | |
ba9703b0 XL |
827 | return Ok(()); |
828 | } | |
064997fb | 829 | |
ba9703b0 XL |
830 | // Normal return, figure out where to jump. |
831 | if unwinding { | |
60c5eb7d | 832 | // Follow the unwind edge. |
17df50a5 XL |
833 | let unwind = match return_to_block { |
834 | StackPopCleanup::Goto { unwind, .. } => unwind, | |
a2a8927a XL |
835 | StackPopCleanup::Root { .. } => { |
836 | panic!("encountered StackPopCleanup::Root when unwinding!") | |
17df50a5 XL |
837 | } |
838 | }; | |
839 | self.unwind_to_block(unwind) | |
0bf4aa26 | 840 | } else { |
60c5eb7d | 841 | // Follow the normal return edge. |
17df50a5 XL |
842 | match return_to_block { |
843 | StackPopCleanup::Goto { ret, .. } => self.return_to_block(ret), | |
a2a8927a XL |
844 | StackPopCleanup::Root { .. } => { |
845 | assert!( | |
846 | self.stack().is_empty(), | |
847 | "only the topmost frame can have StackPopCleanup::Root" | |
848 | ); | |
849 | Ok(()) | |
850 | } | |
a1dfa0c6 | 851 | } |
a1dfa0c6 | 852 | } |
ff7c6d11 XL |
853 | } |
854 | ||
fc512014 XL |
855 | /// Mark a storage as live, killing the previous content. |
856 | pub fn storage_live(&mut self, local: mir::Local) -> InterpResult<'tcx> { | |
0bf4aa26 XL |
857 | assert!(local != mir::RETURN_PLACE, "Cannot make return place live"); |
858 | trace!("{:?} is now live", local); | |
859 | ||
064997fb | 860 | let local_val = LocalValue::Live(Operand::Immediate(Immediate::Uninit)); |
fc512014 XL |
861 | // StorageLive expects the local to be dead, and marks it live. |
862 | let old = mem::replace(&mut self.frame_mut().locals[local].value, local_val); | |
863 | if !matches!(old, LocalValue::Dead) { | |
864 | throw_ub_format!("StorageLive on a local that was already live"); | |
865 | } | |
866 | Ok(()) | |
0bf4aa26 XL |
867 | } |
868 | ||
fc512014 | 869 | pub fn storage_dead(&mut self, local: mir::Local) -> InterpResult<'tcx> { |
0bf4aa26 XL |
870 | assert!(local != mir::RETURN_PLACE, "Cannot make return place dead"); |
871 | trace!("{:?} is now dead", local); | |
872 | ||
fc512014 XL |
873 | // It is entirely okay for this local to be already dead (at least that's how we currently generate MIR) |
874 | let old = mem::replace(&mut self.frame_mut().locals[local].value, LocalValue::Dead); | |
875 | self.deallocate_local(old)?; | |
876 | Ok(()) | |
0bf4aa26 XL |
877 | } |
878 | ||
5e7ed085 | 879 | #[instrument(skip(self), level = "debug")] |
064997fb | 880 | fn deallocate_local(&mut self, local: LocalValue<M::Provenance>) -> InterpResult<'tcx> { |
b7449926 | 881 | if let LocalValue::Live(Operand::Indirect(MemPlace { ptr, .. })) = local { |
dfeec247 | 882 | // All locals have a backing allocation, even if the allocation is empty |
136023e0 XL |
883 | // due to the local having ZST type. Hence we can `unwrap`. |
884 | trace!( | |
885 | "deallocating local {:?}: {:?}", | |
886 | local, | |
04454e1e FG |
887 | // Locals always have a `alloc_id` (they are never the result of a int2ptr). |
888 | self.dump_alloc(ptr.provenance.unwrap().get_alloc_id().unwrap()) | |
136023e0 | 889 | ); |
04454e1e | 890 | self.deallocate_ptr(ptr, None, MemoryKind::Stack)?; |
ff7c6d11 XL |
891 | }; |
892 | Ok(()) | |
893 | } | |
894 | ||
487cf647 FG |
895 | /// Call a query that can return `ErrorHandled`. If `span` is `Some`, point to that span when an error occurs. |
896 | pub fn ctfe_query<T>( | |
897 | &self, | |
898 | span: Option<Span>, | |
899 | query: impl FnOnce(TyCtxtAt<'tcx>) -> Result<T, ErrorHandled>, | |
900 | ) -> InterpResult<'tcx, T> { | |
901 | // Use a precise span for better cycle errors. | |
902 | query(self.tcx.at(span.unwrap_or_else(|| self.cur_span()))).map_err(|err| { | |
903 | match err { | |
904 | ErrorHandled::Reported(err) => { | |
905 | if let Some(span) = span { | |
906 | // To make it easier to figure out where this error comes from, also add a note at the current location. | |
907 | self.tcx.sess.span_note_without_error(span, "erroneous constant used"); | |
908 | } | |
909 | err_inval!(AlreadyReported(err)) | |
910 | } | |
911 | ErrorHandled::TooGeneric => err_inval!(TooGeneric), | |
912 | } | |
913 | .into() | |
914 | }) | |
915 | } | |
916 | ||
917 | pub fn eval_global( | |
a1dfa0c6 XL |
918 | &self, |
919 | gid: GlobalId<'tcx>, | |
487cf647 | 920 | span: Option<Span>, |
064997fb | 921 | ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> { |
dfeec247 XL |
922 | // For statics we pick `ParamEnv::reveal_all`, because statics don't have generics |
923 | // and thus don't care about the parameter environment. While we could just use | |
924 | // `self.param_env`, that would mean we invoke the query to evaluate the static | |
925 | // with different parameter environments, thus causing the static to be evaluated | |
926 | // multiple times. | |
48663c56 | 927 | let param_env = if self.tcx.is_static(gid.instance.def_id()) { |
0531ce1d XL |
928 | ty::ParamEnv::reveal_all() |
929 | } else { | |
930 | self.param_env | |
931 | }; | |
a2a8927a | 932 | let param_env = param_env.with_const(); |
487cf647 | 933 | let val = self.ctfe_query(span, |tcx| tcx.eval_to_allocation_raw(param_env.and(gid)))?; |
a1dfa0c6 | 934 | self.raw_const_to_mplace(val) |
ff7c6d11 XL |
935 | } |
936 | ||
3dfed10e | 937 | #[must_use] |
064997fb | 938 | pub fn dump_place(&self, place: Place<M::Provenance>) -> PlacePrinter<'_, 'mir, 'tcx, M> { |
3dfed10e XL |
939 | PlacePrinter { ecx: self, place } |
940 | } | |
941 | ||
942 | #[must_use] | |
2b03887a FG |
943 | pub fn generate_stacktrace_from_stack( |
944 | stack: &[Frame<'mir, 'tcx, M::Provenance, M::FrameExtra>], | |
945 | ) -> Vec<FrameInfo<'tcx>> { | |
3dfed10e | 946 | let mut frames = Vec::new(); |
064997fb FG |
947 | // This deliberately does *not* honor `requires_caller_location` since it is used for much |
948 | // more than just panics. | |
2b03887a | 949 | for frame in stack.iter().rev() { |
9c376795 | 950 | let lint_root = frame.lint_root(); |
3dfed10e XL |
951 | let span = frame.current_span(); |
952 | ||
953 | frames.push(FrameInfo { span, instance: frame.instance, lint_root }); | |
83c7162d | 954 | } |
3dfed10e XL |
955 | trace!("generate stacktrace: {:#?}", frames); |
956 | frames | |
957 | } | |
2b03887a FG |
958 | |
959 | #[must_use] | |
960 | pub fn generate_stacktrace(&self) -> Vec<FrameInfo<'tcx>> { | |
961 | Self::generate_stacktrace_from_stack(self.stack()) | |
962 | } | |
3dfed10e XL |
963 | } |
964 | ||
965 | #[doc(hidden)] | |
966 | /// Helper struct for the `dump_place` function. | |
967 | pub struct PlacePrinter<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> { | |
968 | ecx: &'a InterpCx<'mir, 'tcx, M>, | |
064997fb | 969 | place: Place<M::Provenance>, |
3dfed10e XL |
970 | } |
971 | ||
972 | impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> std::fmt::Debug | |
973 | for PlacePrinter<'a, 'mir, 'tcx, M> | |
974 | { | |
975 | fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { | |
976 | match self.place { | |
ff7c6d11 XL |
977 | Place::Local { frame, local } => { |
978 | let mut allocs = Vec::new(); | |
3dfed10e XL |
979 | write!(fmt, "{:?}", local)?; |
980 | if frame != self.ecx.frame_idx() { | |
981 | write!(fmt, " ({} frames up)", self.ecx.frame_idx() - frame)?; | |
ff7c6d11 | 982 | } |
3dfed10e | 983 | write!(fmt, ":")?; |
ff7c6d11 | 984 | |
3dfed10e XL |
985 | match self.ecx.stack()[frame].locals[local].value { |
986 | LocalValue::Dead => write!(fmt, " is dead")?, | |
064997fb FG |
987 | LocalValue::Live(Operand::Immediate(Immediate::Uninit)) => { |
988 | write!(fmt, " is uninitialized")? | |
989 | } | |
136023e0 XL |
990 | LocalValue::Live(Operand::Indirect(mplace)) => { |
991 | write!( | |
992 | fmt, | |
064997fb | 993 | " by {} ref {:?}:", |
136023e0 XL |
994 | match mplace.meta { |
995 | MemPlaceMeta::Meta(meta) => format!(" meta({:?})", meta), | |
064997fb | 996 | MemPlaceMeta::None => String::new(), |
136023e0 XL |
997 | }, |
998 | mplace.ptr, | |
999 | )?; | |
1000 | allocs.extend(mplace.ptr.provenance.map(Provenance::get_alloc_id)); | |
1001 | } | |
48663c56 | 1002 | LocalValue::Live(Operand::Immediate(Immediate::Scalar(val))) => { |
3dfed10e | 1003 | write!(fmt, " {:?}", val)?; |
f2b60f7d | 1004 | if let Scalar::Ptr(ptr, _size) = val { |
136023e0 | 1005 | allocs.push(ptr.provenance.get_alloc_id()); |
ff7c6d11 XL |
1006 | } |
1007 | } | |
48663c56 | 1008 | LocalValue::Live(Operand::Immediate(Immediate::ScalarPair(val1, val2))) => { |
3dfed10e | 1009 | write!(fmt, " ({:?}, {:?})", val1, val2)?; |
f2b60f7d | 1010 | if let Scalar::Ptr(ptr, _size) = val1 { |
136023e0 | 1011 | allocs.push(ptr.provenance.get_alloc_id()); |
ff7c6d11 | 1012 | } |
f2b60f7d | 1013 | if let Scalar::Ptr(ptr, _size) = val2 { |
136023e0 | 1014 | allocs.push(ptr.provenance.get_alloc_id()); |
ff7c6d11 XL |
1015 | } |
1016 | } | |
1017 | } | |
1018 | ||
923072b8 | 1019 | write!(fmt, ": {:?}", self.ecx.dump_allocs(allocs.into_iter().flatten().collect())) |
ff7c6d11 | 1020 | } |
04454e1e | 1021 | Place::Ptr(mplace) => match mplace.ptr.provenance.and_then(Provenance::get_alloc_id) { |
064997fb FG |
1022 | Some(alloc_id) => { |
1023 | write!(fmt, "by ref {:?}: {:?}", mplace.ptr, self.ecx.dump_alloc(alloc_id)) | |
1024 | } | |
3dfed10e | 1025 | ptr => write!(fmt, " integral by ref: {:?}", ptr), |
dfeec247 | 1026 | }, |
ff7c6d11 XL |
1027 | } |
1028 | } | |
ff7c6d11 | 1029 | } |