]> git.proxmox.com Git - rustc.git/blame - compiler/rustc_const_eval/src/interpret/operand.rs
bump version to 1.82.0+dfsg1-1~bpo12+pve2
[rustc.git] / compiler / rustc_const_eval / src / interpret / operand.rs
CommitLineData
b7449926
XL
1//! Functions concerning immediate values and operands, and reading from operands.
2//! All high-level functions to read from memory work on operands as sources.
3
add651ee
FG
4use std::assert_matches::assert_matches;
5
487cf647 6use either::{Either, Left, Right};
ba9703b0 7use rustc_hir::def::Namespace;
e8be2606 8use rustc_middle::mir::interpret::ScalarSizeMismatch;
31ef2f64 9use rustc_middle::ty::layout::{HasParamEnv, HasTyCtxt, LayoutOf, TyAndLayout};
f2b60f7d 10use rustc_middle::ty::print::{FmtPrinter, PrettyPrinter};
e8be2606 11use rustc_middle::ty::{ConstInt, ScalarInt, Ty, TyCtxt};
04c3a46a 12use rustc_middle::{bug, mir, span_bug, ty};
ed00b5ec 13use rustc_target::abi::{self, Abi, HasDataLayout, Size};
04c3a46a 14use tracing::trace;
ba9703b0
XL
15
16use super::{
31ef2f64
FG
17 alloc_range, err_ub, from_known_layout, mir_assign_valid_types, throw_ub, CtfeProvenance,
18 InterpCx, InterpResult, MPlaceTy, Machine, MemPlace, MemPlaceMeta, OffsetMode, PlaceTy,
19 Pointer, Projectable, Provenance, Scalar,
ba9703b0 20};
0bf4aa26 21
e74abb32 22/// An `Immediate` represents a single immediate self-contained Rust value.
b7449926
XL
23///
24/// For optimization of a few very common cases, there is also a representation for a pair of
25/// primitive values (`ScalarPair`). It allows Miri to avoid making allocations for checked binary
60c5eb7d 26/// operations and wide pointers. This idea was taken from rustc's codegen.
b7449926 27/// In particular, thanks to `ScalarPair`, arithmetic operations and casts can be entirely
a1dfa0c6 28/// defined on `Immediate`, and do not have to work with a `Place`.
064997fb 29#[derive(Copy, Clone, Debug)]
4b012472 30pub enum Immediate<Prov: Provenance = CtfeProvenance> {
064997fb 31 /// A single scalar value (must have *initialized* `Scalar` ABI).
f2b60f7d 32 Scalar(Scalar<Prov>),
064997fb
FG
33 /// A pair of two scalar value (must have `ScalarPair` ABI where both fields are
34 /// `Scalar::Initialized`).
f2b60f7d 35 ScalarPair(Scalar<Prov>, Scalar<Prov>),
781aab86 36 /// A value of fully uninitialized memory. Can have arbitrary size and layout, but must be sized.
064997fb 37 Uninit,
b7449926
XL
38}
39
064997fb 40impl<Prov: Provenance> From<Scalar<Prov>> for Immediate<Prov> {
416331ca 41 #[inline(always)]
064997fb 42 fn from(val: Scalar<Prov>) -> Self {
9c376795 43 Immediate::Scalar(val)
9fa01778 44 }
416331ca 45}
9fa01778 46
f2b60f7d 47impl<Prov: Provenance> Immediate<Prov> {
ed00b5ec
FG
48 pub fn new_pointer_with_meta(
49 ptr: Pointer<Option<Prov>>,
50 meta: MemPlaceMeta<Prov>,
51 cx: &impl HasDataLayout,
52 ) -> Self {
53 let ptr = Scalar::from_maybe_pointer(ptr, cx);
54 match meta {
55 MemPlaceMeta::None => Immediate::from(ptr),
56 MemPlaceMeta::Meta(meta) => Immediate::ScalarPair(ptr, meta),
57 }
60c5eb7d 58 }
60c5eb7d 59
781aab86
FG
60 pub fn new_slice(ptr: Pointer<Option<Prov>>, len: u64, cx: &impl HasDataLayout) -> Self {
61 Immediate::ScalarPair(
62 Scalar::from_maybe_pointer(ptr, cx),
63 Scalar::from_target_usize(len, cx),
64 )
b7449926
XL
65 }
66
94222f64 67 pub fn new_dyn_trait(
781aab86 68 val: Pointer<Option<Prov>>,
064997fb 69 vtable: Pointer<Option<Prov>>,
94222f64
XL
70 cx: &impl HasDataLayout,
71 ) -> Self {
781aab86
FG
72 Immediate::ScalarPair(
73 Scalar::from_maybe_pointer(val, cx),
74 Scalar::from_maybe_pointer(vtable, cx),
75 )
b7449926
XL
76 }
77
78 #[inline]
064997fb 79 #[cfg_attr(debug_assertions, track_caller)] // only in debug builds due to perf (see #98980)
f2b60f7d 80 pub fn to_scalar(self) -> Scalar<Prov> {
b7449926 81 match self {
a1dfa0c6 82 Immediate::Scalar(val) => val,
94222f64 83 Immediate::ScalarPair(..) => bug!("Got a scalar pair where a scalar was expected"),
f2b60f7d 84 Immediate::Uninit => bug!("Got uninit where a scalar was expected"),
b7449926
XL
85 }
86 }
87
04c3a46a
FG
88 #[inline]
89 #[cfg_attr(debug_assertions, track_caller)] // only in debug builds due to perf (see #98980)
90 pub fn to_scalar_int(self) -> ScalarInt {
91 self.to_scalar().try_to_scalar_int().unwrap()
92 }
93
b7449926 94 #[inline]
064997fb 95 #[cfg_attr(debug_assertions, track_caller)] // only in debug builds due to perf (see #98980)
f2b60f7d 96 pub fn to_scalar_pair(self) -> (Scalar<Prov>, Scalar<Prov>) {
94222f64 97 match self {
04454e1e
FG
98 Immediate::ScalarPair(val1, val2) => (val1, val2),
99 Immediate::Scalar(..) => bug!("Got a scalar where a scalar pair was expected"),
f2b60f7d 100 Immediate::Uninit => bug!("Got uninit where a scalar pair was expected"),
94222f64
XL
101 }
102 }
4b012472
FG
103
104 /// Returns the scalar from the first component and optionally the 2nd component as metadata.
105 #[inline]
106 #[cfg_attr(debug_assertions, track_caller)] // only in debug builds due to perf (see #98980)
107 pub fn to_scalar_and_meta(self) -> (Scalar<Prov>, MemPlaceMeta<Prov>) {
108 match self {
109 Immediate::ScalarPair(val1, val2) => (val1, MemPlaceMeta::Meta(val2)),
110 Immediate::Scalar(val) => (val, MemPlaceMeta::None),
111 Immediate::Uninit => bug!("Got uninit where a scalar or scalar pair was expected"),
112 }
113 }
b7449926
XL
114}
115
a1dfa0c6 116// ScalarPair needs a type to interpret, so we often have an immediate and a type together
b7449926 117// as input for binary and cast operations.
781aab86 118#[derive(Clone)]
4b012472 119pub struct ImmTy<'tcx, Prov: Provenance = CtfeProvenance> {
064997fb 120 imm: Immediate<Prov>,
ba9703b0 121 pub layout: TyAndLayout<'tcx>,
b7449926
XL
122}
123
064997fb 124impl<Prov: Provenance> std::fmt::Display for ImmTy<'_, Prov> {
ba9703b0
XL
125 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
126 /// Helper function for printing a scalar to a FmtPrinter
064997fb 127 fn p<'a, 'tcx, Prov: Provenance>(
ed00b5ec 128 cx: &mut FmtPrinter<'a, 'tcx>,
f2b60f7d 129 s: Scalar<Prov>,
ba9703b0 130 ty: Ty<'tcx>,
ed00b5ec 131 ) -> Result<(), std::fmt::Error> {
ba9703b0 132 match s {
f2b60f7d
FG
133 Scalar::Int(int) => cx.pretty_print_const_scalar_int(int, ty, true),
134 Scalar::Ptr(ptr, _sz) => {
136023e0
XL
135 // Just print the ptr value. `pretty_print_const_scalar_ptr` would also try to
136 // print what is points to, which would fail since it has no access to the local
137 // memory.
fe692bf9 138 cx.pretty_print_const_pointer(ptr, ty)
ba9703b0 139 }
74b04a01 140 }
ba9703b0
XL
141 }
142 ty::tls::with(|tcx| {
143 match self.imm {
144 Immediate::Scalar(s) => {
29967ef6 145 if let Some(ty) = tcx.lift(self.layout.ty) {
ed00b5ec
FG
146 let s =
147 FmtPrinter::print_string(tcx, Namespace::ValueNS, |cx| p(cx, s, ty))?;
148 f.write_str(&s)?;
ba9703b0 149 return Ok(());
60c5eb7d 150 }
5e7ed085 151 write!(f, "{:x}: {}", s, self.layout.ty)
ba9703b0
XL
152 }
153 Immediate::ScalarPair(a, b) => {
154 // FIXME(oli-obk): at least print tuples and slices nicely
064997fb
FG
155 write!(f, "({:x}, {:x}): {}", a, b, self.layout.ty)
156 }
157 Immediate::Uninit => {
158 write!(f, "uninit: {}", self.layout.ty)
dfeec247 159 }
74b04a01 160 }
ba9703b0 161 })
60c5eb7d
XL
162 }
163}
164
781aab86
FG
165impl<Prov: Provenance> std::fmt::Debug for ImmTy<'_, Prov> {
166 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
167 // Printing `layout` results in too much noise; just print a nice version of the type.
168 f.debug_struct("ImmTy")
169 .field("imm", &self.imm)
170 .field("ty", &format_args!("{}", self.layout.ty))
171 .finish()
172 }
173}
174
064997fb
FG
175impl<'tcx, Prov: Provenance> std::ops::Deref for ImmTy<'tcx, Prov> {
176 type Target = Immediate<Prov>;
b7449926 177 #[inline(always)]
064997fb 178 fn deref(&self) -> &Immediate<Prov> {
9fa01778 179 &self.imm
b7449926
XL
180 }
181}
182
064997fb 183impl<'tcx, Prov: Provenance> ImmTy<'tcx, Prov> {
9fa01778 184 #[inline]
064997fb 185 pub fn from_scalar(val: Scalar<Prov>, layout: TyAndLayout<'tcx>) -> Self {
781aab86 186 debug_assert!(layout.abi.is_scalar(), "`ImmTy::from_scalar` on non-scalar layout");
04c3a46a 187 debug_assert_eq!(val.size(), layout.size);
416331ca 188 ImmTy { imm: val.into(), layout }
9fa01778
XL
189 }
190
ed00b5ec
FG
191 #[inline]
192 pub fn from_scalar_pair(a: Scalar<Prov>, b: Scalar<Prov>, layout: TyAndLayout<'tcx>) -> Self {
193 debug_assert!(
194 matches!(layout.abi, Abi::ScalarPair(..)),
195 "`ImmTy::from_scalar_pair` on non-scalar-pair layout"
196 );
197 let imm = Immediate::ScalarPair(a, b);
198 ImmTy { imm, layout }
199 }
200
781aab86 201 #[inline(always)]
064997fb 202 pub fn from_immediate(imm: Immediate<Prov>, layout: TyAndLayout<'tcx>) -> Self {
781aab86
FG
203 debug_assert!(
204 match (imm, layout.abi) {
205 (Immediate::Scalar(..), Abi::Scalar(..)) => true,
206 (Immediate::ScalarPair(..), Abi::ScalarPair(..)) => true,
207 (Immediate::Uninit, _) if layout.is_sized() => true,
208 _ => false,
209 },
210 "immediate {imm:?} does not fit to layout {layout:?}",
211 );
ba9703b0
XL
212 ImmTy { imm, layout }
213 }
214
064997fb
FG
215 #[inline]
216 pub fn uninit(layout: TyAndLayout<'tcx>) -> Self {
781aab86 217 debug_assert!(layout.is_sized(), "immediates must be sized");
064997fb
FG
218 ImmTy { imm: Immediate::Uninit, layout }
219 }
220
e8be2606
FG
221 #[inline]
222 pub fn from_scalar_int(s: ScalarInt, layout: TyAndLayout<'tcx>) -> Self {
223 assert_eq!(s.size(), layout.size);
224 Self::from_scalar(Scalar::from(s), layout)
225 }
226
e1599b0c 227 #[inline]
ba9703b0 228 pub fn from_uint(i: impl Into<u128>, layout: TyAndLayout<'tcx>) -> Self {
e1599b0c
XL
229 Self::from_scalar(Scalar::from_uint(i, layout.size), layout)
230 }
231
232 #[inline]
ba9703b0 233 pub fn from_int(i: impl Into<i128>, layout: TyAndLayout<'tcx>) -> Self {
e1599b0c
XL
234 Self::from_scalar(Scalar::from_int(i, layout.size), layout)
235 }
f035d41b 236
781aab86
FG
237 #[inline]
238 pub fn from_bool(b: bool, tcx: TyCtxt<'tcx>) -> Self {
239 let layout = tcx.layout_of(ty::ParamEnv::reveal_all().and(tcx.types.bool)).unwrap();
240 Self::from_scalar(Scalar::from_bool(b), layout)
241 }
242
e8be2606
FG
243 #[inline]
244 pub fn from_ordering(c: std::cmp::Ordering, tcx: TyCtxt<'tcx>) -> Self {
245 let ty = tcx.ty_ordering_enum(None);
246 let layout = tcx.layout_of(ty::ParamEnv::reveal_all().and(ty)).unwrap();
247 Self::from_scalar(Scalar::from_i8(c as i8), layout)
248 }
249
31ef2f64
FG
250 pub fn from_pair(a: Self, b: Self, tcx: TyCtxt<'tcx>) -> Self {
251 let layout = tcx
252 .layout_of(
253 ty::ParamEnv::reveal_all().and(Ty::new_tup(tcx, &[a.layout.ty, b.layout.ty])),
254 )
255 .unwrap();
256 Self::from_scalar_pair(a.to_scalar(), b.to_scalar(), layout)
257 }
258
e8be2606
FG
259 /// Return the immediate as a `ScalarInt`. Ensures that it has the size that the layout of the
260 /// immediate indicates.
261 #[inline]
262 pub fn to_scalar_int(&self) -> InterpResult<'tcx, ScalarInt> {
263 let s = self.to_scalar().to_scalar_int()?;
264 if s.size() != self.layout.size {
265 throw_ub!(ScalarSizeMismatch(ScalarSizeMismatch {
266 target_size: self.layout.size.bytes(),
267 data_size: s.size().bytes(),
268 }));
269 }
270 Ok(s)
271 }
272
f035d41b
XL
273 #[inline]
274 pub fn to_const_int(self) -> ConstInt {
275 assert!(self.layout.ty.is_integral());
04c3a46a
FG
276 let int = self.imm.to_scalar_int();
277 assert_eq!(int.size(), self.layout.size);
29967ef6 278 ConstInt::new(int, self.layout.ty.is_signed(), self.layout.ty.is_ptr_sized_integral())
f035d41b 279 }
add651ee 280
31ef2f64
FG
281 #[inline]
282 #[cfg_attr(debug_assertions, track_caller)] // only in debug builds due to perf (see #98980)
283 pub fn to_pair(self, cx: &(impl HasTyCtxt<'tcx> + HasParamEnv<'tcx>)) -> (Self, Self) {
284 let layout = self.layout;
285 let (val0, val1) = self.to_scalar_pair();
286 (
287 ImmTy::from_scalar(val0, layout.field(cx, 0)),
288 ImmTy::from_scalar(val1, layout.field(cx, 1)),
289 )
290 }
291
add651ee
FG
292 /// Compute the "sub-immediate" that is located within the `base` at the given offset with the
293 /// given layout.
294 // Not called `offset` to avoid confusion with the trait method.
295 fn offset_(&self, offset: Size, layout: TyAndLayout<'tcx>, cx: &impl HasDataLayout) -> Self {
ed00b5ec
FG
296 debug_assert!(layout.is_sized(), "unsized immediates are not a thing");
297 // `ImmTy` have already been checked to be in-bounds, so we can just check directly if this
298 // remains in-bounds. This cannot actually be violated since projections are type-checked
299 // and bounds-checked.
300 assert!(
301 offset + layout.size <= self.layout.size,
302 "attempting to project to field at offset {} with size {} into immediate with layout {:#?}",
303 offset.bytes(),
304 layout.size.bytes(),
305 self.layout,
306 );
add651ee
FG
307 // This makes several assumptions about what layouts we will encounter; we match what
308 // codegen does as good as we can (see `extract_field` in `rustc_codegen_ssa/src/mir/operand.rs`).
309 let inner_val: Immediate<_> = match (**self, self.layout.abi) {
c0240ec0 310 // If the entire value is uninit, then so is the field (can happen in ConstProp).
add651ee 311 (Immediate::Uninit, _) => Immediate::Uninit,
c0240ec0
FG
312 // If the field is uninhabited, we can forget the data (can happen in ConstProp).
313 // `enum S { A(!), B, C }` is an example of an enum with Scalar layout that
314 // has an `Uninhabited` variant, which means this case is possible.
315 _ if layout.abi.is_uninhabited() => Immediate::Uninit,
add651ee 316 // the field contains no information, can be left uninit
781aab86 317 // (Scalar/ScalarPair can contain even aligned ZST, not just 1-ZST)
add651ee
FG
318 _ if layout.is_zst() => Immediate::Uninit,
319 // some fieldless enum variants can have non-zero size but still `Aggregate` ABI... try
320 // to detect those here and also give them no data
321 _ if matches!(layout.abi, Abi::Aggregate { .. })
04c3a46a 322 && matches!(layout.variants, abi::Variants::Single { .. })
add651ee
FG
323 && matches!(&layout.fields, abi::FieldsShape::Arbitrary { offsets, .. } if offsets.len() == 0) =>
324 {
325 Immediate::Uninit
326 }
327 // the field covers the entire type
328 _ if layout.size == self.layout.size => {
329 assert_eq!(offset.bytes(), 0);
330 assert!(
331 match (self.layout.abi, layout.abi) {
04c3a46a
FG
332 (Abi::Scalar(l), Abi::Scalar(r)) => l.size(cx) == r.size(cx),
333 (Abi::ScalarPair(l1, l2), Abi::ScalarPair(r1, r2)) =>
334 l1.size(cx) == r1.size(cx) && l2.size(cx) == r2.size(cx),
add651ee
FG
335 _ => false,
336 },
337 "cannot project into {} immediate with equally-sized field {}\nouter ABI: {:#?}\nfield ABI: {:#?}",
338 self.layout.ty,
339 layout.ty,
340 self.layout.abi,
341 layout.abi,
342 );
343 **self
344 }
345 // extract fields from types with `ScalarPair` ABI
346 (Immediate::ScalarPair(a_val, b_val), Abi::ScalarPair(a, b)) => {
04c3a46a 347 assert_matches!(layout.abi, Abi::Scalar(..));
add651ee 348 Immediate::from(if offset.bytes() == 0 {
04c3a46a
FG
349 // It is "okay" to transmute from `usize` to a pointer (GVN relies on that).
350 // So only compare the size.
351 assert_eq!(layout.size, a.size(cx));
add651ee
FG
352 a_val
353 } else {
04c3a46a
FG
354 assert_eq!(offset, a.size(cx).align_to(b.align(cx).abi));
355 assert_eq!(layout.size, b.size(cx));
add651ee
FG
356 b_val
357 })
358 }
359 // everything else is a bug
04c3a46a
FG
360 _ => bug!(
361 "invalid field access on immediate {} at offset {}, original layout {:#?}",
362 self,
363 offset.bytes(),
364 self.layout
365 ),
add651ee
FG
366 };
367
368 ImmTy::from_immediate(inner_val, layout)
369 }
370}
371
372impl<'tcx, Prov: Provenance> Projectable<'tcx, Prov> for ImmTy<'tcx, Prov> {
373 #[inline(always)]
374 fn layout(&self) -> TyAndLayout<'tcx> {
375 self.layout
376 }
377
781aab86
FG
378 #[inline(always)]
379 fn meta(&self) -> MemPlaceMeta<Prov> {
380 debug_assert!(self.layout.is_sized()); // unsized ImmTy can only exist temporarily and should never reach this here
381 MemPlaceMeta::None
add651ee
FG
382 }
383
31ef2f64 384 fn offset_with_meta<M: Machine<'tcx, Provenance = Prov>>(
add651ee
FG
385 &self,
386 offset: Size,
ed00b5ec 387 _mode: OffsetMode,
add651ee
FG
388 meta: MemPlaceMeta<Prov>,
389 layout: TyAndLayout<'tcx>,
31ef2f64 390 ecx: &InterpCx<'tcx, M>,
add651ee
FG
391 ) -> InterpResult<'tcx, Self> {
392 assert_matches!(meta, MemPlaceMeta::None); // we can't store this anywhere anyway
781aab86 393 Ok(self.offset_(offset, layout, ecx))
add651ee
FG
394 }
395
31ef2f64 396 fn to_op<M: Machine<'tcx, Provenance = Prov>>(
add651ee 397 &self,
31ef2f64 398 _ecx: &InterpCx<'tcx, M>,
add651ee
FG
399 ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
400 Ok(self.clone().into())
401 }
9fa01778
XL
402}
403
781aab86
FG
404/// An `Operand` is the result of computing a `mir::Operand`. It can be immediate,
405/// or still in memory. The latter is an optimization, to delay reading that chunk of
406/// memory and to avoid having to store arbitrary-sized data here.
407#[derive(Copy, Clone, Debug)]
4b012472 408pub(super) enum Operand<Prov: Provenance = CtfeProvenance> {
781aab86
FG
409 Immediate(Immediate<Prov>),
410 Indirect(MemPlace<Prov>),
411}
412
413#[derive(Clone)]
4b012472 414pub struct OpTy<'tcx, Prov: Provenance = CtfeProvenance> {
781aab86
FG
415 op: Operand<Prov>, // Keep this private; it helps enforce invariants.
416 pub layout: TyAndLayout<'tcx>,
781aab86
FG
417}
418
419impl<Prov: Provenance> std::fmt::Debug for OpTy<'_, Prov> {
420 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
421 // Printing `layout` results in too much noise; just print a nice version of the type.
422 f.debug_struct("OpTy")
423 .field("op", &self.op)
424 .field("ty", &format_args!("{}", self.layout.ty))
425 .finish()
426 }
427}
428
429impl<'tcx, Prov: Provenance> From<ImmTy<'tcx, Prov>> for OpTy<'tcx, Prov> {
430 #[inline(always)]
431 fn from(val: ImmTy<'tcx, Prov>) -> Self {
ed00b5ec 432 OpTy { op: Operand::Immediate(val.imm), layout: val.layout }
781aab86
FG
433 }
434}
435
436impl<'tcx, Prov: Provenance> From<MPlaceTy<'tcx, Prov>> for OpTy<'tcx, Prov> {
437 #[inline(always)]
438 fn from(mplace: MPlaceTy<'tcx, Prov>) -> Self {
ed00b5ec 439 OpTy { op: Operand::Indirect(*mplace.mplace()), layout: mplace.layout }
781aab86
FG
440 }
441}
442
064997fb 443impl<'tcx, Prov: Provenance> OpTy<'tcx, Prov> {
781aab86
FG
444 #[inline(always)]
445 pub(super) fn op(&self) -> &Operand<Prov> {
446 &self.op
064997fb 447 }
add651ee 448}
064997fb 449
781aab86 450impl<'tcx, Prov: Provenance> Projectable<'tcx, Prov> for OpTy<'tcx, Prov> {
add651ee
FG
451 #[inline(always)]
452 fn layout(&self) -> TyAndLayout<'tcx> {
453 self.layout
9ffffee4
FG
454 }
455
781aab86
FG
456 #[inline]
457 fn meta(&self) -> MemPlaceMeta<Prov> {
458 match self.as_mplace_or_imm() {
459 Left(mplace) => mplace.meta(),
460 Right(_) => {
461 debug_assert!(self.layout.is_sized(), "unsized immediates are not a thing");
462 MemPlaceMeta::None
463 }
464 }
add651ee
FG
465 }
466
31ef2f64 467 fn offset_with_meta<M: Machine<'tcx, Provenance = Prov>>(
064997fb
FG
468 &self,
469 offset: Size,
ed00b5ec 470 mode: OffsetMode,
064997fb
FG
471 meta: MemPlaceMeta<Prov>,
472 layout: TyAndLayout<'tcx>,
31ef2f64 473 ecx: &InterpCx<'tcx, M>,
064997fb 474 ) -> InterpResult<'tcx, Self> {
487cf647 475 match self.as_mplace_or_imm() {
ed00b5ec 476 Left(mplace) => Ok(mplace.offset_with_meta(offset, mode, meta, layout, ecx)?.into()),
487cf647 477 Right(imm) => {
781aab86 478 assert_matches!(meta, MemPlaceMeta::None); // no place to store metadata here
064997fb 479 // Every part of an uninit is uninit.
781aab86 480 Ok(imm.offset_(offset, layout, ecx).into())
064997fb
FG
481 }
482 }
483 }
484
31ef2f64 485 fn to_op<M: Machine<'tcx, Provenance = Prov>>(
064997fb 486 &self,
31ef2f64 487 _ecx: &InterpCx<'tcx, M>,
add651ee
FG
488 ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
489 Ok(self.clone())
490 }
491}
492
781aab86 493/// The `Readable` trait describes interpreter values that one can read from.
add651ee
FG
494pub trait Readable<'tcx, Prov: Provenance>: Projectable<'tcx, Prov> {
495 fn as_mplace_or_imm(&self) -> Either<MPlaceTy<'tcx, Prov>, ImmTy<'tcx, Prov>>;
496}
497
781aab86 498impl<'tcx, Prov: Provenance> Readable<'tcx, Prov> for OpTy<'tcx, Prov> {
add651ee
FG
499 #[inline(always)]
500 fn as_mplace_or_imm(&self) -> Either<MPlaceTy<'tcx, Prov>, ImmTy<'tcx, Prov>> {
501 self.as_mplace_or_imm()
502 }
503}
504
781aab86 505impl<'tcx, Prov: Provenance> Readable<'tcx, Prov> for MPlaceTy<'tcx, Prov> {
add651ee
FG
506 #[inline(always)]
507 fn as_mplace_or_imm(&self) -> Either<MPlaceTy<'tcx, Prov>, ImmTy<'tcx, Prov>> {
508 Left(self.clone())
509 }
510}
511
512impl<'tcx, Prov: Provenance> Readable<'tcx, Prov> for ImmTy<'tcx, Prov> {
513 #[inline(always)]
514 fn as_mplace_or_imm(&self) -> Either<MPlaceTy<'tcx, Prov>, ImmTy<'tcx, Prov>> {
515 Right(self.clone())
064997fb
FG
516 }
517}
518
31ef2f64 519impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
dc9dc135 520 /// Try reading an immediate in memory; this is interesting particularly for `ScalarPair`.
9fa01778 521 /// Returns `None` if the layout does not permit loading this as a value.
04454e1e
FG
522 ///
523 /// This is an internal function; call `read_immediate` instead.
524 fn read_immediate_from_mplace_raw(
b7449926 525 &self,
064997fb 526 mplace: &MPlaceTy<'tcx, M::Provenance>,
064997fb 527 ) -> InterpResult<'tcx, Option<ImmTy<'tcx, M::Provenance>>> {
b7449926 528 if mplace.layout.is_unsized() {
0bf4aa26 529 // Don't touch unsized
b7449926
XL
530 return Ok(None);
531 }
b7449926 532
04454e1e 533 let Some(alloc) = self.get_place_alloc(mplace)? else {
064997fb
FG
534 // zero-sized type can be left uninit
535 return Ok(Some(ImmTy::uninit(mplace.layout)));
dc9dc135 536 };
b7449926 537
04454e1e
FG
538 // It may seem like all types with `Scalar` or `ScalarPair` ABI are fair game at this point.
539 // However, `MaybeUninit<u64>` is considered a `Scalar` as far as its layout is concerned --
540 // and yet cannot be represented by an interpreter `Scalar`, since we have to handle the
541 // case where some of the bytes are initialized and others are not. So, we need an extra
542 // check that walks over the type of `mplace` to make sure it is truly correct to treat this
543 // like a `Scalar` (or `ScalarPair`).
f2b60f7d
FG
544 Ok(match mplace.layout.abi {
545 Abi::Scalar(abi::Scalar::Initialized { value: s, .. }) => {
546 let size = s.size(self);
547 assert_eq!(size, mplace.layout.size, "abi::Scalar size does not match layout size");
548 let scalar = alloc.read_scalar(
549 alloc_range(Size::ZERO, size),
9ffffee4 550 /*read_provenance*/ matches!(s, abi::Pointer(_)),
f2b60f7d 551 )?;
781aab86 552 Some(ImmTy::from_scalar(scalar, mplace.layout))
f2b60f7d 553 }
04454e1e
FG
554 Abi::ScalarPair(
555 abi::Scalar::Initialized { value: a, .. },
556 abi::Scalar::Initialized { value: b, .. },
f2b60f7d
FG
557 ) => {
558 // We checked `ptr_align` above, so all fields will have the alignment they need.
559 // We would anyway check against `ptr_align.restrict_for_offset(b_offset)`,
560 // which `ptr.offset(b_offset)` cannot possibly fail to satisfy.
561 let (a_size, b_size) = (a.size(self), b.size(self));
562 let b_offset = a_size.align_to(b.align(self).abi);
563 assert!(b_offset.bytes() > 0); // in `operand_field` we use the offset to tell apart the fields
564 let a_val = alloc.read_scalar(
565 alloc_range(Size::ZERO, a_size),
9ffffee4 566 /*read_provenance*/ matches!(a, abi::Pointer(_)),
f2b60f7d
FG
567 )?;
568 let b_val = alloc.read_scalar(
569 alloc_range(b_offset, b_size),
9ffffee4 570 /*read_provenance*/ matches!(b, abi::Pointer(_)),
f2b60f7d 571 )?;
781aab86 572 Some(ImmTy::from_immediate(Immediate::ScalarPair(a_val, b_val), mplace.layout))
f2b60f7d
FG
573 }
574 _ => {
575 // Neither a scalar nor scalar pair.
576 None
577 }
578 })
b7449926
XL
579 }
580
04454e1e
FG
581 /// Try returning an immediate for the operand. If the layout does not permit loading this as an
582 /// immediate, return where in memory we can find the data.
487cf647
FG
583 /// Note that for a given layout, this operation will either always return Left or Right!
584 /// succeed! Whether it returns Left depends on whether the layout can be represented
94222f64 585 /// in an `Immediate`, not on which data is stored there currently.
04454e1e 586 ///
04454e1e 587 /// This is an internal function that should not usually be used; call `read_immediate` instead.
064997fb 588 /// ConstProp needs it, though.
04454e1e 589 pub fn read_immediate_raw(
b7449926 590 &self,
add651ee 591 src: &impl Readable<'tcx, M::Provenance>,
487cf647
FG
592 ) -> InterpResult<'tcx, Either<MPlaceTy<'tcx, M::Provenance>, ImmTy<'tcx, M::Provenance>>> {
593 Ok(match src.as_mplace_or_imm() {
594 Left(ref mplace) => {
f2b60f7d 595 if let Some(val) = self.read_immediate_from_mplace_raw(mplace)? {
487cf647 596 Right(val)
b7449926 597 } else {
add651ee 598 Left(mplace.clone())
b7449926 599 }
dfeec247 600 }
487cf647 601 Right(val) => Right(val),
b7449926
XL
602 })
603 }
604
a1dfa0c6 605 /// Read an immediate from a place, asserting that that is possible with the given layout.
f2b60f7d 606 ///
487cf647 607 /// If this succeeds, the `ImmTy` is never `Uninit`.
b7449926 608 #[inline(always)]
a1dfa0c6 609 pub fn read_immediate(
0bf4aa26 610 &self,
add651ee 611 op: &impl Readable<'tcx, M::Provenance>,
064997fb 612 ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
f2b60f7d 613 if !matches!(
add651ee 614 op.layout().abi,
f2b60f7d
FG
615 Abi::Scalar(abi::Scalar::Initialized { .. })
616 | Abi::ScalarPair(abi::Scalar::Initialized { .. }, abi::Scalar::Initialized { .. })
617 ) {
781aab86 618 span_bug!(self.cur_span(), "primitive read not possible for type: {}", op.layout().ty);
b7449926 619 }
487cf647 620 let imm = self.read_immediate_raw(op)?.right().unwrap();
f2b60f7d
FG
621 if matches!(*imm, Immediate::Uninit) {
622 throw_ub!(InvalidUninitBytes(None));
623 }
624 Ok(imm)
b7449926
XL
625 }
626
627 /// Read a scalar from a place
0bf4aa26
XL
628 pub fn read_scalar(
629 &self,
add651ee 630 op: &impl Readable<'tcx, M::Provenance>,
f2b60f7d
FG
631 ) -> InterpResult<'tcx, Scalar<M::Provenance>> {
632 Ok(self.read_immediate(op)?.to_scalar())
b7449926
XL
633 }
634
9c376795
FG
635 // Pointer-sized reads are fairly common and need target layout access, so we wrap them in
636 // convenience functions.
637
136023e0
XL
638 /// Read a pointer from a place.
639 pub fn read_pointer(
640 &self,
add651ee 641 op: &impl Readable<'tcx, M::Provenance>,
064997fb
FG
642 ) -> InterpResult<'tcx, Pointer<Option<M::Provenance>>> {
643 self.read_scalar(op)?.to_pointer(self)
136023e0 644 }
9c376795 645 /// Read a pointer-sized unsigned integer from a place.
add651ee
FG
646 pub fn read_target_usize(
647 &self,
648 op: &impl Readable<'tcx, M::Provenance>,
649 ) -> InterpResult<'tcx, u64> {
9ffffee4 650 self.read_scalar(op)?.to_target_usize(self)
9c376795
FG
651 }
652 /// Read a pointer-sized signed integer from a place.
add651ee
FG
653 pub fn read_target_isize(
654 &self,
655 op: &impl Readable<'tcx, M::Provenance>,
656 ) -> InterpResult<'tcx, i64> {
9ffffee4 657 self.read_scalar(op)?.to_target_isize(self)
9c376795 658 }
136023e0 659
064997fb
FG
660 /// Turn the wide MPlace into a string (must already be dereferenced!)
661 pub fn read_str(&self, mplace: &MPlaceTy<'tcx, M::Provenance>) -> InterpResult<'tcx, &str> {
b7449926 662 let len = mplace.len(self)?;
781aab86 663 let bytes = self.read_bytes_ptr_strip_provenance(mplace.ptr(), Size::from_bytes(len))?;
29967ef6 664 let str = std::str::from_utf8(bytes).map_err(|err| err_ub!(InvalidStr(err)))?;
b7449926
XL
665 Ok(str)
666 }
667
3c0e092e
XL
668 /// Converts a repr(simd) operand into an operand where `place_index` accesses the SIMD elements.
669 /// Also returns the number of elements.
064997fb
FG
670 ///
671 /// Can (but does not always) trigger UB if `op` is uninitialized.
3c0e092e
XL
672 pub fn operand_to_simd(
673 &self,
064997fb
FG
674 op: &OpTy<'tcx, M::Provenance>,
675 ) -> InterpResult<'tcx, (MPlaceTy<'tcx, M::Provenance>, u64)> {
3c0e092e
XL
676 // Basically we just transmute this place into an array following simd_size_and_type.
677 // This only works in memory, but repr(simd) types should never be immediates anyway.
064997fb 678 assert!(op.layout.ty.is_simd());
487cf647
FG
679 match op.as_mplace_or_imm() {
680 Left(mplace) => self.mplace_to_simd(&mplace),
681 Right(imm) => match *imm {
064997fb
FG
682 Immediate::Uninit => {
683 throw_ub!(InvalidUninitBytes(None))
684 }
685 Immediate::Scalar(..) | Immediate::ScalarPair(..) => {
686 bug!("arrays/slices can never have Scalar/ScalarPair layout")
687 }
688 },
689 }
3c0e092e
XL
690 }
691
c620b35d 692 /// Read from a local of the current frame.
f035d41b
XL
693 /// Will not access memory, instead an indirect `Operand` is returned.
694 ///
695 /// This is public because it is used by [priroda](https://github.com/oli-obk/priroda) to get an
064997fb
FG
696 /// OpTy from a local.
697 pub fn local_to_op(
0bf4aa26 698 &self,
0bf4aa26 699 local: mir::Local,
ba9703b0 700 layout: Option<TyAndLayout<'tcx>>,
064997fb 701 ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
c620b35d 702 let frame = self.frame();
9fa01778 703 let layout = self.layout_of_local(frame, local, layout)?;
f2b60f7d 704 let op = *frame.locals[local].access()?;
781aab86 705 if matches!(op, Operand::Immediate(_)) {
c0240ec0 706 assert!(!layout.is_unsized());
781aab86 707 }
ed00b5ec 708 Ok(OpTy { op, layout })
0bf4aa26
XL
709 }
710
ba9703b0
XL
711 /// Every place can be read from, so we can turn them into an operand.
712 /// This will definitely return `Indirect` if the place is a `Ptr`, i.e., this
713 /// will never actually read from memory.
9fa01778
XL
714 pub fn place_to_op(
715 &self,
064997fb
FG
716 place: &PlaceTy<'tcx, M::Provenance>,
717 ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
add651ee
FG
718 match place.as_mplace_or_local() {
719 Left(mplace) => Ok(mplace.into()),
c620b35d 720 Right((local, offset, locals_addr)) => {
781aab86 721 debug_assert!(place.layout.is_sized()); // only sized locals can ever be `Place::Local`.
c620b35d
FG
722 debug_assert_eq!(locals_addr, self.frame().locals_addr());
723 let base = self.local_to_op(local, None)?;
ed00b5ec 724 Ok(match offset {
781aab86
FG
725 Some(offset) => base.offset(offset, place.layout, self)?,
726 None => {
727 // In the common case this hasn't been projected.
728 debug_assert_eq!(place.layout, base.layout);
729 base
730 }
ed00b5ec 731 })
ba9703b0 732 }
add651ee 733 }
9fa01778
XL
734 }
735
9c376795 736 /// Evaluate a place with the goal of reading from it. This lets us sometimes
04454e1e 737 /// avoid allocations.
e74abb32 738 pub fn eval_place_to_op(
b7449926 739 &self,
064997fb 740 mir_place: mir::Place<'tcx>,
ba9703b0 741 layout: Option<TyAndLayout<'tcx>>,
064997fb 742 ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
f9f354fc
XL
743 // Do not use the layout passed in as argument if the base we are looking at
744 // here is not the entire place.
064997fb 745 let layout = if mir_place.projection.is_empty() { layout } else { None };
b7449926 746
c620b35d 747 let mut op = self.local_to_op(mir_place.local, layout)?;
064997fb
FG
748 // Using `try_fold` turned out to be bad for performance, hence the loop.
749 for elem in mir_place.projection.iter() {
add651ee 750 op = self.project(&op, elem)?
064997fb 751 }
e1599b0c 752
781aab86 753 trace!("eval_place_to_op: got {:?}", op);
f9f354fc 754 // Sanity-check the type we ended up with.
ed00b5ec 755 if cfg!(debug_assertions) {
c620b35d
FG
756 let normalized_place_ty = self
757 .instantiate_from_current_frame_and_normalize_erasing_regions(
758 mir_place.ty(&self.frame().body.local_decls, *self.tcx).ty,
759 )?;
ed00b5ec 760 if !mir_assign_valid_types(
064997fb
FG
761 *self.tcx,
762 self.param_env,
ed00b5ec 763 self.layout_of(normalized_place_ty)?,
064997fb 764 op.layout,
ed00b5ec
FG
765 ) {
766 span_bug!(
767 self.cur_span(),
768 "eval_place of a MIR place with type {} produced an interpreter operand with type {}",
769 normalized_place_ty,
770 op.layout.ty,
771 )
772 }
773 }
e1599b0c 774 Ok(op)
b7449926
XL
775 }
776
777 /// Evaluate the operand, returning a place where you can then find the data.
dc9dc135 778 /// If you already know the layout, you can save two table lookups
b7449926 779 /// by passing it in here.
6a06907d 780 #[inline]
b7449926
XL
781 pub fn eval_operand(
782 &self,
783 mir_op: &mir::Operand<'tcx>,
ba9703b0 784 layout: Option<TyAndLayout<'tcx>>,
064997fb 785 ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
ba9703b0 786 use rustc_middle::mir::Operand::*;
9c376795 787 let op = match mir_op {
b7449926 788 // FIXME: do some more logic on `move` to invalidate the old location
9c376795 789 &Copy(place) | &Move(place) => self.eval_place_to_op(place, layout)?,
b7449926 790
9c376795 791 Constant(constant) => {
c620b35d
FG
792 let c = self.instantiate_from_current_frame_and_normalize_erasing_regions(
793 constant.const_,
794 )?;
064997fb 795
5869c6ff 796 // This can still fail:
5e7ed085 797 // * During ConstProp, with `TooGeneric` or since the `required_consts` were not all
5869c6ff
XL
798 // checked yet.
799 // * During CTFE, since promoteds in `const`/`static` initializer bodies can fail.
e8be2606 800 self.eval_mir_constant(&c, constant.span, layout)?
e1599b0c 801 }
b7449926 802 };
781aab86 803 trace!("{:?}: {:?}", mir_op, op);
b7449926
XL
804 Ok(op)
805 }
806
fe692bf9 807 pub(crate) fn const_val_to_op(
6a06907d 808 &self,
781aab86 809 val_val: mir::ConstValue<'tcx>,
6a06907d
XL
810 ty: Ty<'tcx>,
811 layout: Option<TyAndLayout<'tcx>>,
064997fb 812 ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
dc9dc135 813 // Other cases need layout.
064997fb 814 let adjust_scalar = |scalar| -> InterpResult<'tcx, _> {
6a06907d 815 Ok(match scalar {
e8be2606 816 Scalar::Ptr(ptr, size) => Scalar::Ptr(self.global_root_pointer(ptr)?, size),
6a06907d
XL
817 Scalar::Int(int) => Scalar::Int(int),
818 })
819 };
820 let layout = from_known_layout(self.tcx, self.param_env, layout, || self.layout_of(ty))?;
ed00b5ec 821 let imm = match val_val {
781aab86 822 mir::ConstValue::Indirect { alloc_id, offset } => {
4b012472 823 // This is const data, no mutation allowed.
e8be2606 824 let ptr = self.global_root_pointer(Pointer::new(
4b012472
FG
825 CtfeProvenance::from(alloc_id).as_immutable(),
826 offset,
827 ))?;
ed00b5ec 828 return Ok(self.ptr_to_mplace(ptr.into(), layout).into());
dfeec247 829 }
ed00b5ec
FG
830 mir::ConstValue::Scalar(x) => adjust_scalar(x)?.into(),
831 mir::ConstValue::ZeroSized => Immediate::Uninit,
781aab86 832 mir::ConstValue::Slice { data, meta } => {
4b012472
FG
833 // This is const data, no mutation allowed.
834 let alloc_id = self.tcx.reserve_and_set_memory_alloc(data);
835 let ptr = Pointer::new(CtfeProvenance::from(alloc_id).as_immutable(), Size::ZERO);
e8be2606 836 Immediate::new_slice(self.global_root_pointer(ptr)?.into(), meta, self)
dc9dc135 837 }
dc9dc135 838 };
ed00b5ec 839 Ok(OpTy { op: Operand::Immediate(imm), layout })
b7449926 840 }
b7449926 841}
064997fb
FG
842
843// Some nodes are used a lot. Make sure they don't unintentionally get bigger.
e8be2606 844#[cfg(target_pointer_width = "64")]
064997fb 845mod size_asserts {
f2b60f7d 846 use rustc_data_structures::static_assert_size;
04c3a46a
FG
847
848 use super::*;
2b03887a 849 // tidy-alphabetical-start
f2b60f7d
FG
850 static_assert_size!(Immediate, 48);
851 static_assert_size!(ImmTy<'_>, 64);
852 static_assert_size!(Operand, 56);
ed00b5ec 853 static_assert_size!(OpTy<'_>, 72);
2b03887a 854 // tidy-alphabetical-end
064997fb 855}